diff --git a/node_modules/libnpmexec/node_modules/.bin/arborist b/node_modules/libnpmexec/node_modules/.bin/arborist deleted file mode 120000 index 752f4a756e0ec..0000000000000 --- a/node_modules/libnpmexec/node_modules/.bin/arborist +++ /dev/null @@ -1 +0,0 @@ -../@npmcli/arborist/bin/index.js \ No newline at end of file diff --git a/node_modules/libnpmexec/node_modules/.bin/node-gyp b/node_modules/libnpmexec/node_modules/.bin/node-gyp deleted file mode 120000 index 9b31a4fe437b3..0000000000000 --- a/node_modules/libnpmexec/node_modules/.bin/node-gyp +++ /dev/null @@ -1 +0,0 @@ -../node-gyp/bin/node-gyp.js \ No newline at end of file diff --git a/node_modules/libnpmexec/node_modules/.bin/pacote b/node_modules/libnpmexec/node_modules/.bin/pacote deleted file mode 120000 index e595831431bfc..0000000000000 --- a/node_modules/libnpmexec/node_modules/.bin/pacote +++ /dev/null @@ -1 +0,0 @@ -../pacote/lib/bin.js \ No newline at end of file diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/LICENSE b/node_modules/libnpmexec/node_modules/@npmcli/arborist/LICENSE deleted file mode 100644 index 13ff5b9eac5dc..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - ---- - -Files and metadata contained in `test/fixtures/registry-mocks/content` are -downloaded from the public npm registry. These are the property of their -respective owners. The use and distribution of said artifacts are covered by -their associated licenses. diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/actual.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/actual.js deleted file mode 100644 index eb0495997a1b9..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/actual.js +++ /dev/null @@ -1,23 +0,0 @@ -const Arborist = require('../') -const print = require('./lib/print-tree.js') -const options = require('./lib/options.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const start = process.hrtime() -new Arborist(options).loadActual(options).then(tree => { - const end = process.hrtime(start) - if (!process.argv.includes('--quiet')) { - print(tree) - } - - console.error(`read ${tree.inventory.size} deps in ${end[0] * 1000 + end[1] / 1e6}ms`) - if (options.save) { - tree.meta.save() - } - if (options.saveHidden) { - tree.meta.hiddenLockfile = true - tree.meta.filename = options.path + '/node_modules/.package-lock.json' - tree.meta.save() - } -}).catch(er => console.error(er)) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/audit.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/audit.js deleted file mode 100644 index d9ac532d3ed70..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/audit.js +++ /dev/null @@ -1,54 +0,0 @@ -const Arborist = require('../') - -const print = require('./lib/print-tree.js') -const options = require('./lib/options.js') -require('./lib/timers.js') -require('./lib/logging.js') - -const Vuln = require('../lib/vuln.js') -const printReport = report => { - for (const vuln of report.values()) { - console.log(printVuln(vuln)) - } - if (report.topVulns.size) { - console.log('\n# top-level vulnerabilities') - for (const vuln of report.topVulns.values()) { - console.log(printVuln(vuln)) - } - } -} - -const printVuln = vuln => { - return { - __proto__: { constructor: Vuln }, - name: vuln.name, - issues: [...vuln.advisories].map(a => printAdvisory(a)), - range: vuln.simpleRange, - nodes: [...vuln.nodes].map(node => `${node.name} ${node.location || '#ROOT'}`), - ...(vuln.topNodes.size === 0 ? {} : { - topNodes: [...vuln.topNodes].map(node => `${node.location || '#ROOT'}`), - }), - } -} - -const printAdvisory = a => `${a.title}${a.url ? ' ' + a.url : ''}` - -const start = process.hrtime() -process.emit('time', 'audit script') -const arb = new Arborist(options) -arb.audit(options).then(tree => { - process.emit('timeEnd', 'audit script') - const end = process.hrtime(start) - if (options.fix) { - print(tree) - } - if (!options.quiet) { - printReport(arb.auditReport) - } - if (options.fix) { - console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) - } - if (tree.meta && options.save) { - tree.meta.save() - } -}).catch(er => console.error(er)) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/dedupe.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/dedupe.js deleted file mode 100644 index b0e83459ef73f..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/dedupe.js +++ /dev/null @@ -1,49 +0,0 @@ -const Arborist = require('../') - -const options = require('./lib/options.js') -const print = require('./lib/print-tree.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const printDiff = diff => { - const {depth} = require('treeverse') - depth({ - tree: diff, - visit: d => { - if (d.location === '') { - return - } - switch (d.action) { - case 'REMOVE': - console.error('REMOVE', d.actual.location) - break - case 'ADD': - console.error('ADD', d.ideal.location, d.ideal.resolved) - break - case 'CHANGE': - console.error('CHANGE', d.actual.location, { - from: d.actual.resolved, - to: d.ideal.resolved, - }) - break - } - }, - getChildren: d => d.children, - }) -} - -const start = process.hrtime() -process.emit('time', 'install') -const arb = new Arborist(options) -arb.dedupe(options).then(tree => { - process.emit('timeEnd', 'install') - const end = process.hrtime(start) - print(tree) - if (options.dryRun) { - printDiff(arb.diff) - } - console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) - if (tree.meta && options.save) { - tree.meta.save() - } -}).catch(er => console.error(require('util').inspect(er, { depth: Infinity }))) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/funding.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/funding.js deleted file mode 100644 index d0f4f31654ae0..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/funding.js +++ /dev/null @@ -1,34 +0,0 @@ -const options = require('./lib/options.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const Arborist = require('../') -const a = new Arborist(options) -const query = options._.shift() -const start = process.hrtime() -a.loadVirtual().then(tree => { - // only load the actual tree if the virtual one doesn't have modern metadata - if (!tree.meta || !(tree.meta.originalLockfileVersion >= 2)) { - console.error('old metadata, load actual') - throw 'load actual' - } else { - console.error('meta ok, return virtual tree') - return tree - } -}).catch(() => a.loadActual()).then(tree => { - const end = process.hrtime(start) - if (!query) { - for (const node of tree.inventory.values()) { - if (node.package.funding) { - console.log(node.name, node.location, node.package.funding) - } - } - } else { - for (const node of tree.inventory.query('name', query)) { - if (node.package.funding) { - console.log(node.name, node.location, node.package.funding) - } - } - } - console.error(`read ${tree.inventory.size} deps in ${end[0] * 1000 + end[1] / 1e6}ms`) -}) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/ideal.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/ideal.js deleted file mode 100644 index 5d1ed0dcd9dc6..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/ideal.js +++ /dev/null @@ -1,21 +0,0 @@ -const Arborist = require('../') - -const { inspect } = require('util') -const options = require('./lib/options.js') -const print = require('./lib/print-tree.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const start = process.hrtime() -new Arborist(options).buildIdealTree(options).then(tree => { - const end = process.hrtime(start) - print(tree) - console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 10e9}s`) - if (tree.meta && options.save) { - tree.meta.save() - } -}).catch(er => { - const opt = { depth: Infinity, color: true } - console.error(er.code === 'ERESOLVE' ? inspect(er, opt) : er) - process.exitCode = 1 -}) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/index.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/index.js deleted file mode 100755 index 5449a50e67f62..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/index.js +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env node -const [cmd] = process.argv.splice(2, 1) - -const usage = () => `Arborist - the npm tree doctor - -Version: ${require('../package.json').version} - -# USAGE - arborist [path] [options...] - -# COMMANDS - -* reify: reify ideal tree to node_modules (install, update, rm, ...) -* prune: prune the ideal tree and reify (like npm prune) -* ideal: generate and print the ideal tree -* actual: read and print the actual tree in node_modules -* virtual: read and print the virtual tree in the local shrinkwrap file -* shrinkwrap: load a local shrinkwrap and print its data -* audit: perform a security audit on project dependencies -* funding: query funding information in the local package tree. A second - positional argument after the path name can limit to a package name. -* license: query license information in the local package tree. A second - positional argument after the path name can limit to a license type. -* help: print this text - -# OPTIONS - -Most npm options are supported, but in camelCase rather than css-case. For -example, instead of '--dry-run', use '--dryRun'. - -Additionally: - -* --quiet will supppress the printing of package trees -* Instead of 'npm install ', use 'arborist reify --add='. - The '--add=' option can be specified multiple times. -* Instead of 'npm rm ', use 'arborist reify --rm='. - The '--rm=' option can be specified multiple times. -* Instead of 'npm update', use 'arborist reify --update-all'. -* 'npm audit fix' is 'arborist audit --fix' -` - -const help = () => console.log(usage()) - -switch (cmd) { - case 'actual': - require('./actual.js') - break - case 'virtual': - require('./virtual.js') - break - case 'ideal': - require('./ideal.js') - break - case 'prune': - require('./prune.js') - break - case 'reify': - require('./reify.js') - break - case 'audit': - require('./audit.js') - break - case 'funding': - require('./funding.js') - break - case 'license': - require('./license.js') - break - case 'shrinkwrap': - require('./shrinkwrap.js') - break - case 'help': - case '-h': - case '--help': - help() - break - default: - process.exitCode = 1 - console.error(usage()) - break -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/logging.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/logging.js deleted file mode 100644 index 8183ece1fd119..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/logging.js +++ /dev/null @@ -1,42 +0,0 @@ -const options = require('./options.js') -const { quiet = false } = options -const { loglevel = quiet ? 'warn' : 'silly' } = options - -const levels = [ - 'silly', - 'verbose', - 'info', - 'timing', - 'http', - 'notice', - 'warn', - 'error', - 'silent', -] - -const levelMap = new Map(levels.reduce((set, level, index) => { - set.push([level, index], [index, level]) - return set -}, [])) - -const { inspect, format } = require('util') -const colors = process.stderr.isTTY -const magenta = colors ? msg => `\x1B[35m${msg}\x1B[39m` : m => m -if (loglevel !== 'silent') { - process.on('log', (level, ...args) => { - if (levelMap.get(level) < levelMap.get(loglevel)) { - return - } - const pref = `${process.pid} ${magenta(level)} ` - if (level === 'warn' && args[0] === 'ERESOLVE') { - args[2] = inspect(args[2], { depth: 10, colors }) - } else { - args = args.map(a => { - return typeof a === 'string' ? a - : inspect(a, { depth: 10, colors }) - }) - } - const msg = pref + format(...args).trim().split('\n').join(`\n${pref}`) - console.error(msg) - }) -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/options.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/options.js deleted file mode 100644 index 23e89ddce698b..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/options.js +++ /dev/null @@ -1,59 +0,0 @@ -const options = module.exports = { - path: undefined, - cache: `${process.env.HOME}/.npm/_cacache`, - _: [], -} - -for (const arg of process.argv.slice(2)) { - if (/^--add=/.test(arg)) { - options.add = options.add || [] - options.add.push(arg.substr('--add='.length)) - } else if (/^--rm=/.test(arg)) { - options.rm = options.rm || [] - options.rm.push(arg.substr('--rm='.length)) - } else if (arg === '--global') { - options.global = true - } else if (arg === '--global-style') { - options.globalStyle = true - } else if (arg === '--prefer-dedupe') { - options.preferDedupe = true - } else if (arg === '--legacy-peer-deps') { - options.legacyPeerDeps = true - } else if (arg === '--force') { - options.force = true - } else if (arg === '--update-all') { - options.update = options.update || {} - options.update.all = true - } else if (/^--update=/.test(arg)) { - options.update = options.update || {} - options.update.names = options.update.names || [] - options.update.names.push(arg.substr('--update='.length)) - } else if (/^--omit=/.test(arg)) { - options.omit = options.omit || [] - options.omit.push(arg.substr('--omit='.length)) - } else if (/^--before=/.test(arg)) { - options.before = new Date(arg.substr('--before='.length)) - } else if (/^-w.+/.test(arg)) { - options.workspaces = options.workspaces || [] - options.workspaces.push(arg.replace(/^-w/, '')) - } else if (/^--workspace=/.test(arg)) { - options.workspaces = options.workspaces || [] - options.workspaces.push(arg.replace(/^--workspace=/, '')) - } else if (/^--[^=]+=/.test(arg)) { - const [key, ...v] = arg.replace(/^--/, '').split('=') - const val = v.join('=') - options[key] = val === 'false' ? false : val === 'true' ? true : val - } else if (/^--.+/.test(arg)) { - options[arg.replace(/^--/, '')] = true - } else if (options.path === undefined) { - options.path = arg - } else { - options._.push(arg) - } -} - -if (options.path === undefined) { - options.path = '.' -} - -console.error(options) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/print-tree.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/print-tree.js deleted file mode 100644 index 1ea2a72187332..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/print-tree.js +++ /dev/null @@ -1,5 +0,0 @@ -const { inspect } = require('util') -const { quiet } = require('./options.js') - -module.exports = quiet ? () => {} - : tree => console.log(inspect(tree.toJSON(), { depth: Infinity })) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/timers.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/timers.js deleted file mode 100644 index 242431980e55c..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/lib/timers.js +++ /dev/null @@ -1,31 +0,0 @@ -const timers = Object.create(null) -const { format } = require('util') -const options = require('./options.js') - -process.on('time', name => { - if (timers[name]) { - throw new Error('conflicting timer! ' + name) - } - timers[name] = process.hrtime() -}) - -const dim = process.stderr.isTTY ? msg => `\x1B[2m${msg}\x1B[22m` : m => m -const red = process.stderr.isTTY ? msg => `\x1B[31m${msg}\x1B[39m` : m => m -process.on('timeEnd', name => { - if (!timers[name]) { - throw new Error('timer not started! ' + name) - } - const res = process.hrtime(timers[name]) - delete timers[name] - const msg = format(`${process.pid} ${name}`, res[0] * 1e3 + res[1] / 1e6) - if (options.timers !== false) { - console.error(dim(msg)) - } -}) - -process.on('exit', () => { - for (const name of Object.keys(timers)) { - console.error(red('Dangling timer:'), name) - process.exitCode = 1 - } -}) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/license.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/license.js deleted file mode 100644 index 7fc08dd83eb5b..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/license.js +++ /dev/null @@ -1,38 +0,0 @@ -const Arborist = require('../') -const options = require('./lib/options.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const a = new Arborist(options) -const query = options._.shift() - -a.loadVirtual().then(tree => { - // only load the actual tree if the virtual one doesn't have modern metadata - if (!tree.meta || !(tree.meta.originalLockfileVersion >= 2)) { - throw 'load actual' - } else { - return tree - } -}).catch((er) => { - console.error('loading actual tree', er) - return a.loadActual() -}).then(tree => { - if (!query) { - const set = [] - for (const license of tree.inventory.query('license')) { - set.push([tree.inventory.query('license', license).size, license]) - } - - for (const [count, license] of set.sort((a, b) => - a[1] && b[1] ? b[0] - a[0] || a[1].localeCompare(b[1], 'en') - : a[1] ? -1 - : b[1] ? 1 - : 0)) { - console.log(count, license) - } - } else { - for (const node of tree.inventory.query('license', query === 'undefined' ? undefined : query)) { - console.log(`${node.name} ${node.location} ${node.package.description || ''}`) - } - } -}) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/prune.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/prune.js deleted file mode 100644 index 4809a992388aa..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/prune.js +++ /dev/null @@ -1,49 +0,0 @@ -const Arborist = require('../') - -const options = require('./lib/options.js') -const print = require('./lib/print-tree.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const printDiff = diff => { - const {depth} = require('treeverse') - depth({ - tree: diff, - visit: d => { - if (d.location === '') { - return - } - switch (d.action) { - case 'REMOVE': - console.error('REMOVE', d.actual.location) - break - case 'ADD': - console.error('ADD', d.ideal.location, d.ideal.resolved) - break - case 'CHANGE': - console.error('CHANGE', d.actual.location, { - from: d.actual.resolved, - to: d.ideal.resolved, - }) - break - } - }, - getChildren: d => d.children, - }) -} - -const start = process.hrtime() -process.emit('time', 'install') -const arb = new Arborist(options) -arb.prune(options).then(tree => { - process.emit('timeEnd', 'install') - const end = process.hrtime(start) - print(tree) - if (options.dryRun) { - printDiff(arb.diff) - } - console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) - if (tree.meta && options.save) { - tree.meta.save() - } -}).catch(er => console.error(require('util').inspect(er, { depth: Infinity }))) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/reify.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/reify.js deleted file mode 100644 index 803bac978c69a..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/reify.js +++ /dev/null @@ -1,49 +0,0 @@ -const Arborist = require('../') - -const options = require('./lib/options.js') -const print = require('./lib/print-tree.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const printDiff = diff => { - const {depth} = require('treeverse') - depth({ - tree: diff, - visit: d => { - if (d.location === '') { - return - } - switch (d.action) { - case 'REMOVE': - console.error('REMOVE', d.actual.location) - break - case 'ADD': - console.error('ADD', d.ideal.location, d.ideal.resolved) - break - case 'CHANGE': - console.error('CHANGE', d.actual.location, { - from: d.actual.resolved, - to: d.ideal.resolved, - }) - break - } - }, - getChildren: d => d.children, - }) -} - -const start = process.hrtime() -process.emit('time', 'install') -const arb = new Arborist(options) -arb.reify(options).then(tree => { - process.emit('timeEnd', 'install') - const end = process.hrtime(start) - print(tree) - if (options.dryRun) { - printDiff(arb.diff) - } - console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) - if (tree.meta && options.save) { - tree.meta.save() - } -}).catch(er => console.error(require('util').inspect(er, { depth: Infinity }))) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/shrinkwrap.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/shrinkwrap.js deleted file mode 100644 index ee5ec24557947..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/shrinkwrap.js +++ /dev/null @@ -1,12 +0,0 @@ -const Shrinkwrap = require('../lib/shrinkwrap.js') -const options = require('./lib/options.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const { quiet } = options -Shrinkwrap.load(options) - .then(s => quiet || console.log(JSON.stringify(s.data, 0, 2))) - .catch(er => { - console.error('shrinkwrap load failure', er) - process.exit(1) - }) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/virtual.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/virtual.js deleted file mode 100644 index 457c945e72c21..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/bin/virtual.js +++ /dev/null @@ -1,18 +0,0 @@ -const Arborist = require('../') - -const print = require('./lib/print-tree.js') -const options = require('./lib/options.js') -require('./lib/logging.js') -require('./lib/timers.js') - -const start = process.hrtime() -new Arborist(options).loadVirtual().then(tree => { - const end = process.hrtime(start) - if (!options.quiet) { - print(tree) - } - if (options.save) { - tree.meta.save() - } - console.error(`read ${tree.inventory.size} deps in ${end[0] * 1000 + end[1] / 1e6}ms`) -}).catch(er => console.error(er)) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js deleted file mode 100644 index 3c1cbd44abcc9..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js +++ /dev/null @@ -1,148 +0,0 @@ -// add and remove dependency specs to/from pkg manifest - -const localeCompare = require('@isaacs/string-locale-compare')('en') - -const add = ({pkg, add, saveBundle, saveType, log}) => { - for (const spec of add) { - addSingle({pkg, spec, saveBundle, saveType, log}) - } - - return pkg -} - -// Canonical source of both the map between saveType and where it correlates to -// in the package, and the names of all our dependencies attributes -const saveTypeMap = new Map([ - ['dev', 'devDependencies'], - ['optional', 'optionalDependencies'], - ['prod', 'dependencies'], - ['peerOptional', 'peerDependencies'], - ['peer', 'peerDependencies'], -]) - -const addSingle = ({pkg, spec, saveBundle, saveType, log}) => { - const { name, rawSpec } = spec - - // if the user does not give us a type, we infer which type(s) - // to keep based on the same order of priority we do when - // building the tree as defined in the _loadDeps method of - // the node class. - if (!saveType) { - saveType = inferSaveType(pkg, spec.name) - } - - if (saveType === 'prod') { - // a production dependency can only exist as production (rpj ensures it - // doesn't coexist w/ optional) - deleteSubKey(pkg, 'devDependencies', name, 'dependencies', log) - deleteSubKey(pkg, 'peerDependencies', name, 'dependencies', log) - } else if (saveType === 'dev') { - // a dev dependency may co-exist as peer, or optional, but not production - deleteSubKey(pkg, 'dependencies', name, 'devDependencies', log) - } else if (saveType === 'optional') { - // an optional dependency may co-exist as dev (rpj ensures it doesn't - // coexist w/ prod) - deleteSubKey(pkg, 'peerDependencies', name, 'optionalDependencies', log) - } else { // peer or peerOptional is all that's left - // a peer dependency may coexist as dev - deleteSubKey(pkg, 'dependencies', name, 'peerDependencies', log) - deleteSubKey(pkg, 'optionalDependencies', name, 'peerDependencies', log) - } - - const depType = saveTypeMap.get(saveType) - - pkg[depType] = pkg[depType] || {} - if (rawSpec !== '' || pkg[depType][name] === undefined) { - pkg[depType][name] = rawSpec || '*' - } - if (saveType === 'optional') { - // Affordance for previous npm versions that require this behaviour - pkg.dependencies = pkg.dependencies || {} - pkg.dependencies[name] = pkg.optionalDependencies[name] - } - - if (saveType === 'peer' || saveType === 'peerOptional') { - const pdm = pkg.peerDependenciesMeta || {} - if (saveType === 'peer' && pdm[name] && pdm[name].optional) { - pdm[name].optional = false - } else if (saveType === 'peerOptional') { - pdm[name] = pdm[name] || {} - pdm[name].optional = true - pkg.peerDependenciesMeta = pdm - } - // peerDeps are often also a devDep, so that they can be tested when - // using package managers that don't auto-install peer deps - if (pkg.devDependencies && pkg.devDependencies[name] !== undefined) { - pkg.devDependencies[name] = pkg.peerDependencies[name] - } - } - - if (saveBundle && saveType !== 'peer' && saveType !== 'peerOptional') { - // keep it sorted, keep it unique - const bd = new Set(pkg.bundleDependencies || []) - bd.add(spec.name) - pkg.bundleDependencies = [...bd].sort(localeCompare) - } -} - -// Finds where the package is already in the spec and infers saveType from that -const inferSaveType = (pkg, name) => { - for (const saveType of saveTypeMap.keys()) { - if (hasSubKey(pkg, saveTypeMap.get(saveType), name)) { - if ( - saveType === 'peerOptional' && - (!hasSubKey(pkg, 'peerDependenciesMeta', name) || - !pkg.peerDependenciesMeta[name].optional) - ) { - return 'peer' - } - return saveType - } - } - return 'prod' -} - -const { hasOwnProperty } = Object.prototype -const hasSubKey = (pkg, depType, name) => { - return pkg[depType] && hasOwnProperty.call(pkg[depType], name) -} - -// Removes a subkey and warns about it if it's being replaced -const deleteSubKey = (pkg, depType, name, replacedBy, log) => { - if (hasSubKey(pkg, depType, name)) { - if (replacedBy && log) { - log.warn('idealTree', `Removing ${depType}.${name} in favor of ${replacedBy}.${name}`) - } - delete pkg[depType][name] - - // clean up peerDepsMeta if we are removing something from peerDependencies - if (depType === 'peerDependencies' && pkg.peerDependenciesMeta) { - delete pkg.peerDependenciesMeta[name] - if (!Object.keys(pkg.peerDependenciesMeta).length) { - delete pkg.peerDependenciesMeta - } - } - - if (!Object.keys(pkg[depType]).length) { - delete pkg[depType] - } - } -} - -const rm = (pkg, rm) => { - for (const depType of new Set(saveTypeMap.values())) { - for (const name of rm) { - deleteSubKey(pkg, depType, name) - } - } - if (pkg.bundleDependencies) { - pkg.bundleDependencies = pkg.bundleDependencies - .filter(name => !rm.includes(name)) - if (!pkg.bundleDependencies.length) { - delete pkg.bundleDependencies - } - } - return pkg -} - -module.exports = { add, rm, saveTypeMap, hasSubKey } diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/audit.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/audit.js deleted file mode 100644 index eb4a3565531cc..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/audit.js +++ /dev/null @@ -1,43 +0,0 @@ -// mixin implementing the audit method - -const AuditReport = require('../audit-report.js') - -// shared with reify -const _global = Symbol.for('global') -const _workspaces = Symbol.for('workspaces') -const _includeWorkspaceRoot = Symbol.for('includeWorkspaceRoot') - -module.exports = cls => class Auditor extends cls { - async audit (options = {}) { - this.addTracker('audit') - if (this[_global]) { - throw Object.assign( - new Error('`npm audit` does not support testing globals'), - { code: 'EAUDITGLOBAL' } - ) - } - - // allow the user to set options on the ctor as well. - // XXX: deprecate separate method options objects. - options = { ...this.options, ...options } - - process.emit('time', 'audit') - const tree = await this.loadVirtual() - if (this[_workspaces] && this[_workspaces].length) { - options.filterSet = this.workspaceDependencySet( - tree, - this[_workspaces], - this[_includeWorkspaceRoot] - ) - } - if (!options.workspacesEnabled) { - options.filterSet = - this.excludeWorkspacesDependencySet(tree) - } - this.auditReport = await AuditReport.load(tree, options) - const ret = options.fix ? this.reify(options) : this.auditReport - process.emit('timeEnd', 'audit') - this.finishTracker('audit') - return ret - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js deleted file mode 100644 index 3e6a9838f8f40..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ /dev/null @@ -1,1471 +0,0 @@ -// mixin implementing the buildIdealTree method -const localeCompare = require('@isaacs/string-locale-compare')('en') -const rpj = require('read-package-json-fast') -const npa = require('npm-package-arg') -const pacote = require('pacote') -const cacache = require('cacache') -const promiseCallLimit = require('promise-call-limit') -const realpath = require('../../lib/realpath.js') -const { resolve, dirname } = require('path') -const { promisify } = require('util') -const treeCheck = require('../tree-check.js') -const readdir = promisify(require('readdir-scoped-modules')) -const fs = require('fs') -const lstat = promisify(fs.lstat) -const readlink = promisify(fs.readlink) -const { depth } = require('treeverse') - -const { - OK, - REPLACE, - CONFLICT, -} = require('../can-place-dep.js') -const PlaceDep = require('../place-dep.js') - -const debug = require('../debug.js') -const fromPath = require('../from-path.js') -const calcDepFlags = require('../calc-dep-flags.js') -const Shrinkwrap = require('../shrinkwrap.js') -const Node = require('../node.js') -const Link = require('../link.js') -const addRmPkgDeps = require('../add-rm-pkg-deps.js') -const optionalSet = require('../optional-set.js') -const {checkEngine, checkPlatform} = require('npm-install-checks') - -const relpath = require('../relpath.js') - -// note: some of these symbols are shared so we can hit -// them with unit tests and reuse them across mixins -const _complete = Symbol('complete') -const _depsSeen = Symbol('depsSeen') -const _depsQueue = Symbol('depsQueue') -const _currentDep = Symbol('currentDep') -const _updateAll = Symbol('updateAll') -const _mutateTree = Symbol('mutateTree') -const _flagsSuspect = Symbol.for('flagsSuspect') -const _workspaces = Symbol.for('workspaces') -const _prune = Symbol('prune') -const _preferDedupe = Symbol('preferDedupe') -const _legacyBundling = Symbol('legacyBundling') -const _parseSettings = Symbol('parseSettings') -const _initTree = Symbol('initTree') -const _applyUserRequests = Symbol('applyUserRequests') -const _applyUserRequestsToNode = Symbol('applyUserRequestsToNode') -const _inflateAncientLockfile = Symbol('inflateAncientLockfile') -const _buildDeps = Symbol('buildDeps') -const _buildDepStep = Symbol('buildDepStep') -const _nodeFromEdge = Symbol('nodeFromEdge') -const _nodeFromSpec = Symbol('nodeFromSpec') -const _fetchManifest = Symbol('fetchManifest') -const _problemEdges = Symbol('problemEdges') -const _manifests = Symbol('manifests') -const _loadWorkspaces = Symbol.for('loadWorkspaces') -const _linkFromSpec = Symbol('linkFromSpec') -const _loadPeerSet = Symbol('loadPeerSet') -const _updateNames = Symbol.for('updateNames') -const _fixDepFlags = Symbol('fixDepFlags') -const _resolveLinks = Symbol('resolveLinks') -const _rootNodeFromPackage = Symbol('rootNodeFromPackage') -const _add = Symbol('add') -const _resolvedAdd = Symbol.for('resolvedAdd') -const _queueNamedUpdates = Symbol('queueNamedUpdates') -const _queueVulnDependents = Symbol('queueVulnDependents') -const _avoidRange = Symbol('avoidRange') -const _shouldUpdateNode = Symbol('shouldUpdateNode') -const resetDepFlags = require('../reset-dep-flags.js') -const _loadFailures = Symbol('loadFailures') -const _pruneFailedOptional = Symbol('pruneFailedOptional') -const _linkNodes = Symbol('linkNodes') -const _follow = Symbol('follow') -const _globalStyle = Symbol('globalStyle') -const _globalRootNode = Symbol('globalRootNode') -const _isVulnerable = Symbol.for('isVulnerable') -const _usePackageLock = Symbol.for('usePackageLock') -const _rpcache = Symbol.for('realpathCache') -const _stcache = Symbol.for('statCache') -const _updateFilePath = Symbol('updateFilePath') -const _followSymlinkPath = Symbol('followSymlinkPath') -const _getRelpathSpec = Symbol('getRelpathSpec') -const _retrieveSpecName = Symbol('retrieveSpecName') -const _strictPeerDeps = Symbol('strictPeerDeps') -const _checkEngineAndPlatform = Symbol('checkEngineAndPlatform') -const _checkEngine = Symbol('checkEngine') -const _checkPlatform = Symbol('checkPlatform') -const _virtualRoots = Symbol('virtualRoots') -const _virtualRoot = Symbol('virtualRoot') -const _includeWorkspaceRoot = Symbol.for('includeWorkspaceRoot') - -const _failPeerConflict = Symbol('failPeerConflict') -const _explainPeerConflict = Symbol('explainPeerConflict') -const _edgesOverridden = Symbol('edgesOverridden') -// exposed symbol for unit testing the placeDep method directly -const _peerSetSource = Symbol.for('peerSetSource') - -// used by Reify mixin -const _force = Symbol.for('force') -const _explicitRequests = Symbol('explicitRequests') -const _global = Symbol.for('global') -const _idealTreePrune = Symbol.for('idealTreePrune') - -module.exports = cls => class IdealTreeBuilder extends cls { - constructor (options) { - super(options) - - // normalize trailing slash - const registry = options.registry || 'https://registry.npmjs.org' - options.registry = this.registry = registry.replace(/\/+$/, '') + '/' - - const { - follow = false, - force = false, - global = false, - globalStyle = false, - idealTree = null, - includeWorkspaceRoot = false, - legacyPeerDeps = false, - packageLock = true, - strictPeerDeps = false, - workspaces = [], - } = options - - this[_workspaces] = workspaces || [] - this[_force] = !!force - this[_strictPeerDeps] = !!strictPeerDeps - - this.idealTree = idealTree - this.legacyPeerDeps = legacyPeerDeps - - this[_usePackageLock] = packageLock - this[_global] = !!global - this[_globalStyle] = this[_global] || globalStyle - this[_follow] = !!follow - - if (this[_workspaces].length && this[_global]) { - throw new Error('Cannot operate on workspaces in global mode') - } - - this[_explicitRequests] = new Set() - this[_preferDedupe] = false - this[_legacyBundling] = false - this[_depsSeen] = new Set() - this[_depsQueue] = [] - this[_currentDep] = null - this[_updateNames] = [] - this[_updateAll] = false - this[_mutateTree] = false - this[_loadFailures] = new Set() - this[_linkNodes] = new Set() - this[_manifests] = new Map() - this[_edgesOverridden] = new Set() - this[_resolvedAdd] = [] - - // a map of each module in a peer set to the thing that depended on - // that set of peers in the first place. Use a WeakMap so that we - // don't hold onto references for nodes that are garbage collected. - this[_peerSetSource] = new WeakMap() - this[_virtualRoots] = new Map() - - this[_includeWorkspaceRoot] = includeWorkspaceRoot - } - - get explicitRequests () { - return new Set(this[_explicitRequests]) - } - - // public method - async buildIdealTree (options = {}) { - if (this.idealTree) { - return Promise.resolve(this.idealTree) - } - - // allow the user to set reify options on the ctor as well. - // XXX: deprecate separate reify() options object. - options = { ...this.options, ...options } - - // an empty array or any falsey value is the same as null - if (!options.add || options.add.length === 0) { - options.add = null - } - if (!options.rm || options.rm.length === 0) { - options.rm = null - } - - process.emit('time', 'idealTree') - - if (!options.add && !options.rm && !options.update && this[_global]) { - const er = new Error('global requires add, rm, or update option') - return Promise.reject(er) - } - - // first get the virtual tree, if possible. If there's a lockfile, then - // that defines the ideal tree, unless the root package.json is not - // satisfied by what the ideal tree provides. - // from there, we start adding nodes to it to satisfy the deps requested - // by the package.json in the root. - - this[_parseSettings](options) - - // start tracker block - this.addTracker('idealTree') - - try { - await this[_initTree]() - await this[_inflateAncientLockfile]() - await this[_applyUserRequests](options) - await this[_buildDeps]() - await this[_fixDepFlags]() - await this[_pruneFailedOptional]() - await this[_checkEngineAndPlatform]() - } finally { - process.emit('timeEnd', 'idealTree') - this.finishTracker('idealTree') - } - - return treeCheck(this.idealTree) - } - - async [_checkEngineAndPlatform] () { - for (const node of this.idealTree.inventory.values()) { - if (!node.optional) { - this[_checkEngine](node) - this[_checkPlatform](node) - } - } - } - - [_checkPlatform] (node) { - checkPlatform(node.package, this[_force]) - } - - [_checkEngine] (node) { - const { engineStrict, npmVersion, nodeVersion } = this.options - const c = () => - checkEngine(node.package, npmVersion, nodeVersion, this[_force]) - - if (engineStrict) { - c() - } else { - try { - c() - } catch (er) { - this.log.warn(er.code, er.message, { - package: er.pkgid, - required: er.required, - current: er.current, - }) - } - } - } - - [_parseSettings] (options) { - const update = options.update === true ? { all: true } - : Array.isArray(options.update) ? { names: options.update } - : options.update || {} - - if (update.all || !Array.isArray(update.names)) { - update.names = [] - } - - this[_complete] = !!options.complete - this[_preferDedupe] = !!options.preferDedupe - this[_legacyBundling] = !!options.legacyBundling - this[_updateNames] = update.names - - this[_updateAll] = update.all - // we prune by default unless explicitly set to boolean false - this[_prune] = options.prune !== false - - // set if we add anything, but also set here if we know we'll make - // changes and thus have to maybe prune later. - this[_mutateTree] = !!( - options.add || - options.rm || - update.all || - update.names.length - ) - } - - // load the initial tree, either the virtualTree from a shrinkwrap, - // or just the root node from a package.json - [_initTree] () { - process.emit('time', 'idealTree:init') - return ( - this[_global] ? this[_globalRootNode]() - : rpj(this.path + '/package.json').then( - pkg => this[_rootNodeFromPackage](pkg), - er => { - if (er.code === 'EJSONPARSE') { - throw er - } - return this[_rootNodeFromPackage]({}) - } - )) - .then(root => this[_loadWorkspaces](root)) - // ok to not have a virtual tree. probably initial install. - // When updating all, we load the shrinkwrap, but don't bother - // to build out the full virtual tree from it, since we'll be - // reconstructing it anyway. - .then(root => this[_global] ? root - : !this[_usePackageLock] || this[_updateAll] - ? Shrinkwrap.reset({ path: this.path }) - .then(meta => Object.assign(root, {meta})) - : this.loadVirtual({ root })) - - // if we don't have a lockfile to go from, then start with the - // actual tree, so we only make the minimum required changes. - // don't do this for global installs or updates, because in those - // cases we don't use a lockfile anyway. - // Load on a new Arborist object, so the Nodes aren't the same, - // or else it'll get super confusing when we change them! - .then(async root => { - if (!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk) { - await new this.constructor(this.options).loadActual({ root }) - const tree = root.target - // even though we didn't load it from a package-lock.json FILE, - // we still loaded it "from disk", meaning we have to reset - // dep flags before assuming that any mutations were reflected. - if (tree.children.size) { - root.meta.loadedFromDisk = true - } - } - return root - }) - - .then(tree => { - // null the virtual tree, because we're about to hack away at it - // if you want another one, load another copy. - this.idealTree = tree - this.virtualTree = null - process.emit('timeEnd', 'idealTree:init') - }) - } - - async [_globalRootNode] () { - const root = await this[_rootNodeFromPackage]({ dependencies: {} }) - // this is a gross kludge to handle the fact that we don't save - // metadata on the root node in global installs, because the "root" - // node is something like /usr/local/lib. - const meta = new Shrinkwrap({ path: this.path }) - meta.reset() - root.meta = meta - return root - } - - async [_rootNodeFromPackage] (pkg) { - // if the path doesn't exist, then we explode at this point. Note that - // this is not a problem for reify(), since it creates the root path - // before ever loading trees. - // TODO: make buildIdealTree() and loadActual handle a missing root path, - // or a symlink to a missing target, and let reify() create it as needed. - const real = await realpath(this.path, this[_rpcache], this[_stcache]) - const Cls = real === this.path ? Node : Link - const root = new Cls({ - path: this.path, - realpath: real, - pkg, - extraneous: false, - dev: false, - devOptional: false, - peer: false, - optional: false, - global: this[_global], - legacyPeerDeps: this.legacyPeerDeps, - }) - if (root.isLink) { - root.target = new Node({ - path: real, - realpath: real, - pkg, - extraneous: false, - dev: false, - devOptional: false, - peer: false, - optional: false, - global: this[_global], - legacyPeerDeps: this.legacyPeerDeps, - root, - }) - } - return root - } - - // process the add/rm requests by modifying the root node, and the - // update.names request by queueing nodes dependent on those named. - async [_applyUserRequests] (options) { - process.emit('time', 'idealTree:userRequests') - const tree = this.idealTree.target - - if (!this[_workspaces].length) { - await this[_applyUserRequestsToNode](tree, options) - } else { - const nodes = this.workspaceNodes(tree, this[_workspaces]) - if (this[_includeWorkspaceRoot]) { - nodes.push(tree) - } - const appliedRequests = nodes.map( - node => this[_applyUserRequestsToNode](node, options) - ) - await Promise.all(appliedRequests) - } - - process.emit('timeEnd', 'idealTree:userRequests') - } - - async [_applyUserRequestsToNode] (tree, options) { - // If we have a list of package names to update, and we know it's - // going to update them wherever they are, add any paths into those - // named nodes to the buildIdealTree queue. - if (!this[_global] && this[_updateNames].length) { - this[_queueNamedUpdates]() - } - - // global updates only update the globalTop nodes, but we need to know - // that they're there, and not reinstall the world unnecessarily. - const globalExplicitUpdateNames = [] - if (this[_global] && (this[_updateAll] || this[_updateNames].length)) { - const nm = resolve(this.path, 'node_modules') - for (const name of await readdir(nm).catch(() => [])) { - tree.package.dependencies = tree.package.dependencies || {} - const updateName = this[_updateNames].includes(name) - if (this[_updateAll] || updateName) { - if (updateName) { - globalExplicitUpdateNames.push(name) - } - const dir = resolve(nm, name) - const st = await lstat(dir) - .catch(/* istanbul ignore next */ er => null) - if (st && st.isSymbolicLink()) { - const target = await readlink(dir) - const real = resolve(dirname(dir), target) - tree.package.dependencies[name] = `file:${real}` - } else { - tree.package.dependencies[name] = '*' - } - } - } - } - - if (this.auditReport && this.auditReport.size > 0) { - await this[_queueVulnDependents](options) - } - - const { add, rm } = options - - if (rm && rm.length) { - addRmPkgDeps.rm(tree.package, rm) - for (const name of rm) { - this[_explicitRequests].add({ from: tree, name, action: 'DELETE' }) - } - } - - if (add && add.length) { - await this[_add](tree, options) - } - - // triggers a refresh of all edgesOut. this has to be done BEFORE - // adding the edges to explicitRequests, because the package setter - // resets all edgesOut. - if (add && add.length || rm && rm.length || this[_global]) { - tree.package = tree.package - } - - for (const spec of this[_resolvedAdd]) { - if (spec.tree === tree) { - this[_explicitRequests].add(tree.edgesOut.get(spec.name)) - } - } - for (const name of globalExplicitUpdateNames) { - this[_explicitRequests].add(tree.edgesOut.get(name)) - } - - this[_depsQueue].push(tree) - } - - // This returns a promise because we might not have the name yet, - // and need to call pacote.manifest to find the name. - [_add] (tree, {add, saveType = null, saveBundle = false}) { - // get the name for each of the specs in the list. - // ie, doing `foo@bar` we just return foo - // but if it's a url or git, we don't know the name until we - // fetch it and look in its manifest. - return Promise.all(add.map(async rawSpec => { - // We do NOT provide the path to npa here, because user-additions - // need to be resolved relative to the CWD the user is in. - const spec = await this[_retrieveSpecName](npa(rawSpec)) - .then(spec => this[_updateFilePath](spec)) - .then(spec => this[_followSymlinkPath](spec)) - spec.tree = tree - return spec - })).then(add => { - this[_resolvedAdd].push(...add) - // now add is a list of spec objects with names. - // find a home for each of them! - addRmPkgDeps.add({ - pkg: tree.package, - add, - saveBundle, - saveType, - path: this.path, - log: this.log, - }) - }) - } - - async [_retrieveSpecName] (spec) { - // if it's just @'' then we reload whatever's there, or get latest - // if it's an explicit tag, we need to install that specific tag version - const isTag = spec.rawSpec && spec.type === 'tag' - - if (spec.name && !isTag) { - return spec - } - - const mani = await pacote.manifest(spec, { ...this.options }) - // if it's a tag type, then we need to run it down to an actual version - if (isTag) { - return npa(`${mani.name}@${mani.version}`) - } - - spec.name = mani.name - return spec - } - - async [_updateFilePath] (spec) { - if (spec.type === 'file') { - return this[_getRelpathSpec](spec, spec.fetchSpec) - } - - return spec - } - - async [_followSymlinkPath] (spec) { - if (spec.type === 'directory') { - const real = await ( - realpath(spec.fetchSpec, this[_rpcache], this[_stcache]) - // TODO: create synthetic test case to simulate realpath failure - .catch(/* istanbul ignore next */() => null) - ) - - return this[_getRelpathSpec](spec, real) - } - return spec - } - - [_getRelpathSpec] (spec, filepath) { - /* istanbul ignore else - should also be covered by realpath failure */ - if (filepath) { - const { name } = spec - const tree = this.idealTree.target - spec = npa(`file:${relpath(tree.path, filepath)}`, tree.path) - spec.name = name - } - return spec - } - - // TODO: provide a way to fix bundled deps by exposing metadata about - // what's in the bundle at each published manifest. Without that, we - // can't possibly fix bundled deps without breaking a ton of other stuff, - // and leaving the user subject to getting it overwritten later anyway. - async [_queueVulnDependents] (options) { - for (const vuln of this.auditReport.values()) { - for (const node of vuln.nodes) { - const bundler = node.getBundler() - - // XXX this belongs in the audit report itself, not here. - // We shouldn't even get these things here, and they shouldn't - // be printed by npm-audit-report as if they can be fixed, because - // they can't. - if (bundler) { - this.log.warn(`audit fix ${node.name}@${node.version}`, - `${node.location}\nis a bundled dependency of\n${ - bundler.name}@${bundler.version} at ${bundler.location}\n` + - 'It cannot be fixed automatically.\n' + - `Check for updates to the ${bundler.name} package.`) - continue - } - - for (const edge of node.edgesIn) { - this.addTracker('idealTree', edge.from.name, edge.from.location) - this[_depsQueue].push(edge.from) - } - } - } - - // note any that can't be fixed at the root level without --force - // if there's a fix, we use that. otherwise, the user has to remove it, - // find a different thing, fix the upstream, etc. - // - // XXX: how to handle top nodes that aren't the root? Maybe the report - // just tells the user to cd into that directory and fix it? - if (this[_force] && this.auditReport && this.auditReport.topVulns.size) { - options.add = options.add || [] - options.rm = options.rm || [] - const nodesTouched = new Set() - for (const [name, topVuln] of this.auditReport.topVulns.entries()) { - const { - simpleRange, - topNodes, - fixAvailable, - } = topVuln - for (const node of topNodes) { - if (!node.isProjectRoot && !node.isWorkspace) { - // not something we're going to fix, sorry. have to cd into - // that directory and fix it yourself. - this.log.warn('audit', 'Manual fix required in linked project ' + - `at ./${node.location} for ${name}@${simpleRange}.\n` + - `'cd ./${node.location}' and run 'npm audit' for details.`) - continue - } - - if (!fixAvailable) { - this.log.warn('audit', `No fix available for ${name}@${simpleRange}`) - continue - } - - const { isSemVerMajor, version } = fixAvailable - const breakingMessage = isSemVerMajor - ? 'a SemVer major change' - : 'outside your stated dependency range' - this.log.warn('audit', `Updating ${name} to ${version},` + - `which is ${breakingMessage}.`) - - await this[_add](node, { add: [`${name}@${version}`] }) - nodesTouched.add(node) - } - } - for (const node of nodesTouched) { - node.package = node.package - } - } - } - - [_isVulnerable] (node) { - return this.auditReport && this.auditReport.isVulnerable(node) - } - - [_avoidRange] (name) { - if (!this.auditReport) { - return null - } - const vuln = this.auditReport.get(name) - if (!vuln) { - return null - } - return vuln.range - } - - [_queueNamedUpdates] () { - // ignore top nodes, since they are not loaded the same way, and - // probably have their own project associated with them. - - // for every node with one of the names on the list, we add its - // dependents to the queue to be evaluated. in buildDepStep, - // anything on the update names list will get refreshed, even if - // it isn't a problem. - - // XXX this could be faster by doing a series of inventory.query('name') - // calls rather than walking over everything in the tree. - const set = this.idealTree.inventory - .filter(n => this[_shouldUpdateNode](n)) - for (const node of set) { - for (const edge of node.edgesIn) { - this.addTracker('idealTree', edge.from.name, edge.from.location) - this[_depsQueue].push(edge.from) - } - } - } - - [_shouldUpdateNode] (node) { - return this[_updateNames].includes(node.name) && - !node.isTop && - !node.inDepBundle && - !node.inShrinkwrap - } - - async [_inflateAncientLockfile] () { - const { meta, inventory } = this.idealTree - const ancient = meta.ancientLockfile - const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2) - - if (inventory.size === 0 || !ancient && !old) { - return - } - - // if the lockfile is from node v5 or earlier, then we'll have to reload - // all the manifests of everything we encounter. this is costly, but at - // least it's just a one-time hit. - process.emit('time', 'idealTree:inflate') - - const heading = ancient ? 'ancient lockfile' : 'old lockfile' - this.log.warn(heading, - ` -The ${meta.type} file was created with an old version of npm, -so supplemental metadata must be fetched from the registry. - -This is a one-time fix-up, please be patient... -`) - - this.addTracker('idealTree:inflate') - const queue = [] - for (const node of inventory.values()) { - if (node.isProjectRoot) { - continue - } - - queue.push(async () => { - this.log.silly('inflate', node.location) - const { resolved, version, path, name, location, integrity } = node - // don't try to hit the registry for linked deps - const useResolved = resolved && ( - !version || resolved.startsWith('file:') - ) - const id = useResolved ? resolved - : version || `file:${node.path}` - const spec = npa.resolve(name, id, dirname(path)) - const sloc = location.substr('node_modules/'.length) - const t = `idealTree:inflate:${sloc}` - this.addTracker(t) - await pacote.manifest(spec, { - ...this.options, - resolved: resolved, - integrity: integrity, - fullMetadata: false, - }).then(mani => { - node.package = { ...mani, _id: `${mani.name}@${mani.version}` } - }).catch((er) => { - const warning = `Could not fetch metadata for ${name}@${id}` - this.log.warn(heading, warning, er) - }) - this.finishTracker(t) - }) - } - await promiseCallLimit(queue) - - // have to re-calc dep flags, because the nodes don't have edges - // until their packages get assigned, so everything looks extraneous - calcDepFlags(this.idealTree) - - // yes, yes, this isn't the "original" version, but now that it's been - // upgraded, we need to make sure we don't do the work to upgrade it - // again, since it's now as new as can be. - meta.originalLockfileVersion = 2 - this.finishTracker('idealTree:inflate') - process.emit('timeEnd', 'idealTree:inflate') - } - - // at this point we have a virtual tree with the actual root node's - // package deps, which may be partly or entirely incomplete, invalid - // or extraneous. - [_buildDeps] () { - process.emit('time', 'idealTree:buildDeps') - const tree = this.idealTree.target - this[_depsQueue].push(tree) - this.log.silly('idealTree', 'buildDeps') - this.addTracker('idealTree', tree.name, '') - return this[_buildDepStep]() - .then(() => process.emit('timeEnd', 'idealTree:buildDeps')) - } - - async [_buildDepStep] () { - // removes tracker of previous dependency in the queue - if (this[_currentDep]) { - const { location, name } = this[_currentDep] - process.emit('timeEnd', `idealTree:${location || '#root'}`) - this.finishTracker('idealTree', name, location) - this[_currentDep] = null - } - - if (!this[_depsQueue].length) { - return this[_resolveLinks]() - } - - // sort physically shallower deps up to the front of the queue, - // because they'll affect things deeper in, then alphabetical - this[_depsQueue].sort((a, b) => - (a.depth - b.depth) || localeCompare(a.path, b.path)) - - const node = this[_depsQueue].shift() - const bd = node.package.bundleDependencies - const hasBundle = bd && Array.isArray(bd) && bd.length - const { hasShrinkwrap } = node - - // if the node was already visited, or has since been removed from the - // tree, skip over it and process the rest of the queue. If a node has - // a shrinkwrap, also skip it, because it's going to get its deps - // satisfied by whatever's in that file anyway. - if (this[_depsSeen].has(node) || - node.root !== this.idealTree || - hasShrinkwrap && !this[_complete]) { - return this[_buildDepStep]() - } - - this[_depsSeen].add(node) - this[_currentDep] = node - process.emit('time', `idealTree:${node.location || '#root'}`) - - // if we're loading a _complete_ ideal tree, for a --package-lock-only - // installation for example, we have to crack open the tarball and - // look inside if it has bundle deps or shrinkwraps. note that this is - // not necessary during a reification, because we just update the - // ideal tree by reading bundles/shrinkwraps in place. - // Don't bother if the node is from the actual tree and hasn't - // been resolved, because we can't fetch it anyway, could be anything! - const crackOpen = this[_complete] && - node !== this.idealTree && - node.resolved && - (hasBundle || hasShrinkwrap) - if (crackOpen) { - const Arborist = this.constructor - const opt = { ...this.options } - await cacache.tmp.withTmp(this.cache, opt, async path => { - await pacote.extract(node.resolved, path, { - ...opt, - resolved: node.resolved, - integrity: node.integrity, - }) - - if (hasShrinkwrap) { - await new Arborist({ ...this.options, path }) - .loadVirtual({ root: node }) - } - - if (hasBundle) { - await new Arborist({ ...this.options, path }) - .loadActual({ root: node, ignoreMissing: true }) - } - }) - } - - // if any deps are missing or invalid, then we fetch the manifest for - // the thing we want, and build a new dep node from that. - // Then, find the ideal placement for that node. The ideal placement - // searches from the node's deps (or parent deps in the case of non-root - // peer deps), and walks up the tree until it finds the highest spot - // where it doesn't cause any conflicts. - // - // A conflict can be: - // - A node by that name already exists at that location. - // - The parent has a peer dep on that name - // - One of the node's peer deps conflicts at that location, unless the - // peer dep is met by a node at that location, which is fine. - // - // If we create a new node, then build its ideal deps as well. - // - // Note: this is the same "maximally naive" deduping tree-building - // algorithm that npm has used since v3. In a case like this: - // - // root -> (a@1, b@1||2) - // a -> (b@1) - // - // You'll end up with a tree like this: - // - // root - // +-- a@1 - // | +-- b@1 - // +-- b@2 - // - // rather than this, more deduped, but just as correct tree: - // - // root - // +-- a@1 - // +-- b@1 - // - // Another way to look at it is that this algorithm favors getting higher - // version deps at higher levels in the tree, even if that reduces - // potential deduplication. - // - // Set `preferDedupe: true` in the options to replace the shallower - // dep if allowed. - - const tasks = [] - const peerSource = this[_peerSetSource].get(node) || node - for (const edge of this[_problemEdges](node)) { - if (edge.overridden) { - continue - } - - // peerSetSource is only relevant when we have a peerEntryEdge - // otherwise we're setting regular non-peer deps as if they have - // a virtual root of whatever brought in THIS node. - // so we VR the node itself if the edge is not a peer - const source = edge.peer ? peerSource : node - - const virtualRoot = this[_virtualRoot](source, true) - // reuse virtual root if we already have one, but don't - // try to do the override ahead of time, since we MAY be able - // to create a more correct tree than the virtual root could. - const vrEdge = virtualRoot && virtualRoot.edgesOut.get(edge.name) - const vrDep = vrEdge && vrEdge.valid && vrEdge.to - // only re-use the virtualRoot if it's a peer edge we're placing. - // otherwise, we end up in situations where we override peer deps that - // we could have otherwise found homes for. Eg: - // xy -> (x, y) - // x -> PEER(z@1) - // y -> PEER(z@2) - // If xy is a dependency, we can resolve this like: - // project - // +-- xy - // | +-- y - // | +-- z@2 - // +-- x - // +-- z@1 - // But if x and y are loaded in the same virtual root, then they will - // be forced to agree on a version of z. - const required = new Set([edge.from]) - const parent = edge.peer ? virtualRoot : null - const dep = vrDep && vrDep.satisfies(edge) ? vrDep - : await this[_nodeFromEdge](edge, parent, null, required) - - /* istanbul ignore next */ - debug(() => { - if (!dep) { - throw new Error('no dep??') - } - }) - - tasks.push({edge, dep}) - } - - const placeDeps = tasks - .sort((a, b) => localeCompare(a.edge.name, b.edge.name)) - .map(({ edge, dep }) => new PlaceDep({ - edge, - dep, - - explicitRequest: this[_explicitRequests].has(edge), - updateNames: this[_updateNames], - auditReport: this.auditReport, - force: this[_force], - preferDedupe: this[_preferDedupe], - legacyBundling: this[_legacyBundling], - strictPeerDeps: this[_strictPeerDeps], - legacyPeerDeps: this.legacyPeerDeps, - globalStyle: this[_globalStyle], - })) - - const promises = [] - for (const pd of placeDeps) { - // placing a dep is actually a tree of placing the dep itself - // and all of its peer group that aren't already met by the tree - depth({ - tree: pd, - getChildren: pd => pd.children, - visit: pd => { - const { placed, edge, canPlace: cpd } = pd - // if we didn't place anything, nothing to do here - if (!placed) { - return - } - - // we placed something, that means we changed the tree - if (placed.errors.length) { - this[_loadFailures].add(placed) - } - this[_mutateTree] = true - if (cpd.canPlaceSelf === OK) { - for (const edgeIn of placed.edgesIn) { - if (edgeIn === edge) { - continue - } - const { from, valid, overridden } = edgeIn - if (!overridden && !valid && !this[_depsSeen].has(from)) { - this.addTracker('idealTree', from.name, from.location) - this[_depsQueue].push(edgeIn.from) - } - } - } else { - /* istanbul ignore else - should be only OK or REPLACE here */ - if (cpd.canPlaceSelf === REPLACE) { - // this may also create some invalid edges, for example if we're - // intentionally causing something to get nested which was - // previously placed in this location. - for (const edgeIn of placed.edgesIn) { - if (edgeIn === edge) { - continue - } - - const { valid, overridden } = edgeIn - if (!valid && !overridden) { - // if it's already been visited, we have to re-visit - // otherwise, just enqueue normally. - this[_depsSeen].delete(edgeIn.from) - this[_depsQueue].push(edgeIn.from) - } - } - } - } - - /* istanbul ignore if - should be impossible */ - if (cpd.canPlaceSelf === CONFLICT) { - debug(() => { - const er = new Error('placed with canPlaceSelf=CONFLICT') - throw Object.assign(er, { placeDep: pd }) - }) - return - } - - // lastly, also check for the missing deps of the node we placed, - // and any holes created by pruning out conflicted peer sets. - this[_depsQueue].push(placed) - for (const dep of pd.needEvaluation) { - this[_depsSeen].delete(dep) - this[_depsQueue].push(dep) - } - - // pre-fetch any problem edges, since we'll need these soon - // if it fails at this point, though, dont' worry because it - // may well be an optional dep that has gone missing. it'll - // fail later anyway. - const from = fromPath(placed) - promises.push(...this[_problemEdges](placed).map(e => - this[_fetchManifest](npa.resolve(e.name, e.spec, from)) - .catch(er => null))) - }, - }) - } - - for (const { to } of node.edgesOut.values()) { - if (to && to.isLink && to.target) { - this[_linkNodes].add(to) - } - } - - await Promise.all(promises) - return this[_buildDepStep]() - } - - // loads a node from an edge, and then loads its peer deps (and their - // peer deps, on down the line) into a virtual root parent. - async [_nodeFromEdge] (edge, parent_, secondEdge, required) { - // create a virtual root node with the same deps as the node that - // is requesting this one, so that we can get all the peer deps in - // a context where they're likely to be resolvable. - // Note that the virtual root will also have virtual copies of the - // targets of any child Links, so that they resolve appropriately. - const parent = parent_ || this[_virtualRoot](edge.from) - - const spec = npa.resolve(edge.name, edge.spec, edge.from.path) - const first = await this[_nodeFromSpec](edge.name, spec, parent, edge) - - // we might have a case where the parent has a peer dependency on - // `foo@*` which resolves to v2, but another dep in the set has a - // peerDependency on `foo@1`. In that case, if we force it to be v2, - // we're unnecessarily triggering an ERESOLVE. - // If we have a second edge to worry about, and it's not satisfied - // by the first node, try a second and see if that satisfies the - // original edge here. - const spec2 = secondEdge && npa.resolve( - edge.name, - secondEdge.spec, - secondEdge.from.path - ) - const second = secondEdge && !secondEdge.valid - ? await this[_nodeFromSpec](edge.name, spec2, parent, secondEdge) - : null - - // pick the second one if they're both happy with that, otherwise first - const node = second && edge.valid ? second : first - // ensure the one we want is the one that's placed - node.parent = parent - - if (required.has(edge.from) && edge.type !== 'peerOptional' || - secondEdge && ( - required.has(secondEdge.from) && secondEdge.type !== 'peerOptional')) { - required.add(node) - } - - // keep track of the thing that caused this node to be included. - const src = parent.sourceReference - this[_peerSetSource].set(node, src) - - // do not load the peers along with the set if this is a global top pkg - // otherwise we'll be tempted to put peers as other top-level installed - // things, potentially clobbering what's there already, which is not - // what we want. the missing edges will be picked up on the next pass. - if (this[_global] && edge.from.isProjectRoot) { - return node - } - - // otherwise, we have to make sure that our peers can go along with us. - return this[_loadPeerSet](node, required) - } - - [_virtualRoot] (node, reuse = false) { - if (reuse && this[_virtualRoots].has(node)) { - return this[_virtualRoots].get(node) - } - - const vr = new Node({ - path: node.realpath, - sourceReference: node, - legacyPeerDeps: this.legacyPeerDeps, - }) - - // also need to set up any targets from any link deps, so that - // they are properly reflected in the virtual environment - for (const child of node.children.values()) { - if (child.isLink) { - new Node({ - path: child.realpath, - sourceReference: child.target, - root: vr, - }) - } - } - - this[_virtualRoots].set(node, vr) - return vr - } - - [_problemEdges] (node) { - // skip over any bundled deps, they're not our problem. - // Note that this WILL fetch bundled meta-deps which are also dependencies - // but not listed as bundled deps. When reifying, we first unpack any - // nodes that have bundleDependencies, then do a loadActual on them, move - // the nodes into the ideal tree, and then prune. So, fetching those - // possibly-bundled meta-deps at this point doesn't cause any worse - // problems than a few unnecessary packument fetches. - - // also skip over any nodes in the tree that failed to load, since those - // will crash the install later on anyway. - const bd = node.isProjectRoot || node.isWorkspace ? null - : node.package.bundleDependencies - const bundled = new Set(bd || []) - - return [...node.edgesOut.values()] - .filter(edge => { - // If it's included in a bundle, we take whatever is specified. - if (bundled.has(edge.name)) { - return false - } - - // If it's already been logged as a load failure, skip it. - if (edge.to && this[_loadFailures].has(edge.to)) { - return false - } - - // If it's shrinkwrapped, we use what the shrinkwap wants. - if (edge.to && edge.to.inShrinkwrap) { - return false - } - - // If the edge has no destination, that's a problem, unless - // if it's peerOptional and not explicitly requested. - if (!edge.to) { - return edge.type !== 'peerOptional' || - this[_explicitRequests].has(edge) - } - - // If the edge has an error, there's a problem. - if (!edge.valid) { - return true - } - - // user explicitly asked to update this package by name, problem - if (this[_updateNames].includes(edge.name)) { - return true - } - - // fixing a security vulnerability with this package, problem - if (this[_isVulnerable](edge.to)) { - return true - } - - // user has explicitly asked to install this package, problem - if (this[_explicitRequests].has(edge)) { - return true - } - - // No problems! - return false - }) - } - - async [_fetchManifest] (spec) { - const options = { - ...this.options, - avoid: this[_avoidRange](spec.name), - } - // get the intended spec and stored metadata from yarn.lock file, - // if available and valid. - spec = this.idealTree.meta.checkYarnLock(spec, options) - - if (this[_manifests].has(spec.raw)) { - return this[_manifests].get(spec.raw) - } else { - this.log.silly('fetch manifest', spec.raw) - const p = pacote.manifest(spec, options) - .then(mani => { - this[_manifests].set(spec.raw, mani) - return mani - }) - this[_manifests].set(spec.raw, p) - return p - } - } - - [_nodeFromSpec] (name, spec, parent, edge) { - // pacote will slap integrity on its options, so we have to clone - // the object so it doesn't get mutated. - // Don't bother to load the manifest for link deps, because the target - // might be within another package that doesn't exist yet. - const { legacyPeerDeps } = this - return spec.type === 'directory' - ? this[_linkFromSpec](name, spec, parent, edge) - : this[_fetchManifest](spec) - .then(pkg => new Node({ name, pkg, parent, legacyPeerDeps }), error => { - error.requiredBy = edge.from.location || '.' - - // failed to load the spec, either because of enotarget or - // fetch failure of some other sort. save it so we can verify - // later that it's optional, otherwise the error is fatal. - const n = new Node({ - name, - parent, - error, - legacyPeerDeps, - }) - this[_loadFailures].add(n) - return n - }) - } - - [_linkFromSpec] (name, spec, parent, edge) { - const realpath = spec.fetchSpec - const { legacyPeerDeps } = this - return rpj(realpath + '/package.json').catch(() => ({})).then(pkg => { - const link = new Link({ name, parent, realpath, pkg, legacyPeerDeps }) - this[_linkNodes].add(link) - return link - }) - } - - // load all peer deps and meta-peer deps into the node's parent - // At the end of this, the node's peer-type outward edges are all - // resolved, and so are all of theirs, but other dep types are not. - // We prefer to get peer deps that meet the requiring node's dependency, - // if possible, since that almost certainly works (since that package was - // developed with this set of deps) and will typically be more restrictive. - // Note that the peers in the set can conflict either with each other, - // or with a direct dependency from the virtual root parent! In strict - // mode, this is always an error. In force mode, it never is, and we - // prefer the parent's non-peer dep over a peer dep, or the version that - // gets placed first. In non-strict mode, we behave strictly if the - // virtual root is based on the root project, and allow non-peer parent - // deps to override, but throw if no preference can be determined. - async [_loadPeerSet] (node, required) { - const peerEdges = [...node.edgesOut.values()] - // we typically only install non-optional peers, but we have to - // factor them into the peerSet so that we can avoid conflicts - .filter(e => e.peer && !(e.valid && e.to)) - .sort(({name: a}, {name: b}) => localeCompare(a, b)) - - for (const edge of peerEdges) { - // already placed this one, and we're happy with it. - if (edge.valid && edge.to) { - continue - } - - const parentEdge = node.parent.edgesOut.get(edge.name) - const {isProjectRoot, isWorkspace} = node.parent.sourceReference - const isMine = isProjectRoot || isWorkspace - const conflictOK = this[_force] || !isMine && !this[_strictPeerDeps] - - if (!edge.to) { - if (!parentEdge) { - // easy, just put the thing there - await this[_nodeFromEdge](edge, node.parent, null, required) - continue - } else { - // if the parent's edge is very broad like >=1, and the edge in - // question is something like 1.x, then we want to get a 1.x, not - // a 2.x. pass along the child edge as an advisory guideline. - // if the parent edge doesn't satisfy the child edge, and the - // child edge doesn't satisfy the parent edge, then we have - // a conflict. this is always a problem in strict mode, never - // in force mode, and a problem in non-strict mode if this isn't - // on behalf of our project. in all such cases, we warn at least. - const dep = await this[_nodeFromEdge]( - parentEdge, - node.parent, - edge, - required - ) - - // hooray! that worked! - if (edge.valid) { - continue - } - - // allow it. either we're overriding, or it's not something - // that will be installed by default anyway, and we'll fail when - // we get to the point where we need to, if we need to. - if (conflictOK || !required.has(dep)) { - edge.overridden = true - continue - } - - // problem - this[_failPeerConflict](edge, parentEdge) - } - } - - // There is something present already, and we're not happy about it - // See if the thing we WOULD be happy with is also going to satisfy - // the other dependents on the current node. - const current = edge.to - const dep = await this[_nodeFromEdge](edge, null, null, required) - if (dep.canReplace(current)) { - await this[_nodeFromEdge](edge, node.parent, null, required) - continue - } - - // at this point we know that there is a dep there, and - // we don't like it. always fail strictly, always allow forcibly or - // in non-strict mode if it's not our fault. don't warn here, because - // we are going to warn again when we place the deps, if we end up - // overriding for something else. If the thing that has this dep - // isn't also required, then there's a good chance we won't need it, - // so allow it for now and let it conflict if it turns out to actually - // be necessary for the installation. - if (conflictOK || !required.has(edge.from)) { - continue - } - - // ok, it's the root, or we're in unforced strict mode, so this is bad - this[_failPeerConflict](edge, parentEdge) - } - return node - } - - [_failPeerConflict] (edge, currentEdge) { - const expl = this[_explainPeerConflict](edge, currentEdge) - throw Object.assign(new Error('unable to resolve dependency tree'), expl) - } - - [_explainPeerConflict] (edge, currentEdge) { - const node = edge.from - const curNode = node.resolve(edge.name) - const current = curNode.explain() - return { - code: 'ERESOLVE', - current, - // it SHOULD be impossible to get here without a current node in place, - // but this at least gives us something report on when bugs creep into - // the tree handling logic. - currentEdge: currentEdge ? currentEdge.explain() : null, - edge: edge.explain(), - strictPeerDeps: this[_strictPeerDeps], - force: this[_force], - } - } - - // go through all the links in the this[_linkNodes] set - // for each one: - // - if outside the root, ignore it, assume it's fine, it's not our problem - // - if a node in the tree already, assign the target to that node. - // - if a path under an existing node, then assign that as the fsParent, - // and add it to the _depsQueue - // - // call buildDepStep if anything was added to the queue, otherwise we're done - [_resolveLinks] () { - for (const link of this[_linkNodes]) { - this[_linkNodes].delete(link) - - // link we never ended up placing, skip it - if (link.root !== this.idealTree) { - continue - } - - const tree = this.idealTree.target - const external = !link.target.isDescendantOf(tree) - - // outside the root, somebody else's problem, ignore it - if (external && !this[_follow]) { - continue - } - - // didn't find a parent for it or it has not been seen yet - // so go ahead and process it. - const unseenLink = (link.target.parent || link.target.fsParent) && - !this[_depsSeen].has(link.target) - - if (this[_follow] && - !link.target.parent && - !link.target.fsParent || - unseenLink) { - this.addTracker('idealTree', link.target.name, link.target.location) - this[_depsQueue].push(link.target) - } - } - - if (this[_depsQueue].length) { - return this[_buildDepStep]() - } - } - - [_fixDepFlags] () { - process.emit('time', 'idealTree:fixDepFlags') - const metaFromDisk = this.idealTree.meta.loadedFromDisk - const flagsSuspect = this[_flagsSuspect] - const mutateTree = this[_mutateTree] - // if the options set prune:false, then we don't prune, but we still - // mark the extraneous items in the tree if we modified it at all. - // If we did no modifications, we just iterate over the extraneous nodes. - // if we started with an empty tree, then the dep flags are already - // all set to true, and there can be nothing extraneous, so there's - // nothing to prune, because we built it from scratch. if we didn't - // add or remove anything, then also nothing to do. - if (metaFromDisk && mutateTree) { - resetDepFlags(this.idealTree) - } - - // update all the dev/optional/etc flags in the tree - // either we started with a fresh tree, or we - // reset all the flags to find the extraneous nodes. - // - // if we started from a blank slate, or changed something, then - // the dep flags will be all set to true. - if (!metaFromDisk || mutateTree) { - calcDepFlags(this.idealTree) - } else { - // otherwise just unset all the flags on the root node - // since they will sometimes have the default value - this.idealTree.extraneous = false - this.idealTree.dev = false - this.idealTree.optional = false - this.idealTree.devOptional = false - this.idealTree.peer = false - } - - // at this point, any node marked as extraneous should be pruned. - // if we started from a shrinkwrap, and then added/removed something, - // then the tree is suspect. Prune what is marked as extraneous. - // otherwise, don't bother. - const needPrune = metaFromDisk && (mutateTree || flagsSuspect) - if (this[_prune] && needPrune) { - this[_idealTreePrune]() - } - - process.emit('timeEnd', 'idealTree:fixDepFlags') - } - - [_idealTreePrune] () { - for (const node of this.idealTree.inventory.filter(n => n.extraneous)) { - node.parent = null - } - } - - [_pruneFailedOptional] () { - for (const node of this[_loadFailures]) { - if (!node.optional) { - throw node.errors[0] - } - - const set = optionalSet(node) - for (const node of set) { - node.parent = null - } - } - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/deduper.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/deduper.js deleted file mode 100644 index 1741c31a19a27..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/deduper.js +++ /dev/null @@ -1,19 +0,0 @@ -module.exports = cls => class Deduper extends cls { - async dedupe (options = {}) { - // allow the user to set options on the ctor as well. - // XXX: deprecate separate method options objects. - options = { ...this.options, ...options } - const tree = await this.loadVirtual().catch(() => this.loadActual()) - const names = [] - for (const name of tree.inventory.query('name')) { - if (tree.inventory.query('name', name).size > 1) { - names.push(name) - } - } - return this.reify({ - ...options, - preferDedupe: true, - update: { names }, - }) - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/index.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/index.js deleted file mode 100644 index ccfa7cad9c04d..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/index.js +++ /dev/null @@ -1,130 +0,0 @@ -// The arborist manages three trees: -// - actual -// - virtual -// - ideal -// -// The actual tree is what's present on disk in the node_modules tree -// and elsewhere that links may extend. -// -// The virtual tree is loaded from metadata (package.json and lock files). -// -// The ideal tree is what we WANT that actual tree to become. This starts -// with the virtual tree, and then applies the options requesting -// add/remove/update actions. -// -// To reify a tree, we calculate a diff between the ideal and actual trees, -// and then turn the actual tree into the ideal tree by taking the actions -// required. At the end of the reification process, the actualTree is -// updated to reflect the changes. -// -// Each tree has an Inventory at the root. Shrinkwrap is tracked by Arborist -// instance. It always refers to the actual tree, but is updated (and written -// to disk) on reification. - -// Each of the mixin "classes" adds functionality, but are not dependent on -// constructor call order. So, we just load them in an array, and build up -// the base class, so that the overall voltron class is easier to test and -// cover, and separation of concerns can be maintained. - -const {resolve} = require('path') -const {homedir} = require('os') -const procLog = require('proc-log') -const { saveTypeMap } = require('../add-rm-pkg-deps.js') - -const mixins = [ - require('../tracker.js'), - require('./pruner.js'), - require('./deduper.js'), - require('./audit.js'), - require('./build-ideal-tree.js'), - require('./load-workspaces.js'), - require('./load-actual.js'), - require('./load-virtual.js'), - require('./rebuild.js'), - require('./reify.js'), -] - -const Base = mixins.reduce((a, b) => b(a), require('events')) -const getWorkspaceNodes = require('../get-workspace-nodes.js') - -class Arborist extends Base { - constructor (options = {}) { - process.emit('time', 'arborist:ctor') - super(options) - this.options = { - nodeVersion: process.version, - ...options, - path: options.path || '.', - cache: options.cache || `${homedir()}/.npm/_cacache`, - packumentCache: options.packumentCache || new Map(), - log: options.log || procLog, - workspacesEnabled: options.workspacesEnabled !== false, - } - if (options.saveType && !saveTypeMap.get(options.saveType)) { - throw new Error(`Invalid saveType ${options.saveType}`) - } - this.cache = resolve(this.options.cache) - this.path = resolve(this.options.path) - process.emit('timeEnd', 'arborist:ctor') - } - - // returns an array of the actual nodes for all the workspaces - workspaceNodes (tree, workspaces) { - return getWorkspaceNodes(tree, workspaces, this.log) - } - - // returns a set of workspace nodes and all their deps - workspaceDependencySet (tree, workspaces, includeWorkspaceRoot) { - const wsNodes = this.workspaceNodes(tree, workspaces) - if (includeWorkspaceRoot) { - for (const edge of tree.edgesOut.values()) { - if (edge.type !== 'workspace' && edge.to) { - wsNodes.push(edge.to) - } - } - } - const set = new Set(wsNodes) - const extraneous = new Set() - for (const node of set) { - for (const edge of node.edgesOut.values()) { - const dep = edge.to - if (dep) { - set.add(dep) - if (dep.isLink) { - set.add(dep.target) - } - } - } - for (const child of node.children.values()) { - if (child.extraneous) { - extraneous.add(child) - } - } - } - for (const extra of extraneous) { - set.add(extra) - } - - return set - } - - excludeWorkspacesDependencySet (tree) { - const set = new Set() - for (const edge of tree.edgesOut.values()) { - if (edge.type !== 'workspace' && edge.to) { - set.add(edge.to) - } - } - for (const node of set) { - for (const edge of node.edgesOut.values()) { - if (edge.to) { - set.add(edge.to) - } - } - } - - return set - } -} - -module.exports = Arborist diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-actual.js deleted file mode 100644 index 68e58af7d9858..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ /dev/null @@ -1,450 +0,0 @@ -// mix-in implementing the loadActual method - -const {relative, dirname, resolve, join, normalize} = require('path') - -const rpj = require('read-package-json-fast') -const {promisify} = require('util') -const readdir = promisify(require('readdir-scoped-modules')) -const walkUp = require('walk-up-path') -const ancestorPath = require('common-ancestor-path') -const treeCheck = require('../tree-check.js') - -const Shrinkwrap = require('../shrinkwrap.js') -const calcDepFlags = require('../calc-dep-flags.js') -const Node = require('../node.js') -const Link = require('../link.js') -const realpath = require('../realpath.js') - -const _loadFSNode = Symbol('loadFSNode') -const _newNode = Symbol('newNode') -const _newLink = Symbol('newLink') -const _loadFSTree = Symbol('loadFSTree') -const _loadFSChildren = Symbol('loadFSChildren') -const _findMissingEdges = Symbol('findMissingEdges') -const _findFSParents = Symbol('findFSParents') -const _resetDepFlags = Symbol('resetDepFlags') - -const _actualTreeLoaded = Symbol('actualTreeLoaded') -const _rpcache = Symbol.for('realpathCache') -const _stcache = Symbol.for('statCache') -const _topNodes = Symbol('linkTargets') -const _cache = Symbol('nodeLoadingCache') -const _loadActual = Symbol('loadActual') -const _loadActualVirtually = Symbol('loadActualVirtually') -const _loadActualActually = Symbol('loadActualActually') -const _loadWorkspaces = Symbol.for('loadWorkspaces') -const _loadWorkspaceTargets = Symbol('loadWorkspaceTargets') -const _actualTreePromise = Symbol('actualTreePromise') -const _actualTree = Symbol('actualTree') -const _transplant = Symbol('transplant') -const _transplantFilter = Symbol('transplantFilter') - -const _filter = Symbol('filter') -const _global = Symbol.for('global') -const _changePath = Symbol.for('_changePath') - -module.exports = cls => class ActualLoader extends cls { - constructor (options) { - super(options) - - this[_global] = !!options.global - - // the tree of nodes on disk - this.actualTree = options.actualTree - - // ensure when walking the tree that we don't call loadTree on the - // same actual node more than one time. - this[_actualTreeLoaded] = new Set() - - // caches for cached realpath calls - const cwd = process.cwd() - // assume that the cwd is real enough for our purposes - this[_rpcache] = new Map([[cwd, cwd]]) - this[_stcache] = new Map() - - // cache of nodes when loading the actualTree, so that we avoid - // loaded the same node multiple times when symlinks attack. - this[_cache] = new Map() - - // cache of link targets for setting fsParent links - // We don't do fsParent as a magic getter/setter, because - // it'd be too costly to keep up to date along the walk. - // And, we know that it can ONLY be relevant when the node - // is a target of a link, otherwise it'd be in a node_modules - // folder, so take advantage of that to limit the scans later. - this[_topNodes] = new Set() - } - - [_resetDepFlags] (tree, root) { - // reset all deps to extraneous prior to recalc - if (!root) { - for (const node of tree.inventory.values()) { - node.extraneous = true - } - } - - // only reset root flags if we're not re-rooting, - // otherwise leave as-is - calcDepFlags(tree, !root) - return tree - } - - // public method - async loadActual (options = {}) { - // allow the user to set options on the ctor as well. - // XXX: deprecate separate method options objects. - options = { ...this.options, ...options } - - // stash the promise so that we don't ever have more than one - // going at the same time. This is so that buildIdealTree can - // default to the actualTree if no shrinkwrap present, but - // reify() can still call buildIdealTree and loadActual in parallel - // safely. - return this.actualTree ? this.actualTree - : this[_actualTreePromise] ? this[_actualTreePromise] - : this[_actualTreePromise] = this[_loadActual](options) - .then(tree => this[_resetDepFlags](tree, options.root)) - .then(tree => this.actualTree = treeCheck(tree)) - } - - async [_loadActual] (options) { - // mostly realpath to throw if the root doesn't exist - const { - global = false, - filter = () => true, - root = null, - transplantFilter = () => true, - ignoreMissing = false, - } = options - this[_filter] = filter - this[_transplantFilter] = transplantFilter - - if (global) { - const real = await realpath(this.path, this[_rpcache], this[_stcache]) - const newNodeOrLink = this.path === real ? _newNode : _newLink - this[_actualTree] = await this[newNodeOrLink]({ - path: this.path, - realpath: real, - pkg: {}, - global, - }) - return this[_loadActualActually]({root, ignoreMissing, global}) - } - - // not in global mode, hidden lockfile is allowed, load root pkg too - this[_actualTree] = await this[_loadFSNode]({ - path: this.path, - real: await realpath(this.path, this[_rpcache], this[_stcache]), - }) - - // XXX only rely on this if the hidden lockfile is the newest thing? - // need some kind of heuristic, like if the package.json or sw have - // been touched more recently, then ignore it? This is a hazard if - // user switches back and forth between Arborist and another way of - // mutating the node_modules folder. - const meta = await Shrinkwrap.load({ - path: this[_actualTree].path, - hiddenLockfile: true, - }) - if (meta.loadedFromDisk) { - this[_actualTree].meta = meta - return this[_loadActualVirtually]({ root }) - } else { - const meta = await Shrinkwrap.load({ - path: this[_actualTree].path, - }) - this[_actualTree].meta = meta - return this[_loadActualActually]({ root, ignoreMissing }) - } - } - - async [_loadActualVirtually] ({ root }) { - // have to load on a new Arborist object, so we don't assign - // the virtualTree on this one! Also, the weird reference is because - // we can't easily get a ref to Arborist in this module, without - // creating a circular reference, since this class is a mixin used - // to build up the Arborist class itself. - await new this.constructor({...this.options}).loadVirtual({ - root: this[_actualTree], - }) - await this[_loadWorkspaces](this[_actualTree]) - - this[_transplant](root) - return this[_actualTree] - } - - async [_loadActualActually] ({ root, ignoreMissing, global }) { - await this[_loadFSTree](this[_actualTree]) - await this[_loadWorkspaces](this[_actualTree]) - await this[_loadWorkspaceTargets](this[_actualTree]) - if (!ignoreMissing) { - await this[_findMissingEdges]() - } - this[_findFSParents]() - this[_transplant](root) - - if (global) { - // need to depend on the children, or else all of them - // will end up being flagged as extraneous, since the - // global root isn't a "real" project - const tree = this[_actualTree] - const actualRoot = tree.isLink ? tree.target : tree - const { dependencies = {} } = actualRoot.package - for (const [name, kid] of actualRoot.children.entries()) { - const def = kid.isLink ? `file:${kid.realpath}` : '*' - dependencies[name] = dependencies[name] || def - } - actualRoot.package = { ...actualRoot.package, dependencies } - } - return this[_actualTree] - } - - // if there are workspace targets without Link nodes created, load - // the targets, so that we know what they are. - async [_loadWorkspaceTargets] (tree) { - if (!tree.workspaces || !tree.workspaces.size) { - return - } - - const promises = [] - for (const path of tree.workspaces.values()) { - if (!this[_cache].has(path)) { - const p = this[_loadFSNode]({ path, root: this[_actualTree] }) - .then(node => this[_loadFSTree](node)) - promises.push(p) - } - } - await Promise.all(promises) - } - - [_transplant] (root) { - if (!root || root === this[_actualTree]) { - return - } - - this[_actualTree][_changePath](root.path) - for (const node of this[_actualTree].children.values()) { - if (!this[_transplantFilter](node)) { - node.root = null - } - } - - root.replace(this[_actualTree]) - for (const node of this[_actualTree].fsChildren) { - node.root = this[_transplantFilter](node) ? root : null - } - - this[_actualTree] = root - } - - [_loadFSNode] ({ path, parent, real, root }) { - if (!real) { - return realpath(path, this[_rpcache], this[_stcache]) - .then( - real => this[_loadFSNode]({ path, parent, real, root }), - // if realpath fails, just provide a dummy error node - error => new Node({ error, path, realpath: path, parent, root }) - ) - } - - // cache temporarily holds a promise placeholder so we don't try to create - // the same node multiple times. this is rare to encounter, given the - // aggressive caching on realpath and lstat calls, but it's possible that - // it's already loaded as a tree top, and then gets its parent loaded - // later, if a symlink points deeper in the tree. - const cached = this[_cache].get(path) - if (cached && !cached.dummy) { - return Promise.resolve(cached).then(node => { - node.parent = parent - return node - }) - } - - const p = rpj(join(real, 'package.json')) - // soldier on if read-package-json raises an error - .then(pkg => [pkg, null], error => [null, error]) - .then(([pkg, error]) => { - return this[normalize(path) === real ? _newNode : _newLink]({ - legacyPeerDeps: this.legacyPeerDeps, - path, - realpath: real, - pkg, - error, - parent, - root, - }) - }) - .then(node => { - this[_cache].set(path, node) - return node - }) - - this[_cache].set(path, p) - return p - } - - // this is the way it is to expose a timing issue which is difficult to - // test otherwise. The creation of a Node may take slightly longer than - // the creation of a Link that targets it. If the Node has _begun_ its - // creation phase (and put a Promise in the cache) then the Link will - // get a Promise as its cachedTarget instead of an actual Node object. - // This is not a problem, because it gets resolved prior to returning - // the tree or attempting to load children. However, it IS remarkably - // difficult to get to happen in a test environment to verify reliably. - // Hence this kludge. - [_newNode] (options) { - // check it for an fsParent if it's a tree top. there's a decent chance - // it'll get parented later, making the fsParent scan a no-op, but better - // safe than sorry, since it's cheap. - const { parent, realpath } = options - if (!parent) { - this[_topNodes].add(realpath) - } - return process.env._TEST_ARBORIST_SLOW_LINK_TARGET_ === '1' - ? new Promise(res => setTimeout(() => res(new Node(options)), 100)) - : new Node(options) - } - - [_newLink] (options) { - const { realpath } = options - this[_topNodes].add(realpath) - const target = this[_cache].get(realpath) - const link = new Link({ ...options, target }) - - if (!target) { - this[_cache].set(realpath, link.target) - // if a link target points at a node outside of the root tree's - // node_modules hierarchy, then load that node as well. - return this[_loadFSTree](link.target).then(() => link) - } else if (target.then) { - target.then(node => link.target = node) - } - - return link - } - - [_loadFSTree] (node) { - const did = this[_actualTreeLoaded] - node = node.target - - // if a Link target has started, but not completed, then - // a Promise will be in the cache to indicate this. - if (node.then) { - return node.then(node => this[_loadFSTree](node)) - } - - // impossible except in pathological ELOOP cases - /* istanbul ignore if */ - if (did.has(node.realpath)) { - return Promise.resolve(node) - } - - did.add(node.realpath) - return this[_loadFSChildren](node) - .then(() => Promise.all( - [...node.children.entries()] - .filter(([name, kid]) => !did.has(kid.realpath)) - .map(([name, kid]) => this[_loadFSTree](kid)))) - } - - // create child nodes for all the entries in node_modules - // and attach them to the node as a parent - [_loadFSChildren] (node) { - const nm = resolve(node.realpath, 'node_modules') - return readdir(nm).then(kids => { - return Promise.all( - // ignore . dirs and retired scoped package folders - kids.filter(kid => !/^(@[^/]+\/)?\./.test(kid)) - .filter(kid => this[_filter](node, kid)) - .map(kid => this[_loadFSNode]({ - parent: node, - path: resolve(nm, kid), - }))) - }, - // error in the readdir is not fatal, just means no kids - () => {}) - } - - async [_findMissingEdges] () { - // try to resolve any missing edges by walking up the directory tree, - // checking for the package in each node_modules folder. stop at the - // root directory. - // The tricky move here is that we load a "dummy" node for the folder - // containing the node_modules folder, so that it can be assigned as - // the fsParent. It's a bad idea to *actually* load that full node, - // because people sometimes develop in ~/projects/node_modules/... - // so we'd end up loading a massive tree with lots of unrelated junk. - const nmContents = new Map() - const tree = this[_actualTree] - for (const node of tree.inventory.values()) { - const ancestor = ancestorPath(node.realpath, this.path) - - const depPromises = [] - for (const [name, edge] of node.edgesOut.entries()) { - const notMissing = !edge.missing && - !(edge.to && (edge.to.dummy || edge.to.parent !== node)) - if (notMissing) { - continue - } - - // start the walk from the dirname, because we would have found - // the dep in the loadFSTree step already if it was local. - for (const p of walkUp(dirname(node.realpath))) { - // only walk as far as the nearest ancestor - // this keeps us from going into completely unrelated - // places when a project is just missing something, but - // allows for finding the transitive deps of link targets. - // ie, if it has to go up and back out to get to the path - // from the nearest common ancestor, we've gone too far. - if (ancestor && /^\.\.(?:[\\/]|$)/.test(relative(ancestor, p))) { - break - } - - const entries = nmContents.get(p) || - await readdir(p + '/node_modules').catch(() => []) - nmContents.set(p, entries) - if (!entries.includes(name)) { - continue - } - - const d = this[_cache].has(p) ? await this[_cache].get(p) - : new Node({ path: p, root: node.root, dummy: true }) - this[_cache].set(p, d) - if (d.dummy) { - // it's a placeholder, so likely would not have loaded this dep, - // unless another dep in the tree also needs it. - const depPath = `${p}/node_modules/${name}` - const cached = this[_cache].get(depPath) - if (!cached || cached.dummy) { - depPromises.push(this[_loadFSNode]({ - path: depPath, - root: node.root, - parent: d, - }).then(node => this[_loadFSTree](node))) - } - } - break - } - } - await Promise.all(depPromises) - } - } - - // try to find a node that is the parent in a fs tree sense, but not a - // node_modules tree sense, of any link targets. this allows us to - // resolve deps that node will find, but a legacy npm view of the - // world would not have noticed. - [_findFSParents] () { - for (const path of this[_topNodes]) { - const node = this[_cache].get(path) - if (node && !node.parent && !node.fsParent) { - for (const p of walkUp(dirname(path))) { - if (this[_cache].has(p)) { - node.fsParent = this[_cache].get(p) - break - } - } - } - } - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js deleted file mode 100644 index f1960116737e8..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js +++ /dev/null @@ -1,315 +0,0 @@ -// mixin providing the loadVirtual method -const localeCompare = require('@isaacs/string-locale-compare')('en') - -const {resolve} = require('path') - -const nameFromFolder = require('@npmcli/name-from-folder') -const consistentResolve = require('../consistent-resolve.js') -const Shrinkwrap = require('../shrinkwrap.js') -const Node = require('../node.js') -const Link = require('../link.js') -const relpath = require('../relpath.js') -const calcDepFlags = require('../calc-dep-flags.js') -const rpj = require('read-package-json-fast') -const treeCheck = require('../tree-check.js') - -const loadFromShrinkwrap = Symbol('loadFromShrinkwrap') -const resolveNodes = Symbol('resolveNodes') -const resolveLinks = Symbol('resolveLinks') -const assignBundles = Symbol('assignBundles') -const loadRoot = Symbol('loadRoot') -const loadNode = Symbol('loadVirtualNode') -const loadLink = Symbol('loadVirtualLink') -const loadWorkspaces = Symbol.for('loadWorkspaces') -const loadWorkspacesVirtual = Symbol.for('loadWorkspacesVirtual') -const flagsSuspect = Symbol.for('flagsSuspect') -const reCalcDepFlags = Symbol('reCalcDepFlags') -const checkRootEdges = Symbol('checkRootEdges') -const rootOptionProvided = Symbol('rootOptionProvided') - -const depsToEdges = (type, deps) => - Object.entries(deps).map(d => [type, ...d]) - -module.exports = cls => class VirtualLoader extends cls { - constructor (options) { - super(options) - - // the virtual tree we load from a shrinkwrap - this.virtualTree = options.virtualTree - this[flagsSuspect] = false - } - - // public method - async loadVirtual (options = {}) { - if (this.virtualTree) { - return this.virtualTree - } - - // allow the user to set reify options on the ctor as well. - // XXX: deprecate separate reify() options object. - options = { ...this.options, ...options } - - if (options.root && options.root.meta) { - await this[loadFromShrinkwrap](options.root.meta, options.root) - return treeCheck(this.virtualTree) - } - - const s = await Shrinkwrap.load({ path: this.path }) - if (!s.loadedFromDisk && !options.root) { - const er = new Error('loadVirtual requires existing shrinkwrap file') - throw Object.assign(er, { code: 'ENOLOCK' }) - } - - // when building the ideal tree, we pass in a root node to this function - // otherwise, load it from the root package json or the lockfile - const { - root = await this[loadRoot](s), - } = options - - this[rootOptionProvided] = options.root - - await this[loadFromShrinkwrap](s, root) - return treeCheck(this.virtualTree) - } - - async [loadRoot] (s) { - const pj = this.path + '/package.json' - const pkg = await rpj(pj).catch(() => s.data.packages['']) || {} - return this[loadWorkspaces](this[loadNode]('', pkg)) - } - - async [loadFromShrinkwrap] (s, root) { - if (!this[rootOptionProvided]) { - // root is never any of these things, but might be a brand new - // baby Node object that never had its dep flags calculated. - root.extraneous = false - root.dev = false - root.optional = false - root.devOptional = false - root.peer = false - } else { - this[flagsSuspect] = true - } - - this[checkRootEdges](s, root) - root.meta = s - this.virtualTree = root - const {links, nodes} = this[resolveNodes](s, root) - await this[resolveLinks](links, nodes) - if (!(s.originalLockfileVersion >= 2)) { - this[assignBundles](nodes) - } - if (this[flagsSuspect]) { - this[reCalcDepFlags](nodes.values()) - } - return root - } - - [reCalcDepFlags] (nodes) { - // reset all dep flags - // can't use inventory here, because virtualTree might not be root - for (const node of nodes) { - if (node.isRoot || node === this[rootOptionProvided]) { - continue - } - node.extraneous = true - node.dev = true - node.optional = true - node.devOptional = true - node.peer = true - } - calcDepFlags(this.virtualTree, !this[rootOptionProvided]) - } - - // check the lockfile deps, and see if they match. if they do not - // then we have to reset dep flags at the end. for example, if the - // user manually edits their package.json file, then we need to know - // that the idealTree is no longer entirely trustworthy. - [checkRootEdges] (s, root) { - // loaded virtually from tree, no chance of being out of sync - // ancient lockfiles are critically damaged by this process, - // so we need to just hope for the best in those cases. - if (!s.loadedFromDisk || s.ancientLockfile) { - return - } - - const lock = s.get('') - const prod = lock.dependencies || {} - const dev = lock.devDependencies || {} - const optional = lock.optionalDependencies || {} - const peer = lock.peerDependencies || {} - const peerOptional = {} - if (lock.peerDependenciesMeta) { - for (const [name, meta] of Object.entries(lock.peerDependenciesMeta)) { - if (meta.optional && peer[name] !== undefined) { - peerOptional[name] = peer[name] - delete peer[name] - } - } - } - for (const name of Object.keys(optional)) { - delete prod[name] - } - - const lockWS = [] - const workspaces = this[loadWorkspacesVirtual]({ - cwd: this.path, - lockfile: s.data, - }) - for (const [name, path] of workspaces.entries()) { - lockWS.push(['workspace', name, `file:${path}`]) - } - - const lockEdges = [ - ...depsToEdges('prod', prod), - ...depsToEdges('dev', dev), - ...depsToEdges('optional', optional), - ...depsToEdges('peer', peer), - ...depsToEdges('peerOptional', peerOptional), - ...lockWS, - ].sort(([atype, aname], [btype, bname]) => - localeCompare(atype, btype) || localeCompare(aname, bname)) - - const rootEdges = [...root.edgesOut.values()] - .map(e => [e.type, e.name, e.spec]) - .sort(([atype, aname], [btype, bname]) => - localeCompare(atype, btype) || localeCompare(aname, bname)) - - if (rootEdges.length !== lockEdges.length) { - // something added or removed - return this[flagsSuspect] = true - } - - for (let i = 0; i < lockEdges.length; i++) { - if (rootEdges[i][0] !== lockEdges[i][0] || - rootEdges[i][1] !== lockEdges[i][1] || - rootEdges[i][2] !== lockEdges[i][2]) { - return this[flagsSuspect] = true - } - } - } - - // separate out link metadatas, and create Node objects for nodes - [resolveNodes] (s, root) { - const links = new Map() - const nodes = new Map([['', root]]) - for (const [location, meta] of Object.entries(s.data.packages)) { - // skip the root because we already got it - if (!location) { - continue - } - - if (meta.link) { - links.set(location, meta) - } else { - nodes.set(location, this[loadNode](location, meta)) - } - } - return {links, nodes} - } - - // links is the set of metadata, and nodes is the map of non-Link nodes - // Set the targets to nodes in the set, if we have them (we might not) - async [resolveLinks] (links, nodes) { - for (const [location, meta] of links.entries()) { - const targetPath = resolve(this.path, meta.resolved) - const targetLoc = relpath(this.path, targetPath) - const target = nodes.get(targetLoc) - const link = this[loadLink](location, targetLoc, target, meta) - nodes.set(location, link) - nodes.set(targetLoc, link.target) - - // we always need to read the package.json for link targets - // outside node_modules because they can be changed by the local user - if (!link.target.parent) { - const pj = link.realpath + '/package.json' - const pkg = await rpj(pj).catch(() => null) - if (pkg) { - link.target.package = pkg - } - } - } - } - - [assignBundles] (nodes) { - for (const [location, node] of nodes) { - // Skip assignment of parentage for the root package - if (!location || node.isLink && !node.target.location) { - continue - } - const { name, parent, package: { inBundle }} = node - - if (!parent) { - continue - } - - // read inBundle from package because 'package' here is - // actually a v2 lockfile metadata entry. - // If the *parent* is also bundled, though, or if the parent has - // no dependency on it, then we assume that it's being pulled in - // just by virtue of its parent or a transitive dep being bundled. - const { package: ppkg } = parent - const { inBundle: parentBundled } = ppkg - if (inBundle && !parentBundled && parent.edgesOut.has(node.name)) { - if (!ppkg.bundleDependencies) { - ppkg.bundleDependencies = [name] - } else { - ppkg.bundleDependencies.push(name) - } - } - } - } - - [loadNode] (location, sw) { - const p = this.virtualTree ? this.virtualTree.realpath : this.path - const path = resolve(p, location) - // shrinkwrap doesn't include package name unless necessary - if (!sw.name) { - sw.name = nameFromFolder(path) - } - - const dev = sw.dev - const optional = sw.optional - const devOptional = dev || optional || sw.devOptional - const peer = sw.peer - - const node = new Node({ - legacyPeerDeps: this.legacyPeerDeps, - root: this.virtualTree, - path, - realpath: path, - integrity: sw.integrity, - resolved: consistentResolve(sw.resolved, this.path, path), - pkg: sw, - hasShrinkwrap: sw.hasShrinkwrap, - dev, - optional, - devOptional, - peer, - }) - // cast to boolean because they're undefined in the lock file when false - node.extraneous = !!sw.extraneous - node.devOptional = !!(sw.devOptional || sw.dev || sw.optional) - node.peer = !!sw.peer - node.optional = !!sw.optional - node.dev = !!sw.dev - return node - } - - [loadLink] (location, targetLoc, target, meta) { - const path = resolve(this.path, location) - const link = new Link({ - legacyPeerDeps: this.legacyPeerDeps, - path, - realpath: resolve(this.path, targetLoc), - target, - pkg: target && target.package, - }) - link.extraneous = target.extraneous - link.devOptional = target.devOptional - link.peer = target.peer - link.optional = target.optional - link.dev = target.dev - return link - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js deleted file mode 100644 index 0a7965ae30ca1..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js +++ /dev/null @@ -1,33 +0,0 @@ -const mapWorkspaces = require('@npmcli/map-workspaces') - -const _appendWorkspaces = Symbol('appendWorkspaces') -// shared ref used by other mixins/Arborist -const _loadWorkspaces = Symbol.for('loadWorkspaces') -const _loadWorkspacesVirtual = Symbol.for('loadWorkspacesVirtual') - -module.exports = cls => class MapWorkspaces extends cls { - [_appendWorkspaces] (node, workspaces) { - if (node && workspaces.size) { - node.workspaces = workspaces - } - - return node - } - - async [_loadWorkspaces] (node) { - if (node.workspaces) { - return node - } - - const workspaces = await mapWorkspaces({ - cwd: node.path, - pkg: node.package, - }) - - return this[_appendWorkspaces](node, workspaces) - } - - [_loadWorkspacesVirtual] (opts) { - return mapWorkspaces.virtual(opts) - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/pruner.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/pruner.js deleted file mode 100644 index 3e9d4f1bf66b0..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/pruner.js +++ /dev/null @@ -1,15 +0,0 @@ -const _idealTreePrune = Symbol.for('idealTreePrune') - -module.exports = cls => class Pruner extends cls { - async prune (options = {}) { - // allow the user to set options on the ctor as well. - // XXX: deprecate separate method options objects. - options = { ...this.options, ...options } - - await this.buildIdealTree(options) - - this[_idealTreePrune]() - - return this.reify(options) - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/rebuild.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/rebuild.js deleted file mode 100644 index 6fa5c00116fc2..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/rebuild.js +++ /dev/null @@ -1,380 +0,0 @@ -// Arborist.rebuild({path = this.path}) will do all the binlinks and -// bundle building needed. Called by reify, and by `npm rebuild`. - -const localeCompare = require('@isaacs/string-locale-compare')('en') -const {depth: dfwalk} = require('treeverse') -const promiseAllRejectLate = require('promise-all-reject-late') -const rpj = require('read-package-json-fast') -const binLinks = require('bin-links') -const runScript = require('@npmcli/run-script') -const promiseCallLimit = require('promise-call-limit') -const {resolve} = require('path') -const { - isNodeGypPackage, - defaultGypInstallScript, -} = require('@npmcli/node-gyp') - -const boolEnv = b => b ? '1' : '' -const sortNodes = (a, b) => - (a.depth - b.depth) || localeCompare(a.path, b.path) - -const _workspaces = Symbol.for('workspaces') -const _build = Symbol('build') -const _resetQueues = Symbol('resetQueues') -const _rebuildBundle = Symbol('rebuildBundle') -const _ignoreScripts = Symbol('ignoreScripts') -const _binLinks = Symbol('binLinks') -const _oldMeta = Symbol('oldMeta') -const _createBinLinks = Symbol('createBinLinks') -const _doHandleOptionalFailure = Symbol('doHandleOptionalFailure') -const _linkAllBins = Symbol('linkAllBins') -const _runScripts = Symbol('runScripts') -const _buildQueues = Symbol('buildQueues') -const _addToBuildSet = Symbol('addToBuildSet') -const _checkBins = Symbol.for('checkBins') -const _queues = Symbol('queues') -const _scriptShell = Symbol('scriptShell') -const _includeWorkspaceRoot = Symbol.for('includeWorkspaceRoot') - -const _force = Symbol.for('force') - -// defined by reify mixin -const _handleOptionalFailure = Symbol.for('handleOptionalFailure') -const _trashList = Symbol.for('trashList') - -module.exports = cls => class Builder extends cls { - constructor (options) { - super(options) - - const { - ignoreScripts = false, - scriptShell, - binLinks = true, - rebuildBundle = true, - } = options - - this.scriptsRun = new Set() - this[_binLinks] = binLinks - this[_ignoreScripts] = !!ignoreScripts - this[_scriptShell] = scriptShell - this[_rebuildBundle] = !!rebuildBundle - this[_resetQueues]() - this[_oldMeta] = null - } - - async rebuild ({ nodes, handleOptionalFailure = false } = {}) { - // nothing to do if we're not building anything! - if (this[_ignoreScripts] && !this[_binLinks]) { - return - } - - // when building for the first time, as part of reify, we ignore - // failures in optional nodes, and just delete them. however, when - // running JUST a rebuild, we treat optional failures as real fails - this[_doHandleOptionalFailure] = handleOptionalFailure - - // if we don't have a set of nodes, then just rebuild - // the actual tree on disk. - if (!nodes) { - const tree = await this.loadActual() - if (this[_workspaces] && this[_workspaces].length) { - const filterSet = this.workspaceDependencySet( - tree, - this[_workspaces], - this[_includeWorkspaceRoot] - ) - nodes = tree.inventory.filter(node => filterSet.has(node)) - } else { - nodes = tree.inventory.values() - } - } - - // separates links nodes so that it can run - // prepare scripts and link bins in the expected order - process.emit('time', 'build') - const depNodes = new Set() - const linkNodes = new Set() - for (const node of nodes) { - // we skip the target nodes to that workspace in order to make sure - // we only run lifecycle scripts / place bin links once per workspace - if (node.isLink) { - linkNodes.add(node) - } else { - depNodes.add(node) - } - } - - await this[_build](depNodes, {}) - - if (linkNodes.size) { - this[_resetQueues]() - await this[_build](linkNodes, { type: 'links' }) - } - - process.emit('timeEnd', 'build') - } - - [_resetQueues] () { - this[_queues] = { - preinstall: [], - install: [], - postinstall: [], - prepare: [], - bin: [], - } - } - - async [_build] (nodes, { type = 'deps' }) { - process.emit('time', `build:${type}`) - - await this[_buildQueues](nodes) - if (!this[_ignoreScripts]) { - await this[_runScripts]('preinstall') - } - if (this[_binLinks] && type !== 'links') { - await this[_linkAllBins]() - } - - // links should also run prepare scripts and only link bins after that - if (type === 'links') { - await this[_runScripts]('prepare') - - if (this[_binLinks]) { - await this[_linkAllBins]() - } - } - - if (!this[_ignoreScripts]) { - await this[_runScripts]('install') - await this[_runScripts]('postinstall') - } - - process.emit('timeEnd', `build:${type}`) - } - - async [_buildQueues] (nodes) { - process.emit('time', 'build:queue') - const set = new Set() - - const promises = [] - for (const node of nodes) { - promises.push(this[_addToBuildSet](node, set)) - - // if it has bundle deps, add those too, if rebuildBundle - if (this[_rebuildBundle] !== false) { - const bd = node.package.bundleDependencies - if (bd && bd.length) { - dfwalk({ - tree: node, - leave: node => promises.push(this[_addToBuildSet](node, set)), - getChildren: node => [...node.children.values()], - filter: node => node.inBundle, - }) - } - } - } - await promiseAllRejectLate(promises) - - // now sort into the queues for the 4 things we have to do - // run in the same predictable order that buildIdealTree uses - // there's no particular reason for doing it in this order rather - // than another, but sorting *somehow* makes it consistent. - const queue = [...set].sort(sortNodes) - - for (const node of queue) { - const { package: { bin, scripts = {} } } = node.target - const { preinstall, install, postinstall, prepare } = scripts - const tests = { bin, preinstall, install, postinstall, prepare } - for (const [key, has] of Object.entries(tests)) { - if (has) { - this[_queues][key].push(node) - } - } - } - process.emit('timeEnd', 'build:queue') - } - - async [_checkBins] (node) { - // if the node is a global top, and we're not in force mode, then - // any existing bins need to either be missing, or a symlink into - // the node path. Otherwise a package can have a preinstall script - // that unlinks something, to allow them to silently overwrite system - // binaries, which is unsafe and insecure. - if (!node.globalTop || this[_force]) { - return - } - const { path, package: pkg } = node - await binLinks.checkBins({ pkg, path, top: true, global: true }) - } - - async [_addToBuildSet] (node, set, refreshed = false) { - if (set.has(node)) { - return - } - - if (this[_oldMeta] === null) { - const {root: {meta}} = node - this[_oldMeta] = meta && meta.loadedFromDisk && - !(meta.originalLockfileVersion >= 2) - } - - const { package: pkg, hasInstallScript } = node.target - const { gypfile, bin, scripts = {} } = pkg - - const { preinstall, install, postinstall, prepare } = scripts - const anyScript = preinstall || install || postinstall || prepare - if (!refreshed && !anyScript && (hasInstallScript || this[_oldMeta])) { - // we either have an old metadata (and thus might have scripts) - // or we have an indication that there's install scripts (but - // don't yet know what they are) so we have to load the package.json - // from disk to see what the deal is. Failure here just means - // no scripts to add, probably borked package.json. - // add to the set then remove while we're reading the pj, so we - // don't accidentally hit it multiple times. - set.add(node) - const pkg = await rpj(node.path + '/package.json').catch(() => ({})) - set.delete(node) - - const {scripts = {}} = pkg - node.package.scripts = scripts - return this[_addToBuildSet](node, set, true) - } - - // Rebuild node-gyp dependencies lacking an install or preinstall script - // note that 'scripts' might be missing entirely, and the package may - // set gypfile:false to avoid this automatic detection. - const isGyp = gypfile !== false && - !install && - !preinstall && - await isNodeGypPackage(node.path) - - if (bin || preinstall || install || postinstall || prepare || isGyp) { - if (bin) { - await this[_checkBins](node) - } - if (isGyp) { - scripts.install = defaultGypInstallScript - node.package.scripts = scripts - } - set.add(node) - } - } - - async [_runScripts] (event) { - const queue = this[_queues][event] - - if (!queue.length) { - return - } - - process.emit('time', `build:run:${event}`) - const stdio = this.options.foregroundScripts ? 'inherit' : 'pipe' - const limit = this.options.foregroundScripts ? 1 : undefined - await promiseCallLimit(queue.map(node => async () => { - const { - path, - integrity, - resolved, - optional, - peer, - dev, - devOptional, - package: pkg, - location, - } = node.target - - // skip any that we know we'll be deleting - if (this[_trashList].has(path)) { - return - } - - const timer = `build:run:${event}:${location}` - process.emit('time', timer) - this.log.info('run', pkg._id, event, location, pkg.scripts[event]) - const env = { - npm_package_resolved: resolved, - npm_package_integrity: integrity, - npm_package_json: resolve(path, 'package.json'), - npm_package_optional: boolEnv(optional), - npm_package_dev: boolEnv(dev), - npm_package_peer: boolEnv(peer), - npm_package_dev_optional: - boolEnv(devOptional && !dev && !optional), - } - const runOpts = { - event, - path, - pkg, - stdioString: true, - stdio, - env, - scriptShell: this[_scriptShell], - } - const p = runScript(runOpts).catch(er => { - const { code, signal } = er - this.log.info('run', pkg._id, event, {code, signal}) - throw er - }).then(({args, code, signal, stdout, stderr}) => { - this.scriptsRun.add({ - pkg, - path, - event, - cmd: args && args[args.length - 1], - env, - code, - signal, - stdout, - stderr, - }) - this.log.info('run', pkg._id, event, {code, signal}) - }) - - await (this[_doHandleOptionalFailure] - ? this[_handleOptionalFailure](node, p) - : p) - - process.emit('timeEnd', timer) - }), limit) - process.emit('timeEnd', `build:run:${event}`) - } - - async [_linkAllBins] () { - const queue = this[_queues].bin - if (!queue.length) { - return - } - - process.emit('time', 'build:link') - const promises = [] - // sort the queue by node path, so that the module-local collision - // detector in bin-links will always resolve the same way. - for (const node of queue.sort(sortNodes)) { - promises.push(this[_createBinLinks](node)) - } - - await promiseAllRejectLate(promises) - process.emit('timeEnd', 'build:link') - } - - async [_createBinLinks] (node) { - if (this[_trashList].has(node.path)) { - return - } - - process.emit('time', `build:link:${node.location}`) - - const p = binLinks({ - pkg: node.package, - path: node.path, - top: !!(node.isTop || node.globalTop), - force: this[_force], - global: !!node.globalTop, - }) - - await (this[_doHandleOptionalFailure] - ? this[_handleOptionalFailure](node, p) - : p) - - process.emit('timeEnd', `build:link:${node.location}`) - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/reify.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/reify.js deleted file mode 100644 index a279d8956f243..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ /dev/null @@ -1,1404 +0,0 @@ -// mixin implementing the reify method - -const onExit = require('../signal-handling.js') -const pacote = require('pacote') -const AuditReport = require('../audit-report.js') -const {subset, intersects} = require('semver') -const npa = require('npm-package-arg') -const debug = require('../debug.js') -const walkUp = require('walk-up-path') - -const {dirname, resolve, relative} = require('path') -const {depth: dfwalk} = require('treeverse') -const fs = require('fs') -const {promisify} = require('util') -const lstat = promisify(fs.lstat) -const symlink = promisify(fs.symlink) -const mkdirp = require('mkdirp-infer-owner') -const justMkdirp = require('mkdirp') -const moveFile = require('@npmcli/move-file') -const rimraf = promisify(require('rimraf')) -const PackageJson = require('@npmcli/package-json') -const packageContents = require('@npmcli/installed-package-contents') -const { checkEngine, checkPlatform } = require('npm-install-checks') -const _force = Symbol.for('force') - -const treeCheck = require('../tree-check.js') -const relpath = require('../relpath.js') -const Diff = require('../diff.js') -const retirePath = require('../retire-path.js') -const promiseAllRejectLate = require('promise-all-reject-late') -const optionalSet = require('../optional-set.js') -const calcDepFlags = require('../calc-dep-flags.js') -const { saveTypeMap, hasSubKey } = require('../add-rm-pkg-deps.js') - -const _retiredPaths = Symbol('retiredPaths') -const _retiredUnchanged = Symbol('retiredUnchanged') -const _sparseTreeDirs = Symbol('sparseTreeDirs') -const _sparseTreeRoots = Symbol('sparseTreeRoots') -const _savePrefix = Symbol('savePrefix') -const _retireShallowNodes = Symbol.for('retireShallowNodes') -const _getBundlesByDepth = Symbol('getBundlesByDepth') -const _registryResolved = Symbol('registryResolved') -const _addNodeToTrashList = Symbol('addNodeToTrashList') -const _workspaces = Symbol.for('workspaces') - -// shared by rebuild mixin -const _trashList = Symbol.for('trashList') -const _handleOptionalFailure = Symbol.for('handleOptionalFailure') -const _loadTrees = Symbol.for('loadTrees') - -// shared symbols for swapping out when testing -const _diffTrees = Symbol.for('diffTrees') -const _createSparseTree = Symbol.for('createSparseTree') -const _loadShrinkwrapsAndUpdateTrees = Symbol.for('loadShrinkwrapsAndUpdateTrees') -const _shrinkwrapInflated = Symbol('shrinkwrapInflated') -const _bundleUnpacked = Symbol('bundleUnpacked') -const _bundleMissing = Symbol('bundleMissing') -const _reifyNode = Symbol.for('reifyNode') -const _extractOrLink = Symbol('extractOrLink') -// defined by rebuild mixin -const _checkBins = Symbol.for('checkBins') -const _symlink = Symbol('symlink') -const _warnDeprecated = Symbol('warnDeprecated') -const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees') -const _submitQuickAudit = Symbol('submitQuickAudit') -const _awaitQuickAudit = Symbol('awaitQuickAudit') -const _unpackNewModules = Symbol.for('unpackNewModules') -const _moveContents = Symbol.for('moveContents') -const _moveBackRetiredUnchanged = Symbol.for('moveBackRetiredUnchanged') -const _build = Symbol.for('build') -const _removeTrash = Symbol.for('removeTrash') -const _renamePath = Symbol.for('renamePath') -const _rollbackRetireShallowNodes = Symbol.for('rollbackRetireShallowNodes') -const _rollbackCreateSparseTree = Symbol.for('rollbackCreateSparseTree') -const _rollbackMoveBackRetiredUnchanged = Symbol.for('rollbackMoveBackRetiredUnchanged') -const _saveIdealTree = Symbol.for('saveIdealTree') -const _saveLockFile = Symbol('saveLockFile') -const _copyIdealToActual = Symbol('copyIdealToActual') -const _addOmitsToTrashList = Symbol('addOmitsToTrashList') -const _packageLockOnly = Symbol('packageLockOnly') -const _dryRun = Symbol('dryRun') -const _validateNodeModules = Symbol('validateNodeModules') -const _nmValidated = Symbol('nmValidated') -const _validatePath = Symbol('validatePath') -const _reifyPackages = Symbol.for('reifyPackages') -const _includeWorkspaceRoot = Symbol.for('includeWorkspaceRoot') - -const _omitDev = Symbol('omitDev') -const _omitOptional = Symbol('omitOptional') -const _omitPeer = Symbol('omitPeer') - -const _global = Symbol.for('global') - -const _pruneBundledMetadeps = Symbol('pruneBundledMetadeps') - -// defined by Ideal mixin -const _resolvedAdd = Symbol.for('resolvedAdd') -const _usePackageLock = Symbol.for('usePackageLock') -const _formatPackageLock = Symbol.for('formatPackageLock') - -module.exports = cls => class Reifier extends cls { - constructor (options) { - super(options) - - const { - savePrefix = '^', - packageLockOnly = false, - dryRun = false, - formatPackageLock = true, - } = options - - this[_dryRun] = !!dryRun - this[_packageLockOnly] = !!packageLockOnly - this[_savePrefix] = savePrefix - this[_formatPackageLock] = !!formatPackageLock - - this.diff = null - this[_retiredPaths] = {} - this[_shrinkwrapInflated] = new Set() - this[_retiredUnchanged] = {} - this[_sparseTreeDirs] = new Set() - this[_sparseTreeRoots] = new Set() - this[_trashList] = new Set() - // the nodes we unpack to read their bundles - this[_bundleUnpacked] = new Set() - // child nodes we'd EXPECT to be included in a bundle, but aren't - this[_bundleMissing] = new Set() - this[_nmValidated] = new Set() - } - - // public method - async reify (options = {}) { - if (this[_packageLockOnly] && this[_global]) { - const er = new Error('cannot generate lockfile for global packages') - er.code = 'ESHRINKWRAPGLOBAL' - throw er - } - - const omit = new Set(options.omit || []) - this[_omitDev] = omit.has('dev') - this[_omitOptional] = omit.has('optional') - this[_omitPeer] = omit.has('peer') - - // start tracker block - this.addTracker('reify') - process.emit('time', 'reify') - await this[_validatePath]() - await this[_loadTrees](options) - await this[_diffTrees]() - await this[_reifyPackages]() - await this[_saveIdealTree](options) - await this[_copyIdealToActual]() - await this[_awaitQuickAudit]() - - this.finishTracker('reify') - process.emit('timeEnd', 'reify') - return treeCheck(this.actualTree) - } - - async [_validatePath] () { - // don't create missing dirs on dry runs - if (this[_packageLockOnly] || this[_dryRun]) { - return - } - - // we do NOT want to set ownership on this folder, especially - // recursively, because it can have other side effects to do that - // in a project directory. We just want to make it if it's missing. - await justMkdirp(resolve(this.path)) - - // do not allow the top-level node_modules to be a symlink - await this[_validateNodeModules](resolve(this.path, 'node_modules')) - } - - async [_reifyPackages] () { - // we don't submit the audit report or write to disk on dry runs - if (this[_dryRun]) { - return - } - - if (this[_packageLockOnly]) { - // we already have the complete tree, so just audit it now, - // and that's all we have to do here. - return this[_submitQuickAudit]() - } - - // ok, we're about to start touching the fs. need to roll back - // if we get an early termination. - let reifyTerminated = null - const removeHandler = onExit(({signal}) => { - // only call once. if signal hits twice, we just terminate - removeHandler() - reifyTerminated = Object.assign(new Error('process terminated'), { - signal, - }) - return false - }) - - // [rollbackfn, [...actions]] - // after each step, if the process was terminated, execute the rollback - // note that each rollback *also* calls the previous one when it's - // finished, and then the first one throws the error, so we only need - // a new rollback step when we have a new thing that must be done to - // revert the install. - const steps = [ - [_rollbackRetireShallowNodes, [ - _retireShallowNodes, - ]], - [_rollbackCreateSparseTree, [ - _createSparseTree, - _addOmitsToTrashList, - _loadShrinkwrapsAndUpdateTrees, - _loadBundlesAndUpdateTrees, - _submitQuickAudit, - _unpackNewModules, - ]], - [_rollbackMoveBackRetiredUnchanged, [ - _moveBackRetiredUnchanged, - _build, - ]], - ] - for (const [rollback, actions] of steps) { - for (const action of actions) { - try { - await this[action]() - if (reifyTerminated) { - throw reifyTerminated - } - } catch (er) { - await this[rollback](er) - /* istanbul ignore next - rollback throws, should never hit this */ - throw er - } - } - } - - // no rollback for this one, just exit with the error, since the - // install completed and can't be safely recovered at this point. - await this[_removeTrash]() - if (reifyTerminated) { - throw reifyTerminated - } - - // done modifying the file system, no need to keep listening for sigs - removeHandler() - } - - // when doing a local install, we load everything and figure it all out. - // when doing a global install, we *only* care about the explicit requests. - [_loadTrees] (options) { - process.emit('time', 'reify:loadTrees') - const bitOpt = { - ...options, - complete: this[_packageLockOnly] || this[_dryRun], - } - - // if we're only writing a package lock, then it doesn't matter what's here - if (this[_packageLockOnly]) { - return this.buildIdealTree(bitOpt) - .then(() => process.emit('timeEnd', 'reify:loadTrees')) - } - - const actualOpt = this[_global] ? { - ignoreMissing: true, - global: true, - filter: (node, kid) => { - // if it's not the project root, and we have no explicit requests, - // then we're already into a nested dep, so we keep it - if (this.explicitRequests.size === 0 || !node.isProjectRoot) { - return true - } - - // if we added it as an edgeOut, then we want it - if (this.idealTree.edgesOut.has(kid)) { - return true - } - - // if it's an explicit request, then we want it - const hasExplicit = [...this.explicitRequests] - .some(edge => edge.name === kid) - if (hasExplicit) { - return true - } - - // ignore the rest of the global install folder - return false - }, - } : { ignoreMissing: true } - - if (!this[_global]) { - return Promise.all([ - this.loadActual(actualOpt), - this.buildIdealTree(bitOpt), - ]).then(() => process.emit('timeEnd', 'reify:loadTrees')) - } - - // the global install space tends to have a lot of stuff in it. don't - // load all of it, just what we care about. we won't be saving a - // hidden lockfile in there anyway. Note that we have to load ideal - // BEFORE loading actual, so that the actualOpt can use the - // explicitRequests which is set during buildIdealTree - return this.buildIdealTree(bitOpt) - .then(() => this.loadActual(actualOpt)) - .then(() => process.emit('timeEnd', 'reify:loadTrees')) - } - - [_diffTrees] () { - if (this[_packageLockOnly]) { - return - } - - process.emit('time', 'reify:diffTrees') - // XXX if we have an existing diff already, there should be a way - // to just invalidate the parts that changed, but avoid walking the - // whole tree again. - - const filterNodes = [] - if (this[_global] && this.explicitRequests.size) { - const idealTree = this.idealTree.target - const actualTree = this.actualTree.target - // we ONLY are allowed to make changes in the global top-level - // children where there's an explicit request. - for (const { name } of this.explicitRequests) { - const ideal = idealTree.children.get(name) - if (ideal) { - filterNodes.push(ideal) - } - const actual = actualTree.children.get(name) - if (actual) { - filterNodes.push(actual) - } - } - } else { - for (const ws of this[_workspaces]) { - const ideal = this.idealTree.children.get(ws) - if (ideal) { - filterNodes.push(ideal) - } - const actual = this.actualTree.children.get(ws) - if (actual) { - filterNodes.push(actual) - } - } - if (this[_includeWorkspaceRoot] && (this[_workspaces].length > 0)) { - for (const tree of [this.idealTree, this.actualTree]) { - for (const {type, to} of tree.edgesOut.values()) { - if (type !== 'workspace' && to) { - filterNodes.push(to) - } - } - } - } - } - - // find all the nodes that need to change between the actual - // and ideal trees. - this.diff = Diff.calculate({ - shrinkwrapInflated: this[_shrinkwrapInflated], - filterNodes, - actual: this.actualTree, - ideal: this.idealTree, - }) - - // we don't have to add 'removed' folders to the trashlist, because - // they'll be moved aside to a retirement folder, and then the retired - // folder will be deleted at the end. This is important when we have - // a folder like FOO being "removed" in favor of a folder like "foo", - // because if we remove node_modules/FOO on case-insensitive systems, - // it will remove the dep that we *want* at node_modules/foo. - - process.emit('timeEnd', 'reify:diffTrees') - } - - // add the node and all its bins to the list of things to be - // removed later on in the process. optionally, also mark them - // as a retired paths, so that we move them out of the way and - // replace them when rolling back on failure. - [_addNodeToTrashList] (node, retire = false) { - const paths = [node.path, ...node.binPaths] - const moves = this[_retiredPaths] - this.log.silly('reify', 'mark', retire ? 'retired' : 'deleted', paths) - for (const path of paths) { - if (retire) { - const retired = retirePath(path) - moves[path] = retired - this[_trashList].add(retired) - } else { - this[_trashList].add(path) - } - } - } - - // move aside the shallowest nodes in the tree that have to be - // changed or removed, so that we can rollback if necessary. - [_retireShallowNodes] () { - process.emit('time', 'reify:retireShallow') - const moves = this[_retiredPaths] = {} - for (const diff of this.diff.children) { - if (diff.action === 'CHANGE' || diff.action === 'REMOVE') { - // we'll have to clean these up at the end, so add them to the list - this[_addNodeToTrashList](diff.actual, true) - } - } - this.log.silly('reify', 'moves', moves) - const movePromises = Object.entries(moves) - .map(([from, to]) => this[_renamePath](from, to)) - return promiseAllRejectLate(movePromises) - .then(() => process.emit('timeEnd', 'reify:retireShallow')) - } - - [_renamePath] (from, to, didMkdirp = false) { - return moveFile(from, to) - .catch(er => { - // Occasionally an expected bin file might not exist in the package, - // or a shim/symlink might have been moved aside. If we've already - // handled the most common cause of ENOENT (dir doesn't exist yet), - // then just ignore any ENOENT. - if (er.code === 'ENOENT') { - return didMkdirp ? null : mkdirp(dirname(to)).then(() => - this[_renamePath](from, to, true)) - } else if (er.code === 'EEXIST') { - return rimraf(to).then(() => moveFile(from, to)) - } else { - throw er - } - }) - } - - [_rollbackRetireShallowNodes] (er) { - process.emit('time', 'reify:rollback:retireShallow') - const moves = this[_retiredPaths] - const movePromises = Object.entries(moves) - .map(([from, to]) => this[_renamePath](to, from)) - return promiseAllRejectLate(movePromises) - // ignore subsequent rollback errors - .catch(er => {}) - .then(() => process.emit('timeEnd', 'reify:rollback:retireShallow')) - .then(() => { - throw er - }) - } - - // adding to the trash list will skip reifying, and delete them - // if they are currently in the tree and otherwise untouched. - [_addOmitsToTrashList] () { - if (!this[_omitDev] && !this[_omitOptional] && !this[_omitPeer]) { - return - } - - process.emit('time', 'reify:trashOmits') - - const filter = node => - node.top.isProjectRoot && - (node.peer && this[_omitPeer] || - node.dev && this[_omitDev] || - node.optional && this[_omitOptional] || - node.devOptional && this[_omitOptional] && this[_omitDev]) - - for (const node of this.idealTree.inventory.filter(filter)) { - this[_addNodeToTrashList](node) - } - - process.emit('timeEnd', 'reify:trashOmits') - } - - [_createSparseTree] () { - process.emit('time', 'reify:createSparse') - // if we call this fn again, we look for the previous list - // so that we can avoid making the same directory multiple times - const leaves = this.diff.leaves - .filter(diff => { - return (diff.action === 'ADD' || diff.action === 'CHANGE') && - !this[_sparseTreeDirs].has(diff.ideal.path) && - !diff.ideal.isLink - }) - .map(diff => diff.ideal) - - // we check this in parallel, so guard against multiple attempts to - // retire the same path at the same time. - const dirsChecked = new Set() - return promiseAllRejectLate(leaves.map(async node => { - for (const d of walkUp(node.path)) { - if (d === node.top.path) { - break - } - if (dirsChecked.has(d)) { - continue - } - dirsChecked.add(d) - const st = await lstat(d).catch(er => null) - // this can happen if we have a link to a package with a name - // that the filesystem treats as if it is the same thing. - // would be nice to have conditional istanbul ignores here... - /* istanbul ignore next - defense in depth */ - if (st && !st.isDirectory()) { - const retired = retirePath(d) - this[_retiredPaths][d] = retired - this[_trashList].add(retired) - await this[_renamePath](d, retired) - } - } - const made = await mkdirp(node.path) - this[_sparseTreeDirs].add(node.path) - this[_sparseTreeRoots].add(made) - })) - .then(() => process.emit('timeEnd', 'reify:createSparse')) - } - - [_rollbackCreateSparseTree] (er) { - process.emit('time', 'reify:rollback:createSparse') - // cut the roots of the sparse tree that were created, not the leaves - const roots = this[_sparseTreeRoots] - // also delete the moves that we retired, so that we can move them back - const failures = [] - const targets = [...roots, ...Object.keys(this[_retiredPaths])] - const unlinks = targets - .map(path => rimraf(path).catch(er => failures.push([path, er]))) - return promiseAllRejectLate(unlinks) - .then(() => { - if (failures.length) { - this.log.warn('cleanup', 'Failed to remove some directories', failures) - } - }) - .then(() => process.emit('timeEnd', 'reify:rollback:createSparse')) - .then(() => this[_rollbackRetireShallowNodes](er)) - } - - // shrinkwrap nodes define their dependency branches with a file, so - // we need to unpack them, read that shrinkwrap file, and then update - // the tree by calling loadVirtual with the node as the root. - [_loadShrinkwrapsAndUpdateTrees] () { - const seen = this[_shrinkwrapInflated] - const shrinkwraps = this.diff.leaves - .filter(d => (d.action === 'CHANGE' || d.action === 'ADD' || !d.action) && - d.ideal.hasShrinkwrap && !seen.has(d.ideal) && - !this[_trashList].has(d.ideal.path)) - - if (!shrinkwraps.length) { - return - } - - process.emit('time', 'reify:loadShrinkwraps') - - const Arborist = this.constructor - return promiseAllRejectLate(shrinkwraps.map(diff => { - const node = diff.ideal - seen.add(node) - return diff.action ? this[_reifyNode](node) : node - })) - .then(nodes => promiseAllRejectLate(nodes.map(node => new Arborist({ - ...this.options, - path: node.path, - }).loadVirtual({ root: node })))) - // reload the diff and sparse tree because the ideal tree changed - .then(() => this[_diffTrees]()) - .then(() => this[_createSparseTree]()) - .then(() => this[_addOmitsToTrashList]()) - .then(() => this[_loadShrinkwrapsAndUpdateTrees]()) - .then(() => process.emit('timeEnd', 'reify:loadShrinkwraps')) - } - - // create a symlink for Links, extract for Nodes - // return the node object, since we usually want that - // handle optional dep failures here - // If node is in trash list, skip it - // If reifying fails, and the node is optional, add it and its optionalSet - // to the trash list - // Always return the node. - [_reifyNode] (node) { - if (this[_trashList].has(node.path)) { - return node - } - - const timer = `reifyNode:${node.location}` - process.emit('time', timer) - this.addTracker('reify', node.name, node.location) - - const { npmVersion, nodeVersion } = this.options - const p = Promise.resolve() - .then(async () => { - // when we reify an optional node, check the engine and platform - // first. be sure to ignore the --force and --engine-strict flags, - // since we always want to skip any optional packages we can't install. - // these checks throwing will result in a rollback and removal - // of the mismatches - if (node.optional) { - checkEngine(node.package, npmVersion, nodeVersion, false) - checkPlatform(node.package, false) - } - await this[_checkBins](node) - await this[_extractOrLink](node) - await this[_warnDeprecated](node) - }) - - return this[_handleOptionalFailure](node, p) - .then(() => { - this.finishTracker('reify', node.name, node.location) - process.emit('timeEnd', timer) - return node - }) - } - - // do not allow node_modules to be a symlink - async [_validateNodeModules] (nm) { - if (this[_force] || this[_nmValidated].has(nm)) { - return - } - const st = await lstat(nm).catch(() => null) - if (!st || st.isDirectory()) { - this[_nmValidated].add(nm) - return - } - this.log.warn('reify', 'Removing non-directory', nm) - await rimraf(nm) - } - - async [_extractOrLink] (node) { - // in normal cases, node.resolved should *always* be set by now. - // however, it is possible when a lockfile is damaged, or very old, - // or in some other race condition bugs in npm v6, that a previously - // bundled dependency will have just a version, but no resolved value, - // and no 'bundled: true' setting. - // Do the best with what we have, or else remove it from the tree - // entirely, since we can't possibly reify it. - const res = node.resolved ? `${node.name}@${this[_registryResolved](node.resolved)}` - : node.packageName && node.version - ? `${node.packageName}@${node.version}` - : null - - // no idea what this thing is. remove it from the tree. - if (!res) { - const warning = 'invalid or damaged lockfile detected\n' + - 'please re-try this operation once it completes\n' + - 'so that the damage can be corrected, or perform\n' + - 'a fresh install with no lockfile if the problem persists.' - this.log.warn('reify', warning) - this.log.verbose('reify', 'unrecognized node in tree', node.path) - node.parent = null - node.fsParent = null - this[_addNodeToTrashList](node) - return - } - - const nm = resolve(node.parent.path, 'node_modules') - await this[_validateNodeModules](nm) - - if (node.isLink) { - await rimraf(node.path) - await this[_symlink](node) - } else { - await debug(async () => { - const st = await lstat(node.path).catch(e => null) - if (st && !st.isDirectory()) { - debug.log('unpacking into a non-directory', node) - throw Object.assign(new Error('ENOTDIR: not a directory'), { - code: 'ENOTDIR', - path: node.path, - }) - } - }) - await pacote.extract(res, node.path, { - ...this.options, - resolved: node.resolved, - integrity: node.integrity, - }) - } - } - - async [_symlink] (node) { - const dir = dirname(node.path) - const target = node.realpath - const rel = relative(dir, target) - await mkdirp(dir) - return symlink(rel, node.path, 'junction') - } - - [_warnDeprecated] (node) { - const {_id, deprecated} = node.package - if (deprecated) { - this.log.warn('deprecated', `${_id}: ${deprecated}`) - } - } - - // if the node is optional, then the failure of the promise is nonfatal - // just add it and its optional set to the trash list. - [_handleOptionalFailure] (node, p) { - return (node.optional ? p.catch(er => { - const set = optionalSet(node) - for (node of set) { - this.log.verbose('reify', 'failed optional dependency', node.path) - this[_addNodeToTrashList](node) - } - }) : p).then(() => node) - } - - [_registryResolved] (resolved) { - // the default registry url is a magic value meaning "the currently - // configured registry". - // - // XXX: use a magic string that isn't also a valid value, like - // ${REGISTRY} or something. This has to be threaded through the - // Shrinkwrap and Node classes carefully, so for now, just treat - // the default reg as the magical animal that it has been. - return resolved && resolved - .replace(/^https?:\/\/registry.npmjs.org\//, this.registry) - } - - // bundles are *sort of* like shrinkwraps, in that the branch is defined - // by the contents of the package. however, in their case, rather than - // shipping a virtual tree that must be reified, they ship an entire - // reified actual tree that must be unpacked and not modified. - [_loadBundlesAndUpdateTrees] ( - depth = 0, bundlesByDepth = this[_getBundlesByDepth]() - ) { - if (depth === 0) { - process.emit('time', 'reify:loadBundles') - } - - const maxBundleDepth = bundlesByDepth.get('maxBundleDepth') - if (depth > maxBundleDepth) { - // if we did something, then prune the tree and update the diffs - if (maxBundleDepth !== -1) { - this[_pruneBundledMetadeps](bundlesByDepth) - this[_diffTrees]() - } - process.emit('timeEnd', 'reify:loadBundles') - return - } - - // skip any that have since been removed from the tree, eg by a - // shallower bundle overwriting them with a bundled meta-dep. - const set = (bundlesByDepth.get(depth) || []) - .filter(node => node.root === this.idealTree && - node.target !== node.root && - !this[_trashList].has(node.path)) - - if (!set.length) { - return this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth) - } - - // extract all the nodes with bundles - return promiseAllRejectLate(set.map(node => { - this[_bundleUnpacked].add(node) - return this[_reifyNode](node) - })) - // then load their unpacked children and move into the ideal tree - .then(nodes => - promiseAllRejectLate(nodes.map(async node => { - const arb = new this.constructor({ - ...this.options, - path: node.path, - }) - const notTransplanted = new Set(node.children.keys()) - await arb.loadActual({ - root: node, - // don't transplant any sparse folders we created - // loadActual will set node.package to {} for empty directories - // if by chance there are some empty folders in the node_modules - // tree for some other reason, then ok, ignore those too. - transplantFilter: node => { - if (node.package._id) { - // it's actually in the bundle if it gets transplanted - notTransplanted.delete(node.name) - return true - } else { - return false - } - }, - }) - for (const name of notTransplanted) { - this[_bundleMissing].add(node.children.get(name)) - } - }))) - // move onto the next level of bundled items - .then(() => this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth)) - } - - [_getBundlesByDepth] () { - const bundlesByDepth = new Map() - let maxBundleDepth = -1 - dfwalk({ - tree: this.diff, - visit: diff => { - const node = diff.ideal - if (!node) { - return - } - if (node.isProjectRoot) { - return - } - - const { bundleDependencies } = node.package - if (bundleDependencies && bundleDependencies.length) { - maxBundleDepth = Math.max(maxBundleDepth, node.depth) - if (!bundlesByDepth.has(node.depth)) { - bundlesByDepth.set(node.depth, [node]) - } else { - bundlesByDepth.get(node.depth).push(node) - } - } - }, - getChildren: diff => diff.children, - }) - - bundlesByDepth.set('maxBundleDepth', maxBundleDepth) - return bundlesByDepth - } - - // https://github.com/npm/cli/issues/1597#issuecomment-667639545 - [_pruneBundledMetadeps] (bundlesByDepth) { - const bundleShadowed = new Set() - - // Example dep graph: - // root -> (a, c) - // a -> BUNDLE(b) - // b -> c - // c -> b - // - // package tree: - // root - // +-- a - // | +-- b(1) - // | +-- c(1) - // +-- b(2) - // +-- c(2) - // 1. mark everything that's shadowed by anything in the bundle. This - // marks b(2) and c(2). - // 2. anything with edgesIn from outside the set, mark not-extraneous, - // remove from set. This unmarks c(2). - // 3. continue until no change - // 4. remove everything in the set from the tree. b(2) is pruned - - // create the list of nodes shadowed by children of bundlers - for (const bundles of bundlesByDepth.values()) { - // skip the 'maxBundleDepth' item - if (!Array.isArray(bundles)) { - continue - } - for (const node of bundles) { - for (const name of node.children.keys()) { - const shadow = node.parent.resolve(name) - if (!shadow) { - continue - } - bundleShadowed.add(shadow) - shadow.extraneous = true - } - } - } - - // lib -> (a@1.x) BUNDLE(a@1.2.3 (b@1.2.3)) - // a@1.2.3 -> (b@1.2.3) - // a@1.3.0 -> (b@2) - // b@1.2.3 -> () - // b@2 -> (c@2) - // - // root - // +-- lib - // | +-- a@1.2.3 - // | +-- b@1.2.3 - // +-- b@2 <-- shadowed, now extraneous - // +-- c@2 <-- also shadowed, because only dependent is shadowed - for (const shadow of bundleShadowed) { - for (const shadDep of shadow.edgesOut.values()) { - /* istanbul ignore else - pretty unusual situation, just being - * defensive here. Would mean that a bundled dep has a dependency - * that is unmet. which, weird, but if you bundle it, we take - * whatever you put there and assume the publisher knows best. */ - if (shadDep.to) { - bundleShadowed.add(shadDep.to) - shadDep.to.extraneous = true - } - } - } - - let changed - do { - changed = false - for (const shadow of bundleShadowed) { - for (const edge of shadow.edgesIn) { - if (!bundleShadowed.has(edge.from)) { - shadow.extraneous = false - bundleShadowed.delete(shadow) - changed = true - break - } - } - } - } while (changed) - - for (const shadow of bundleShadowed) { - this[_addNodeToTrashList](shadow) - shadow.root = null - } - } - - [_submitQuickAudit] () { - if (this.options.audit === false) { - return this.auditReport = null - } - - // we submit the quick audit at this point in the process, as soon as - // we have all the deps resolved, so that it can overlap with the other - // actions as much as possible. Stash the promise, which we resolve - // before finishing the reify() and returning the tree. Thus, we do - // NOT return the promise, as the intent is for this to run in parallel - // with the reification, and be resolved at a later time. - process.emit('time', 'reify:audit') - const options = { ...this.options } - const tree = this.idealTree - - // if we're operating on a workspace, only audit the workspace deps - if (this[_workspaces] && this[_workspaces].length) { - options.filterSet = this.workspaceDependencySet( - tree, - this[_workspaces], - this[_includeWorkspaceRoot] - ) - } - - this.auditReport = AuditReport.load(tree, options) - .then(res => { - process.emit('timeEnd', 'reify:audit') - this.auditReport = res - }) - } - - // return the promise if we're waiting for it, or the replaced result - [_awaitQuickAudit] () { - return this.auditReport - } - - // ok! actually unpack stuff into their target locations! - // The sparse tree has already been created, so we walk the diff - // kicking off each unpack job. If any fail, we rimraf the sparse - // tree entirely and try to put everything back where it was. - [_unpackNewModules] () { - process.emit('time', 'reify:unpack') - const unpacks = [] - dfwalk({ - tree: this.diff, - visit: diff => { - // no unpacking if we don't want to change this thing - if (diff.action !== 'CHANGE' && diff.action !== 'ADD') { - return - } - - const node = diff.ideal - const bd = this[_bundleUnpacked].has(node) - const sw = this[_shrinkwrapInflated].has(node) - const bundleMissing = this[_bundleMissing].has(node) - - // check whether we still need to unpack this one. - // test the inDepBundle last, since that's potentially a tree walk. - const doUnpack = node && // can't unpack if removed! - // root node already exists - !node.isRoot && - // already unpacked to read bundle - !bd && - // already unpacked to read sw - !sw && - // already unpacked by another dep's bundle - (bundleMissing || !node.inDepBundle) - - if (doUnpack) { - unpacks.push(this[_reifyNode](node)) - } - }, - getChildren: diff => diff.children, - }) - return promiseAllRejectLate(unpacks) - .then(() => process.emit('timeEnd', 'reify:unpack')) - } - - // This is the part where we move back the unchanging nodes that were - // the children of a node that did change. If this fails, the rollback - // is a three-step process. First, we try to move the retired unchanged - // nodes BACK to their retirement folders, then delete the sparse tree, - // then move everything out of retirement. - [_moveBackRetiredUnchanged] () { - // get a list of all unchanging children of any shallow retired nodes - // if they are not the ancestor of any node in the diff set, then the - // directory won't already exist, so just rename it over. - // This is sort of an inverse diff tree, of all the nodes where - // the actualTree and idealTree _don't_ differ, starting from the - // shallowest nodes that we moved aside in the first place. - process.emit('time', 'reify:unretire') - const moves = this[_retiredPaths] - this[_retiredUnchanged] = {} - return promiseAllRejectLate(this.diff.children.map(diff => { - // skip if nothing was retired - if (diff.action !== 'CHANGE' && diff.action !== 'REMOVE') { - return - } - - const { path: realFolder } = diff.actual - const retireFolder = moves[realFolder] - /* istanbul ignore next - should be impossible */ - debug(() => { - if (!retireFolder) { - const er = new Error('trying to un-retire but not retired') - throw Object.assign(er, { - realFolder, - retireFolder, - actual: diff.actual, - ideal: diff.ideal, - action: diff.action, - }) - } - }) - - this[_retiredUnchanged][retireFolder] = [] - return promiseAllRejectLate(diff.unchanged.map(node => { - // no need to roll back links, since we'll just delete them anyway - if (node.isLink) { - return mkdirp(dirname(node.path)).then(() => this[_reifyNode](node)) - } - - // will have been moved/unpacked along with bundler - if (node.inDepBundle && !this[_bundleMissing].has(node)) { - return - } - - this[_retiredUnchanged][retireFolder].push(node) - - const rel = relative(realFolder, node.path) - const fromPath = resolve(retireFolder, rel) - // if it has bundleDependencies, then make node_modules. otherwise - // skip it. - const bd = node.package.bundleDependencies - const dir = bd && bd.length ? node.path + '/node_modules' : node.path - return mkdirp(dir).then(() => this[_moveContents](node, fromPath)) - })) - })) - .then(() => process.emit('timeEnd', 'reify:unretire')) - } - - // move the contents from the fromPath to the node.path - [_moveContents] (node, fromPath) { - return packageContents({ - path: fromPath, - depth: 1, - packageJsonCache: new Map([[fromPath + '/package.json', node.package]]), - }).then(res => promiseAllRejectLate(res.map(path => { - const rel = relative(fromPath, path) - const to = resolve(node.path, rel) - return this[_renamePath](path, to) - }))) - } - - [_rollbackMoveBackRetiredUnchanged] (er) { - const moves = this[_retiredPaths] - // flip the mapping around to go back - const realFolders = new Map(Object.entries(moves).map(([k, v]) => [v, k])) - const promises = Object.entries(this[_retiredUnchanged]) - .map(([retireFolder, nodes]) => promiseAllRejectLate(nodes.map(node => { - const realFolder = realFolders.get(retireFolder) - const rel = relative(realFolder, node.path) - const fromPath = resolve(retireFolder, rel) - return this[_moveContents]({ ...node, path: fromPath }, node.path) - }))) - return promiseAllRejectLate(promises) - .then(() => this[_rollbackCreateSparseTree](er)) - } - - [_build] () { - process.emit('time', 'reify:build') - - // for all the things being installed, run their appropriate scripts - // run in tip->root order, so as to be more likely to build a node's - // deps before attempting to build it itself - const nodes = [] - dfwalk({ - tree: this.diff, - leave: diff => { - if (!diff.ideal.isProjectRoot) { - nodes.push(diff.ideal) - } - }, - // process adds before changes, ignore removals - getChildren: diff => diff && diff.children, - filter: diff => diff.action === 'ADD' || diff.action === 'CHANGE', - }) - - // pick up link nodes from the unchanged list as we want to run their - // scripts in every install despite of having a diff status change - for (const node of this.diff.unchanged) { - const tree = node.root.target - - // skip links that only live within node_modules as they are most - // likely managed by packages we installed, we only want to rebuild - // unchanged links we directly manage - if (node.isLink && node.target.fsTop === tree) { - nodes.push(node) - } - } - - return this.rebuild({ nodes, handleOptionalFailure: true }) - .then(() => process.emit('timeEnd', 'reify:build')) - } - - // the tree is pretty much built now, so it's cleanup time. - // remove the retired folders, and any deleted nodes - // If this fails, there isn't much we can do but tell the user about it. - // Thankfully, it's pretty unlikely that it'll fail, since rimraf is a tank. - [_removeTrash] () { - process.emit('time', 'reify:trash') - const promises = [] - const failures = [] - const rm = path => rimraf(path).catch(er => failures.push([path, er])) - - for (const path of this[_trashList]) { - promises.push(rm(path)) - } - - return promiseAllRejectLate(promises).then(() => { - if (failures.length) { - this.log.warn('cleanup', 'Failed to remove some directories', failures) - } - }) - .then(() => process.emit('timeEnd', 'reify:trash')) - } - - // last but not least, we save the ideal tree metadata to the package-lock - // or shrinkwrap file, and any additions or removals to package.json - async [_saveIdealTree] (options) { - // the ideal tree is actualized now, hooray! - // it still contains all the references to optional nodes that were removed - // for install failures. Those still end up in the shrinkwrap, so we - // save it first, then prune out the optional trash, and then return it. - - // support save=false option - if (options.save === false || this[_global] || this[_dryRun]) { - return false - } - - process.emit('time', 'reify:save') - - const updatedTrees = new Set() - - // resolvedAdd is the list of user add requests, but with names added - // to things like git repos and tarball file/urls. However, if the - // user requested 'foo@', and we have a foo@file:../foo, then we should - // end up saving the spec we actually used, not whatever they gave us. - if (this[_resolvedAdd].length) { - for (const { name, tree: addTree } of this[_resolvedAdd]) { - // addTree either the root, or a workspace - const edge = addTree.edgesOut.get(name) - const pkg = addTree.package - const req = npa.resolve(name, edge.spec, addTree.realpath) - const {rawSpec, subSpec} = req - - const spec = subSpec ? subSpec.rawSpec : rawSpec - const child = edge.to - - // if we tried to install an optional dep, but it was a version - // that we couldn't resolve, this MAY be missing. if we haven't - // blown up by now, it's because it was not a problem, though, so - // just move on. - if (!child) { - continue - } - - let newSpec - if (req.registry) { - const version = child.version - const prefixRange = version ? this[_savePrefix] + version : '*' - // if we installed a range, then we save the range specified - // if it is not a subset of the ^x.y.z. eg, installing a range - // of `1.x <1.2.3` will not be saved as `^1.2.0`, because that - // would allow versions outside the requested range. Tags and - // specific versions save with the save-prefix. - const isRange = (subSpec || req).type === 'range' - - let range = spec - if ( - !isRange || - spec === '*' || - subset(prefixRange, spec, { loose: true }) - ) { - range = prefixRange - } - - const pname = child.packageName - const alias = name !== pname - newSpec = alias ? `npm:${pname}@${range}` : range - } else if (req.hosted) { - // save the git+https url if it has auth, otherwise shortcut - const h = req.hosted - const opt = { noCommittish: false } - if (h.https && h.auth) { - newSpec = `git+${h.https(opt)}` - } else { - newSpec = h.shortcut(opt) - } - } else if (req.type === 'directory' || req.type === 'file') { - // save the relative path in package.json - // Normally saveSpec is updated with the proper relative - // path already, but it's possible to specify a full absolute - // path initially, in which case we can end up with the wrong - // thing, so just get the ultimate fetchSpec and relativize it. - const p = req.fetchSpec.replace(/^file:/, '') - const rel = relpath(addTree.realpath, p) - newSpec = `file:${rel}` - } else { - newSpec = req.saveSpec - } - - if (options.saveType) { - const depType = saveTypeMap.get(options.saveType) - pkg[depType][name] = newSpec - // rpj will have moved it here if it was in both - // if it is empty it will be deleted later - if (options.saveType === 'prod' && pkg.optionalDependencies) { - delete pkg.optionalDependencies[name] - } - } else { - if (hasSubKey(pkg, 'dependencies', name)) { - pkg.dependencies[name] = newSpec - } - - if (hasSubKey(pkg, 'devDependencies', name)) { - pkg.devDependencies[name] = newSpec - // don't update peer or optional if we don't have to - if (hasSubKey(pkg, 'peerDependencies', name) && !intersects(newSpec, pkg.peerDependencies[name])) { - pkg.peerDependencies[name] = newSpec - } - - if (hasSubKey(pkg, 'optionalDependencies', name) && !intersects(newSpec, pkg.optionalDependencies[name])) { - pkg.optionalDependencies[name] = newSpec - } - } else { - if (hasSubKey(pkg, 'peerDependencies', name)) { - pkg.peerDependencies[name] = newSpec - } - - if (hasSubKey(pkg, 'optionalDependencies', name)) { - pkg.optionalDependencies[name] = newSpec - } - } - } - - updatedTrees.add(addTree) - } - } - - // preserve indentation, if possible - const { - [Symbol.for('indent')]: indent, - } = this.idealTree.package - const format = indent === undefined ? ' ' : indent - - const saveOpt = { - format: (this[_formatPackageLock] && format) ? format - : this[_formatPackageLock], - } - - const promises = [this[_saveLockFile](saveOpt)] - - const updatePackageJson = async (tree) => { - const pkgJson = await PackageJson.load(tree.path) - .catch(() => new PackageJson(tree.path)) - const { - dependencies = {}, - devDependencies = {}, - optionalDependencies = {}, - peerDependencies = {}, - } = tree.package - - pkgJson.update({ - dependencies, - devDependencies, - optionalDependencies, - peerDependencies, - }) - await pkgJson.save() - } - - // grab any from explicitRequests that had deps removed - for (const { from: tree } of this.explicitRequests) { - updatedTrees.add(tree) - } - - for (const tree of updatedTrees) { - // refresh the edges so they have the correct specs - tree.package = tree.package - promises.push(updatePackageJson(tree)) - } - - await Promise.all(promises) - process.emit('timeEnd', 'reify:save') - return true - } - - async [_saveLockFile] (saveOpt) { - if (!this[_usePackageLock]) { - return - } - - const { meta } = this.idealTree - - return meta.save(saveOpt) - } - - async [_copyIdealToActual] () { - // clean up any trash that is still in the tree - for (const path of this[_trashList]) { - const loc = relpath(this.idealTree.realpath, path) - const node = this.idealTree.inventory.get(loc) - if (node && node.root === this.idealTree) { - node.parent = null - } - } - - // if we filtered to only certain nodes, then anything ELSE needs - // to be untouched in the resulting actual tree, even if it differs - // in the idealTree. Copy over anything that was in the actual and - // was not changed, delete anything in the ideal and not actual. - // Then we move the entire idealTree over to this.actualTree, and - // save the hidden lockfile. - if (this.diff && this.diff.filterSet.size) { - const reroot = new Set() - - const { filterSet } = this.diff - const seen = new Set() - for (const [loc, ideal] of this.idealTree.inventory.entries()) { - seen.add(loc) - - // if it's an ideal node from the filter set, then skip it - // because we already made whatever changes were necessary - if (filterSet.has(ideal)) { - continue - } - - // otherwise, if it's not in the actualTree, then it's not a thing - // that we actually added. And if it IS in the actualTree, then - // it's something that we left untouched, so we need to record - // that. - const actual = this.actualTree.inventory.get(loc) - if (!actual) { - ideal.root = null - } else { - if ([...actual.linksIn].some(link => filterSet.has(link))) { - seen.add(actual.location) - continue - } - const { realpath, isLink } = actual - if (isLink && ideal.isLink && ideal.realpath === realpath) { - continue - } else { - reroot.add(actual) - } - } - } - - // now find any actual nodes that may not be present in the ideal - // tree, but were left behind by virtue of not being in the filter - for (const [loc, actual] of this.actualTree.inventory.entries()) { - if (seen.has(loc)) { - continue - } - seen.add(loc) - - // we know that this is something that ISN'T in the idealTree, - // or else we will have addressed it in the previous loop. - // If it's in the filterSet, that means we intentionally removed - // it, so nothing to do here. - if (filterSet.has(actual)) { - continue - } - - reroot.add(actual) - } - - // go through the rerooted actual nodes, and move them over. - for (const actual of reroot) { - actual.root = this.idealTree - } - - // prune out any tops that lack a linkIn, they are no longer relevant. - for (const top of this.idealTree.tops) { - if (top.linksIn.size === 0) { - top.root = null - } - } - - // need to calculate dep flags, since nodes may have been marked - // as extraneous or otherwise incorrect during transit. - calcDepFlags(this.idealTree) - } - - // save the ideal's meta as a hidden lockfile after we actualize it - this.idealTree.meta.filename = - this.idealTree.realpath + '/node_modules/.package-lock.json' - this.idealTree.meta.hiddenLockfile = true - - this.actualTree = this.idealTree - this.idealTree = null - - if (!this[_global]) { - await this.actualTree.meta.save() - } - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/audit-report.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/audit-report.js deleted file mode 100644 index de97cdc29e3a5..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/audit-report.js +++ /dev/null @@ -1,416 +0,0 @@ -// an object representing the set of vulnerabilities in a tree -/* eslint camelcase: "off" */ - -const localeCompare = require('@isaacs/string-locale-compare')('en') -const npa = require('npm-package-arg') -const pickManifest = require('npm-pick-manifest') - -const Vuln = require('./vuln.js') -const Calculator = require('@npmcli/metavuln-calculator') - -const _getReport = Symbol('getReport') -const _fixAvailable = Symbol('fixAvailable') -const _checkTopNode = Symbol('checkTopNode') -const _init = Symbol('init') -const _omit = Symbol('omit') -const procLog = require('proc-log') - -const fetch = require('npm-registry-fetch') - -class AuditReport extends Map { - static load (tree, opts) { - return new AuditReport(tree, opts).run() - } - - get auditReportVersion () { - return 2 - } - - toJSON () { - const obj = { - auditReportVersion: this.auditReportVersion, - vulnerabilities: {}, - metadata: { - vulnerabilities: { - info: 0, - low: 0, - moderate: 0, - high: 0, - critical: 0, - total: this.size, - }, - dependencies: { - prod: 0, - dev: 0, - optional: 0, - peer: 0, - peerOptional: 0, - total: this.tree.inventory.size - 1, - }, - }, - } - - for (const node of this.tree.inventory.values()) { - const { dependencies } = obj.metadata - let prod = true - for (const type of [ - 'dev', - 'optional', - 'peer', - 'peerOptional', - ]) { - if (node[type]) { - dependencies[type]++ - prod = false - } - } - if (prod) { - dependencies.prod++ - } - } - - // if it doesn't have any topVulns, then it's fixable with audit fix - // for each topVuln, figure out if it's fixable with audit fix --force, - // or if we have to just delete the thing, and if the fix --force will - // require a semver major update. - const vulnerabilities = [] - for (const [name, vuln] of this.entries()) { - vulnerabilities.push([name, vuln.toJSON()]) - obj.metadata.vulnerabilities[vuln.severity]++ - } - - obj.vulnerabilities = vulnerabilities - .sort(([a], [b]) => localeCompare(a, b)) - .reduce((set, [name, vuln]) => { - set[name] = vuln - return set - }, {}) - - return obj - } - - constructor (tree, opts = {}) { - super() - const { omit } = opts - this[_omit] = new Set(omit || []) - this.topVulns = new Map() - - this.calculator = new Calculator(opts) - this.error = null - this.options = opts - this.log = opts.log || procLog - this.tree = tree - this.filterSet = opts.filterSet - } - - async run () { - this.report = await this[_getReport]() - this.log.silly('audit report', this.report) - if (this.report) { - await this[_init]() - } - return this - } - - isVulnerable (node) { - const vuln = this.get(node.packageName) - return !!(vuln && vuln.isVulnerable(node)) - } - - async [_init] () { - process.emit('time', 'auditReport:init') - - const promises = [] - for (const [name, advisories] of Object.entries(this.report)) { - for (const advisory of advisories) { - promises.push(this.calculator.calculate(name, advisory)) - } - } - - // now the advisories are calculated with a set of versions - // and the packument. turn them into our style of vuln objects - // which also have the affected nodes, and also create entries - // for all the metavulns that we find from dependents. - const advisories = new Set(await Promise.all(promises)) - const seen = new Set() - for (const advisory of advisories) { - const { name, range } = advisory - - // don't flag the exact same name/range more than once - // adding multiple advisories with the same range is fine, but no - // need to search for nodes we already would have added. - const k = `${name}@${range}` - if (seen.has(k)) { - continue - } - - seen.add(k) - - const vuln = this.get(name) || new Vuln({ name, advisory }) - if (this.has(name)) { - vuln.addAdvisory(advisory) - } - super.set(name, vuln) - - const p = [] - for (const node of this.tree.inventory.query('packageName', name)) { - if (!shouldAudit(node, this[_omit], this.filterSet)) { - continue - } - - // if not vulnerable by this advisory, keep searching - if (!advisory.testVersion(node.version)) { - continue - } - - // we will have loaded the source already if this is a metavuln - if (advisory.type === 'metavuln') { - vuln.addVia(this.get(advisory.dependency)) - } - - // already marked this one, no need to do it again - if (vuln.nodes.has(node)) { - continue - } - - // haven't marked this one yet. get its dependents. - vuln.nodes.add(node) - for (const { from: dep, spec } of node.edgesIn) { - if (dep.isTop && !vuln.topNodes.has(dep)) { - this[_checkTopNode](dep, vuln, spec) - } else { - // calculate a metavuln, if necessary - const calc = this.calculator.calculate(dep.packageName, advisory) - p.push(calc.then(meta => { - if (meta.testVersion(dep.version, spec)) { - advisories.add(meta) - } - })) - } - } - } - await Promise.all(p) - - // make sure we actually got something. if not, remove it - // this can happen if you are loading from a lockfile created by - // npm v5, since it lists the current version of all deps, - // rather than the range that is actually depended upon, - // or if using --omit with the older audit endpoint. - if (this.get(name).nodes.size === 0) { - this.delete(name) - continue - } - - // if the vuln is valid, but THIS advisory doesn't apply to any of - // the nodes it references, then remove it from the advisory list. - // happens when using omit with old audit endpoint. - for (const advisory of vuln.advisories) { - const relevant = [...vuln.nodes] - .some(n => advisory.testVersion(n.version)) - if (!relevant) { - vuln.deleteAdvisory(advisory) - } - } - } - process.emit('timeEnd', 'auditReport:init') - } - - [_checkTopNode] (topNode, vuln, spec) { - vuln.fixAvailable = this[_fixAvailable](topNode, vuln, spec) - - if (vuln.fixAvailable !== true) { - // now we know the top node is vulnerable, and cannot be - // upgraded out of the bad place without --force. But, there's - // no need to add it to the actual vulns list, because nothing - // depends on root. - this.topVulns.set(vuln.name, vuln) - vuln.topNodes.add(topNode) - } - } - - // check whether the top node is vulnerable. - // check whether we can get out of the bad place with --force, and if - // so, whether that update is SemVer Major - [_fixAvailable] (topNode, vuln, spec) { - // this will always be set to at least {name, versions:{}} - const paku = vuln.packument - - if (!vuln.testSpec(spec)) { - return true - } - - // similarly, even if we HAVE a packument, but we're looking for it - // somewhere other than the registry, and we got something vulnerable, - // then we're stuck with it. - const specObj = npa(spec) - if (!specObj.registry) { - return false - } - - if (specObj.subSpec) { - spec = specObj.subSpec.rawSpec - } - - // We don't provide fixes for top nodes other than root, but we - // still check to see if the node is fixable with a different version, - // and if that is a semver major bump. - try { - const { - _isSemVerMajor: isSemVerMajor, - version, - name, - } = pickManifest(paku, spec, { - ...this.options, - before: null, - avoid: vuln.range, - avoidStrict: true, - }) - return {name, version, isSemVerMajor} - } catch (er) { - return false - } - } - - set () { - throw new Error('do not call AuditReport.set() directly') - } - - // convert a quick-audit into a bulk advisory listing - static auditToBulk (report) { - if (!report.advisories) { - // tack on the report json where the response body would go - throw Object.assign(new Error('Invalid advisory report'), { - body: JSON.stringify(report), - }) - } - - const bulk = {} - const {advisories} = report - for (const advisory of Object.values(advisories)) { - const { - id, - url, - title, - severity = 'high', - vulnerable_versions = '*', - module_name: name, - } = advisory - bulk[name] = bulk[name] || [] - bulk[name].push({id, url, title, severity, vulnerable_versions}) - } - - return bulk - } - - async [_getReport] () { - // if we're not auditing, just return false - if (this.options.audit === false || this.tree.inventory.size === 1) { - return null - } - - process.emit('time', 'auditReport:getReport') - try { - try { - // first try the super fast bulk advisory listing - const body = prepareBulkData(this.tree, this[_omit], this.filterSet) - this.log.silly('audit', 'bulk request', body) - - // no sense asking if we don't have anything to audit, - // we know it'll be empty - if (!Object.keys(body).length) { - return null - } - - const res = await fetch('/-/npm/v1/security/advisories/bulk', { - ...this.options, - registry: this.options.auditRegistry || this.options.registry, - method: 'POST', - gzip: true, - body, - }) - - return await res.json() - } catch (er) { - this.log.silly('audit', 'bulk request failed', String(er.body)) - // that failed, try the quick audit endpoint - const body = prepareData(this.tree, this.options) - const res = await fetch('/-/npm/v1/security/audits/quick', { - ...this.options, - registry: this.options.auditRegistry || this.options.registry, - method: 'POST', - gzip: true, - body, - }) - return AuditReport.auditToBulk(await res.json()) - } - } catch (er) { - this.log.verbose('audit error', er) - this.log.silly('audit error', String(er.body)) - this.error = er - return null - } finally { - process.emit('timeEnd', 'auditReport:getReport') - } - } -} - -// return true if we should audit this one -const shouldAudit = (node, omit, filterSet) => - !node.version ? false - : node.isRoot ? false - : filterSet && filterSet.size !== 0 && !filterSet.has(node) ? false - : omit.size === 0 ? true - : !( // otherwise, just ensure we're not omitting this one - node.dev && omit.has('dev') || - node.optional && omit.has('optional') || - node.devOptional && omit.has('dev') && omit.has('optional') || - node.peer && omit.has('peer') - ) - -const prepareBulkData = (tree, omit, filterSet) => { - const payload = {} - for (const name of tree.inventory.query('packageName')) { - const set = new Set() - for (const node of tree.inventory.query('packageName', name)) { - if (!shouldAudit(node, omit, filterSet)) { - continue - } - - set.add(node.version) - } - if (set.size) { - payload[name] = [...set] - } - } - return payload -} - -const prepareData = (tree, opts) => { - const { npmVersion: npm_version } = opts - const node_version = process.version - const { platform, arch } = process - const { NODE_ENV: node_env } = process.env - const data = tree.meta.commit() - // the legacy audit endpoint doesn't support any kind of pre-filtering - // we just have to get the advisories and skip over them in the report - return { - name: data.name, - version: data.version, - requires: { - ...(tree.package.devDependencies || {}), - ...(tree.package.peerDependencies || {}), - ...(tree.package.optionalDependencies || {}), - ...(tree.package.dependencies || {}), - }, - dependencies: data.dependencies, - metadata: { - node_version, - npm_version, - platform, - arch, - node_env, - }, - } -} - -module.exports = AuditReport diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/calc-dep-flags.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/calc-dep-flags.js deleted file mode 100644 index 95ecc8a617b08..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/calc-dep-flags.js +++ /dev/null @@ -1,115 +0,0 @@ -const { depth } = require('treeverse') - -const calcDepFlags = (tree, resetRoot = true) => { - if (resetRoot) { - tree.dev = false - tree.optional = false - tree.devOptional = false - tree.peer = false - } - const ret = depth({ - tree, - visit: node => calcDepFlagsStep(node), - filter: node => node, - getChildren: (node, tree) => - [...tree.edgesOut.values()].map(edge => edge.to), - }) - return ret -} - -const calcDepFlagsStep = (node) => { - // This rewalk is necessary to handle cases where devDep and optional - // or normal dependency graphs overlap deep in the dep graph. - // Since we're only walking through deps that are not already flagged - // as non-dev/non-optional, it's typically a very shallow traversal - node.extraneous = false - resetParents(node, 'extraneous') - resetParents(node, 'dev') - resetParents(node, 'peer') - resetParents(node, 'devOptional') - resetParents(node, 'optional') - - // for links, map their hierarchy appropriately - if (node.isLink) { - node.target.dev = node.dev - node.target.optional = node.optional - node.target.devOptional = node.devOptional - node.target.peer = node.peer - return calcDepFlagsStep(node.target) - } - - node.edgesOut.forEach(({peer, optional, dev, to}) => { - // if the dep is missing, then its flags are already maximally unset - if (!to) { - return - } - - // everything with any kind of edge into it is not extraneous - to.extraneous = false - - // devOptional is the *overlap* of the dev and optional tree. - // however, for convenience and to save an extra rewalk, we leave - // it set when we are in *either* tree, and then omit it from the - // package-lock if either dev or optional are set. - const unsetDevOpt = !node.devOptional && !node.dev && !node.optional && - !dev && !optional - - // if we are not in the devOpt tree, then we're also not in - // either the dev or opt trees - const unsetDev = unsetDevOpt || !node.dev && !dev - const unsetOpt = unsetDevOpt || - !node.optional && !optional - const unsetPeer = !node.peer && !peer - - if (unsetPeer) { - unsetFlag(to, 'peer') - } - - if (unsetDevOpt) { - unsetFlag(to, 'devOptional') - } - - if (unsetDev) { - unsetFlag(to, 'dev') - } - - if (unsetOpt) { - unsetFlag(to, 'optional') - } - }) - - return node -} - -const resetParents = (node, flag) => { - if (node[flag]) { - return - } - - for (let p = node; p && (p === node || p[flag]); p = p.resolveParent) { - p[flag] = false - } -} - -// typically a short walk, since it only traverses deps that -// have the flag set. -const unsetFlag = (node, flag) => { - if (node[flag]) { - node[flag] = false - depth({ - tree: node, - visit: node => { - node.extraneous = node[flag] = false - if (node.isLink) { - node.target.extraneous = node.target[flag] = false - } - }, - getChildren: node => [...node.target.edgesOut.values()] - .filter(edge => edge.to && edge.to[flag] && - (flag !== 'peer' && edge.type === 'peer' || edge.type === 'prod')) - .map(edge => edge.to), - }) - } -} - -module.exports = calcDepFlags diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/can-place-dep.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/can-place-dep.js deleted file mode 100644 index 6be59093c034f..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/can-place-dep.js +++ /dev/null @@ -1,431 +0,0 @@ -// Internal methods used by buildIdealTree. -// Answer the question: "can I put this dep here?" -// -// IMPORTANT: *nothing* in this class should *ever* modify or mutate the tree -// at all. The contract here is strictly limited to read operations. We call -// this in the process of walking through the ideal tree checking many -// different potential placement targets for a given node. If a change is made -// to the tree along the way, that can cause serious problems! -// -// In order to enforce this restriction, in debug mode, canPlaceDep() will -// snapshot the tree at the start of the process, and then at the end, will -// verify that it still matches the snapshot, and throw an error if any changes -// occurred. -// -// The algorithm is roughly like this: -// - check the node itself: -// - if there is no version present, and no conflicting edges from target, -// OK, provided all peers can be placed at or above the target. -// - if the current version matches, KEEP -// - if there is an older version present, which can be replaced, then -// - if satisfying and preferDedupe? KEEP -// - else: REPLACE -// - if there is a newer version present, and preferDedupe, REPLACE -// - if the version present satisfies the edge, KEEP -// - else: CONFLICT -// - if the node is not in conflict, check each of its peers: -// - if the peer can be placed in the target, continue -// - else if the peer can be placed in a parent, and there is no other -// conflicting version shadowing it, continue -// - else CONFLICT -// - If the peers are not in conflict, return the original node's value -// -// An exception to this logic is that if the target is the deepest location -// that a node can be placed, and the conflicting node can be placed deeper, -// then we will return REPLACE rather than CONFLICT, and Arborist will queue -// the replaced node for resolution elsewhere. - -const localeCompare = require('@isaacs/string-locale-compare')('en') -const semver = require('semver') -const debug = require('./debug.js') -const peerEntrySets = require('./peer-entry-sets.js') -const deepestNestingTarget = require('./deepest-nesting-target.js') - -const CONFLICT = Symbol('CONFLICT') -const OK = Symbol('OK') -const REPLACE = Symbol('REPLACE') -const KEEP = Symbol('KEEP') - -class CanPlaceDep { - // dep is a dep that we're trying to place. it should already live in - // a virtual tree where its peer set is loaded as children of the root. - // target is the actual place where we're trying to place this dep - // in a node_modules folder. - // edge is the edge that we're trying to satisfy with this placement. - // parent is the CanPlaceDep object of the entry node when placing a peer. - constructor (options) { - const { - dep, - target, - edge, - preferDedupe, - parent = null, - peerPath = [], - explicitRequest = false, - } = options - - debug(() => { - if (!dep) { - throw new Error('no dep provided to CanPlaceDep') - } - - if (!target) { - throw new Error('no target provided to CanPlaceDep') - } - - if (!edge) { - throw new Error('no edge provided to CanPlaceDep') - } - - this._treeSnapshot = JSON.stringify([...target.root.inventory.entries()] - .map(([loc, {packageName, version, resolved}]) => { - return [loc, packageName, version, resolved] - }).sort(([a], [b]) => localeCompare(a, b))) - }) - - // the result of whether we can place it or not - this.canPlace = null - // if peers conflict, but this one doesn't, then that is useful info - this.canPlaceSelf = null - - this.dep = dep - this.target = target - this.edge = edge - this.explicitRequest = explicitRequest - - // preventing cycles when we check peer sets - this.peerPath = peerPath - // we always prefer to dedupe peers, because they are trying - // a bit harder to be singletons. - this.preferDedupe = !!preferDedupe || edge.peer - this.parent = parent - this.children = [] - - this.isSource = target === this.peerSetSource - this.name = edge.name - this.current = target.children.get(this.name) - this.targetEdge = target.edgesOut.get(this.name) - this.conflicts = new Map() - - // check if this dep was already subject to a peerDep override while - // building the peerSet. - this.edgeOverride = !dep.satisfies(edge) - - this.canPlace = this.checkCanPlace() - if (!this.canPlaceSelf) { - this.canPlaceSelf = this.canPlace - } - - debug(() => { - const treeSnapshot = JSON.stringify([...target.root.inventory.entries()] - .map(([loc, {packageName, version, resolved}]) => { - return [loc, packageName, version, resolved] - }).sort(([a], [b]) => localeCompare(a, b))) - /* istanbul ignore if */ - if (this._treeSnapshot !== treeSnapshot) { - throw Object.assign(new Error('tree changed in CanPlaceDep'), { - expect: this._treeSnapshot, - actual: treeSnapshot, - }) - } - }) - } - - checkCanPlace () { - const { target, targetEdge, current, dep } = this - - // if the dep failed to load, we're going to fail the build or - // prune it out anyway, so just move forward placing/replacing it. - if (dep.errors.length) { - return current ? REPLACE : OK - } - - // cannot place peers inside their dependents, except for tops - if (targetEdge && targetEdge.peer && !target.isTop) { - return CONFLICT - } - - if (targetEdge && !dep.satisfies(targetEdge) && targetEdge !== this.edge) { - return CONFLICT - } - - return current ? this.checkCanPlaceCurrent() : this.checkCanPlaceNoCurrent() - } - - // we know that the target has a dep by this name in its node_modules - // already. Can return KEEP, REPLACE, or CONFLICT. - checkCanPlaceCurrent () { - const { preferDedupe, explicitRequest, current, target, edge, dep } = this - - if (dep.matches(current)) { - if (current.satisfies(edge) || this.edgeOverride) { - return explicitRequest ? REPLACE : KEEP - } - } - - const { version: curVer } = current - const { version: newVer } = dep - const tryReplace = curVer && newVer && semver.gte(newVer, curVer) - if (tryReplace && dep.canReplace(current)) { - /* XXX-istanbul ignore else - It's extremely rare that a replaceable - * node would be a conflict, if the current one wasn't a conflict, - * but it is theoretically possible if peer deps are pinned. In - * that case we treat it like any other conflict, and keep trying */ - const cpp = this.canPlacePeers(REPLACE) - if (cpp !== CONFLICT) { - return cpp - } - } - - // ok, can't replace the current with new one, but maybe current is ok? - if (current.satisfies(edge) && (!explicitRequest || preferDedupe)) { - return KEEP - } - - // if we prefer deduping, then try replacing newer with older - if (preferDedupe && !tryReplace && dep.canReplace(current)) { - const cpp = this.canPlacePeers(REPLACE) - if (cpp !== CONFLICT) { - return cpp - } - } - - // Check for interesting cases! - // First, is this the deepest place that this thing can go, and NOT the - // deepest place where the conflicting dep can go? If so, replace it, - // and let it re-resolve deeper in the tree. - const myDeepest = this.deepestNestingTarget - - // ok, i COULD be placed deeper, so leave the current one alone. - if (target !== myDeepest) { - return CONFLICT - } - - // if we are not checking a peerDep, then we MUST place it here, in the - // target that has a non-peer dep on it. - if (!edge.peer && target === edge.from) { - return this.canPlacePeers(REPLACE) - } - - // if we aren't placing a peer in a set, then we're done here. - // This is ignored because it SHOULD be redundant, as far as I can tell, - // with the deepest target and target===edge.from tests. But until we - // can prove that isn't possible, this condition is here for safety. - /* istanbul ignore if - allegedly impossible */ - if (!this.parent && !edge.peer) { - return CONFLICT - } - - // check the deps in the peer group for each edge into that peer group - // if ALL of them can be pushed deeper, or if it's ok to replace its - // members with the contents of the new peer group, then we're good. - let canReplace = true - for (const [entryEdge, currentPeers] of peerEntrySets(current)) { - if (entryEdge === this.edge || entryEdge === this.peerEntryEdge) { - continue - } - - // First, see if it's ok to just replace the peerSet entirely. - // we do this by walking out from the entryEdge, because in a case like - // this: - // - // v -> PEER(a@1||2) - // a@1 -> PEER(b@1) - // a@2 -> PEER(b@2) - // b@1 -> PEER(a@1) - // b@2 -> PEER(a@2) - // - // root - // +-- v - // +-- a@2 - // +-- b@2 - // - // Trying to place a peer group of (a@1, b@1) would fail to note that - // they can be replaced, if we did it by looping 1 by 1. If we are - // replacing something, we don't have to check its peer deps, because - // the peerDeps in the placed peerSet will presumably satisfy. - const entryNode = entryEdge.to - const entryRep = dep.parent.children.get(entryNode.name) - if (entryRep) { - if (entryRep.canReplace(entryNode, dep.parent.children.keys())) { - continue - } - } - - let canClobber = !entryRep - if (!entryRep) { - const peerReplacementWalk = new Set([entryNode]) - OUTER: for (const currentPeer of peerReplacementWalk) { - for (const edge of currentPeer.edgesOut.values()) { - if (!edge.peer || !edge.valid) { - continue - } - const rep = dep.parent.children.get(edge.name) - if (!rep) { - if (edge.to) { - peerReplacementWalk.add(edge.to) - } - continue - } - if (!rep.satisfies(edge)) { - canClobber = false - break OUTER - } - } - } - } - if (canClobber) { - continue - } - - // ok, we can't replace, but maybe we can nest the current set deeper? - let canNestCurrent = true - for (const currentPeer of currentPeers) { - if (!canNestCurrent) { - break - } - - // still possible to nest this peerSet - const curDeep = deepestNestingTarget(entryEdge.from, currentPeer.name) - if (curDeep === target || target.isDescendantOf(curDeep)) { - canNestCurrent = false - canReplace = false - } - if (canNestCurrent) { - continue - } - } - } - - // if we can nest or replace all the current peer groups, we can replace. - if (canReplace) { - return this.canPlacePeers(REPLACE) - } - - return CONFLICT - } - - checkCanPlaceNoCurrent () { - const { target, peerEntryEdge, dep, name } = this - - // check to see what that name resolves to here, and who may depend on - // being able to reach it by crawling up past the parent. we know - // that it's not the target's direct child node, and if it was a direct - // dep of the target, we would have conflicted earlier. - const current = target !== peerEntryEdge.from && target.resolve(name) - if (current) { - for (const edge of current.edgesIn.values()) { - if (edge.from.isDescendantOf(target) && edge.valid) { - if (!dep.satisfies(edge)) { - return CONFLICT - } - } - } - } - - // no objections, so this is fine as long as peers are ok here. - return this.canPlacePeers(OK) - } - - get deepestNestingTarget () { - const start = this.parent ? this.parent.deepestNestingTarget - : this.edge.from - return deepestNestingTarget(start, this.name) - } - - get conflictChildren () { - return this.allChildren.filter(c => c.canPlace === CONFLICT) - } - - get allChildren () { - const set = new Set(this.children) - for (const child of set) { - for (const grandchild of child.children) { - set.add(grandchild) - } - } - return [...set] - } - - get top () { - return this.parent ? this.parent.top : this - } - - // check if peers can go here. returns state or CONFLICT - canPlacePeers (state) { - this.canPlaceSelf = state - if (this._canPlacePeers) { - return this._canPlacePeers - } - - // TODO: represent peerPath in ERESOLVE error somehow? - const peerPath = [...this.peerPath, this.dep] - let sawConflict = false - for (const peerEdge of this.dep.edgesOut.values()) { - if (!peerEdge.peer || !peerEdge.to || peerPath.includes(peerEdge.to)) { - continue - } - const peer = peerEdge.to - // it may be the case that the *initial* dep can be nested, but a peer - // of that dep needs to be placed shallower, because the target has - // a peer dep on the peer as well. - const target = deepestNestingTarget(this.target, peer.name) - const cpp = new CanPlaceDep({ - dep: peer, - target, - parent: this, - edge: peerEdge, - peerPath, - // always place peers in preferDedupe mode - preferDedupe: true, - }) - /* istanbul ignore next */ - debug(() => { - if (this.children.some(c => c.dep === cpp.dep)) { - throw new Error('checking same dep repeatedly') - } - }) - this.children.push(cpp) - - if (cpp.canPlace === CONFLICT) { - sawConflict = true - } - } - - this._canPlacePeers = sawConflict ? CONFLICT : state - return this._canPlacePeers - } - - // what is the node that is causing this peerSet to be placed? - get peerSetSource () { - return this.parent ? this.parent.peerSetSource : this.edge.from - } - - get peerEntryEdge () { - return this.top.edge - } - - static get CONFLICT () { - return CONFLICT - } - - static get OK () { - return OK - } - - static get REPLACE () { - return REPLACE - } - - static get KEEP () { - return KEEP - } - - get description () { - const { canPlace } = this - return canPlace && canPlace.description || - /* istanbul ignore next - old node affordance */ canPlace - } -} - -module.exports = CanPlaceDep diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/case-insensitive-map.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/case-insensitive-map.js deleted file mode 100644 index 016ce6017b01e..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/case-insensitive-map.js +++ /dev/null @@ -1,50 +0,0 @@ -// package children are represented with a Map object, but many file systems -// are case-insensitive and unicode-normalizing, so we need to treat -// node.children.get('FOO') and node.children.get('foo') as the same thing. - -const _keys = Symbol('keys') -const _normKey = Symbol('normKey') -const normalize = s => s.normalize('NFKD').toLowerCase() -const OGMap = Map -module.exports = class Map extends OGMap { - constructor (items = []) { - super() - this[_keys] = new OGMap() - for (const [key, val] of items) { - this.set(key, val) - } - } - - [_normKey] (key) { - return typeof key === 'string' ? normalize(key) : key - } - - get (key) { - const normKey = this[_normKey](key) - return this[_keys].has(normKey) ? super.get(this[_keys].get(normKey)) - : undefined - } - - set (key, val) { - const normKey = this[_normKey](key) - if (this[_keys].has(normKey)) { - super.delete(this[_keys].get(normKey)) - } - this[_keys].set(normKey, key) - return super.set(key, val) - } - - delete (key) { - const normKey = this[_normKey](key) - if (this[_keys].has(normKey)) { - const prevKey = this[_keys].get(normKey) - this[_keys].delete(normKey) - return super.delete(prevKey) - } - } - - has (key) { - const normKey = this[_normKey](key) - return this[_keys].has(normKey) && super.has(this[_keys].get(normKey)) - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/consistent-resolve.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/consistent-resolve.js deleted file mode 100644 index e34e40a46d002..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/consistent-resolve.js +++ /dev/null @@ -1,39 +0,0 @@ -// take a path and a resolved value, and turn it into a resolution from -// the given new path. This is used with converting a package.json's -// relative file: path into one suitable for a lockfile, or between -// lockfiles, and for converting hosted git repos to a consistent url type. -const npa = require('npm-package-arg') -const relpath = require('./relpath.js') -const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => { - if (!resolved) { - return null - } - - try { - const hostedOpt = { noCommittish: false } - const { - fetchSpec, - saveSpec, - type, - hosted, - rawSpec, - raw, - } = npa(resolved, fromPath) - const isPath = type === 'file' || type === 'directory' - return isPath && !relPaths ? `file:${fetchSpec}` - : isPath ? 'file:' + (toPath ? relpath(toPath, fetchSpec) : fetchSpec) - : hosted ? `git+${ - hosted.auth ? hosted.https(hostedOpt) : hosted.sshurl(hostedOpt) - }` - : type === 'git' ? saveSpec - // always return something. 'foo' is interpreted as 'foo@' otherwise. - : rawSpec === '' && raw.slice(-1) !== '@' ? raw - // just strip off the name, but otherwise return as-is - : rawSpec - } catch (_) { - // whatever we passed in was not acceptable to npa. - // leave it 100% untouched. - return resolved - } -} -module.exports = consistentResolve diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/debug.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/debug.js deleted file mode 100644 index aeda7229d5e8c..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/debug.js +++ /dev/null @@ -1,31 +0,0 @@ -// certain assertions we should do only when testing arborist itself, because -// they are too expensive or aggressive and would break user programs if we -// miss a situation where they are actually valid. -// -// call like this: -// -// /* istanbul ignore next - debug check */ -// debug(() => { -// if (someExpensiveCheck) -// throw new Error('expensive check should have returned false') -// }) - -// run in debug mode if explicitly requested, running arborist tests, -// or working in the arborist project directory. - -const debug = process.env.ARBORIST_DEBUG !== '0' && ( - process.env.ARBORIST_DEBUG === '1' || - /\barborist\b/.test(process.env.NODE_DEBUG || '') || - process.env.npm_package_name === '@npmcli/arborist' && - ['test', 'snap'].includes(process.env.npm_lifecycle_event) || - process.cwd() === require('path').resolve(__dirname, '..') -) - -module.exports = debug ? fn => fn() : () => {} -const red = process.stderr.isTTY ? msg => `\x1B[31m${msg}\x1B[39m` : m => m -module.exports.log = (...msg) => module.exports(() => { - const { format } = require('util') - const prefix = `\n${process.pid} ${red(format(msg.shift()))} ` - msg = (prefix + format(...msg).trim().split('\n').join(prefix)).trim() - console.error(msg) -}) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js deleted file mode 100644 index 2c6647f5db7ba..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js +++ /dev/null @@ -1,18 +0,0 @@ -// given a starting node, what is the *deepest* target where name could go? -// This is not on the Node class for the simple reason that we sometimes -// need to check the deepest *potential* target for a Node that is not yet -// added to the tree where we are checking. -const deepestNestingTarget = (start, name) => { - for (const target of start.ancestry()) { - // note: this will skip past the first target if edge is peer - if (target.isProjectRoot || !target.resolveParent || target.globalTop) { - return target - } - const targetEdge = target.edgesOut.get(name) - if (!targetEdge || !targetEdge.peer) { - return target - } - } -} - -module.exports = deepestNestingTarget diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/dep-valid.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/dep-valid.js deleted file mode 100644 index d80437f20c8e4..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/dep-valid.js +++ /dev/null @@ -1,134 +0,0 @@ -// Do not rely on package._fields, so that we don't throw -// false failures if a tree is generated by other clients. -// Only relies on child.resolved, which MAY come from -// client-specific package.json meta _fields, but most of -// the time will be pulled out of a lockfile - -const semver = require('semver') -const npa = require('npm-package-arg') -const {relative} = require('path') -const fromPath = require('./from-path.js') - -const depValid = (child, requested, requestor) => { - // NB: we don't do much to verify 'tag' type requests. - // Just verify that we got a remote resolution. Presumably, it - // came from a registry and was tagged at some point. - - if (typeof requested === 'string') { - try { - // tarball/dir must have resolved to the same tgz on disk, but for - // file: deps that depend on other files/dirs, we must resolve the - // location based on the *requestor* file/dir, not where it ends up. - // '' is equivalent to '*' - requested = npa.resolve(child.name, requested || '*', fromPath(requestor)) - } catch (er) { - // Not invalid because the child doesn't match, but because - // the spec itself is not supported. Nothing would match, - // so the edge is definitely not valid and never can be. - er.dependency = child.name - er.requested = requested - requestor.errors.push(er) - return false - } - } - - // if the lockfile is super old, or hand-modified, - // then it's possible to hit this state. - if (!requested) { - const er = new Error('Invalid dependency specifier') - er.dependency = child.name - er.requested = requested - requestor.errors.push(er) - return false - } - - switch (requested.type) { - case 'range': - if (requested.fetchSpec === '*') { - return true - } - // fallthrough - case 'version': - // if it's a version or a range other than '*', semver it - return semver.satisfies(child.version, requested.fetchSpec, true) - - case 'directory': - // directory must be a link to the specified folder - return !!child.isLink && - relative(child.realpath, requested.fetchSpec) === '' - - case 'file': - return tarballValid(child, requested, requestor) - - case 'alias': - // check that the alias target is valid - return depValid(child, requested.subSpec, requestor) - - case 'tag': - // if it's a tag, we just verify that it has a tarball resolution - // presumably, it came from the registry and was tagged at some point - return child.resolved && npa(child.resolved).type === 'remote' - - case 'remote': - // verify that we got it from the desired location - return child.resolved === requested.fetchSpec - - case 'git': { - // if it's a git type, verify that they're the same repo - // - // if it specifies a definite commit, then it must have the - // same commit to be considered the same repo - // - // if it has a #semver: specifier, verify that the - // version in the package is in the semver range - const resRepo = npa(child.resolved || '') - const resHost = resRepo.hosted - const reqHost = requested.hosted - const reqCommit = /^[a-fA-F0-9]{40}$/.test(requested.gitCommittish || '') - const nc = { noCommittish: !reqCommit } - const sameRepo = - resHost ? reqHost && reqHost.ssh(nc) === resHost.ssh(nc) - : resRepo.fetchSpec === requested.fetchSpec - - return !sameRepo ? false - : !requested.gitRange ? true - : semver.satisfies(child.package.version, requested.gitRange, { - loose: true, - }) - } - - default: // unpossible, just being cautious - break - } - - const er = new Error('Unsupported dependency type') - er.dependency = child.name - er.requested = requested - requestor.errors.push(er) - return false -} - -const tarballValid = (child, requested, requestor) => { - if (child.isLink) { - return false - } - - if (child.resolved) { - return child.resolved.replace(/\\/g, '/') === `file:${requested.fetchSpec.replace(/\\/g, '/')}` - } - - // if we have a legacy mutated package.json file. we can't be 100% - // sure that it resolved to the same file, but if it was the same - // request, that's a pretty good indicator of sameness. - if (child.package._requested) { - return child.package._requested.saveSpec === requested.saveSpec - } - - // ok, we're probably dealing with some legacy cruft here, not much - // we can do at this point unfortunately. - return false -} - -module.exports = (child, requested, accept, requestor) => - depValid(child, requested, requestor) || - (typeof accept === 'string' ? depValid(child, accept, requestor) : false) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/diff.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/diff.js deleted file mode 100644 index 0d17bde9583ac..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/diff.js +++ /dev/null @@ -1,306 +0,0 @@ -// a tree representing the difference between two trees -// A Diff node's parent is not necessarily the parent of -// the node location it refers to, but rather the highest level -// node that needs to be either changed or removed. -// Thus, the root Diff node is the shallowest change required -// for a given branch of the tree being mutated. - -const {depth} = require('treeverse') -const {existsSync} = require('fs') - -const ssri = require('ssri') - -class Diff { - constructor ({actual, ideal, filterSet, shrinkwrapInflated}) { - this.filterSet = filterSet - this.shrinkwrapInflated = shrinkwrapInflated - this.children = [] - this.actual = actual - this.ideal = ideal - if (this.ideal) { - this.resolved = this.ideal.resolved - this.integrity = this.ideal.integrity - } - this.action = getAction(this) - this.parent = null - // the set of leaf nodes that we rake up to the top level - this.leaves = [] - // the set of nodes that don't change in this branch of the tree - this.unchanged = [] - // the set of nodes that will be removed in this branch of the tree - this.removed = [] - } - - static calculate ({ - actual, - ideal, - filterNodes = [], - shrinkwrapInflated = new Set(), - }) { - // if there's a filterNode, then: - // - get the path from the root to the filterNode. The root or - // root.target should have an edge either to the filterNode or - // a link to the filterNode. If not, abort. Add the path to the - // filterSet. - // - Add set of Nodes depended on by the filterNode to filterSet. - // - Anything outside of that set should be ignored by getChildren - const filterSet = new Set() - const extraneous = new Set() - for (const filterNode of filterNodes) { - const { root } = filterNode - if (root !== ideal && root !== actual) { - throw new Error('invalid filterNode: outside idealTree/actualTree') - } - const rootTarget = root.target - const edge = [...rootTarget.edgesOut.values()].filter(e => { - return e.to && (e.to === filterNode || e.to.target === filterNode) - })[0] - filterSet.add(root) - filterSet.add(rootTarget) - filterSet.add(ideal) - filterSet.add(actual) - if (edge && edge.to) { - filterSet.add(edge.to) - filterSet.add(edge.to.target) - } - filterSet.add(filterNode) - - depth({ - tree: filterNode, - visit: node => filterSet.add(node), - getChildren: node => { - node = node.target - const loc = node.location - const idealNode = ideal.inventory.get(loc) - const ideals = !idealNode ? [] - : [...idealNode.edgesOut.values()].filter(e => e.to).map(e => e.to) - const actualNode = actual.inventory.get(loc) - const actuals = !actualNode ? [] - : [...actualNode.edgesOut.values()].filter(e => e.to).map(e => e.to) - if (actualNode) { - for (const child of actualNode.children.values()) { - if (child.extraneous) { - extraneous.add(child) - } - } - } - - return ideals.concat(actuals) - }, - }) - } - for (const extra of extraneous) { - filterSet.add(extra) - } - - return depth({ - tree: new Diff({actual, ideal, filterSet, shrinkwrapInflated}), - getChildren, - leave, - }) - } -} - -const getAction = ({actual, ideal}) => { - if (!ideal) { - return 'REMOVE' - } - - // bundled meta-deps are copied over to the ideal tree when we visit it, - // so they'll appear to be missing here. There's no need to handle them - // in the diff, though, because they'll be replaced at reify time anyway - // Otherwise, add the missing node. - if (!actual) { - return ideal.inDepBundle ? null : 'ADD' - } - - // always ignore the root node - if (ideal.isRoot && actual.isRoot) { - return null - } - - // if the versions don't match, it's a change no matter what - if (ideal.version !== actual.version) { - return 'CHANGE' - } - - const binsExist = ideal.binPaths.every((path) => existsSync(path)) - - // top nodes, links, and git deps won't have integrity, but do have resolved - // if neither node has integrity, the bins exist, and either (a) neither - // node has a resolved value or (b) they both do and match, then we can - // leave this one alone since we already know the versions match due to - // the condition above. The "neither has resolved" case (a) cannot be - // treated as a 'mark CHANGE and refetch', because shrinkwraps, bundles, - // and link deps may lack this information, and we don't want to try to - // go to the registry for something that isn't there. - const noIntegrity = !ideal.integrity && !actual.integrity - const noResolved = !ideal.resolved && !actual.resolved - const resolvedMatch = ideal.resolved && ideal.resolved === actual.resolved - if (noIntegrity && binsExist && (resolvedMatch || noResolved)) { - return null - } - - // otherwise, verify that it's the same bits - // note that if ideal has integrity, and resolved doesn't, we treat - // that as a 'change', so that it gets re-fetched and locked down. - const integrityMismatch = !ideal.integrity || !actual.integrity || - !ssri.parse(ideal.integrity).match(actual.integrity) - if (integrityMismatch || !binsExist) { - return 'CHANGE' - } - - return null -} - -const allChildren = node => { - if (!node) { - return new Map() - } - - // if the node is root, and also a link, then what we really - // want is to traverse the target's children - if (node.isRoot && node.isLink) { - return allChildren(node.target) - } - - const kids = new Map() - for (const n of [node, ...node.fsChildren]) { - for (const kid of n.children.values()) { - kids.set(kid.path, kid) - } - } - return kids -} - -// functions for the walk options when we traverse the trees -// to create the diff tree -const getChildren = diff => { - const children = [] - const { - actual, - ideal, - unchanged, - removed, - filterSet, - shrinkwrapInflated, - } = diff - - // Note: we DON'T diff fsChildren themselves, because they are either - // included in the package contents, or part of some other project, and - // will never appear in legacy shrinkwraps anyway. but we _do_ include the - // child nodes of fsChildren, because those are nodes that we are typically - // responsible for installing. - const actualKids = allChildren(actual) - const idealKids = allChildren(ideal) - - if (ideal && ideal.hasShrinkwrap && !shrinkwrapInflated.has(ideal)) { - // Guaranteed to get a diff.leaves here, because we always - // be called with a proper Diff object when ideal has a shrinkwrap - // that has not been inflated. - diff.leaves.push(diff) - return children - } - - const paths = new Set([...actualKids.keys(), ...idealKids.keys()]) - for (const path of paths) { - const actual = actualKids.get(path) - const ideal = idealKids.get(path) - diffNode({ - actual, - ideal, - children, - unchanged, - removed, - filterSet, - shrinkwrapInflated, - }) - } - - if (diff.leaves && !children.length) { - diff.leaves.push(diff) - } - - return children -} - -const diffNode = ({ - actual, - ideal, - children, - unchanged, - removed, - filterSet, - shrinkwrapInflated, -}) => { - if (filterSet.size && !(filterSet.has(ideal) || filterSet.has(actual))) { - return - } - - const action = getAction({actual, ideal}) - - // if it's a match, then get its children - // otherwise, this is the child diff node - if (action || (!shrinkwrapInflated.has(ideal) && ideal.hasShrinkwrap)) { - if (action === 'REMOVE') { - removed.push(actual) - } - children.push(new Diff({actual, ideal, filterSet, shrinkwrapInflated})) - } else { - unchanged.push(ideal) - // !*! Weird dirty hack warning !*! - // - // Bundled deps aren't loaded in the ideal tree, because we don't know - // what they are going to be without unpacking. Swap them over now if - // the bundling node isn't changing, so we don't prune them later. - // - // It's a little bit dirty to be doing this here, since it means that - // diffing trees can mutate them, but otherwise we have to walk over - // all unchanging bundlers and correct the diff later, so it's more - // efficient to just fix it while we're passing through already. - // - // Note that moving over a bundled dep will break the links to other - // deps under this parent, which may have been transitively bundled. - // Breaking those links means that we'll no longer see the transitive - // dependency, meaning that it won't appear as bundled any longer! - // In order to not end up dropping transitively bundled deps, we have - // to get the list of nodes to move, then move them all at once, rather - // than moving them one at a time in the first loop. - const bd = ideal.package.bundleDependencies - if (actual && bd && bd.length) { - const bundledChildren = [] - for (const node of actual.children.values()) { - if (node.inBundle) { - bundledChildren.push(node) - } - } - for (const node of bundledChildren) { - node.parent = ideal - } - } - children.push(...getChildren({ - actual, - ideal, - unchanged, - removed, - filterSet, - shrinkwrapInflated, - })) - } -} - -// set the parentage in the leave step so that we aren't attaching -// child nodes only to remove them later. also bubble up the unchanged -// nodes so that we can move them out of staging in the reification step. -const leave = (diff, children) => { - children.forEach(kid => { - kid.parent = diff - diff.leaves.push(...kid.leaves) - diff.unchanged.push(...kid.unchanged) - diff.removed.push(...kid.removed) - }) - diff.children = children - return diff -} - -module.exports = Diff diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/edge.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/edge.js deleted file mode 100644 index 777ecc44a7c00..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/edge.js +++ /dev/null @@ -1,245 +0,0 @@ -// An edge in the dependency graph -// Represents a dependency relationship of some kind - -const util = require('util') -const npa = require('npm-package-arg') -const depValid = require('./dep-valid.js') -const _from = Symbol('_from') -const _to = Symbol('_to') -const _type = Symbol('_type') -const _spec = Symbol('_spec') -const _accept = Symbol('_accept') -const _name = Symbol('_name') -const _error = Symbol('_error') -const _loadError = Symbol('_loadError') -const _setFrom = Symbol('_setFrom') -const _explain = Symbol('_explain') -const _explanation = Symbol('_explanation') - -const types = new Set([ - 'prod', - 'dev', - 'optional', - 'peer', - 'peerOptional', - 'workspace', -]) - -class ArboristEdge {} -const printableEdge = (edge) => { - const edgeFrom = edge.from && edge.from.location - const edgeTo = edge.to && edge.to.location - - return Object.assign(new ArboristEdge(), { - name: edge.name, - spec: edge.spec, - type: edge.type, - ...(edgeFrom != null ? { from: edgeFrom } : {}), - ...(edgeTo ? { to: edgeTo } : {}), - ...(edge.error ? { error: edge.error } : {}), - ...(edge.overridden ? { overridden: true } : {}), - }) -} - -class Edge { - constructor (options) { - const { type, name, spec, accept, from } = options - - if (typeof spec !== 'string') { - throw new TypeError('must provide string spec') - } - - if (type === 'workspace' && npa(spec).type !== 'directory') { - throw new TypeError('workspace edges must be a symlink') - } - - this[_spec] = spec - - if (accept !== undefined) { - if (typeof accept !== 'string') { - throw new TypeError('accept field must be a string if provided') - } - this[_accept] = accept || '*' - } - - if (typeof name !== 'string') { - throw new TypeError('must provide dependency name') - } - this[_name] = name - - if (!types.has(type)) { - throw new TypeError( - `invalid type: ${type}\n` + - `(valid types are: ${Edge.types.join(', ')})`) - } - this[_type] = type - if (!from) { - throw new TypeError('must provide "from" node') - } - this[_setFrom](from) - this[_error] = this[_loadError]() - this.overridden = false - } - - satisfiedBy (node) { - return node.name === this.name && - depValid(node, this.spec, this.accept, this.from) - } - - explain (seen = []) { - if (this[_explanation]) { - return this[_explanation] - } - - return this[_explanation] = this[_explain](seen) - } - - // return the edge data, and an explanation of how that edge came to be here - [_explain] (seen) { - const { error, from, bundled } = this - return { - type: this.type, - name: this.name, - spec: this.spec, - ...(bundled ? { bundled } : {}), - ...(error ? { error } : {}), - ...(from ? { from: from.explain(null, seen) } : {}), - } - } - - get bundled () { - if (!this.from) { - return false - } - const { package: { bundleDependencies = [] } } = this.from - return bundleDependencies.includes(this.name) - } - - get workspace () { - return this[_type] === 'workspace' - } - - get prod () { - return this[_type] === 'prod' - } - - get dev () { - return this[_type] === 'dev' - } - - get optional () { - return this[_type] === 'optional' || this[_type] === 'peerOptional' - } - - get peer () { - return this[_type] === 'peer' || this[_type] === 'peerOptional' - } - - get type () { - return this[_type] - } - - get name () { - return this[_name] - } - - get spec () { - return this[_spec] - } - - get accept () { - return this[_accept] - } - - get valid () { - return !this.error - } - - get missing () { - return this.error === 'MISSING' - } - - get invalid () { - return this.error === 'INVALID' - } - - get peerLocal () { - return this.error === 'PEER LOCAL' - } - - get error () { - this[_error] = this[_error] || this[_loadError]() - return this[_error] === 'OK' ? null : this[_error] - } - - [_loadError] () { - return !this[_to] ? (this.optional ? null : 'MISSING') - : this.peer && this.from === this.to.parent && !this.from.isTop ? 'PEER LOCAL' - : !this.satisfiedBy(this.to) ? 'INVALID' - : 'OK' - } - - reload (hard = false) { - this[_explanation] = null - const newTo = this[_from].resolve(this.name) - if (newTo !== this[_to]) { - if (this[_to]) { - this[_to].edgesIn.delete(this) - } - this[_to] = newTo - this[_error] = this[_loadError]() - if (this[_to]) { - this[_to].addEdgeIn(this) - } - } else if (hard) { - this[_error] = this[_loadError]() - } - } - - detach () { - this[_explanation] = null - if (this[_to]) { - this[_to].edgesIn.delete(this) - } - this[_from].edgesOut.delete(this.name) - this[_to] = null - this[_error] = 'DETACHED' - this[_from] = null - } - - [_setFrom] (node) { - this[_explanation] = null - this[_from] = node - if (node.edgesOut.has(this.name)) { - node.edgesOut.get(this.name).detach() - } - node.addEdgeOut(this) - this.reload() - } - - get from () { - return this[_from] - } - - get to () { - return this[_to] - } - - toJSON () { - return printableEdge(this) - } - - [util.inspect.custom] () { - return this.toJSON() - } -} - -Edge.types = [...types] -Edge.errors = [ - 'DETACHED', - 'MISSING', - 'PEER LOCAL', - 'INVALID', -] - -module.exports = Edge diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/from-path.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/from-path.js deleted file mode 100644 index 08be583d1a49d..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/from-path.js +++ /dev/null @@ -1,13 +0,0 @@ -// file dependencies need their dependencies resolved based on the -// location where the tarball was found, not the location where they -// end up getting installed. directory (ie, symlink) deps also need -// to be resolved based on their targets, but that's what realpath is - -const {dirname} = require('path') -const npa = require('npm-package-arg') - -const fromPath = (node, spec) => - spec && spec.type === 'file' ? dirname(spec.fetchSpec) - : node.realpath - -module.exports = node => fromPath(node, node.resolved && npa(node.resolved)) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/gather-dep-set.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/gather-dep-set.js deleted file mode 100644 index 2c85a640fddfb..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/gather-dep-set.js +++ /dev/null @@ -1,43 +0,0 @@ -// Given a set of nodes in a tree, and a filter function to test -// incoming edges to the dep set that should be ignored otherwise. -// -// find the set of deps that are only depended upon by nodes in the set, or -// their dependencies, or edges that are ignored. -// -// Used when figuring out what to prune when replacing a node with a newer -// version, or when an optional dep fails to install. - -const gatherDepSet = (set, edgeFilter) => { - const deps = new Set(set) - - // add the full set of dependencies. note that this loop will continue - // as the deps set increases in size. - for (const node of deps) { - for (const edge of node.edgesOut.values()) { - if (edge.to && edgeFilter(edge)) { - deps.add(edge.to) - } - } - } - - // now remove all nodes in the set that have a dependant outside the set - // if any change is made, then re-check - // continue until no changes made, or deps set evaporates fully. - let changed = true - while (changed === true && deps.size > 0) { - changed = false - for (const dep of deps) { - for (const edge of dep.edgesIn) { - if (!deps.has(edge.from) && edgeFilter(edge)) { - changed = true - deps.delete(dep) - break - } - } - } - } - - return deps -} - -module.exports = gatherDepSet diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/get-workspace-nodes.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/get-workspace-nodes.js deleted file mode 100644 index 6db489f69c518..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/get-workspace-nodes.js +++ /dev/null @@ -1,33 +0,0 @@ -// Get the actual nodes corresponding to a root node's child workspaces, -// given a list of workspace names. -const relpath = require('./relpath.js') -const getWorkspaceNodes = (tree, workspaces, log) => { - const wsMap = tree.workspaces - if (!wsMap) { - log.warn('workspaces', 'filter set, but no workspaces present') - return [] - } - - const nodes = [] - for (const name of workspaces) { - const path = wsMap.get(name) - if (!path) { - log.warn('workspaces', `${name} in filter set, but not in workspaces`) - continue - } - - const loc = relpath(tree.realpath, path) - const node = tree.inventory.get(loc) - - if (!node) { - log.warn('workspaces', `${name} in filter set, but no workspace folder present`) - continue - } - - nodes.push(node) - } - - return nodes -} - -module.exports = getWorkspaceNodes diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/index.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/index.js deleted file mode 100644 index c7b07ce28e4df..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/index.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = require('./arborist/index.js') -module.exports.Arborist = module.exports -module.exports.Node = require('./node.js') -module.exports.Link = require('./link.js') -module.exports.Edge = require('./edge.js') -module.exports.Shrinkwrap = require('./shrinkwrap.js') -// XXX export the other classes, too. shrinkwrap, diff, etc. -// they're handy! diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/inventory.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/inventory.js deleted file mode 100644 index 34b6f98a8b286..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/inventory.js +++ /dev/null @@ -1,125 +0,0 @@ -// a class to manage an inventory and set of indexes of -// a set of objects based on specific fields. -// primary is the primary index key. -// keys is the set of fields to be able to query. -const _primaryKey = Symbol('_primaryKey') -const _index = Symbol('_index') -const defaultKeys = ['name', 'license', 'funding', 'realpath', 'packageName'] -const { hasOwnProperty } = Object.prototype -const debug = require('./debug.js') - -// handling for the outdated "licenses" array, just pick the first one -// also support the alternative spelling "licence" -const getLicense = pkg => { - if (pkg) { - const lic = pkg.license || pkg.licence - if (lic) { - return lic - } - const lics = pkg.licenses || pkg.licences - if (Array.isArray(lics)) { - return lics[0] - } - } -} - -class Inventory extends Map { - constructor (opt = {}) { - const { primary, keys } = opt - super() - this[_primaryKey] = primary || 'location' - this[_index] = (keys || defaultKeys).reduce((index, i) => { - index.set(i, new Map()) - return index - }, new Map()) - } - - get primaryKey () { - return this[_primaryKey] - } - - get indexes () { - return [...this[_index].keys()] - } - - * filter (fn) { - for (const node of this.values()) { - if (fn(node)) { - yield node - } - } - } - - add (node) { - const root = super.get('') - if (root && node.root !== root && node.root !== root.root) { - debug(() => { - throw Object.assign(new Error('adding external node to inventory'), { - root: root.path, - node: node.path, - nodeRoot: node.root.path, - }) - }) - return - } - - const current = super.get(node[this.primaryKey]) - if (current) { - if (current === node) { - return - } - this.delete(current) - } - super.set(node[this.primaryKey], node) - for (const [key, map] of this[_index].entries()) { - // if the node has the value, but it's false, then use that - const val_ = hasOwnProperty.call(node, key) ? node[key] - : key === 'license' ? getLicense(node.package) - : node[key] ? node[key] - : node.package && node.package[key] - const val = typeof val_ === 'string' ? val_ - : !val_ || typeof val_ !== 'object' ? val_ - : key === 'license' ? val_.type - : key === 'funding' ? val_.url - : /* istanbul ignore next - not used */ val_ - const set = map.get(val) || new Set() - set.add(node) - map.set(val, set) - } - } - - delete (node) { - if (!this.has(node)) { - return - } - - super.delete(node[this.primaryKey]) - for (const [key, map] of this[_index].entries()) { - const val = node[key] !== undefined ? node[key] - : (node[key] || (node.package && node.package[key])) - const set = map.get(val) - if (set) { - set.delete(node) - if (set.size === 0) { - map.delete(node[key]) - } - } - } - } - - query (key, val) { - const map = this[_index].get(key) - return map && (arguments.length === 2 ? map.get(val) : map.keys()) || - new Set() - } - - has (node) { - return super.get(node[this.primaryKey]) === node - } - - set (k, v) { - throw new Error('direct set() not supported, use inventory.add(node)') - } -} - -module.exports = Inventory diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/link.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/link.js deleted file mode 100644 index 0289e04151ef5..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/link.js +++ /dev/null @@ -1,145 +0,0 @@ -const debug = require('./debug.js') -const relpath = require('./relpath.js') -const Node = require('./node.js') -const _loadDeps = Symbol.for('Arborist.Node._loadDeps') -const _target = Symbol.for('_target') -const {dirname} = require('path') -// defined by Node class -const _delistFromMeta = Symbol.for('_delistFromMeta') -const _refreshLocation = Symbol.for('_refreshLocation') -class Link extends Node { - constructor (options) { - const { root, realpath, target, parent, fsParent } = options - - if (!realpath && !(target && target.path)) { - throw new TypeError('must provide realpath for Link node') - } - - super({ - ...options, - realpath: realpath || target.path, - root: root || (parent ? parent.root - : fsParent ? fsParent.root - : target ? target.root - : null), - }) - - if (target) { - this.target = target - } else if (this.realpath === this.root.path) { - this.target = this.root - } else { - this.target = new Node({ - ...options, - path: realpath, - parent: null, - fsParent: null, - root: this.root, - }) - } - } - - get version () { - return this.target ? this.target.version : this.package.version || '' - } - - get target () { - return this[_target] - } - - set target (target) { - const current = this[_target] - if (target === current) { - return - } - - if (current && current.then) { - debug(() => { - throw Object.assign(new Error('cannot set target while awaiting'), { - path: this.path, - realpath: this.realpath, - }) - }) - } - - if (target && target.then) { - // can set to a promise during an async tree build operation - // wait until then to assign it. - this[_target] = target - target.then(node => { - this[_target] = null - this.target = node - }) - return - } - - if (!target) { - if (current && current.linksIn) { - current.linksIn.delete(this) - } - if (this.path) { - this[_delistFromMeta]() - this[_target] = null - this.package = {} - this[_refreshLocation]() - } else { - this[_target] = null - } - return - } - - if (!this.path) { - // temp node pending assignment to a tree - // we know it's not in the inventory yet, because no path. - if (target.path) { - this.realpath = target.path - } else { - target.path = target.realpath = this.realpath - } - target.root = this.root - this[_target] = target - target.linksIn.add(this) - this.package = target.package - return - } - - // have to refresh metadata, because either realpath or package - // is very likely changing. - this[_delistFromMeta]() - this.package = target.package - this.realpath = target.path - this[_refreshLocation]() - - target.root = this.root - } - - // a link always resolves to the relative path to its target - get resolved () { - // the path/realpath guard is there for the benefit of setting - // these things in the "wrong" order - return this.path && this.realpath - ? `file:${relpath(dirname(this.path), this.realpath)}` - : null - } - - set resolved (r) {} - - // deps are resolved on the target, not the Link - // so this is a no-op - [_loadDeps] () {} - - // links can't have children, only their targets can - // fix it to an empty list so that we can still call - // things that iterate over them, just as a no-op - get children () { - return new Map() - } - - set children (c) {} - - get isLink () { - return true - } -} - -module.exports = Link diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/node.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/node.js deleted file mode 100644 index a872f24805b59..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/node.js +++ /dev/null @@ -1,1393 +0,0 @@ -// inventory, path, realpath, root, and parent -// -// node.root is a reference to the root module in the tree (ie, typically the -// cwd project folder) -// -// node.location is the /-delimited path from the root module to the node. In -// the case of link targets that may be outside of the root's package tree, -// this can include some number of /../ path segments. The location of the -// root module is always '.'. node.location thus never contains drive letters -// or absolute paths, and is portable within a given project, suitable for -// inclusion in lockfiles and metadata. -// -// node.path is the path to the place where this node lives on disk. It is -// system-specific and absolute. -// -// node.realpath is the path to where the module actually resides on disk. In -// the case of non-link nodes, node.realpath is equivalent to node.path. In -// the case of link nodes, it is equivalent to node.target.path. -// -// Setting node.parent will set the node's root to the parent's root, as well -// as updating edgesIn and edgesOut to reload dependency resolutions as needed, -// and setting node.path to parent.path/node_modules/name. -// -// node.inventory is a Map of name to a Set() of all the nodes under a given -// root by that name. It's empty for non-root nodes, and changing the root -// reference will remove it from the old root's inventory and add it to the new -// one. This map is useful for cases like `npm update foo` or `npm ls foo` -// where we need to quickly find all instances of a given package name within a -// tree. - -const semver = require('semver') -const nameFromFolder = require('@npmcli/name-from-folder') -const Edge = require('./edge.js') -const Inventory = require('./inventory.js') -const {normalize} = require('read-package-json-fast') -const {getPaths: getBinPaths} = require('bin-links') -const npa = require('npm-package-arg') -const debug = require('./debug.js') -const gatherDepSet = require('./gather-dep-set.js') -const treeCheck = require('./tree-check.js') -const walkUp = require('walk-up-path') - -const {resolve, relative, dirname, basename} = require('path') -const util = require('util') -const _package = Symbol('_package') -const _parent = Symbol('_parent') -const _target = Symbol.for('_target') -const _fsParent = Symbol('_fsParent') -const _loadDepType = Symbol('_loadDepType') -const _loadWorkspaces = Symbol('_loadWorkspaces') -const _reloadNamedEdges = Symbol('_reloadNamedEdges') -// overridden by Link class -const _loadDeps = Symbol.for('Arborist.Node._loadDeps') -const _root = Symbol('_root') -const _refreshLocation = Symbol.for('_refreshLocation') -const _changePath = Symbol.for('_changePath') -// used by Link class as well -const _delistFromMeta = Symbol.for('_delistFromMeta') -const _global = Symbol.for('global') -const _workspaces = Symbol('_workspaces') -const _explain = Symbol('_explain') -const _explanation = Symbol('_explanation') -const _meta = Symbol('_meta') - -const relpath = require('./relpath.js') -const consistentResolve = require('./consistent-resolve.js') - -const printableTree = require('./printable.js') -const CaseInsensitiveMap = require('./case-insensitive-map.js') - -class Node { - constructor (options) { - // NB: path can be null if it's a link target - const { - root, - path, - realpath, - parent, - error, - meta, - fsParent, - resolved, - integrity, - // allow setting name explicitly when we haven't set a path yet - name, - children, - fsChildren, - legacyPeerDeps = false, - linksIn, - hasShrinkwrap, - extraneous = true, - dev = true, - optional = true, - devOptional = true, - peer = true, - global = false, - dummy = false, - sourceReference = null, - } = options - - // true if part of a global install - this[_global] = global - - this[_workspaces] = null - - this.errors = error ? [error] : [] - - // this will usually be null, except when modeling a - // package's dependencies in a virtual root. - this.sourceReference = sourceReference - - const pkg = sourceReference ? sourceReference.package - : normalize(options.pkg || {}) - - this.name = name || - nameFromFolder(path || pkg.name || realpath) || - pkg.name || - null - - // should be equal if not a link - this.path = path ? resolve(path) : null - - if (!this.name && (!this.path || this.path !== dirname(this.path))) { - throw new TypeError('could not detect node name from path or package') - } - - this.realpath = !this.isLink ? this.path : resolve(realpath) - - this.resolved = resolved || null - if (!this.resolved) { - // note: this *only* works for non-file: deps, so we avoid even - // trying here. - // file: deps are tracked in package.json will _resolved set to the - // full path to the tarball or link target. However, if the package - // is checked into git or moved to another location, that's 100% not - // portable at all! The _where and _location don't provide much help, - // since _location is just where the module ended up in the tree, - // and _where can be different than the actual root if it's a - // meta-dep deeper in the dependency graph. - // - // If we don't have the other oldest indicators of legacy npm, then it's - // probably what we're getting from pacote, which IS trustworthy. - // - // Otherwise, hopefully a shrinkwrap will help us out. - const resolved = consistentResolve(pkg._resolved) - if (resolved && !(/^file:/.test(resolved) && pkg._where)) { - this.resolved = resolved - } - } - this.integrity = integrity || pkg._integrity || null - this.hasShrinkwrap = hasShrinkwrap || pkg._hasShrinkwrap || false - this.legacyPeerDeps = legacyPeerDeps - - this.children = new CaseInsensitiveMap() - this.fsChildren = new Set() - this.inventory = new Inventory({}) - this.tops = new Set() - this.linksIn = new Set(linksIn || []) - - // these three are set by an Arborist taking a catalog - // after the tree is built. We don't get this along the way, - // because they have a tendency to change as new children are - // added, especially when they're deduped. Eg, a dev dep may be - // a 3-levels-deep dependency of a non-dev dep. If we calc the - // flags along the way, then they'll tend to be invalid by the - // time we need to look at them. - if (!dummy) { - this.dev = dev - this.optional = optional - this.devOptional = devOptional - this.peer = peer - this.extraneous = extraneous - this.dummy = false - } else { - // true if this is a placeholder for the purpose of serving as a - // fsParent to link targets that get their deps resolved outside - // the root tree folder. - this.dummy = true - this.dev = false - this.optional = false - this.devOptional = false - this.peer = false - this.extraneous = false - } - - this.edgesIn = new Set() - this.edgesOut = new CaseInsensitiveMap() - - // have to set the internal package ref before assigning the parent, - // because this.package is read when adding to inventory - this[_package] = pkg && typeof pkg === 'object' ? pkg : {} - - // only relevant for the root and top nodes - this.meta = meta - - // Note: this is _slightly_ less efficient for the initial tree - // building than it could be, but in exchange, it's a much simpler - // algorithm. - // If this node has a bunch of children, and those children satisfy - // its various deps, then we're going to _first_ create all the - // edges, and _then_ assign the children into place, re-resolving - // them all in _reloadNamedEdges. - // A more efficient, but more complicated, approach would be to - // flag this node as being a part of a tree build, so it could - // hold off on resolving its deps until its children are in place. - - // call the parent setter - // Must be set prior to calling _loadDeps, because top-ness is relevant - - // will also assign root if present on the parent - this[_parent] = null - this.parent = parent || null - - this[_fsParent] = null - this.fsParent = fsParent || null - - // see parent/root setters below. - // root is set to parent's root if we have a parent, otherwise if it's - // null, then it's set to the node itself. - if (!parent && !fsParent) { - this.root = root || null - } - - // mostly a convenience for testing, but also a way to create - // trees in a more declarative way than setting parent on each - if (children) { - for (const c of children) { - new Node({ ...c, parent: this }) - } - } - if (fsChildren) { - for (const c of fsChildren) { - new Node({ ...c, fsParent: this }) - } - } - - // now load all the dep edges - this[_loadDeps]() - } - - get meta () { - return this[_meta] - } - - set meta (meta) { - this[_meta] = meta - if (meta) { - meta.add(this) - } - } - - get global () { - return this.root[_global] - } - - // true for packages installed directly in the global node_modules folder - get globalTop () { - return this.global && this.parent && this.parent.isProjectRoot - } - - get workspaces () { - return this[_workspaces] - } - - set workspaces (workspaces) { - // deletes edges if they already exists - if (this[_workspaces]) { - for (const name of this[_workspaces].keys()) { - if (!workspaces.has(name)) { - this.edgesOut.get(name).detach() - } - } - } - - this[_workspaces] = workspaces - this[_loadWorkspaces]() - this[_loadDeps]() - } - - get binPaths () { - if (!this.parent) { - return [] - } - - return getBinPaths({ - pkg: this[_package], - path: this.path, - global: this.global, - top: this.globalTop, - }) - } - - get hasInstallScript () { - const {hasInstallScript, scripts} = this.package - const {install, preinstall, postinstall} = scripts || {} - return !!(hasInstallScript || install || preinstall || postinstall) - } - - get version () { - return this[_package].version || '' - } - - get packageName () { - return this[_package].name || null - } - - get pkgid () { - const { name = '', version = '' } = this.package - // root package will prefer package name over folder name, - // and never be called an alias. - const { isProjectRoot } = this - const myname = isProjectRoot ? name || this.name - : this.name - const alias = !isProjectRoot && name && myname !== name ? `npm:${name}@` - : '' - return `${myname}@${alias}${version}` - } - - get package () { - return this[_package] - } - - set package (pkg) { - // just detach them all. we could make this _slightly_ more efficient - // by only detaching the ones that changed, but we'd still have to walk - // them all, and the comparison logic gets a bit tricky. we generally - // only do this more than once at the root level, so the resolve() calls - // are only one level deep, and there's not much to be saved, anyway. - // simpler to just toss them all out. - for (const edge of this.edgesOut.values()) { - edge.detach() - } - - this[_explanation] = null - /* istanbul ignore next - should be impossible */ - if (!pkg || typeof pkg !== 'object') { - debug(() => { - throw new Error('setting Node.package to non-object') - }) - pkg = {} - } - this[_package] = pkg - this[_loadWorkspaces]() - this[_loadDeps]() - // do a hard reload, since the dependents may now be valid or invalid - // as a result of the package change. - this.edgesIn.forEach(edge => edge.reload(true)) - } - - // node.explain(nodes seen already, edge we're trying to satisfy - // if edge is not specified, it lists every edge into the node. - explain (edge = null, seen = []) { - if (this[_explanation]) { - return this[_explanation] - } - - return this[_explanation] = this[_explain](edge, seen) - } - - [_explain] (edge, seen) { - if (this.isProjectRoot && !this.sourceReference) { - return { - location: this.path, - } - } - - const why = { - name: this.isProjectRoot || this.isTop ? this.packageName : this.name, - version: this.package.version, - } - if (this.errors.length || !this.packageName || !this.package.version) { - why.errors = this.errors.length ? this.errors : [ - new Error('invalid package: lacks name and/or version'), - ] - why.package = this.package - } - - if (this.root.sourceReference) { - const {name, version} = this.root.package - why.whileInstalling = { - name, - version, - path: this.root.sourceReference.path, - } - } - - if (this.sourceReference) { - return this.sourceReference.explain(edge, seen) - } - - if (seen.includes(this)) { - return why - } - - why.location = this.location - why.isWorkspace = this.isWorkspace - - // make a new list each time. we can revisit, but not loop. - seen = seen.concat(this) - - why.dependents = [] - if (edge) { - why.dependents.push(edge.explain(seen)) - } else { - // ignore invalid edges, since those aren't satisfied by this thing, - // and are not keeping it held in this spot anyway. - const edges = [] - for (const edge of this.edgesIn) { - if (!edge.valid && !edge.from.isProjectRoot) { - continue - } - - edges.push(edge) - } - for (const edge of edges) { - why.dependents.push(edge.explain(seen)) - } - } - - if (this.linksIn.size) { - why.linksIn = [...this.linksIn].map(link => link[_explain](edge, seen)) - } - - return why - } - - isDescendantOf (node) { - for (let p = this; p; p = p.resolveParent) { - if (p === node) { - return true - } - } - return false - } - - getBundler (path = []) { - // made a cycle, definitely not bundled! - if (path.includes(this)) { - return null - } - - path.push(this) - - const parent = this[_parent] - if (!parent) { - return null - } - - const pBundler = parent.getBundler(path) - if (pBundler) { - return pBundler - } - - const ppkg = parent.package - const bd = ppkg && ppkg.bundleDependencies - // explicit bundling - if (Array.isArray(bd) && bd.includes(this.name)) { - return parent - } - - // deps that are deduped up to the bundling level are bundled. - // however, if they get their dep met further up than that, - // then they are not bundled. Ie, installing a package with - // unmet bundled deps will not cause your deps to be bundled. - for (const edge of this.edgesIn) { - const eBundler = edge.from.getBundler(path) - if (!eBundler) { - continue - } - - if (eBundler === parent) { - return eBundler - } - } - - return null - } - - get inBundle () { - return !!this.getBundler() - } - - // when reifying, if a package is technically in a bundleDependencies list, - // but that list is the root project, we still have to install it. This - // getter returns true if it's in a dependency's bundle list, not the root's. - get inDepBundle () { - const bundler = this.getBundler() - return !!bundler && bundler !== this.root - } - - get isWorkspace () { - if (this.isProjectRoot) { - return false - } - const { root } = this - const { type, to } = root.edgesOut.get(this.packageName) || {} - return type === 'workspace' && to && (to.target === this || to === this) - } - - get isRoot () { - return this === this.root - } - - get isProjectRoot () { - // only treat as project root if it's the actual link that is the root, - // or the target of the root link, but NOT if it's another link to the - // same root that happens to be somewhere else. - return this === this.root || this === this.root.target - } - - * ancestry () { - for (let anc = this; anc; anc = anc.resolveParent) { - yield anc - } - } - - set root (root) { - // setting to null means this is the new root - // should only ever be one step - while (root && root.root !== root) { - root = root.root - } - - root = root || this - - // delete from current root inventory - this[_delistFromMeta]() - - // can't set the root (yet) if there's no way to determine location - // this allows us to do new Node({...}) and then set the root later. - // just make the assignment so we don't lose it, and move on. - if (!this.path || !root.realpath || !root.path) { - return this[_root] = root - } - - // temporarily become a root node - this[_root] = this - - // break all linksIn, we're going to re-set them if needed later - for (const link of this.linksIn) { - link[_target] = null - this.linksIn.delete(link) - } - - // temporarily break this link as well, we'll re-set if possible later - const { target } = this - if (this.isLink) { - if (target) { - target.linksIn.delete(this) - if (target.root === this) { - target[_delistFromMeta]() - } - } - this[_target] = null - } - - // if this is part of a cascading root set, then don't do this bit - // but if the parent/fsParent is in a different set, we have to break - // that reference before proceeding - if (this.parent && this.parent.root !== root) { - this.parent.children.delete(this.name) - this[_parent] = null - } - if (this.fsParent && this.fsParent.root !== root) { - this.fsParent.fsChildren.delete(this) - this[_fsParent] = null - } - - if (root === this) { - this[_refreshLocation]() - } else { - // setting to some different node. - const loc = relpath(root.realpath, this.path) - const current = root.inventory.get(loc) - - // clobber whatever is there now - if (current) { - current.root = null - } - - this[_root] = root - // set this.location and add to inventory - this[_refreshLocation]() - - // try to find our parent/fsParent in the new root inventory - for (const p of walkUp(dirname(this.path))) { - if (p === this.path) { - continue - } - const ploc = relpath(root.realpath, p) - const parent = root.inventory.get(ploc) - if (parent) { - /* istanbul ignore next - impossible */ - if (parent.isLink) { - debug(() => { - throw Object.assign(new Error('assigning parentage to link'), { - path: this.path, - parent: parent.path, - parentReal: parent.realpath, - }) - }) - continue - } - const childLoc = `${ploc}${ploc ? '/' : ''}node_modules/${this.name}` - const isParent = this.location === childLoc - if (isParent) { - const oldChild = parent.children.get(this.name) - if (oldChild && oldChild !== this) { - oldChild.root = null - } - if (this.parent) { - this.parent.children.delete(this.name) - this.parent[_reloadNamedEdges](this.name) - } - parent.children.set(this.name, this) - this[_parent] = parent - // don't do it for links, because they don't have a target yet - // we'll hit them up a bit later on. - if (!this.isLink) { - parent[_reloadNamedEdges](this.name) - } - } else { - /* istanbul ignore if - should be impossible, since we break - * all fsParent/child relationships when moving? */ - if (this.fsParent) { - this.fsParent.fsChildren.delete(this) - } - parent.fsChildren.add(this) - this[_fsParent] = parent - } - break - } - } - - // if it doesn't have a parent, it's a top node - if (!this.parent) { - root.tops.add(this) - } else { - root.tops.delete(this) - } - - // assign parentage for any nodes that need to have this as a parent - // this can happen when we have a node at nm/a/nm/b added *before* - // the node at nm/a, which might have the root node as a fsParent. - // we can't rely on the public setter here, because it calls into - // this function to set up these references! - const nmloc = `${this.location}${this.location ? '/' : ''}node_modules/` - const isChild = n => n.location === nmloc + n.name - // check dirname so that /foo isn't treated as the fsparent of /foo-bar - const isFsChild = n => { - return dirname(n.path).startsWith(this.path) && - n !== this && - !n.parent && - (!n.fsParent || - n.fsParent === this || - dirname(this.path).startsWith(n.fsParent.path)) - } - const isKid = n => isChild(n) || isFsChild(n) - - // only walk top nodes, since anything else already has a parent. - for (const child of root.tops) { - if (!isKid(child)) { - continue - } - - // set up the internal parentage links - if (this.isLink) { - child.root = null - } else { - // can't possibly have a parent, because it's in tops - if (child.fsParent) { - child.fsParent.fsChildren.delete(child) - } - child[_fsParent] = null - if (isChild(child)) { - this.children.set(child.name, child) - child[_parent] = this - root.tops.delete(child) - } else { - this.fsChildren.add(child) - child[_fsParent] = this - } - } - } - - // look for any nodes with the same realpath. either they're links - // to that realpath, or a thing at that realpath if we're adding a link - // (if we're adding a regular node, we already deleted the old one) - for (const node of root.inventory.query('realpath', this.realpath)) { - if (node === this) { - continue - } - - /* istanbul ignore next - should be impossible */ - debug(() => { - if (node.root !== root) { - throw new Error('inventory contains node from other root') - } - }) - - if (this.isLink) { - const target = node.target - this[_target] = target - this[_package] = target.package - target.linksIn.add(this) - // reload edges here, because now we have a target - if (this.parent) { - this.parent[_reloadNamedEdges](this.name) - } - break - } else { - /* istanbul ignore else - should be impossible */ - if (node.isLink) { - node[_target] = this - node[_package] = this.package - this.linksIn.add(node) - if (node.parent) { - node.parent[_reloadNamedEdges](node.name) - } - } else { - debug(() => { - throw Object.assign(new Error('duplicate node in root setter'), { - path: this.path, - realpath: this.realpath, - root: root.realpath, - }) - }) - } - } - } - } - - // reload all edgesIn where the root doesn't match, so we don't have - // cross-tree dependency graphs - for (const edge of this.edgesIn) { - if (edge.from.root !== root) { - edge.reload() - } - } - // reload all edgesOut where root doens't match, or is missing, since - // it might not be missing in the new tree - for (const edge of this.edgesOut.values()) { - if (!edge.to || edge.to.root !== root) { - edge.reload() - } - } - - // now make sure our family comes along for the ride! - const family = new Set([ - ...this.fsChildren, - ...this.children.values(), - ...this.inventory.values(), - ].filter(n => n !== this)) - - for (const child of family) { - if (child.root !== root) { - child[_delistFromMeta]() - child[_parent] = null - this.children.delete(child.name) - child[_fsParent] = null - this.fsChildren.delete(child) - for (const l of child.linksIn) { - l[_target] = null - child.linksIn.delete(l) - } - } - } - for (const child of family) { - if (child.root !== root) { - child.root = root - } - } - - // if we had a target, and didn't find one in the new root, then bring - // it over as well, but only if we're setting the link into a new root, - // as we don't want to lose the target any time we remove a link. - if (this.isLink && target && !this.target && root !== this) { - target.root = root - } - - // tree should always be valid upon root setter completion. - treeCheck(this) - treeCheck(root) - } - - get root () { - return this[_root] || this - } - - [_loadWorkspaces] () { - if (!this[_workspaces]) { - return - } - - for (const [name, path] of this[_workspaces].entries()) { - new Edge({ from: this, name, spec: `file:${path}`, type: 'workspace' }) - } - } - - [_loadDeps] () { - // Caveat! Order is relevant! - // Packages in optionalDependencies are optional. - // Packages in both deps and devDeps are required. - // Note the subtle breaking change from v6: it is no longer possible - // to have a different spec for a devDep than production dep. - - // Linked targets that are disconnected from the tree are tops, - // but don't have a 'path' field, only a 'realpath', because we - // don't know their canonical location. We don't need their devDeps. - const pd = this.package.peerDependencies - if (pd && typeof pd === 'object' && !this.legacyPeerDeps) { - const pm = this.package.peerDependenciesMeta || {} - const peerDependencies = {} - const peerOptional = {} - for (const [name, dep] of Object.entries(pd)) { - if (pm[name] && pm[name].optional) { - peerOptional[name] = dep - } else { - peerDependencies[name] = dep - } - } - this[_loadDepType](peerDependencies, 'peer') - this[_loadDepType](peerOptional, 'peerOptional') - } - - this[_loadDepType](this.package.dependencies, 'prod') - this[_loadDepType](this.package.optionalDependencies, 'optional') - - const { globalTop, isTop, path, sourceReference } = this - const { - globalTop: srcGlobalTop, - isTop: srcTop, - path: srcPath, - } = sourceReference || {} - const thisDev = isTop && !globalTop && path - const srcDev = !sourceReference || srcTop && !srcGlobalTop && srcPath - if (thisDev && srcDev) { - this[_loadDepType](this.package.devDependencies, 'dev') - } - } - - [_loadDepType] (deps, type) { - const ad = this.package.acceptDependencies || {} - // Because of the order in which _loadDeps runs, we always want to - // prioritize a new edge over an existing one - for (const [name, spec] of Object.entries(deps || {})) { - const current = this.edgesOut.get(name) - if (!current || current.type !== 'workspace') { - new Edge({ from: this, name, spec, accept: ad[name], type }) - } - } - } - - get fsParent () { - const parent = this[_fsParent] - /* istanbul ignore next - should be impossible */ - debug(() => { - if (parent === this) { - throw new Error('node set to its own fsParent') - } - }) - return parent - } - - set fsParent (fsParent) { - if (!fsParent) { - if (this[_fsParent]) { - this.root = null - } - return - } - - debug(() => { - if (fsParent === this) { - throw new Error('setting node to its own fsParent') - } - - if (fsParent.realpath === this.realpath) { - throw new Error('setting fsParent to same path') - } - - // the initial set MUST be an actual walk-up from the realpath - // subsequent sets will re-root on the new fsParent's path. - if (!this[_fsParent] && this.realpath.indexOf(fsParent.realpath) !== 0) { - throw Object.assign(new Error('setting fsParent improperly'), { - path: this.path, - realpath: this.realpath, - fsParent: { - path: fsParent.path, - realpath: fsParent.realpath, - }, - }) - } - }) - - if (fsParent.isLink) { - fsParent = fsParent.target - } - - // setting a thing to its own fsParent is not normal, but no-op for safety - if (this === fsParent || fsParent.realpath === this.realpath) { - return - } - - // nothing to do - if (this[_fsParent] === fsParent) { - return - } - - const oldFsParent = this[_fsParent] - const newPath = !oldFsParent ? this.path - : resolve(fsParent.path, relative(oldFsParent.path, this.path)) - const nmPath = resolve(fsParent.path, 'node_modules', this.name) - - // this is actually the parent, set that instead - if (newPath === nmPath) { - this.parent = fsParent - return - } - - const pathChange = newPath !== this.path - - // remove from old parent/fsParent - const oldParent = this.parent - const oldName = this.name - if (this.parent) { - this.parent.children.delete(this.name) - this[_parent] = null - } - if (this.fsParent) { - this.fsParent.fsChildren.delete(this) - this[_fsParent] = null - } - - // update this.path/realpath for this and all children/fsChildren - if (pathChange) { - this[_changePath](newPath) - } - - if (oldParent) { - oldParent[_reloadNamedEdges](oldName) - } - - // clobbers anything at that path, resets all appropriate references - this.root = fsParent.root - } - - // is it safe to replace one node with another? check the edges to - // make sure no one will get upset. Note that the node might end up - // having its own unmet dependencies, if the new node has new deps. - // Note that there are cases where Arborist will opt to insert a node - // into the tree even though this function returns false! This is - // necessary when a root dependency is added or updated, or when a - // root dependency brings peer deps along with it. In that case, we - // will go ahead and create the invalid state, and then try to resolve - // it with more tree construction, because it's a user request. - canReplaceWith (node, ignorePeers = []) { - if (node.name !== this.name) { - return false - } - - if (node.packageName !== this.packageName) { - return false - } - - ignorePeers = new Set(ignorePeers) - - // gather up all the deps of this node and that are only depended - // upon by deps of this node. those ones don't count, since - // they'll be replaced if this node is replaced anyway. - const depSet = gatherDepSet([this], e => e.to !== this && e.valid) - - for (const edge of this.edgesIn) { - // when replacing peer sets, we need to be able to replace the entire - // peer group, which means we ignore incoming edges from other peers - // within the replacement set. - const ignored = !this.isTop && - edge.from.parent === this.parent && - edge.peer && - ignorePeers.has(edge.from.name) - if (ignored) { - continue - } - - // only care about edges that don't originate from this node - if (!depSet.has(edge.from) && !edge.satisfiedBy(node)) { - return false - } - } - - return true - } - - canReplace (node, ignorePeers) { - return node.canReplaceWith(this, ignorePeers) - } - - // return true if it's safe to remove this node, because anything that - // is depending on it would be fine with the thing that they would resolve - // to if it was removed, or nothing is depending on it in the first place. - canDedupe (preferDedupe = false) { - // not allowed to mess with shrinkwraps or bundles - if (this.inDepBundle || this.inShrinkwrap) { - return false - } - - // it's a top level pkg, or a dep of one - if (!this.resolveParent || !this.resolveParent.resolveParent) { - return false - } - - // no one wants it, remove it - if (this.edgesIn.size === 0) { - return true - } - - const other = this.resolveParent.resolveParent.resolve(this.name) - - // nothing else, need this one - if (!other) { - return false - } - - // if it's the same thing, then always fine to remove - if (other.matches(this)) { - return true - } - - // if the other thing can't replace this, then skip it - if (!other.canReplace(this)) { - return false - } - - // if we prefer dedupe, or if the version is greater/equal, take the other - if (preferDedupe || semver.gte(other.version, this.version)) { - return true - } - - return false - } - - satisfies (requested) { - if (requested instanceof Edge) { - return this.name === requested.name && requested.satisfiedBy(this) - } - - const parsed = npa(requested) - const { name = this.name, rawSpec: spec } = parsed - return this.name === name && this.satisfies(new Edge({ - from: new Node({ path: this.root.realpath }), - type: 'prod', - name, - spec, - })) - } - - matches (node) { - // if the nodes are literally the same object, obviously a match. - if (node === this) { - return true - } - - // if the names don't match, they're different things, even if - // the package contents are identical. - if (node.name !== this.name) { - return false - } - - // if they're links, they match if the targets match - if (this.isLink) { - return node.isLink && this.target.matches(node.target) - } - - // if they're two project root nodes, they're different if the paths differ - if (this.isProjectRoot && node.isProjectRoot) { - return this.path === node.path - } - - // if the integrity matches, then they're the same. - if (this.integrity && node.integrity) { - return this.integrity === node.integrity - } - - // if no integrity, check resolved - if (this.resolved && node.resolved) { - return this.resolved === node.resolved - } - - // if no resolved, check both package name and version - // otherwise, conclude that they are different things - return this.packageName && node.packageName && - this.packageName === node.packageName && - this.version && node.version && - this.version === node.version - } - - // replace this node with the supplied argument - // Useful when mutating an ideal tree, so we can avoid having to call - // the parent/root setters more than necessary. - replaceWith (node) { - node.replace(this) - } - - replace (node) { - this[_delistFromMeta]() - - // if the name matches, but is not identical, we are intending to clobber - // something case-insensitively, so merely setting name and path won't - // have the desired effect. just set the path so it'll collide in the - // parent's children map, and leave it at that. - const nameMatch = node.parent && - node.parent.children.get(this.name) === node - if (nameMatch) { - this.path = resolve(node.parent.path, 'node_modules', this.name) - } else { - this.path = node.path - this.name = node.name - } - - if (!this.isLink) { - this.realpath = this.path - } - this[_refreshLocation]() - - // keep children when a node replaces another - if (!this.isLink) { - for (const kid of node.children.values()) { - kid.parent = this - } - } - - if (!node.isRoot) { - this.root = node.root - } - - treeCheck(this) - } - - get inShrinkwrap () { - return this.parent && - (this.parent.hasShrinkwrap || this.parent.inShrinkwrap) - } - - get parent () { - const parent = this[_parent] - /* istanbul ignore next - should be impossible */ - debug(() => { - if (parent === this) { - throw new Error('node set to its own parent') - } - }) - return parent - } - - // This setter keeps everything in order when we move a node from - // one point in a logical tree to another. Edges get reloaded, - // metadata updated, etc. It's also called when we *replace* a node - // with another by the same name (eg, to update or dedupe). - // This does a couple of walks out on the node_modules tree, recursing - // into child nodes. However, as setting the parent is typically done - // with nodes that don't have have many children, and (deduped) package - // trees tend to be broad rather than deep, it's not that bad. - // The only walk that starts from the parent rather than this node is - // limited by edge name. - set parent (parent) { - // when setting to null, just remove it from the tree entirely - if (!parent) { - // but only delete it if we actually had a parent in the first place - // otherwise it's just setting to null when it's already null - if (this[_parent]) { - this.root = null - } - return - } - - if (parent.isLink) { - parent = parent.target - } - - // setting a thing to its own parent is not normal, but no-op for safety - if (this === parent) { - return - } - - const oldParent = this[_parent] - - // nothing to do - if (oldParent === parent) { - return - } - - // ok now we know something is actually changing, and parent is not a link - const newPath = resolve(parent.path, 'node_modules', this.name) - const pathChange = newPath !== this.path - - // remove from old parent/fsParent - if (oldParent) { - oldParent.children.delete(this.name) - this[_parent] = null - } - if (this.fsParent) { - this.fsParent.fsChildren.delete(this) - this[_fsParent] = null - } - - // update this.path/realpath for this and all children/fsChildren - if (pathChange) { - this[_changePath](newPath) - } - - // clobbers anything at that path, resets all appropriate references - this.root = parent.root - } - - // Call this before changing path or updating the _root reference. - // Removes the node from its root the metadata and inventory. - [_delistFromMeta] () { - const root = this.root - if (!root.realpath || !this.path) { - return - } - root.inventory.delete(this) - root.tops.delete(this) - if (root.meta) { - root.meta.delete(this.path) - } - /* istanbul ignore next - should be impossible */ - debug(() => { - if ([...root.inventory.values()].includes(this)) { - throw new Error('failed to delist') - } - }) - } - - // update this.path/realpath and the paths of all children/fsChildren - [_changePath] (newPath) { - // have to de-list before changing paths - this[_delistFromMeta]() - const oldPath = this.path - this.path = newPath - const namePattern = /(?:^|\/|\\)node_modules[\\/](@[^/\\]+[\\/][^\\/]+|[^\\/]+)$/ - const nameChange = newPath.match(namePattern) - if (nameChange && this.name !== nameChange[1]) { - this.name = nameChange[1].replace(/\\/g, '/') - } - - // if we move a link target, update link realpaths - if (!this.isLink) { - this.realpath = newPath - for (const link of this.linksIn) { - link[_delistFromMeta]() - link.realpath = newPath - link[_refreshLocation]() - } - } - // if we move /x to /y, then a module at /x/a/b becomes /y/a/b - for (const child of this.fsChildren) { - child[_changePath](resolve(newPath, relative(oldPath, child.path))) - } - for (const [name, child] of this.children.entries()) { - child[_changePath](resolve(newPath, 'node_modules', name)) - } - - this[_refreshLocation]() - } - - // Called whenever the root/parent is changed. - // NB: need to remove from former root's meta/inventory and then update - // this.path BEFORE calling this method! - [_refreshLocation] () { - const root = this.root - const loc = relpath(root.realpath, this.path) - - this.location = loc - - root.inventory.add(this) - if (root.meta) { - root.meta.add(this) - } - } - - addEdgeOut (edge) { - this.edgesOut.set(edge.name, edge) - } - - addEdgeIn (edge) { - this.edgesIn.add(edge) - - // try to get metadata from the yarn.lock file - if (this.root.meta) { - this.root.meta.addEdge(edge) - } - } - - [_reloadNamedEdges] (name, rootLoc = this.location) { - const edge = this.edgesOut.get(name) - // if we don't have an edge, do nothing, but keep descending - const rootLocResolved = edge && edge.to && - edge.to.location === `${rootLoc}/node_modules/${edge.name}` - const sameResolved = edge && this.resolve(name) === edge.to - const recheck = rootLocResolved || !sameResolved - if (edge && recheck) { - edge.reload(true) - } - for (const c of this.children.values()) { - c[_reloadNamedEdges](name, rootLoc) - } - - for (const c of this.fsChildren) { - c[_reloadNamedEdges](name, rootLoc) - } - } - - get isLink () { - return false - } - - get target () { - return this - } - - set target (n) { - debug(() => { - throw Object.assign(new Error('cannot set target on non-Link Nodes'), { - path: this.path, - }) - }) - } - - get depth () { - return this.isTop ? 0 : this.parent.depth + 1 - } - - get isTop () { - return !this.parent || this.globalTop - } - - get top () { - return this.isTop ? this : this.parent.top - } - - get isFsTop () { - return !this.fsParent - } - - get fsTop () { - return this.isFsTop ? this : this.fsParent.fsTop - } - - get resolveParent () { - return this.parent || this.fsParent - } - - resolve (name) { - /* istanbul ignore next - should be impossible, - * but I keep doing this mistake in tests */ - debug(() => { - if (typeof name !== 'string' || !name) { - throw new Error('non-string passed to Node.resolve') - } - }) - const mine = this.children.get(name) - if (mine) { - return mine - } - const resolveParent = this.resolveParent - if (resolveParent) { - return resolveParent.resolve(name) - } - return null - } - - inNodeModules () { - const rp = this.realpath - const name = this.name - const scoped = name.charAt(0) === '@' - const d = dirname(rp) - const nm = scoped ? dirname(d) : d - const dir = dirname(nm) - const base = scoped ? `${basename(d)}/${basename(rp)}` : basename(rp) - return base === name && basename(nm) === 'node_modules' ? dir : false - } - - toJSON () { - return printableTree(this) - } - - [util.inspect.custom] () { - return this.toJSON() - } -} - -module.exports = Node diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/optional-set.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/optional-set.js deleted file mode 100644 index 9f5184ea02442..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/optional-set.js +++ /dev/null @@ -1,38 +0,0 @@ -// when an optional dep fails to install, we need to remove the branch of the -// graph up to the first optionalDependencies, as well as any nodes that are -// only required by other nodes in the set. -// -// This function finds the set of nodes that will need to be removed in that -// case. -// -// Note that this is *only* going to work with trees where calcDepFlags -// has been called, because we rely on the node.optional flag. - -const gatherDepSet = require('./gather-dep-set.js') -const optionalSet = node => { - if (!node.optional) { - return new Set() - } - - // start with the node, then walk up the dependency graph until we - // get to the boundaries that define the optional set. since the - // node is optional, we know that all paths INTO this area of the - // graph are optional, but there may be non-optional dependencies - // WITHIN the area. - const set = new Set([node]) - for (const node of set) { - for (const edge of node.edgesIn) { - if (!edge.optional) { - set.add(edge.from) - } - } - } - - // now that we've hit the boundary, gather the rest of the nodes in - // the optional section. that's the set of dependencies that are only - // depended upon by other nodes within the set, or optional dependencies - // from outside the set. - return gatherDepSet(set, edge => !edge.optional) -} - -module.exports = optionalSet diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/peer-entry-sets.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/peer-entry-sets.js deleted file mode 100644 index 2c4322ee678ca..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/peer-entry-sets.js +++ /dev/null @@ -1,77 +0,0 @@ -// Given a node in a tree, return all of the peer dependency sets that -// it is a part of, with the entry (top or non-peer) edges into the sets -// identified. -// -// With this information, we can determine whether it is appropriate to -// replace the entire peer set with another (and remove the old one), -// push the set deeper into the tree, and so on. -// -// Returns a Map of { edge => Set(peerNodes) }, - -const peerEntrySets = node => { - // this is the union of all peer groups that the node is a part of - // later, we identify all of the entry edges, and create a set of - // 1 or more overlapping sets that this node is a part of. - const unionSet = new Set([node]) - for (const node of unionSet) { - for (const edge of node.edgesOut.values()) { - if (edge.valid && edge.peer && edge.to) { - unionSet.add(edge.to) - } - } - for (const edge of node.edgesIn) { - if (edge.valid && edge.peer) { - unionSet.add(edge.from) - } - } - } - const entrySets = new Map() - for (const peer of unionSet) { - for (const edge of peer.edgesIn) { - // if not valid, it doesn't matter anyway. either it's been previously - // overridden, or it's the thing we're interested in replacing. - if (!edge.valid) { - continue - } - // this is the entry point into the peer set - if (!edge.peer || edge.from.isTop) { - // get the subset of peer brought in by this peer entry edge - const sub = new Set([peer]) - for (const peer of sub) { - for (const edge of peer.edgesOut.values()) { - if (edge.valid && edge.peer && edge.to) { - sub.add(edge.to) - } - } - } - // if this subset does not include the node we are focused on, - // then it is not relevant for our purposes. Example: - // - // a -> (b, c, d) - // b -> PEER(d) b -> d -> e -> f <-> g - // c -> PEER(f, h) c -> (f <-> g, h -> g) - // d -> PEER(e) d -> e -> f <-> g - // e -> PEER(f) - // f -> PEER(g) - // g -> PEER(f) - // h -> PEER(g) - // - // The unionSet(e) will include c, but we don't actually care about - // it. We only expanded to the edge of the peer nodes in order to - // find the entry edges that caused the inclusion of peer sets - // including (e), so we want: - // Map{ - // Edge(a->b) => Set(b, d, e, f, g) - // Edge(a->d) => Set(d, e, f, g) - // } - if (sub.has(node)) { - entrySets.set(edge, sub) - } - } - } - } - - return entrySets -} - -module.exports = peerEntrySets diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/place-dep.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/place-dep.js deleted file mode 100644 index 6edd94a38e749..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/place-dep.js +++ /dev/null @@ -1,625 +0,0 @@ -// Given a dep, a node that depends on it, and the edge representing that -// dependency, place the dep somewhere in the node's tree, and all of its -// peer dependencies. -// -// Handles all of the tree updating needed to place the dep, including -// removing replaced nodes, pruning now-extraneous or invalidated nodes, -// and saves a set of what was placed and what needs re-evaluation as -// a result. - -const localeCompare = require('@isaacs/string-locale-compare')('en') -const log = require('proc-log') -const deepestNestingTarget = require('./deepest-nesting-target.js') -const CanPlaceDep = require('./can-place-dep.js') -const { - KEEP, - CONFLICT, -} = CanPlaceDep -const debug = require('./debug.js') - -const Link = require('./link.js') -const gatherDepSet = require('./gather-dep-set.js') -const peerEntrySets = require('./peer-entry-sets.js') - -class PlaceDep { - constructor (options) { - const { - dep, - edge, - parent = null, - } = options - this.name = edge.name - this.dep = dep - this.edge = edge - this.canPlace = null - - this.target = null - this.placed = null - - // inherit all these fields from the parent to ensure consistency. - const { - preferDedupe, - force, - explicitRequest, - updateNames, - auditReport, - legacyBundling, - strictPeerDeps, - legacyPeerDeps, - globalStyle, - } = parent || options - Object.assign(this, { - preferDedupe, - force, - explicitRequest, - updateNames, - auditReport, - legacyBundling, - strictPeerDeps, - legacyPeerDeps, - globalStyle, - }) - - this.children = [] - this.parent = parent - this.peerConflict = null - - this.needEvaluation = new Set() - - this.checks = new Map() - - this.place() - } - - place () { - const { - edge, - dep, - preferDedupe, - globalStyle, - legacyBundling, - explicitRequest, - updateNames, - checks, - } = this - - // nothing to do if the edge is fine as it is - if (edge.to && - !edge.error && - !explicitRequest && - !updateNames.includes(edge.name) && - !this.isVulnerable(edge.to)) { - return - } - - // walk up the tree until we hit either a top/root node, or a place - // where the dep is not a peer dep. - const start = this.getStartNode() - - let canPlace = null - let canPlaceSelf = null - for (const target of start.ancestry()) { - // if the current location has a peerDep on it, then we can't place here - // this is pretty rare to hit, since we always prefer deduping peers, - // and the getStartNode will start us out above any peers from the - // thing that depends on it. but we could hit it with something like: - // - // a -> (b@1, c@1) - // +-- c@1 - // +-- b -> PEEROPTIONAL(v) (c@2) - // +-- c@2 -> (v) - // - // So we check if we can place v under c@2, that's fine. - // Then we check under b, and can't, because of the optional peer dep. - // but we CAN place it under a, so the correct thing to do is keep - // walking up the tree. - const targetEdge = target.edgesOut.get(edge.name) - if (!target.isTop && targetEdge && targetEdge.peer) { - continue - } - - const cpd = new CanPlaceDep({ - dep, - edge, - // note: this sets the parent's canPlace as the parent of this - // canPlace, but it does NOT add this canPlace to the parent's - // children. This way, we can know that it's a peer dep, and - // get the top edge easily, while still maintaining the - // tree of checks that factored into the original decision. - parent: this.parent && this.parent.canPlace, - target, - preferDedupe, - explicitRequest: this.explicitRequest, - }) - checks.set(target, cpd) - - // It's possible that a "conflict" is a conflict among the *peers* of - // a given node we're trying to place, but there actually is no current - // node. Eg, - // root -> (a, b) - // a -> PEER(c) - // b -> PEER(d) - // d -> PEER(c@2) - // We place (a), and get a peer of (c) along with it. - // then we try to place (b), and get CONFLICT in the check, because - // of the conflicting peer from (b)->(d)->(c@2). In that case, we - // should treat (b) and (d) as OK, and place them in the last place - // where they did not themselves conflict, and skip c@2 if conflict - // is ok by virtue of being forced or not ours and not strict. - if (cpd.canPlaceSelf !== CONFLICT) { - canPlaceSelf = cpd - } - - // we found a place this can go, along with all its peer friends. - // we break when we get the first conflict - if (cpd.canPlace !== CONFLICT) { - canPlace = cpd - } else { - break - } - - // if it's a load failure, just plop it in the first place attempted, - // since we're going to crash the build or prune it out anyway. - // but, this will frequently NOT be a successful canPlace, because - // it'll have no version or other information. - if (dep.errors.length) { - break - } - - // nest packages like npm v1 and v2 - // very disk-inefficient - if (legacyBundling) { - break - } - - // when installing globally, or just in global style, we never place - // deps above the first level. - if (globalStyle) { - const rp = target.resolveParent - if (rp && rp.isProjectRoot) { - break - } - } - } - - Object.assign(this, { - canPlace, - canPlaceSelf, - }) - this.current = edge.to - - // if we can't find a target, that means that the last place checked, - // and all the places before it, had a conflict. - if (!canPlace) { - // if not forced, or it's our dep, or strictPeerDeps is set, then - // this is an ERESOLVE error. - if (!this.conflictOk) { - return this.failPeerConflict() - } - - // ok! we're gonna allow the conflict, but we should still warn - // if we have a current, then we treat CONFLICT as a KEEP. - // otherwise, we just skip it. Only warn on the one that actually - // could not be placed somewhere. - if (!canPlaceSelf) { - this.warnPeerConflict() - return - } - - this.canPlace = canPlaceSelf - } - - // now we have a target, a tree of CanPlaceDep results for the peer group, - // and we are ready to go - this.placeInTree() - } - - placeInTree () { - const { - dep, - canPlace, - edge, - } = this - - /* istanbul ignore next */ - if (!canPlace) { - debug(() => { - throw new Error('canPlace not set, but trying to place in tree') - }) - return - } - - const { target } = canPlace - - log.silly( - 'placeDep', - target.location || 'ROOT', - `${dep.name}@${dep.version}`, - canPlace.description, - `for: ${this.edge.from.package._id || this.edge.from.location}`, - `want: ${edge.spec || '*'}` - ) - - const placementType = canPlace.canPlace === CONFLICT - ? canPlace.canPlaceSelf - : canPlace.canPlace - - // if we're placing in the tree with --force, we can get here even though - // it's a conflict. Treat it as a KEEP, but warn and move on. - if (placementType === KEEP) { - // this was an overridden peer dep - if (edge.peer && !edge.valid) { - this.warnPeerConflict() - } - - // if we get a KEEP in a update scenario, then we MAY have something - // already duplicating this unnecessarily! For example: - // ``` - // root (dep: y@1) - // +-- x (dep: y@1.1) - // | +-- y@1.1.0 (replacing with 1.1.2, got KEEP at the root) - // +-- y@1.1.2 (updated already from 1.0.0) - // ``` - // Now say we do `reify({update:['y']})`, and the latest version is - // 1.1.2, which we now have in the root. We'll try to place y@1.1.2 - // first in x, then in the root, ending with KEEP, because we already - // have it. In that case, we ought to REMOVE the nm/x/nm/y node, because - // it is an unnecessary duplicate. - this.pruneDedupable(target) - return - } - - // we were told to place it here in the target, so either it does not - // already exist in the tree, OR it's shadowed. - // handle otherwise unresolvable dependency nesting loops by - // creating a symbolic link - // a1 -> b1 -> a2 -> b2 -> a1 -> ... - // instead of nesting forever, when the loop occurs, create - // a symbolic link to the earlier instance - for (let p = target; p; p = p.resolveParent) { - if (p.matches(dep) && !p.isTop) { - this.placed = new Link({ parent: target, target: p }) - return - } - } - - // XXX if we are replacing SOME of a peer entry group, we will need to - // remove any that are not being replaced and will now be invalid, and - // re-evaluate them deeper into the tree. - - const virtualRoot = dep.parent - this.placed = new dep.constructor({ - name: dep.name, - pkg: dep.package, - resolved: dep.resolved, - integrity: dep.integrity, - legacyPeerDeps: this.legacyPeerDeps, - error: dep.errors[0], - ...(dep.isLink ? { target: dep.target, realpath: dep.realpath } : {}), - }) - - this.oldDep = target.children.get(this.name) - if (this.oldDep) { - this.replaceOldDep() - } else { - this.placed.parent = target - } - - // if it's an overridden peer dep, warn about it - if (edge.peer && !this.placed.satisfies(edge)) { - this.warnPeerConflict() - } - - // If the edge is not an error, then we're updating something, and - // MAY end up putting a better/identical node further up the tree in - // a way that causes an unnecessary duplication. If so, remove the - // now-unnecessary node. - if (edge.valid && edge.to && edge.to !== this.placed) { - this.pruneDedupable(edge.to, false) - } - - // in case we just made some duplicates that can be removed, - // prune anything deeper in the tree that can be replaced by this - for (const node of target.root.inventory.query('name', this.name)) { - if (node.isDescendantOf(target) && !node.isTop) { - this.pruneDedupable(node, false) - // only walk the direct children of the ones we kept - if (node.root === target.root) { - for (const kid of node.children.values()) { - this.pruneDedupable(kid, false) - } - } - } - } - - // also place its unmet or invalid peer deps at this location - // loop through any peer deps from the thing we just placed, and place - // those ones as well. it's safe to do this with the virtual nodes, - // because we're copying rather than moving them out of the virtual root, - // otherwise they'd be gone and the peer set would change throughout - // this loop. - for (const peerEdge of this.placed.edgesOut.values()) { - if (peerEdge.valid || !peerEdge.peer || peerEdge.overridden) { - continue - } - - const peer = virtualRoot.children.get(peerEdge.name) - - // Note: if the virtualRoot *doesn't* have the peer, then that means - // it's an optional peer dep. If it's not being properly met (ie, - // peerEdge.valid is false), then this is likely heading for an - // ERESOLVE error, unless it can walk further up the tree. - if (!peer) { - continue - } - - // overridden peerEdge, just accept what's there already - if (!peer.satisfies(peerEdge)) { - continue - } - - this.children.push(new PlaceDep({ - parent: this, - dep: peer, - node: this.placed, - edge: peerEdge, - })) - } - } - - replaceOldDep () { - const target = this.oldDep.parent - - // XXX handle replacing an entire peer group? - // what about cases where we need to push some other peer groups deeper - // into the tree? all the tree updating should be done here, and track - // all the things that we add and remove, so that we can know what - // to re-evaluate. - - // if we're replacing, we should also remove any nodes for edges that - // are now invalid, and where this (or its deps) is the only dependent, - // and also recurse on that pruning. Otherwise leaving that dep node - // around can result in spurious conflicts pushing nodes deeper into - // the tree than needed in the case of cycles that will be removed - // later anyway. - const oldDeps = [] - for (const [name, edge] of this.oldDep.edgesOut.entries()) { - if (!this.placed.edgesOut.has(name) && edge.to) { - oldDeps.push(...gatherDepSet([edge.to], e => e.to !== edge.to)) - } - } - - // gather all peer edgesIn which are at this level, and will not be - // satisfied by the new dependency. Those are the peer sets that need - // to be either warned about (if they cannot go deeper), or removed and - // re-placed (if they can). - const prunePeerSets = [] - for (const edge of this.oldDep.edgesIn) { - if (this.placed.satisfies(edge) || - !edge.peer || - edge.from.parent !== target || - edge.overridden) { - // not a peer dep, not invalid, or not from this level, so it's fine - // to just let it re-evaluate as a problemEdge later, or let it be - // satisfied by the new dep being placed. - continue - } - for (const entryEdge of peerEntrySets(edge.from).keys()) { - // either this one needs to be pruned and re-evaluated, or marked - // as overridden and warned about. If the entryEdge comes in from - // the root, then we have to leave it alone, and in that case, it - // will have already warned or crashed by getting to this point. - const entryNode = entryEdge.to - const deepestTarget = deepestNestingTarget(entryNode) - if (deepestTarget !== target && !entryEdge.from.isRoot) { - prunePeerSets.push(...gatherDepSet([entryNode], e => { - return e.to !== entryNode && !e.overridden - })) - } else { - this.warnPeerConflict(edge, this.dep) - } - } - } - - this.placed.replace(this.oldDep) - this.pruneForReplacement(this.placed, oldDeps) - for (const dep of prunePeerSets) { - for (const edge of dep.edgesIn) { - this.needEvaluation.add(edge.from) - } - dep.root = null - } - } - - pruneForReplacement (node, oldDeps) { - // gather up all the now-invalid/extraneous edgesOut, as long as they are - // only depended upon by the old node/deps - const invalidDeps = new Set([...node.edgesOut.values()] - .filter(e => e.to && !e.valid).map(e => e.to)) - for (const dep of oldDeps) { - const set = gatherDepSet([dep], e => e.to !== dep && e.valid) - for (const dep of set) { - invalidDeps.add(dep) - } - } - - // ignore dependency edges from the node being replaced, but - // otherwise filter the set down to just the set with no - // dependencies from outside the set, except the node in question. - const deps = gatherDepSet(invalidDeps, edge => - edge.from !== node && edge.to !== node && edge.valid) - - // now just delete whatever's left, because it's junk - for (const dep of deps) { - dep.root = null - } - } - - // prune all the nodes in a branch of the tree that can be safely removed - // This is only the most basic duplication detection; it finds if there - // is another satisfying node further up the tree, and if so, dedupes. - // Even in legacyBundling mode, we do this amount of deduplication. - pruneDedupable (node, descend = true) { - if (node.canDedupe(this.preferDedupe)) { - // gather up all deps that have no valid edges in from outside - // the dep set, except for this node we're deduping, so that we - // also prune deps that would be made extraneous. - const deps = gatherDepSet([node], e => e.to !== node && e.valid) - for (const node of deps) { - node.root = null - } - return - } - if (descend) { - // sort these so that they're deterministically ordered - // otherwise, resulting tree shape is dependent on the order - // in which they happened to be resolved. - const nodeSort = (a, b) => localeCompare(a.location, b.location) - - const children = [...node.children.values()].sort(nodeSort) - for (const child of children) { - this.pruneDedupable(child) - } - const fsChildren = [...node.fsChildren].sort(nodeSort) - for (const topNode of fsChildren) { - const children = [...topNode.children.values()].sort(nodeSort) - for (const child of children) { - this.pruneDedupable(child) - } - } - } - } - - get conflictOk () { - return this.force || (!this.isMine && !this.strictPeerDeps) - } - - get isMine () { - const { edge } = this.top - const { from: node } = edge - - if (node.isWorkspace || node.isProjectRoot) { - return true - } - - if (!edge.peer) { - return false - } - - // re-entry case. check if any non-peer edges come from the project, - // or any entryEdges on peer groups are from the root. - let hasPeerEdges = false - for (const edge of node.edgesIn) { - if (edge.peer) { - hasPeerEdges = true - continue - } - if (edge.from.isWorkspace || edge.from.isProjectRoot) { - return true - } - } - if (hasPeerEdges) { - for (const edge of peerEntrySets(node).keys()) { - if (edge.from.isWorkspace || edge.from.isProjectRoot) { - return true - } - } - } - - return false - } - - warnPeerConflict (edge, dep) { - edge = edge || this.edge - dep = dep || this.dep - edge.overridden = true - const expl = this.explainPeerConflict(edge, dep) - log.warn('ERESOLVE', 'overriding peer dependency', expl) - } - - failPeerConflict (edge, dep) { - edge = edge || this.top.edge - dep = dep || this.top.dep - const expl = this.explainPeerConflict(edge, dep) - throw Object.assign(new Error('could not resolve'), expl) - } - - explainPeerConflict (edge, dep) { - const { from: node } = edge - const curNode = node.resolve(edge.name) - - const expl = { - code: 'ERESOLVE', - edge: edge.explain(), - dep: dep.explain(edge), - } - - if (this.parent) { - // this is the conflicted peer - expl.current = curNode && curNode.explain(edge) - expl.peerConflict = this.current && this.current.explain(this.edge) - } else { - expl.current = curNode && curNode.explain() - if (this.canPlaceSelf && this.canPlaceSelf.canPlaceSelf !== CONFLICT) { - // failed while checking for a child dep - const cps = this.canPlaceSelf - for (const peer of cps.conflictChildren) { - if (peer.current) { - expl.peerConflict = { - current: peer.current.explain(), - peer: peer.dep.explain(peer.edge), - } - break - } - } - } else { - expl.peerConflict = { - current: this.current && this.current.explain(), - peer: this.dep.explain(this.edge), - } - } - } - - const { - strictPeerDeps, - force, - isMine, - } = this - Object.assign(expl, { - strictPeerDeps, - force, - isMine, - }) - - // XXX decorate more with this.canPlace and this.canPlaceSelf, - // this.checks, this.children, walk over conflicted peers, etc. - return expl - } - - getStartNode () { - // if we are a peer, then we MUST be at least as shallow as the - // peer dependent - const from = this.parent ? this.parent.getStartNode() : this.edge.from - return deepestNestingTarget(from, this.name) - } - - get top () { - return this.parent ? this.parent.top : this - } - - isVulnerable (node) { - return this.auditReport && this.auditReport.isVulnerable(node) - } - - get allChildren () { - const set = new Set(this.children) - for (const child of set) { - for (const grandchild of child.children) { - set.add(grandchild) - } - } - return [...set] - } -} - -module.exports = PlaceDep diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/printable.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/printable.js deleted file mode 100644 index 74925d96d2587..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/printable.js +++ /dev/null @@ -1,189 +0,0 @@ -// helper function to output a clearer visualization -// of the current node and its descendents - -const localeCompare = require('@isaacs/string-locale-compare')('en') -const util = require('util') -const relpath = require('./relpath.js') - -class ArboristNode { - constructor (tree, path) { - this.name = tree.name - if (tree.packageName && tree.packageName !== this.name) { - this.packageName = tree.packageName - } - if (tree.version) { - this.version = tree.version - } - this.location = tree.location - this.path = tree.path - if (tree.realpath !== this.path) { - this.realpath = tree.realpath - } - if (tree.resolved !== null) { - this.resolved = tree.resolved - } - if (tree.extraneous) { - this.extraneous = true - } - if (tree.dev) { - this.dev = true - } - if (tree.optional) { - this.optional = true - } - if (tree.devOptional && !tree.dev && !tree.optional) { - this.devOptional = true - } - if (tree.peer) { - this.peer = true - } - if (tree.inBundle) { - this.bundled = true - } - if (tree.inDepBundle) { - this.bundler = tree.getBundler().location - } - if (tree.isProjectRoot) { - this.isProjectRoot = true - } - if (tree.isWorkspace) { - this.isWorkspace = true - } - const bd = tree.package && tree.package.bundleDependencies - if (bd && bd.length) { - this.bundleDependencies = bd - } - if (tree.inShrinkwrap) { - this.inShrinkwrap = true - } else if (tree.hasShrinkwrap) { - this.hasShrinkwrap = true - } - if (tree.error) { - this.error = treeError(tree.error) - } - if (tree.errors && tree.errors.length) { - this.errors = tree.errors.map(treeError) - } - - // edgesOut sorted by name - if (tree.edgesOut.size) { - this.edgesOut = new Map([...tree.edgesOut.entries()] - .sort(([a], [b]) => localeCompare(a, b)) - .map(([name, edge]) => [name, new EdgeOut(edge)])) - } - - // edgesIn sorted by location - if (tree.edgesIn.size) { - this.edgesIn = new Set([...tree.edgesIn] - .sort((a, b) => localeCompare(a.from.location, b.from.location)) - .map(edge => new EdgeIn(edge))) - } - - if (tree.workspaces && tree.workspaces.size) { - this.workspaces = new Map([...tree.workspaces.entries()] - .map(([name, path]) => [name, relpath(tree.root.realpath, path)])) - } - - // fsChildren sorted by path - if (tree.fsChildren.size) { - this.fsChildren = new Set([...tree.fsChildren] - .sort(({path: a}, {path: b}) => localeCompare(a, b)) - .map(tree => printableTree(tree, path))) - } - - // children sorted by name - if (tree.children.size) { - this.children = new Map([...tree.children.entries()] - .sort(([a], [b]) => localeCompare(a, b)) - .map(([name, tree]) => [name, printableTree(tree, path)])) - } - } -} - -class ArboristVirtualNode extends ArboristNode { - constructor (tree, path) { - super(tree, path) - this.sourceReference = printableTree(tree.sourceReference, path) - } -} - -class ArboristLink extends ArboristNode { - constructor (tree, path) { - super(tree, path) - this.target = printableTree(tree.target, path) - } -} - -const treeError = ({code, path}) => ({ - code, - ...(path ? { path } : {}), -}) - -// print out edges without dumping the full node all over again -// this base class will toJSON as a plain old object, but the -// util.inspect() output will be a bit cleaner -class Edge { - constructor (edge) { - this.type = edge.type - this.name = edge.name - this.spec = edge.spec || '*' - if (edge.error) { - this.error = edge.error - } - if (edge.overridden) { - this.overridden = edge.overridden - } - } -} - -// don't care about 'from' for edges out -class EdgeOut extends Edge { - constructor (edge) { - super(edge) - this.to = edge.to && edge.to.location - } - - [util.inspect.custom] () { - return `{ ${this.type} ${this.name}@${this.spec}${ - this.to ? ' -> ' + this.to : '' - }${ - this.error ? ' ' + this.error : '' - }${ - this.overridden ? ' overridden' : '' - } }` - } -} - -// don't care about 'to' for edges in -class EdgeIn extends Edge { - constructor (edge) { - super(edge) - this.from = edge.from && edge.from.location - } - - [util.inspect.custom] () { - return `{ ${this.from || '""'} ${this.type} ${this.name}@${this.spec}${ - this.error ? ' ' + this.error : '' - }${ - this.overridden ? ' overridden' : '' - } }` - } -} - -const printableTree = (tree, path = []) => { - if (!tree) { - return tree - } - - const Cls = tree.isLink ? ArboristLink - : tree.sourceReference ? ArboristVirtualNode - : ArboristNode - if (path.includes(tree)) { - const obj = Object.create(Cls.prototype) - return Object.assign(obj, { location: tree.location }) - } - path.push(tree) - return new Cls(tree, path) -} - -module.exports = printableTree diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/realpath.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/realpath.js deleted file mode 100644 index bc4bbbce38485..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/realpath.js +++ /dev/null @@ -1,98 +0,0 @@ -// look up the realpath, but cache stats to minimize overhead -// If the parent folder is in the realpath cache, then we just -// lstat the child, since there's no need to do a full realpath -// This is not a separate module, and is much simpler than Node's -// built-in fs.realpath, because we only care about symbolic links, -// so we can handle many fewer edge cases. - -const fs = require('fs') -const promisify = require('util').promisify -const readlink = promisify(fs.readlink) -const lstat = promisify(fs.lstat) -const { resolve, basename, dirname } = require('path') - -const realpathCached = (path, rpcache, stcache, depth) => { - // just a safety against extremely deep eloops - /* istanbul ignore next */ - if (depth > 2000) { - throw eloop(path) - } - - path = resolve(path) - if (rpcache.has(path)) { - return Promise.resolve(rpcache.get(path)) - } - - const dir = dirname(path) - const base = basename(path) - - if (base && rpcache.has(dir)) { - return realpathChild(dir, base, rpcache, stcache, depth) - } - - // if it's the root, then we know it's real - if (!base) { - rpcache.set(dir, dir) - return Promise.resolve(dir) - } - - // the parent, what is that? - // find out, and then come back. - return realpathCached(dir, rpcache, stcache, depth + 1).then(() => - realpathCached(path, rpcache, stcache, depth + 1)) -} - -const lstatCached = (path, stcache) => { - if (stcache.has(path)) { - return Promise.resolve(stcache.get(path)) - } - - const p = lstat(path).then(st => { - stcache.set(path, st) - return st - }) - stcache.set(path, p) - return p -} - -// This is a slight fib, as it doesn't actually occur during a stat syscall. -// But file systems are giant piles of lies, so whatever. -const eloop = path => - Object.assign(new Error( - `ELOOP: too many symbolic links encountered, stat '${path}'`), { - errno: -62, - syscall: 'stat', - code: 'ELOOP', - path: path, - }) - -const realpathChild = (dir, base, rpcache, stcache, depth) => { - const realdir = rpcache.get(dir) - // that unpossible - /* istanbul ignore next */ - if (typeof realdir === 'undefined') { - throw new Error('in realpathChild without parent being in realpath cache') - } - - const realish = resolve(realdir, base) - return lstatCached(realish, stcache).then(st => { - if (!st.isSymbolicLink()) { - rpcache.set(resolve(dir, base), realish) - return realish - } - - return readlink(realish).then(target => { - const resolved = resolve(realdir, target) - if (realish === resolved) { - throw eloop(realish) - } - - return realpathCached(resolved, rpcache, stcache, depth + 1) - }).then(real => { - rpcache.set(resolve(dir, base), real) - return real - }) - }) -} - -module.exports = realpathCached diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/relpath.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/relpath.js deleted file mode 100644 index 33e12ff6f6ca8..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/relpath.js +++ /dev/null @@ -1,3 +0,0 @@ -const {relative} = require('path') -const relpath = (from, to) => relative(from, to).replace(/\\/g, '/') -module.exports = relpath diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/reset-dep-flags.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/reset-dep-flags.js deleted file mode 100644 index e259e901a5625..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/reset-dep-flags.js +++ /dev/null @@ -1,15 +0,0 @@ -// Sometimes we need to actually do a walk from the root, because you can -// have a cycle of deps that all depend on each other, but no path from root. -// Also, since the ideal tree is loaded from the shrinkwrap, it had extraneous -// flags set false that might now be actually extraneous, and dev/optional -// flags that are also now incorrect. This method sets all flags to true, so -// we can find the set that is actually extraneous. -module.exports = tree => { - for (const node of tree.inventory.values()) { - node.extraneous = true - node.dev = true - node.devOptional = true - node.peer = true - node.optional = true - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/retire-path.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/retire-path.js deleted file mode 100644 index a6ce867d78940..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/retire-path.js +++ /dev/null @@ -1,19 +0,0 @@ -const crypto = require('crypto') -const {dirname, basename, resolve} = require('path') - -// use sha1 because it's faster, and collisions extremely unlikely anyway -const pathSafeHash = s => - crypto.createHash('sha1') - .update(s) - .digest('base64') - .replace(/[^a-zA-Z0-9]+/g, '') - .substr(0, 8) - -const retirePath = from => { - const d = dirname(from) - const b = basename(from) - const hash = pathSafeHash(from) - return resolve(d, `.${b}-${hash}`) -} - -module.exports = retirePath diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/shrinkwrap.js deleted file mode 100644 index ed28130249ea0..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/shrinkwrap.js +++ /dev/null @@ -1,1073 +0,0 @@ -// a module that manages a shrinkwrap file (npm-shrinkwrap.json or -// package-lock.json). - -// Increment whenever the lockfile version updates -// v1 - npm <=6 -// v2 - arborist v1, npm v7, backwards compatible with v1, add 'packages' -// v3 will drop the 'dependencies' field, backwards comp with v2, not v1 -// -// We cannot bump to v3 until npm v6 is out of common usage, and -// definitely not before npm v8. - -const localeCompare = require('@isaacs/string-locale-compare')('en') -const lockfileVersion = 2 - -// for comparing nodes to yarn.lock entries -const mismatch = (a, b) => a && b && a !== b - -// this.tree => the root node for the tree (ie, same path as this) -// - Set the first time we do `this.add(node)` for a path matching this.path -// -// this.add(node) => -// - decorate the node with the metadata we have, if we have it, and it matches -// - add to the map of nodes needing to be committed, so that subsequent -// changes are captured when we commit that location's metadata. -// -// this.commit() => -// - commit all nodes awaiting update to their metadata entries -// - re-generate this.data and this.yarnLock based on this.tree -// -// Note that between this.add() and this.commit(), `this.data` will be out of -// date! Always call `commit()` before relying on it. -// -// After calling this.commit(), any nodes not present in the tree will have -// been removed from the shrinkwrap data as well. - -const procLog = require('proc-log') -const YarnLock = require('./yarn-lock.js') -const {promisify} = require('util') -const rimraf = promisify(require('rimraf')) -const fs = require('fs') -const readFile = promisify(fs.readFile) -const writeFile = promisify(fs.writeFile) -const stat = promisify(fs.stat) -const readdir_ = promisify(fs.readdir) -const readlink = promisify(fs.readlink) - -// XXX remove when drop support for node v10 -const lstat = promisify(fs.lstat) -/* istanbul ignore next - version specific polyfill */ -const readdir = async (path, opt) => { - if (!opt || !opt.withFileTypes) { - return readdir_(path, opt) - } - const ents = await readdir_(path, opt) - if (typeof ents[0] === 'string') { - return Promise.all(ents.map(async ent => { - return Object.assign(await lstat(path + '/' + ent), { name: ent }) - })) - } - return ents -} - -const { resolve, basename } = require('path') -const specFromLock = require('./spec-from-lock.js') -const versionFromTgz = require('./version-from-tgz.js') -const npa = require('npm-package-arg') -const rpj = require('read-package-json-fast') -const parseJSON = require('parse-conflict-json') - -const stringify = require('json-stringify-nice') -const swKeyOrder = [ - 'name', - 'version', - 'lockfileVersion', - 'resolved', - 'integrity', - 'requires', - 'packages', - 'dependencies', -] - -// used to rewrite from yarn registry to npm registry -const yarnRegRe = /^https?:\/\/registry.yarnpkg.com\// -const npmRegRe = /^https?:\/\/registry.npmjs.org\// - -// sometimes resolved: is weird or broken, or something npa can't handle -const specFromResolved = resolved => { - try { - return npa(resolved) - } catch (er) { - return {} - } -} - -const relpath = require('./relpath.js') - -const consistentResolve = require('./consistent-resolve.js') - -const maybeReadFile = file => { - return readFile(file, 'utf8').then(d => d, er => { - /* istanbul ignore else - can't test without breaking module itself */ - if (er.code === 'ENOENT') { - return '' - } else { - throw er - } - }) -} - -const maybeStatFile = file => { - return stat(file).then(st => st.isFile(), er => { - /* istanbul ignore else - can't test without breaking module itself */ - if (er.code === 'ENOENT') { - return null - } else { - throw er - } - }) -} - -const pkgMetaKeys = [ - // note: name is included if necessary, for alias packages - 'version', - 'dependencies', - 'peerDependencies', - 'peerDependenciesMeta', - 'optionalDependencies', - 'bundleDependencies', - 'acceptDependencies', - 'funding', - 'engines', - 'os', - 'cpu', - '_integrity', - 'license', - '_hasShrinkwrap', - 'hasInstallScript', - 'bin', - 'deprecated', - 'workspaces', -] - -const nodeMetaKeys = [ - 'integrity', - 'inBundle', - 'hasShrinkwrap', - 'hasInstallScript', -] - -const metaFieldFromPkg = (pkg, key) => { - const val = pkg[key] - // get the license type, not an object - return (key === 'license' && val && typeof val === 'object' && val.type) - ? val.type - // skip empty objects and falsey values - : (val && !(typeof val === 'object' && !Object.keys(val).length)) ? val - : null -} - -// check to make sure that there are no packages newer than the hidden lockfile -const assertNoNewer = async (path, data, lockTime, dir = path, seen = null) => { - const base = basename(dir) - const isNM = dir !== path && base === 'node_modules' - const isScope = dir !== path && !isNM && base.charAt(0) === '@' - const isParent = dir === path || isNM || isScope - - const rel = relpath(path, dir) - if (dir !== path) { - const dirTime = (await stat(dir)).mtime - if (dirTime > lockTime) { - throw 'out of date, updated: ' + rel - } - if (!isScope && !isNM && !data.packages[rel]) { - throw 'missing from lockfile: ' + rel - } - seen.add(rel) - } else { - seen = new Set([rel]) - } - - const parent = isParent ? dir : resolve(dir, 'node_modules') - const children = dir === path - ? Promise.resolve([{name: 'node_modules', isDirectory: () => true }]) - : readdir(parent, { withFileTypes: true }) - - return children.catch(() => []) - .then(ents => Promise.all(ents.map(async ent => { - const child = resolve(parent, ent.name) - if (ent.isDirectory() && !/^\./.test(ent.name)) { - await assertNoNewer(path, data, lockTime, child, seen) - } else if (ent.isSymbolicLink()) { - const target = resolve(parent, await readlink(child)) - const tstat = await stat(target).catch( - /* istanbul ignore next - windows */ () => null) - seen.add(relpath(path, child)) - /* istanbul ignore next - windows cannot do this */ - if (tstat && tstat.isDirectory() && !seen.has(relpath(path, target))) { - await assertNoNewer(path, data, lockTime, target, seen) - } - } - }))) - .then(() => { - if (dir !== path) { - return - } - - // assert that all the entries in the lockfile were seen - for (const loc of new Set(Object.keys(data.packages))) { - if (!seen.has(loc)) { - throw 'missing from node_modules: ' + loc - } - } - }) -} - -const _awaitingUpdate = Symbol('_awaitingUpdate') -const _updateWaitingNode = Symbol('_updateWaitingNode') -const _lockFromLoc = Symbol('_lockFromLoc') -const _pathToLoc = Symbol('_pathToLoc') -const _loadAll = Symbol('_loadAll') -const _metaFromLock = Symbol('_metaFromLock') -const _resolveMetaNode = Symbol('_resolveMetaNode') -const _fixDependencies = Symbol('_fixDependencies') -const _buildLegacyLockfile = Symbol('_buildLegacyLockfile') -const _filenameSet = Symbol('_filenameSet') -const _maybeRead = Symbol('_maybeRead') -const _maybeStat = Symbol('_maybeStat') -class Shrinkwrap { - static load (options) { - return new Shrinkwrap(options).load() - } - - static get keyOrder () { - return swKeyOrder - } - - static reset (options) { - // still need to know if it was loaded from the disk, but don't - // bother reading it if we're gonna just throw it away. - const s = new Shrinkwrap(options) - s.reset() - - return s[_maybeStat]().then(([sw, lock]) => { - s.filename = resolve(s.path, - (s.hiddenLockfile ? 'node_modules/.package-lock' - : s.shrinkwrapOnly || sw ? 'npm-shrinkwrap' - : 'package-lock') + '.json') - s.loadedFromDisk = !!(sw || lock) - s.type = basename(s.filename) - return s - }) - } - - static metaFromNode (node, path) { - if (node.isLink) { - return { - resolved: relpath(path, node.realpath), - link: true, - } - } - - const meta = {} - pkgMetaKeys.forEach(key => { - const val = metaFieldFromPkg(node.package, key) - if (val) { - meta[key.replace(/^_/, '')] = val - } - }) - // we only include name if different from the node path name, and for the - // root to help prevent churn based on the name of the directory the - // project is in - const pname = node.packageName - if (pname && (node === node.root || pname !== node.name)) { - meta.name = pname - } - - if (node.isTop && node.package.devDependencies) { - meta.devDependencies = node.package.devDependencies - } - - nodeMetaKeys.forEach(key => { - if (node[key]) { - meta[key] = node[key] - } - }) - - const resolved = consistentResolve(node.resolved, node.path, path, true) - if (resolved) { - meta.resolved = resolved - } - - if (node.extraneous) { - meta.extraneous = true - } else { - if (node.peer) { - meta.peer = true - } - if (node.dev) { - meta.dev = true - } - if (node.optional) { - meta.optional = true - } - if (node.devOptional && !node.dev && !node.optional) { - meta.devOptional = true - } - } - return meta - } - - constructor (options = {}) { - const { - path, - indent = 2, - newline = '\n', - shrinkwrapOnly = false, - hiddenLockfile = false, - log = procLog, - } = options - - this.log = log - this[_awaitingUpdate] = new Map() - this.tree = null - this.path = resolve(path || '.') - this.filename = null - this.data = null - this.indent = indent - this.newline = newline - this.loadedFromDisk = false - this.type = null - this.yarnLock = null - this.hiddenLockfile = hiddenLockfile - this.loadingError = null - // only load npm-shrinkwrap.json in dep trees, not package-lock - this.shrinkwrapOnly = shrinkwrapOnly - } - - // check to see if a spec is present in the yarn.lock file, and if so, - // if we should use it, and what it should resolve to. This is only - // done when we did not load a shrinkwrap from disk. Also, decorate - // the options object if provided with the resolved and integrity that - // we expect. - checkYarnLock (spec, options = {}) { - spec = npa(spec) - const { yarnLock, loadedFromDisk } = this - const useYarnLock = yarnLock && !loadedFromDisk - const fromYarn = useYarnLock && yarnLock.entries.get(spec.raw) - if (fromYarn && fromYarn.version) { - // if it's the yarn or npm default registry, use the version as - // our effective spec. if it's any other kind of thing, use that. - const {resolved, version, integrity} = fromYarn - const isYarnReg = spec.registry && yarnRegRe.test(resolved) - const isnpmReg = spec.registry && !isYarnReg && npmRegRe.test(resolved) - const isReg = isnpmReg || isYarnReg - // don't use the simple version if the "registry" url is - // something else entirely! - const tgz = isReg && versionFromTgz(spec.name, resolved) || {} - const yspec = tgz.name === spec.name && tgz.version === version ? version - : isReg && tgz.name && tgz.version ? `npm:${tgz.name}@${tgz.version}` - : resolved - if (yspec) { - options.resolved = resolved.replace(yarnRegRe, 'https://registry.npmjs.org/') - options.integrity = integrity - return npa(`${spec.name}@${yspec}`) - } - } - return spec - } - - // throw away the shrinkwrap data so we can start fresh - // still worth doing a load() first so we know which files to write. - reset () { - this.tree = null - this[_awaitingUpdate] = new Map() - this.originalLockfileVersion = lockfileVersion - this.data = { - lockfileVersion, - requires: true, - packages: {}, - dependencies: {}, - } - } - - [_filenameSet] () { - return this.shrinkwrapOnly ? [ - this.path + '/npm-shrinkwrap.json', - ] : this.hiddenLockfile ? [ - null, - this.path + '/node_modules/.package-lock.json', - ] : [ - this.path + '/npm-shrinkwrap.json', - this.path + '/package-lock.json', - this.path + '/yarn.lock', - ] - } - - [_maybeRead] () { - return Promise.all(this[_filenameSet]().map(fn => fn && maybeReadFile(fn))) - } - - [_maybeStat] () { - // throw away yarn, we only care about lock or shrinkwrap when checking - // this way, since we're not actually loading the full lock metadata - return Promise.all(this[_filenameSet]().slice(0, 2) - .map(fn => fn && maybeStatFile(fn))) - } - - load () { - // we don't need to load package-lock.json except for top of tree nodes, - // only npm-shrinkwrap.json. - return this[_maybeRead]().then(([sw, lock, yarn]) => { - const data = sw || lock || '' - - // use shrinkwrap only for deps, otherwise prefer package-lock - // and ignore npm-shrinkwrap if both are present. - // TODO: emit a warning here or something if both are present. - this.filename = resolve(this.path, - (this.hiddenLockfile ? 'node_modules/.package-lock' - : this.shrinkwrapOnly || sw ? 'npm-shrinkwrap' - : 'package-lock') + '.json') - - this.type = basename(this.filename) - this.loadedFromDisk = !!data - - if (yarn) { - this.yarnLock = new YarnLock() - // ignore invalid yarn data. we'll likely clobber it later anyway. - try { - this.yarnLock.parse(yarn) - } catch (_) {} - } - - return data ? parseJSON(data) : {} - }).then(async data => { - // don't use detect-indent, just pick the first line. - // if the file starts with {" then we have an indent of '', ie, none - // which will default to 2 at save time. - const { - [Symbol.for('indent')]: indent, - [Symbol.for('newline')]: newline, - } = data - this.indent = indent !== undefined ? indent : this.indent - this.newline = newline !== undefined ? newline : this.newline - - if (!this.hiddenLockfile || !data.packages) { - return data - } - - // add a few ms just to account for jitter - const lockTime = +(await stat(this.filename)).mtime + 10 - await assertNoNewer(this.path, data, lockTime) - - // all good! hidden lockfile is the newest thing in here. - return data - }).catch(er => { - const rel = relpath(this.path, this.filename) - this.log.verbose('shrinkwrap', `failed to load ${rel}`, er) - this.loadingError = er - this.loadedFromDisk = false - this.ancientLockfile = false - return {} - }).then(lock => { - this.data = { - ...lock, - lockfileVersion, - requires: true, - packages: lock.packages || {}, - ...(this.hiddenLockfile ? {} : {dependencies: lock.dependencies || {}}), - } - this.originalLockfileVersion = lock.lockfileVersion - this.ancientLockfile = this.loadedFromDisk && - !(lock.lockfileVersion >= 2) && !lock.requires - - // load old lockfile deps into the packages listing - if (lock.dependencies && !lock.packages) { - return rpj(this.path + '/package.json').then(pkg => pkg, er => ({})) - .then(pkg => { - this[_loadAll]('', null, this.data) - this[_fixDependencies](pkg) - }) - } - }) - .then(() => this) - } - - [_loadAll] (location, name, lock) { - // migrate a v1 package lock to the new format. - const meta = this[_metaFromLock](location, name, lock) - // dependencies nested under a link are actually under the link target - if (meta.link) { - location = meta.resolved - } - if (lock.dependencies) { - for (const [name, dep] of Object.entries(lock.dependencies)) { - const loc = location + (location ? '/' : '') + 'node_modules/' + name - this[_loadAll](loc, name, dep) - } - } - } - - // v1 lockfiles track the optional/dev flags, but they don't tell us - // which thing had what kind of dep on what other thing, so we need - // to correct that now, or every link will be considered prod - [_fixDependencies] (pkg) { - // we need the root package.json because legacy shrinkwraps just - // have requires:true at the root level, which is even less useful - // than merging all dep types into one object. - const root = this.data.packages[''] - pkgMetaKeys.forEach(key => { - const val = metaFieldFromPkg(pkg, key) - const k = key.replace(/^_/, '') - if (val) { - root[k] = val - } - }) - - for (const [loc, meta] of Object.entries(this.data.packages)) { - if (!meta.requires || !loc) { - continue - } - - // resolve each require to a meta entry - // if this node isn't optional, but the dep is, then it's an optionalDep - // likewise for dev deps. - // This isn't perfect, but it's a pretty good approximation, and at - // least gets us out of having all 'prod' edges, which throws off the - // buildIdealTree process - for (const [name, spec] of Object.entries(meta.requires)) { - const dep = this[_resolveMetaNode](loc, name) - // this overwrites the false value set above - const depType = dep && dep.optional && !meta.optional - ? 'optionalDependencies' - : /* istanbul ignore next - dev deps are only for the root level */ - dep && dep.dev && !meta.dev ? 'devDependencies' - // also land here if the dep just isn't in the tree, which maybe - // should be an error, since it means that the shrinkwrap is - // invalid, but we can't do much better without any info. - : 'dependencies' - meta[depType] = meta[depType] || {} - meta[depType][name] = spec - } - delete meta.requires - } - } - - [_resolveMetaNode] (loc, name) { - for (let path = loc; true; path = path.replace(/(^|\/)[^/]*$/, '')) { - const check = `${path}${path ? '/' : ''}node_modules/${name}` - if (this.data.packages[check]) { - return this.data.packages[check] - } - - if (!path) { - break - } - } - return null - } - - [_lockFromLoc] (lock, path, i = 0) { - if (!lock) { - return null - } - - if (path[i] === '') { - i++ - } - - if (i >= path.length) { - return lock - } - - if (!lock.dependencies) { - return null - } - - return this[_lockFromLoc](lock.dependencies[path[i]], path, i + 1) - } - - // pass in a path relative to the root path, or an absolute path, - // get back a /-normalized location based on root path. - [_pathToLoc] (path) { - return relpath(this.path, resolve(this.path, path)) - } - - delete (nodePath) { - if (!this.data) { - throw new Error('run load() before getting or setting data') - } - const location = this[_pathToLoc](nodePath) - this[_awaitingUpdate].delete(location) - - delete this.data.packages[location] - const path = location.split(/(?:^|\/)node_modules\//) - const name = path.pop() - const pLock = this[_lockFromLoc](this.data, path) - if (pLock && pLock.dependencies) { - delete pLock.dependencies[name] - } - } - - get (nodePath) { - if (!this.data) { - throw new Error('run load() before getting or setting data') - } - - const location = this[_pathToLoc](nodePath) - if (this[_awaitingUpdate].has(location)) { - this[_updateWaitingNode](location) - } - - // first try to get from the newer spot, which we know has - // all the things we need. - if (this.data.packages[location]) { - return this.data.packages[location] - } - - // otherwise, fall back to the legacy metadata, and hope for the best - // get the node in the shrinkwrap corresponding to this spot - const path = location.split(/(?:^|\/)node_modules\//) - const name = path[path.length - 1] - const lock = this[_lockFromLoc](this.data, path) - - return this[_metaFromLock](location, name, lock) - } - - [_metaFromLock] (location, name, lock) { - // This function tries as hard as it can to figure out the metadata - // from a lockfile which may be outdated or incomplete. Since v1 - // lockfiles used the "version" field to contain a variety of - // different possible types of data, this gets a little complicated. - if (!lock) { - return {} - } - - // try to figure out a npm-package-arg spec from the lockfile entry - // This will return null if we could not get anything valid out of it. - const spec = specFromLock(name, lock, this.path) - - if (spec.type === 'directory') { - // the "version" was a file: url to a non-tarball path - // this is a symlink dep. We don't store much metadata - // about symlinks, just the target. - const target = relpath(this.path, spec.fetchSpec) - this.data.packages[location] = { - link: true, - resolved: target, - } - // also save the link target, omitting version since we don't know - // what it is, but we know it isn't a link to itself! - if (!this.data.packages[target]) { - this[_metaFromLock](target, name, { ...lock, version: null }) - } - return this.data.packages[location] - } - - const meta = {} - // when calling loadAll we'll change these into proper dep objects - if (lock.requires && typeof lock.requires === 'object') { - meta.requires = lock.requires - } - - if (lock.optional) { - meta.optional = true - } - if (lock.dev) { - meta.dev = true - } - - // the root will typically have a name from the root project's - // package.json file. - if (location === '') { - meta.name = lock.name - } - - // if we have integrity, save it now. - if (lock.integrity) { - meta.integrity = lock.integrity - } - - if (lock.version && !lock.integrity) { - // this is usually going to be a git url or symlink, but it could - // also be a registry dependency that did not have integrity at - // the time it was saved. - // Symlinks were already handled above, so that leaves git. - // - // For git, always save the full SSH url. we'll actually fetch the - // tgz most of the time, since it's faster, but it won't work for - // private repos, and we can't get back to the ssh from the tgz, - // so we store the ssh instead. - // For unknown git hosts, just resolve to the raw spec in lock.version - if (spec.type === 'git') { - meta.resolved = consistentResolve(spec, this.path, this.path) - - // return early because there is nothing else we can do with this - return this.data.packages[location] = meta - } else if (spec.registry) { - // registry dep that didn't save integrity. grab the version, and - // fall through to pick up the resolved and potentially name. - meta.version = lock.version - } - // only other possible case is a tarball without integrity. - // fall through to do what we can with the filename later. - } - - // at this point, we know that the spec is either a registry dep - // (ie, version, because locking, which means a resolved url), - // or a remote dep, or file: url. Remote deps and file urls - // have a fetchSpec equal to the fully resolved thing. - // Registry deps, we take what's in the lockfile. - if (lock.resolved || (spec.type && !spec.registry)) { - if (spec.registry) { - meta.resolved = lock.resolved - } else if (spec.type === 'file') { - meta.resolved = consistentResolve(spec, this.path, this.path, true) - } else if (spec.fetchSpec) { - meta.resolved = spec.fetchSpec - } - } - - // at this point, if still we don't have a version, do our best to - // infer it from the tarball url/file. This works a surprising - // amount of the time, even though it's not guaranteed. - if (!meta.version) { - if (spec.type === 'file' || spec.type === 'remote') { - const fromTgz = versionFromTgz(spec.name, spec.fetchSpec) || - versionFromTgz(spec.name, meta.resolved) - if (fromTgz) { - meta.version = fromTgz.version - if (fromTgz.name !== name) { - meta.name = fromTgz.name - } - } - } else if (spec.type === 'alias') { - meta.name = spec.subSpec.name - meta.version = spec.subSpec.fetchSpec - } else if (spec.type === 'version') { - meta.version = spec.fetchSpec - } - // ok, I did my best! good luck! - } - - if (lock.bundled) { - meta.inBundle = true - } - - // save it for next time - return this.data.packages[location] = meta - } - - add (node) { - if (!this.data) { - throw new Error('run load() before getting or setting data') - } - - // will be actually updated on read - const loc = relpath(this.path, node.path) - if (node.path === this.path) { - this.tree = node - } - - // if we have metadata about this node, and it's a match, then - // try to decorate it. - if (node.resolved === null || node.integrity === null) { - const { - resolved, - integrity, - hasShrinkwrap, - version, - } = this.get(node.path) - - const pathFixed = !resolved ? null - : !/^file:/.test(resolved) ? resolved - // resolve onto the metadata path - : `file:${resolve(this.path, resolved.substr(5))}` - - // if we have one, only set the other if it matches - // otherwise it could be for a completely different thing. - const resolvedOk = !resolved || !node.resolved || - node.resolved === pathFixed - const integrityOk = !integrity || !node.integrity || - node.integrity === integrity - const versionOk = !version || !node.version || version === node.version - - const allOk = (resolved || integrity || version) && - resolvedOk && integrityOk && versionOk - - if (allOk) { - node.resolved = node.resolved || pathFixed || null - node.integrity = node.integrity || integrity || null - node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false - } else { - // try to read off the package or node itself - const { - resolved, - integrity, - hasShrinkwrap, - } = Shrinkwrap.metaFromNode(node, this.path) - node.resolved = node.resolved || resolved || null - node.integrity = node.integrity || integrity || null - node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false - } - } - this[_awaitingUpdate].set(loc, node) - } - - addEdge (edge) { - if (!this.yarnLock || !edge.valid) { - return - } - - const { to: node } = edge - - // if it's already set up, nothing to do - if (node.resolved !== null && node.integrity !== null) { - return - } - - // if the yarn lock is empty, nothing to do - if (!this.yarnLock.entries || !this.yarnLock.entries.size) { - return - } - - // we relativize the path here because that's how it shows up in the lock - // XXX how is this different from pathFixed above?? - const pathFixed = !node.resolved ? null - : !/file:/.test(node.resolved) ? node.resolved - : consistentResolve(node.resolved, node.path, this.path, true) - - const spec = npa(`${node.name}@${edge.spec}`) - const entry = this.yarnLock.entries.get(`${node.name}@${edge.spec}`) - - if (!entry || - mismatch(node.version, entry.version) || - mismatch(node.integrity, entry.integrity) || - mismatch(pathFixed, entry.resolved)) { - return - } - - if (entry.resolved && yarnRegRe.test(entry.resolved) && spec.registry) { - entry.resolved = entry.resolved.replace(yarnRegRe, 'https://registry.npmjs.org/') - } - - node.integrity = node.integrity || entry.integrity || null - node.resolved = node.resolved || - consistentResolve(entry.resolved, this.path, node.path) || null - - this[_awaitingUpdate].set(relpath(this.path, node.path), node) - } - - [_updateWaitingNode] (loc) { - const node = this[_awaitingUpdate].get(loc) - this[_awaitingUpdate].delete(loc) - this.data.packages[loc] = Shrinkwrap.metaFromNode(node, this.path) - } - - commit () { - if (this.tree) { - if (this.yarnLock) { - this.yarnLock.fromTree(this.tree) - } - const root = Shrinkwrap.metaFromNode(this.tree.target, this.path) - this.data.packages = {} - if (Object.keys(root).length) { - this.data.packages[''] = root - } - for (const node of this.tree.root.inventory.values()) { - // only way this.tree is not root is if the root is a link to it - if (node === this.tree || node.isRoot || node.location === '') { - continue - } - const loc = relpath(this.path, node.path) - this.data.packages[loc] = Shrinkwrap.metaFromNode(node, this.path) - } - } else if (this[_awaitingUpdate].size > 0) { - for (const loc of this[_awaitingUpdate].keys()) { - this[_updateWaitingNode](loc) - } - } - - // hidden lockfiles don't include legacy metadata or a root entry - if (this.hiddenLockfile) { - delete this.data.packages[''] - delete this.data.dependencies - } else if (this.tree) { - this[_buildLegacyLockfile](this.tree, this.data) - } - - return this.data - } - - [_buildLegacyLockfile] (node, lock, path = []) { - if (node === this.tree) { - // the root node - lock.name = node.packageName || node.name - if (node.version) { - lock.version = node.version - } - } - - // npm v6 and before tracked 'from', meaning "the request that led - // to this package being installed". However, that's inherently - // racey and non-deterministic in a world where deps are deduped - // ahead of fetch time. In order to maintain backwards compatibility - // with v6 in the lockfile, we do this trick where we pick a valid - // dep link out of the edgesIn set. Choose the edge with the fewest - // number of `node_modules` sections in the requestor path, and then - // lexically sort afterwards. - const edge = [...node.edgesIn].filter(e => e.valid).sort((a, b) => { - const aloc = a.from.location.split('node_modules') - const bloc = b.from.location.split('node_modules') - /* istanbul ignore next - sort calling order is indeterminate */ - return aloc.length > bloc.length ? 1 - : bloc.length > aloc.length ? -1 - : localeCompare(aloc[aloc.length - 1], bloc[bloc.length - 1]) - })[0] - - const res = consistentResolve(node.resolved, this.path, this.path, true) - const rSpec = specFromResolved(res) - - // if we don't have anything (ie, it's extraneous) then use the resolved - // value as if that was where we got it from, since at least it's true. - // if we don't have either, just an empty object so nothing matches below. - // This will effectively just save the version and resolved, as if it's - // a standard version/range dep, which is a reasonable default. - const spec = !edge ? rSpec - : npa.resolve(node.name, edge.spec, edge.from.realpath) - - if (node.isLink) { - lock.version = `file:${relpath(this.path, node.realpath)}` - } else if (spec && (spec.type === 'file' || spec.type === 'remote')) { - lock.version = spec.saveSpec - } else if (spec && spec.type === 'git' || rSpec.type === 'git') { - lock.version = node.resolved - /* istanbul ignore else - don't think there are any cases where a git - * spec (or indeed, ANY npa spec) doesn't have a .raw member */ - if (spec.raw) { - lock.from = spec.raw - } - } else if (!node.isRoot && - node.package && - node.packageName && - node.packageName !== node.name) { - lock.version = `npm:${node.packageName}@${node.version}` - } else if (node.package && node.version) { - lock.version = node.version - } - - if (node.inDepBundle) { - lock.bundled = true - } - - // when we didn't resolve to git, file, or dir, and didn't request - // git, file, dir, or remote, then the resolved value is necessary. - if (node.resolved && - !node.isLink && - rSpec.type !== 'git' && - rSpec.type !== 'file' && - rSpec.type !== 'directory' && - spec.type !== 'directory' && - spec.type !== 'git' && - spec.type !== 'file' && - spec.type !== 'remote') { - lock.resolved = node.resolved - } - - if (node.integrity) { - lock.integrity = node.integrity - } - - if (node.extraneous) { - lock.extraneous = true - } else if (!node.isLink) { - if (node.peer) { - lock.peer = true - } - - if (node.devOptional && !node.dev && !node.optional) { - lock.devOptional = true - } - - if (node.dev) { - lock.dev = true - } - - if (node.optional) { - lock.optional = true - } - } - - const depender = node.target - if (depender.edgesOut.size > 0) { - if (node !== this.tree) { - const entries = [...depender.edgesOut.entries()] - lock.requires = entries.reduce((set, [k, v]) => { - // omit peer deps from legacy lockfile requires field, because - // npm v6 doesn't handle peer deps, and this triggers some bad - // behavior if the dep can't be found in the dependencies list. - const { spec, peer } = v - if (peer) { - return set - } - if (spec.startsWith('file:')) { - // turn absolute file: paths into relative paths from the node - // this especially shows up with workspace edges when the root - // node is also a workspace in the set. - const p = resolve(node.realpath, spec.substr('file:'.length)) - set[k] = `file:${relpath(node.realpath, p)}` - } else { - set[k] = spec - } - return set - }, {}) - } else { - lock.requires = true - } - } - - // now we walk the children, putting them in the 'dependencies' object - const {children} = node.target - if (!children.size) { - delete lock.dependencies - } else { - const kidPath = [...path, node.realpath] - const dependencies = {} - // skip any that are already in the descent path, so cyclical link - // dependencies don't blow up with ELOOP. - let found = false - for (const [name, kid] of children.entries()) { - if (path.includes(kid.realpath)) { - continue - } - dependencies[name] = this[_buildLegacyLockfile](kid, {}, kidPath) - found = true - } - if (found) { - lock.dependencies = dependencies - } - } - return lock - } - - save (options = {}) { - if (!this.data) { - throw new Error('run load() before saving data') - } - - const { format = true } = options - const defaultIndent = this.indent || 2 - const indent = format === true ? defaultIndent - : format || 0 - const eol = format ? this.newline || '\n' : '' - const data = this.commit() - const json = stringify(data, swKeyOrder, indent).replace(/\n/g, eol) - return Promise.all([ - writeFile(this.filename, json).catch(er => { - if (this.hiddenLockfile) { - // well, we did our best. - // if we reify, and there's nothing there, then it might be lacking - // a node_modules folder, but then the lockfile is not important. - // Remove the file, so that in case there WERE deps, but we just - // failed to update the file for some reason, it's not out of sync. - return rimraf(this.filename) - } - throw er - }), - this.yarnLock && this.yarnLock.entries.size && - writeFile(this.path + '/yarn.lock', this.yarnLock.toString()), - ]) - } -} - -module.exports = Shrinkwrap diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/signal-handling.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/signal-handling.js deleted file mode 100644 index 0afbb05dcfc64..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/signal-handling.js +++ /dev/null @@ -1,70 +0,0 @@ -const signals = require('./signals.js') - -// for testing, expose the process being used -module.exports = Object.assign(fn => setup(fn), { process }) - -// do all of this in a setup function so that we can call it -// multiple times for multiple reifies that might be going on. -// Otherwise, Arborist.reify() is a global action, which is a -// new constraint we'd be adding with this behavior. -const setup = fn => { - const { process } = module.exports - - const sigListeners = { loaded: false } - - const unload = () => { - if (!sigListeners.loaded) { - return - } - for (const sig of signals) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - } - process.removeListener('beforeExit', onBeforeExit) - sigListeners.loaded = false - } - - const onBeforeExit = () => { - // this trick ensures that we exit with the same signal we caught - // Ie, if you press ^C and npm gets a SIGINT, we'll do the rollback - // and then exit with a SIGINT signal once we've removed the handler. - // The timeout is there because signals are asynchronous, so we need - // the process to NOT exit on its own, which means we have to have - // something keeping the event loop looping. Hence this hack. - unload() - process.kill(process.pid, signalReceived) - setTimeout(() => {}, 500) - } - - let signalReceived = null - const listener = (sig, fn) => () => { - signalReceived = sig - - // if we exit normally, but caught a signal which would have been fatal, - // then re-send it once we're done with whatever cleanup we have to do. - unload() - if (process.listeners(sig).length < 1) { - process.once('beforeExit', onBeforeExit) - } - - fn({ signal: sig }) - } - - // do the actual loading here - for (const sig of signals) { - sigListeners[sig] = listener(sig, fn) - const max = process.getMaxListeners() - try { - // if we call this a bunch of times, avoid triggering the warning - const { length } = process.listeners(sig) - if (length >= max) { - process.setMaxListeners(length + 1) - } - process.on(sig, sigListeners[sig]) - } catch (er) {} - } - sigListeners.loaded = true - - return unload -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/signals.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/signals.js deleted file mode 100644 index 8dcd585c4c065..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/signals.js +++ /dev/null @@ -1,58 +0,0 @@ -// copied from signal-exit - -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. - -const platform = global.__ARBORIST_FAKE_PLATFORM__ || process.platform - -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM', -] - -if (platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) -} - -if (platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/spec-from-lock.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/spec-from-lock.js deleted file mode 100644 index 789741976269d..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/spec-from-lock.js +++ /dev/null @@ -1,32 +0,0 @@ -const npa = require('npm-package-arg') - -// extracted from npm v6 lib/install/realize-shrinkwrap-specifier.js -const specFromLock = (name, lock, where) => { - try { - if (lock.version) { - const spec = npa.resolve(name, lock.version, where) - if (lock.integrity || spec.type === 'git') { - return spec - } - } - if (lock.from) { - // legacy metadata includes "from", but not integrity - const spec = npa.resolve(name, lock.from, where) - if (spec.registry && lock.version) { - return npa.resolve(name, lock.version, where) - } else if (!lock.resolved) { - return spec - } - } - if (lock.resolved) { - return npa.resolve(name, lock.resolved, where) - } - } catch (_) { } - try { - return npa.resolve(name, lock.version, where) - } catch (_) { - return {} - } -} - -module.exports = specFromLock diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/tracker.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/tracker.js deleted file mode 100644 index b50f06eaa5518..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/tracker.js +++ /dev/null @@ -1,109 +0,0 @@ -const _progress = Symbol('_progress') -const _onError = Symbol('_onError') -const procLog = require('proc-log') - -module.exports = cls => class Tracker extends cls { - constructor (options = {}) { - super(options) - this.log = options.log || procLog - this[_progress] = new Map() - } - - addTracker (section, subsection = null, key = null) { - // TrackerGroup type object not found - if (!this.log.newGroup) { - return - } - - if (section === null || section === undefined) { - this[_onError](`Tracker can't be null or undefined`) - } - - if (key === null) { - key = subsection - } - - const hasTracker = this[_progress].has(section) - const hasSubtracker = this[_progress].has(`${section}:${key}`) - - if (hasTracker && subsection === null) { - // 0. existing tracker, no subsection - this[_onError](`Tracker "${section}" already exists`) - } else if (!hasTracker && subsection === null) { - // 1. no existing tracker, no subsection - // Create a new tracker from this.log - // starts progress bar - if (this[_progress].size === 0) { - this.log.enableProgress() - } - - this[_progress].set(section, this.log.newGroup(section)) - } else if (!hasTracker && subsection !== null) { - // 2. no parent tracker and subsection - this[_onError](`Parent tracker "${section}" does not exist`) - } else if (!hasTracker || !hasSubtracker) { - // 3. existing parent tracker, no subsection tracker - // Create a new subtracker in this[_progress] from parent tracker - this[_progress].set(`${section}:${key}`, - this[_progress].get(section).newGroup(`${section}:${subsection}`) - ) - } - // 4. existing parent tracker, existing subsection tracker - // skip it - } - - finishTracker (section, subsection = null, key = null) { - // TrackerGroup type object not found - if (!this.log.newGroup) { - return - } - - if (section === null || section === undefined) { - this[_onError](`Tracker can't be null or undefined`) - } - - if (key === null) { - key = subsection - } - - const hasTracker = this[_progress].has(section) - const hasSubtracker = this[_progress].has(`${section}:${key}`) - - // 0. parent tracker exists, no subsection - // Finish parent tracker and remove from this[_progress] - if (hasTracker && subsection === null) { - // check if parent tracker does - // not have any remaining children - const keys = this[_progress].keys() - for (const key of keys) { - if (key.match(new RegExp(section + ':'))) { - this.finishTracker(section, key) - } - } - - // remove parent tracker - this[_progress].get(section).finish() - this[_progress].delete(section) - - // remove progress bar if all - // trackers are finished - if (this[_progress].size === 0) { - this.log.disableProgress() - } - } else if (!hasTracker && subsection === null) { - // 1. no existing parent tracker, no subsection - this[_onError](`Tracker "${section}" does not exist`) - } else if (!hasTracker || hasSubtracker) { - // 2. subtracker exists - // Finish subtracker and remove from this[_progress] - this[_progress].get(`${section}:${key}`).finish() - this[_progress].delete(`${section}:${key}`) - } - // 3. existing parent tracker, no subsection - } - - [_onError] (msg) { - this.log.disableProgress() - throw new Error(msg) - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/tree-check.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/tree-check.js deleted file mode 100644 index 44b5484c68240..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/tree-check.js +++ /dev/null @@ -1,155 +0,0 @@ -const debug = require('./debug.js') - -const checkTree = (tree, checkUnreachable = true) => { - const log = [['START TREE CHECK', tree.path]] - - // this can only happen in tests where we have a "tree" object - // that isn't actually a tree. - if (!tree.root || !tree.root.inventory) { - return tree - } - - const { inventory } = tree.root - const seen = new Set() - const check = (node, via = tree, viaType = 'self') => { - log.push([ - 'CHECK', - node && node.location, - via && via.location, - viaType, - 'seen=' + seen.has(node), - 'promise=' + !!(node && node.then), - 'root=' + !!(node && node.isRoot), - ]) - - if (!node || seen.has(node) || node.then) { - return - } - - seen.add(node) - - if (node.isRoot && node !== tree.root) { - throw Object.assign(new Error('double root'), { - node: node.path, - realpath: node.realpath, - tree: tree.path, - root: tree.root.path, - via: via.path, - viaType, - log, - }) - } - - if (node.root !== tree.root) { - throw Object.assign(new Error('node from other root in tree'), { - node: node.path, - realpath: node.realpath, - tree: tree.path, - root: tree.root.path, - via: via.path, - viaType, - otherRoot: node.root && node.root.path, - log, - }) - } - - if (!node.isRoot && node.inventory.size !== 0) { - throw Object.assign(new Error('non-root has non-zero inventory'), { - node: node.path, - tree: tree.path, - root: tree.root.path, - via: via.path, - viaType, - inventory: [...node.inventory.values()].map(node => - [node.path, node.location]), - log, - }) - } - - if (!node.isRoot && !inventory.has(node) && !node.dummy) { - throw Object.assign(new Error('not in inventory'), { - node: node.path, - tree: tree.path, - root: tree.root.path, - via: via.path, - viaType, - log, - }) - } - - const devEdges = [...node.edgesOut.values()].filter(e => e.dev) - if (!node.isTop && devEdges.length) { - throw Object.assign(new Error('dev edges on non-top node'), { - node: node.path, - tree: tree.path, - root: tree.root.path, - via: via.path, - viaType, - devEdges: devEdges.map(e => [e.type, e.name, e.spec, e.error]), - log, - }) - } - - if (node.path === tree.root.path && node !== tree.root) { - throw Object.assign(new Error('node with same path as root'), { - node: node.path, - tree: tree.path, - root: tree.root.path, - via: via.path, - viaType, - log, - }) - } - - if (!node.isLink && node.path !== node.realpath) { - throw Object.assign(new Error('non-link with mismatched path/realpath'), { - node: node.path, - tree: tree.path, - realpath: node.realpath, - root: tree.root.path, - via: via.path, - viaType, - log, - }) - } - - const { parent, fsParent, target } = node - check(parent, node, 'parent') - check(fsParent, node, 'fsParent') - check(target, node, 'target') - log.push(['CHILDREN', node.location, ...node.children.keys()]) - for (const kid of node.children.values()) { - check(kid, node, 'children') - } - for (const kid of node.fsChildren) { - check(kid, node, 'fsChildren') - } - for (const link of node.linksIn) { - check(link, node, 'linksIn') - } - for (const top of node.tops) { - check(top, node, 'tops') - } - log.push(['DONE', node.location]) - } - check(tree) - if (checkUnreachable) { - for (const node of inventory.values()) { - if (!seen.has(node) && node !== tree.root) { - throw Object.assign(new Error('unreachable in inventory'), { - node: node.path, - realpath: node.realpath, - location: node.location, - root: tree.root.path, - tree: tree.path, - log, - }) - } - } - } - return tree -} - -// should only ever run this check in debug mode -module.exports = tree => tree -debug(() => module.exports = checkTree) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/version-from-tgz.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/version-from-tgz.js deleted file mode 100644 index d5d8f7345c9bb..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/version-from-tgz.js +++ /dev/null @@ -1,48 +0,0 @@ -/* eslint node/no-deprecated-api: "off" */ -const semver = require('semver') -const {basename} = require('path') -const {parse} = require('url') -module.exports = (name, tgz) => { - const base = basename(tgz) - if (!base.endsWith('.tgz')) { - return null - } - - const u = parse(tgz) - if (/^https?:/.test(u.protocol)) { - // registry url? check for most likely pattern. - // either /@foo/bar/-/bar-1.2.3.tgz or - // /foo/-/foo-1.2.3.tgz, and fall through to - // basename checking. Note that registries can - // be mounted below the root url, so /a/b/-/x/y/foo/-/foo-1.2.3.tgz - // is a potential option. - const tfsplit = u.path.substr(1).split('/-/') - if (tfsplit.length > 1) { - const afterTF = tfsplit.pop() - if (afterTF === base) { - const pre = tfsplit.pop() - const preSplit = pre.split(/\/|%2f/i) - const project = preSplit.pop() - const scope = preSplit.pop() - return versionFromBaseScopeName(base, scope, project) - } - } - } - - const split = name.split(/\/|%2f/i) - const project = split.pop() - const scope = split.pop() - return versionFromBaseScopeName(base, scope, project) -} - -const versionFromBaseScopeName = (base, scope, name) => { - if (!base.startsWith(name + '-')) { - return null - } - - const parsed = semver.parse(base.substring(name.length + 1, base.length - 4)) - return parsed ? { - name: scope && scope.charAt(0) === '@' ? `${scope}/${name}` : name, - version: parsed.version, - } : null -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/vuln.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/vuln.js deleted file mode 100644 index a818cf318eb89..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/vuln.js +++ /dev/null @@ -1,214 +0,0 @@ -// An object representing a vulnerability either as the result of an -// advisory or due to the package in question depending exclusively on -// vulnerable versions of a dep. -// -// - name: package name -// - range: Set of vulnerable versions -// - nodes: Set of nodes affected -// - effects: Set of vulns triggered by this one -// - advisories: Set of advisories (including metavulns) causing this vuln. -// All of the entries in via are vulnerability objects returned by -// @npmcli/metavuln-calculator -// - via: dependency vulns which cause this one - -const {satisfies, simplifyRange} = require('semver') -const semverOpt = { loose: true, includePrerelease: true } - -const localeCompare = require('@isaacs/string-locale-compare')('en') -const npa = require('npm-package-arg') -const _range = Symbol('_range') -const _simpleRange = Symbol('_simpleRange') -const _fixAvailable = Symbol('_fixAvailable') - -const severities = new Map([ - ['info', 0], - ['low', 1], - ['moderate', 2], - ['high', 3], - ['critical', 4], - [null, -1], -]) - -for (const [name, val] of severities.entries()) { - severities.set(val, name) -} - -class Vuln { - constructor ({ name, advisory }) { - this.name = name - this.via = new Set() - this.advisories = new Set() - this.severity = null - this.effects = new Set() - this.topNodes = new Set() - this[_range] = null - this[_simpleRange] = null - this.nodes = new Set() - // assume a fix is available unless it hits a top node - // that locks it in place, setting this false or {isSemVerMajor, version}. - this[_fixAvailable] = true - this.addAdvisory(advisory) - this.packument = advisory.packument - this.versions = advisory.versions - } - - get fixAvailable () { - return this[_fixAvailable] - } - - set fixAvailable (f) { - this[_fixAvailable] = f - // if there's a fix available for this at the top level, it means that - // it will also fix the vulns that led to it being there. to get there, - // we set the vias to the most "strict" of fix availables. - // - false: no fix is available - // - {name, version, isSemVerMajor} fix requires -f, is semver major - // - {name, version} fix requires -f, not semver major - // - true: fix does not require -f - for (const v of this.via) { - // don't blow up on loops - if (v.fixAvailable === f) { - continue - } - - if (f === false) { - v.fixAvailable = f - } else if (v.fixAvailable === true) { - v.fixAvailable = f - } else if (typeof f === 'object' && ( - typeof v.fixAvailable !== 'object' || !v.fixAvailable.isSemVerMajor)) { - v.fixAvailable = f - } - } - } - - get isDirect () { - for (const node of this.nodes.values()) { - for (const edge of node.edgesIn) { - if (edge.from.isProjectRoot || edge.from.isWorkspace) { - return true - } - } - } - return false - } - - testSpec (spec) { - const specObj = npa(spec) - if (!specObj.registry) { - return true - } - - if (specObj.subSpec) { - spec = specObj.subSpec.rawSpec - } - - for (const v of this.versions) { - if (satisfies(v, spec) && !satisfies(v, this.range, semverOpt)) { - return false - } - } - return true - } - - toJSON () { - return { - name: this.name, - severity: this.severity, - isDirect: this.isDirect, - // just loop over the advisories, since via is only Vuln objects, - // and calculated advisories have all the info we need - via: [...this.advisories].map(v => v.type === 'metavuln' ? v.dependency : { - ...v, - versions: undefined, - vulnerableVersions: undefined, - id: undefined, - }).sort((a, b) => - localeCompare(String(a.source || a), String(b.source || b))), - effects: [...this.effects].map(v => v.name).sort(localeCompare), - range: this.simpleRange, - nodes: [...this.nodes].map(n => n.location).sort(localeCompare), - fixAvailable: this[_fixAvailable], - } - } - - addVia (v) { - this.via.add(v) - v.effects.add(this) - // call the setter since we might add vias _after_ setting fixAvailable - this.fixAvailable = this.fixAvailable - } - - deleteVia (v) { - this.via.delete(v) - v.effects.delete(this) - } - - deleteAdvisory (advisory) { - this.advisories.delete(advisory) - // make sure we have the max severity of all the vulns causing this one - this.severity = null - this[_range] = null - this[_simpleRange] = null - // refresh severity - for (const advisory of this.advisories) { - this.addAdvisory(advisory) - } - - // remove any effects that are no longer relevant - const vias = new Set([...this.advisories].map(a => a.dependency)) - for (const via of this.via) { - if (!vias.has(via.name)) { - this.deleteVia(via) - } - } - } - - addAdvisory (advisory) { - this.advisories.add(advisory) - const sev = severities.get(advisory.severity) - this[_range] = null - this[_simpleRange] = null - if (sev > severities.get(this.severity)) { - this.severity = advisory.severity - } - } - - get range () { - return this[_range] || - (this[_range] = [...this.advisories].map(v => v.range).join(' || ')) - } - - get simpleRange () { - if (this[_simpleRange] && this[_simpleRange] === this[_range]) { - return this[_simpleRange] - } - - const versions = [...this.advisories][0].versions - const range = this.range - const simple = simplifyRange(versions, range, semverOpt) - return this[_simpleRange] = this[_range] = simple - } - - isVulnerable (node) { - if (this.nodes.has(node)) { - return true - } - - const { version } = node.package - if (!version) { - return false - } - - for (const v of this.advisories) { - if (v.testVersion(version)) { - this.nodes.add(node) - return true - } - } - - return false - } -} - -module.exports = Vuln diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/yarn-lock.js b/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/yarn-lock.js deleted file mode 100644 index 1eed0664083ac..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/lib/yarn-lock.js +++ /dev/null @@ -1,349 +0,0 @@ -// parse a yarn lock file -// basic format -// -// [, ...]: -// -// : -// -// -// Assume that any key or value might be quoted, though that's only done -// in practice if certain chars are in the string. Quoting unnecessarily -// does not cause problems for yarn, so that's what we do when we write -// it back. -// -// The data format would support nested objects, but at this time, it -// appears that yarn does not use that for anything, so in the interest -// of a simpler parser algorithm, this implementation only supports a -// single layer of sub objects. -// -// This doesn't deterministically define the shape of the tree, and so -// cannot be used (on its own) for Arborist.loadVirtual. -// But it can give us resolved, integrity, and version, which is useful -// for Arborist.loadActual and for building the ideal tree. -// -// At the very least, when a yarn.lock file is present, we update it -// along the way, and save it back in Shrinkwrap.save() -// -// NIHing this rather than using @yarnpkg/lockfile because that module -// is an impenetrable 10kloc of webpack flow output, which is overkill -// for something relatively simple and tailored to Arborist's use case. - -const localeCompare = require('@isaacs/string-locale-compare')('en') -const consistentResolve = require('./consistent-resolve.js') -const {dirname} = require('path') -const {breadth} = require('treeverse') - -// sort a key/value object into a string of JSON stringified keys and vals -const sortKV = obj => Object.keys(obj) - .sort(localeCompare) - .map(k => ` ${JSON.stringify(k)} ${JSON.stringify(obj[k])}`) - .join('\n') - -// for checking against previous entries -const match = (p, n) => - p.integrity && n.integrity ? p.integrity === n.integrity - : p.resolved && n.resolved ? p.resolved === n.resolved - : p.version && n.version ? p.version === n.version - : true - -const prefix = -`# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -` - -const nullSymbol = Symbol('null') -class YarnLock { - static parse (data) { - return new YarnLock().parse(data) - } - - static fromTree (tree) { - return new YarnLock().fromTree(tree) - } - - constructor () { - this.entries = null - this.endCurrent() - } - - endCurrent () { - this.current = null - this.subkey = nullSymbol - } - - parse (data) { - const ENTRY_START = /^[^\s].*:$/ - const SUBKEY = /^ {2}[^\s]+:$/ - const SUBVAL = /^ {4}[^\s]+ .+$/ - const METADATA = /^ {2}[^\s]+ .+$/ - this.entries = new Map() - this.current = null - const linere = /([^\r\n]*)\r?\n/gm - let match - let lineNum = 0 - if (!/\n$/.test(data)) { - data += '\n' - } - while (match = linere.exec(data)) { - const line = match[1] - lineNum++ - if (line.charAt(0) === '#') { - continue - } - if (line === '') { - this.endCurrent() - continue - } - if (ENTRY_START.test(line)) { - this.endCurrent() - const specs = this.splitQuoted(line.slice(0, -1), /, */) - this.current = new YarnLockEntry(specs) - specs.forEach(spec => this.entries.set(spec, this.current)) - continue - } - if (SUBKEY.test(line)) { - this.subkey = line.slice(2, -1) - this.current[this.subkey] = {} - continue - } - if (SUBVAL.test(line) && this.current && this.current[this.subkey]) { - const subval = this.splitQuoted(line.trimLeft(), ' ') - if (subval.length === 2) { - this.current[this.subkey][subval[0]] = subval[1] - continue - } - } - // any other metadata - if (METADATA.test(line) && this.current) { - const metadata = this.splitQuoted(line.trimLeft(), ' ') - if (metadata.length === 2) { - // strip off the legacy shasum hashes - if (metadata[0] === 'resolved') { - metadata[1] = metadata[1].replace(/#.*/, '') - } - this.current[metadata[0]] = metadata[1] - continue - } - } - - throw Object.assign(new Error('invalid or corrupted yarn.lock file'), { - position: match.index, - content: match[0], - line: lineNum, - }) - } - this.endCurrent() - return this - } - - splitQuoted (str, delim) { - // a,"b,c",d"e,f => ['a','"b','c"','d"e','f'] => ['a','b,c','d"e','f'] - const split = str.split(delim) - const out = [] - let o = 0 - for (let i = 0; i < split.length; i++) { - const chunk = split[i] - if (/^".*"$/.test(chunk)) { - out[o++] = chunk.trim().slice(1, -1) - } else if (/^"/.test(chunk)) { - let collect = chunk.trimLeft().slice(1) - while (++i < split.length) { - const n = split[i] - // something that is not a slash, followed by an even number - // of slashes then a " then end => ending on an unescaped " - if (/[^\\](\\\\)*"$/.test(n)) { - collect += n.trimRight().slice(0, -1) - break - } else { - collect += n - } - } - out[o++] = collect - } else { - out[o++] = chunk.trim() - } - } - return out - } - - toString () { - return prefix + [...new Set([...this.entries.values()])] - .map(e => e.toString()) - .sort(localeCompare).join('\n\n') + '\n' - } - - fromTree (tree) { - this.entries = new Map() - // walk the tree in a deterministic order, breadth-first, alphabetical - breadth({ - tree, - visit: node => this.addEntryFromNode(node), - getChildren: node => [...node.children.values(), ...node.fsChildren] - .sort((a, b) => a.depth - b.depth || localeCompare(a.name, b.name)), - }) - return this - } - - addEntryFromNode (node) { - const specs = [...node.edgesIn] - .map(e => `${node.name}@${e.spec}`) - .sort(localeCompare) - - // Note: - // yarn will do excessive duplication in a case like this: - // root -> (x@1.x, y@1.x, z@1.x) - // y@1.x -> (x@1.1, z@2.x) - // z@1.x -> () - // z@2.x -> (x@1.x) - // - // where x@1.2 exists, because the "x@1.x" spec will *always* resolve - // to x@1.2, which doesn't work for y's dep on x@1.1, so you'll get this: - // - // root - // +-- x@1.2.0 - // +-- y - // | +-- x@1.1.0 - // | +-- z@2 - // | +-- x@1.2.0 - // +-- z@1 - // - // instead of this more deduped tree that arborist builds by default: - // - // root - // +-- x@1.2.0 (dep is x@1.x, from root) - // +-- y - // | +-- x@1.1.0 - // | +-- z@2 (dep on x@1.x deduped to x@1.1.0 under y) - // +-- z@1 - // - // In order to not create an invalid yarn.lock file with conflicting - // entries, AND not tell yarn to create an invalid tree, we need to - // ignore the x@1.x spec coming from z, since it's already in the entries. - // - // So, if the integrity and resolved don't match a previous entry, skip it. - // We call this method on shallower nodes first, so this is fine. - const n = this.entryDataFromNode(node) - let priorEntry = null - const newSpecs = [] - for (const s of specs) { - const prev = this.entries.get(s) - // no previous entry for this spec at all, so it's new - if (!prev) { - // if we saw a match already, then assign this spec to it as well - if (priorEntry) { - priorEntry.addSpec(s) - } else { - newSpecs.push(s) - } - continue - } - - const m = match(prev, n) - // there was a prior entry, but a different thing. skip this one - if (!m) { - continue - } - - // previous matches, but first time seeing it, so already has this spec. - // go ahead and add all the previously unseen specs, though - if (!priorEntry) { - priorEntry = prev - for (const s of newSpecs) { - priorEntry.addSpec(s) - this.entries.set(s, priorEntry) - } - newSpecs.length = 0 - continue - } - - // have a prior entry matching n, and matching the prev we just saw - // add the spec to it - priorEntry.addSpec(s) - this.entries.set(s, priorEntry) - } - - // if we never found a matching prior, then this is a whole new thing - if (!priorEntry) { - const entry = Object.assign(new YarnLockEntry(newSpecs), n) - for (const s of newSpecs) { - this.entries.set(s, entry) - } - } else { - // pick up any new info that we got for this node, so that we can - // decorate with integrity/resolved/etc. - Object.assign(priorEntry, n) - } - } - - entryDataFromNode (node) { - const n = {} - if (node.package.dependencies) { - n.dependencies = node.package.dependencies - } - if (node.package.optionalDependencies) { - n.optionalDependencies = node.package.optionalDependencies - } - if (node.version) { - n.version = node.version - } - if (node.resolved) { - n.resolved = consistentResolve( - node.resolved, - node.isLink ? dirname(node.path) : node.path, - node.root.path, - true - ) - } - if (node.integrity) { - n.integrity = node.integrity - } - - return n - } - - static get Entry () { - return YarnLockEntry - } -} - -const _specs = Symbol('_specs') -class YarnLockEntry { - constructor (specs) { - this[_specs] = new Set(specs) - this.resolved = null - this.version = null - this.integrity = null - this.dependencies = null - this.optionalDependencies = null - } - - toString () { - // sort objects to the bottom, then alphabetical - return ([...this[_specs]] - .sort(localeCompare) - .map(JSON.stringify).join(', ') + - ':\n' + - Object.getOwnPropertyNames(this) - .filter(prop => this[prop] !== null) - .sort( - (a, b) => - /* istanbul ignore next - sort call order is unpredictable */ - (typeof this[a] === 'object') === (typeof this[b] === 'object') - ? localeCompare(a, b) - : typeof this[a] === 'object' ? 1 : -1) - .map(prop => - typeof this[prop] !== 'object' - ? ` ${JSON.stringify(prop)} ${JSON.stringify(this[prop])}\n` - : Object.keys(this[prop]).length === 0 ? '' - : ` ${prop}:\n` + sortKV(this[prop]) + '\n') - .join('')).trim() - } - - addSpec (spec) { - this[_specs].add(spec) - } -} - -module.exports = YarnLock diff --git a/node_modules/libnpmexec/node_modules/@npmcli/arborist/package.json b/node_modules/libnpmexec/node_modules/@npmcli/arborist/package.json deleted file mode 100644 index f912d2af11590..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/arborist/package.json +++ /dev/null @@ -1,95 +0,0 @@ -{ - "name": "@npmcli/arborist", - "version": "2.10.0", - "description": "Manage node_modules trees", - "dependencies": { - "@isaacs/string-locale-compare": "^1.0.1", - "@npmcli/installed-package-contents": "^1.0.7", - "@npmcli/map-workspaces": "^1.0.2", - "@npmcli/metavuln-calculator": "^1.1.0", - "@npmcli/move-file": "^1.1.0", - "@npmcli/name-from-folder": "^1.0.1", - "@npmcli/node-gyp": "^1.0.1", - "@npmcli/package-json": "^1.0.1", - "@npmcli/run-script": "^1.8.2", - "bin-links": "^2.2.1", - "cacache": "^15.0.3", - "common-ancestor-path": "^1.0.1", - "json-parse-even-better-errors": "^2.3.1", - "json-stringify-nice": "^1.1.4", - "mkdirp": "^1.0.4", - "mkdirp-infer-owner": "^2.0.0", - "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.1.5", - "npm-pick-manifest": "^6.1.0", - "npm-registry-fetch": "^11.0.0", - "pacote": "^11.3.5", - "parse-conflict-json": "^1.1.1", - "proc-log": "^1.0.0", - "promise-all-reject-late": "^1.0.0", - "promise-call-limit": "^1.0.1", - "read-package-json-fast": "^2.0.2", - "readdir-scoped-modules": "^1.1.0", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "ssri": "^8.0.1", - "treeverse": "^1.0.4", - "walk-up-path": "^1.0.0" - }, - "devDependencies": { - "@npmcli/lint": "^1.0.2", - "benchmark": "^2.1.4", - "chalk": "^4.1.0", - "minify-registry-metadata": "^2.1.0", - "tap": "^15.0.9", - "tcompare": "^5.0.6" - }, - "scripts": { - "test": "npm run test-only --", - "test-only": "tap", - "posttest": "npm run lint --", - "snap": "tap", - "postsnap": "npm run lintfix --", - "test-proxy": "ARBORIST_TEST_PROXY=1 tap --snapshot", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "eslint": "eslint", - "lint": "npm run npmclilint -- \"lib/**/*.*js\" \"bin/**/*.*js\" \"test/*.*js\" \"test/arborist/*.*js\"", - "lintfix": "npm run lint -- --fix", - "benchmark": "node scripts/benchmark.js", - "benchclean": "rm -rf scripts/benchmark/*/", - "npmclilint": "npmcli-lint" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/arborist" - }, - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "lib/**/*.js", - "bin/**/*.js" - ], - "main": "lib/index.js", - "bin": { - "arborist": "bin/index.js" - }, - "//": "sk test-env locale to catch locale-specific sorting", - "tap": { - "after": "test/fixtures/cleanup.js", - "coverage-map": "map.js", - "test-env": [ - "NODE_OPTIONS=--no-warnings", - "LC_ALL=sk" - ], - "node-arg": [ - "--no-warnings", - "--no-deprecation" - ], - "timeout": "240" - }, - "engines": { - "node": ">= 10" - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/LICENSE b/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/LICENSE deleted file mode 100644 index 19cec97b18468..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/advisory.js b/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/advisory.js deleted file mode 100644 index d0900e3732846..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/advisory.js +++ /dev/null @@ -1,405 +0,0 @@ -const hash = require('./hash.js') -const semver = require('semver') -const semverOpt = { includePrerelease: true, loose: true } -const getDepSpec = require('./get-dep-spec.js') - -// any fields that we don't want in the cache need to be hidden -const _source = Symbol('source') -const _packument = Symbol('packument') -const _versionVulnMemo = Symbol('versionVulnMemo') -const _updated = Symbol('updated') -const _options = Symbol('options') -const _specVulnMemo = Symbol('specVulnMemo') -const _testVersion = Symbol('testVersion') -const _testVersions = Symbol('testVersions') -const _calculateRange = Symbol('calculateRange') -const _markVulnerable = Symbol('markVulnerable') -const _testSpec = Symbol('testSpec') - -class Advisory { - constructor (name, source, options = {}) { - this.source = source.id - this[_source] = source - this[_options] = options - this.name = name - if (!source.name) - source.name = name - - this.dependency = source.name - - if (this.type === 'advisory') { - this.title = source.title - this.url = source.url - } else { - this.title = `Depends on vulnerable versions of ${source.name}` - this.url = null - } - - this.severity = source.severity || 'high' - this.versions = [] - this.vulnerableVersions = [] - - // advisories have the range, metavulns do not - // if an advisory doesn't specify range, assume all are vulnerable - this.range = this.type === 'advisory' ? source.vulnerable_versions || '*' - : null - - this.id = hash(this) - - this[_packument] = null - // memoized list of which versions are vulnerable - this[_versionVulnMemo] = new Map() - // memoized list of which dependency specs are vulnerable - this[_specVulnMemo] = new Map() - this[_updated] = false - } - - // true if we updated from what we had in cache - get updated () { - return this[_updated] - } - - get type () { - return this.dependency === this.name ? 'advisory' : 'metavuln' - } - - get packument () { - return this[_packument] - } - - // load up the data from a cache entry and a fetched packument - load (cached, packument) { - // basic data integrity gutcheck - if (!cached || typeof cached !== 'object') - throw new TypeError('invalid cached data, expected object') - - if (!packument || typeof packument !== 'object') - throw new TypeError('invalid packument data, expected object') - - if (cached.id && cached.id !== this.id) { - throw Object.assign(new Error('loading from incorrect cache entry'), { - expected: this.id, - actual: cached.id, - }) - } - if (packument.name !== this.name) { - throw Object.assign(new Error('loading from incorrect packument'), { - expected: this.name, - actual: packument.name, - }) - } - if (this[_packument]) - throw new Error('advisory object already loaded') - - // if we have a range from the initialization, and the cached - // data has a *different* range, then we know we have to recalc. - // just don't use the cached data, so we will definitely not match later - if (!this.range || cached.range && cached.range === this.range) - Object.assign(this, cached) - - this[_packument] = packument - - const pakuVersions = Object.keys(packument.versions) - const allVersions = new Set([...pakuVersions, ...this.versions]) - const versionsAdded = [] - const versionsRemoved = [] - for (const v of allVersions) { - if (!this.versions.includes(v)) { - versionsAdded.push(v) - this.versions.push(v) - } else if (!pakuVersions.includes(v)) - versionsRemoved.push(v) - } - - // strip out any removed versions from our lists, and sort by semver - this.versions = semver.sort(this.versions.filter(v => - !versionsRemoved.includes(v)), semverOpt) - - // if no changes, then just return what we got from cache - // versions added or removed always means we changed - // otherwise, advisories change if the range changes, and - // metavulns change if the source was updated - const unchanged = this.type === 'advisory' - ? this.range && this.range === cached.range - : !this[_source].updated - - // if the underlying source changed, by an advisory updating the - // range, or a source advisory being updated, then we have to re-check - // otherwise, only recheck the new ones. - this.vulnerableVersions = !unchanged ? [] - : semver.sort(this.vulnerableVersions.filter(v => - !versionsRemoved.includes(v)), semverOpt) - - if (unchanged && !versionsAdded.length && !versionsRemoved.length) { - // nothing added or removed, nothing to do here. use the cached copy. - return this - } - - this[_updated] = true - - // test any versions newly added - if (!unchanged || versionsAdded.length) - this[_testVersions](unchanged ? versionsAdded : this.versions) - this.vulnerableVersions = semver.sort(this.vulnerableVersions, semverOpt) - - // metavulns have to calculate their range, since cache is invalidated - // advisories just get their range from the advisory above - if (this.type === 'metavuln') - this[_calculateRange]() - - return this - } - - [_calculateRange] () { - // calling semver.simplifyRange with a massive list of versions, and those - // versions all concatenated with `||` is a geometric CPU explosion! - // we can try to be a *little* smarter up front by doing x-y for all - // contiguous version sets in the list - const ranges = [] - this.versions = semver.sort(this.versions) - this.vulnerableVersions = semver.sort(this.vulnerableVersions) - for (let v = 0, vulnVer = 0; v < this.versions.length; v++) { - // figure out the vulnerable subrange - const vr = [this.versions[v]] - while (v < this.versions.length) { - if (this.versions[v] !== this.vulnerableVersions[vulnVer]) { - // we don't test prerelease versions, so just skip past it - if (/-/.test(this.versions[v])) { - v++ - continue - } - break - } - if (vr.length > 1) - vr[1] = this.versions[v] - else - vr.push(this.versions[v]) - v++ - vulnVer++ - } - // it'll either be just the first version, which means no overlap, - // or the start and end versions, which might be the same version - if (vr.length > 1) { - const tail = this.versions[this.versions.length - 1] - ranges.push(vr[1] === tail ? `>=${vr[0]}` - : vr[0] === vr[1] ? vr[0] - : vr.join(' - ')) - } - } - const metavuln = ranges.join(' || ').trim() - this.range = !metavuln ? '<0.0.0-0' - : semver.simplifyRange(this.versions, metavuln, semverOpt) - } - - // returns true if marked as vulnerable, false if ok - // spec is a dependency specifier, for metavuln cases - // where the version might not be in the packument. if - // we have the packument and spec is not provided, then - // we use the dependency version from the manifest. - testVersion (version, spec = null) { - const sv = String(version) - if (this[_versionVulnMemo].has(sv)) - return this[_versionVulnMemo].get(sv) - - const result = this[_testVersion](version, spec) - if (result) - this[_markVulnerable](version) - this[_versionVulnMemo].set(sv, !!result) - return result - } - - [_markVulnerable] (version) { - const sv = String(version) - if (!this.vulnerableVersions.includes(sv)) - this.vulnerableVersions.push(sv) - } - - [_testVersion] (version, spec) { - const sv = String(version) - if (this.vulnerableVersions.includes(sv)) - return true - - if (this.type === 'advisory') { - // advisory, just test range - return semver.satisfies(version, this.range, semverOpt) - } - - // check the dependency of this version on the vulnerable dep - // if we got a version that's not in the packument, fall back on - // the spec provided, if possible. - const mani = this[_packument].versions[version] || { - dependencies: { - [this.dependency]: spec, - }, - } - - if (!spec) - spec = getDepSpec(mani, this.dependency) - - // no dep, no vuln - if (spec === null) - return false - - if (!semver.validRange(spec, semverOpt)) { - // not a semver range, nothing we can hope to do about it - return true - } - - const bd = mani.bundleDependencies - const bundled = bd && bd.includes(this[_source].name) - // XXX if bundled, then semver.intersects() means vulnerable - // else, pick a manifest and see if it can't be avoided - // try to pick a version of the dep that isn't vulnerable - const avoid = this[_source].range - - if (bundled) - return semver.intersects(spec, avoid, semverOpt) - - return this[_source].testSpec(spec) - } - - testSpec (spec) { - // testing all the versions is a bit costly, and the spec tends to stay - // consistent across multiple versions, so memoize this as well, in case - // we're testing lots of versions. - const memo = this[_specVulnMemo] - if (memo.has(spec)) - return memo.get(spec) - - const res = this[_testSpec](spec) - memo.set(spec, res) - return res - } - - [_testSpec] (spec) { - for (const v of this.versions) { - const satisfies = semver.satisfies(v, spec) - if (!satisfies) - continue - if (!this.testVersion(v)) - return false - } - // either vulnerable, or not installable because nothing satisfied - // either way, best avoided. - return true - } - - [_testVersions] (versions) { - if (!versions.length) - return - - // set of lists of versions - const versionSets = new Set() - versions = semver.sort(versions.map(v => semver.parse(v, semverOpt))) - - // start out with the versions grouped by major and minor - let last = versions[0].major + '.' + versions[0].minor - let list = [] - versionSets.add(list) - for (const v of versions) { - const k = v.major + '.' + v.minor - if (k !== last) { - last = k - list = [] - versionSets.add(list) - } - list.push(v) - } - - for (const list of versionSets) { - // it's common to have version lists like: - // 1.0.0 - // 1.0.1-alpha.0 - // 1.0.1-alpha.1 - // ... - // 1.0.1-alpha.999 - // 1.0.1 - // 1.0.2-alpha.0 - // ... - // 1.0.2-alpha.99 - // 1.0.2 - // with a huge number of prerelease versions that are not installable - // anyway. - // If mid has a prerelease tag, and list[0] does not, then walk it - // back until we hit a non-prerelease version - // If mid has a prerelease tag, and list[list.length-1] does not, - // then walk it forward until we hit a version without a prerelease tag - // Similarly, if the head/tail is a prerelease, but there is a non-pr - // version in the list, then start there instead. - let h = 0 - const origHeadVuln = this.testVersion(list[h]) - while (h < list.length && /-/.test(String(list[h]))) - h++ - - // don't filter out the whole list! they might all be pr's - if (h === list.length) - h = 0 - else if (origHeadVuln) { - // if the original was vulnerable, assume so are all of these - for (let hh = 0; hh < h; hh++) - this[_markVulnerable](list[hh]) - } - - let t = list.length - 1 - const origTailVuln = this.testVersion(list[t]) - while (t > h && /-/.test(String(list[t]))) - t-- - - // don't filter out the whole list! might all be pr's - if (t === h) - t = list.length - 1 - else if (origTailVuln) { - // if original tail was vulnerable, assume these are as well - for (let tt = list.length - 1; tt > t; tt--) - this[_markVulnerable](list[tt]) - } - - const headVuln = h === 0 ? origHeadVuln - : this.testVersion(list[h]) - - const tailVuln = t === list.length - 1 ? origTailVuln - : this.testVersion(list[t]) - - // if head and tail both vulnerable, whole list is thrown out - if (headVuln && tailVuln) { - for (let v = h; v < t; v++) - this[_markVulnerable](list[v]) - continue - } - - // if length is 2 or 1, then we marked them all already - if (t < h + 2) - continue - - const mid = Math.floor(list.length / 2) - const pre = list.slice(0, mid) - const post = list.slice(mid) - - // if the parent list wasn't prereleases, then drop pr tags - // from end of the pre list, and beginning of the post list, - // marking as vulnerable if the midpoint item we picked is. - if (!/-/.test(String(pre[0]))) { - const midVuln = this.testVersion(pre[pre.length - 1]) - while (/-/.test(String(pre[pre.length - 1]))) { - const v = pre.pop() - if (midVuln) - this[_markVulnerable](v) - } - } - - if (!/-/.test(String(post[post.length - 1]))) { - const midVuln = this.testVersion(post[0]) - while (/-/.test(String(post[0]))) { - const v = post.shift() - if (midVuln) - this[_markVulnerable](v) - } - } - - versionSets.add(pre) - versionSets.add(post) - } - } -} - -module.exports = Advisory diff --git a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js b/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js deleted file mode 100644 index 8d1d72b8c46eb..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js +++ /dev/null @@ -1,15 +0,0 @@ -module.exports = (mani, name) => { - // skip dev because that only matters at the root, - // where we aren't fetching a manifest from the registry - // with multiple versions anyway. - const { - dependencies: deps = {}, - optionalDependencies: optDeps = {}, - peerDependencies: peerDeps = {}, - } = mani - - return deps && typeof deps[name] === 'string' ? deps[name] - : optDeps && typeof optDeps[name] === 'string' ? optDeps[name] - : peerDeps && typeof peerDeps[name] === 'string' ? peerDeps[name] - : null -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/hash.js b/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/hash.js deleted file mode 100644 index 79c0678c7581a..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/hash.js +++ /dev/null @@ -1,5 +0,0 @@ -const {createHash} = require('crypto') - -module.exports = ({name, source}) => createHash('sha512') - .update(JSON.stringify([name, source])) - .digest('base64') diff --git a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/index.js b/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/index.js deleted file mode 100644 index 02c1ed018b8b5..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/lib/index.js +++ /dev/null @@ -1,124 +0,0 @@ -// this is the public class that is used by consumers. -// the Advisory class handles all the calculation, and this -// class handles all the IO with the registry and cache. -const pacote = require('pacote') -const cacache = require('cacache') -const Advisory = require('./advisory.js') -const {homedir} = require('os') -const jsonParse = require('json-parse-even-better-errors') - -const _packument = Symbol('packument') -const _cachePut = Symbol('cachePut') -const _cacheGet = Symbol('cacheGet') -const _cacheData = Symbol('cacheData') -const _packuments = Symbol('packuments') -const _cache = Symbol('cache') -const _options = Symbol('options') -const _advisories = Symbol('advisories') -const _calculate = Symbol('calculate') - -class Calculator { - constructor (options = {}) { - this[_options] = { ...options } - this[_cache] = this[_options].cache || (homedir() + '/.npm/_cacache') - this[_options].cache = this[_cache] - this[_packuments] = new Map() - this[_cacheData] = new Map() - this[_advisories] = new Map() - } - - get cache () { - return this[_cache] - } - - get options () { - return { ...this[_options] } - } - - async calculate (name, source) { - const k = `security-advisory:${name}:${source.id}` - if (this[_advisories].has(k)) - return this[_advisories].get(k) - - const p = this[_calculate](name, source) - this[_advisories].set(k, p) - return p - } - - async [_calculate] (name, source) { - const k = `security-advisory:${name}:${source.id}` - const t = `metavuln:calculate:${k}` - process.emit('time', t) - const advisory = new Advisory(name, source, this[_options]) - // load packument and cached advisory - const [cached, packument] = await Promise.all([ - this[_cacheGet](advisory), - this[_packument](name), - ]) - process.emit('time', `metavuln:load:${k}`) - advisory.load(cached, packument) - process.emit('timeEnd', `metavuln:load:${k}`) - if (advisory.updated) - await this[_cachePut](advisory) - this[_advisories].set(k, advisory) - process.emit('timeEnd', t) - return advisory - } - - async [_cachePut] (advisory) { - const { name, id } = advisory - const key = `security-advisory:${name}:${id}` - process.emit('time', `metavuln:cache:put:${key}`) - const data = JSON.stringify(advisory) - const options = { ...this[_options] } - this[_cacheData].set(key, jsonParse(data)) - await cacache.put(this[_cache], key, data, options).catch(() => {}) - process.emit('timeEnd', `metavuln:cache:put:${key}`) - } - - async [_cacheGet] (advisory) { - const { name, id } = advisory - const key = `security-advisory:${name}:${id}` - /* istanbul ignore if - should be impossible, since we memoize the - * advisory object itself using the same key, just being cautious */ - if (this[_cacheData].has(key)) - return this[_cacheData].get(key) - - process.emit('time', `metavuln:cache:get:${key}`) - const p = cacache.get(this[_cache], key, { ...this[_options] }) - .catch(() => ({ data: '{}' })) - .then(({ data }) => { - data = jsonParse(data) - process.emit('timeEnd', `metavuln:cache:get:${key}`) - this[_cacheData].set(key, data) - return data - }) - this[_cacheData].set(key, p) - return p - } - - async [_packument] (name) { - if (this[_packuments].has(name)) - return this[_packuments].get(name) - - process.emit('time', `metavuln:packument:${name}`) - const p = pacote.packument(name, { ...this[_options] }) - .catch((er) => { - // presumably not something from the registry. - // an empty packument will have an effective range of * - return { - name, - versions: {}, - } - }) - .then(paku => { - process.emit('timeEnd', `metavuln:packument:${name}`) - this[_packuments].set(name, paku) - return paku - }) - this[_packuments].set(name, p) - return p - } -} - -module.exports = Calculator diff --git a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/package.json deleted file mode 100644 index 4ad6193ae6aa8..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "@npmcli/metavuln-calculator", - "version": "1.1.1", - "main": "lib/index.js", - "files": [ - "lib" - ], - "description": "Calculate meta-vulnerabilities from package security advisories", - "repository": "https://github.com/npm/metavuln-calculator", - "author": "Isaac Z. Schlueter (https://izs.me)", - "license": "ISC", - "scripts": { - "test": "tap", - "posttest": "npm run lint", - "snap": "tap", - "postsnap": "npm run lint", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "eslint": "eslint", - "lint": "npm run eslint -- \"lib/**/*.js\" \"test/**/*.js\"", - "lintfix": "npm run lint -- --fix" - }, - "tap": { - "check-coverage": true, - "coverage-map": "map.js" - }, - "devDependencies": { - "eslint": "^7.20.0", - "eslint-plugin-import": "^2.22.1", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^4.3.1", - "eslint-plugin-standard": "^4.1.0", - "require-inject": "^1.4.4", - "tap": "^14.10.8" - }, - "dependencies": { - "cacache": "^15.0.5", - "pacote": "^11.1.11", - "semver": "^7.3.2" - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/LICENSE b/node_modules/libnpmexec/node_modules/@npmcli/run-script/LICENSE deleted file mode 100644 index 19cec97b18468..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/is-server-package.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/is-server-package.js deleted file mode 100644 index d168623247527..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/is-server-package.js +++ /dev/null @@ -1,12 +0,0 @@ -const util = require('util') -const fs = require('fs') -const { stat } = fs.promises || { stat: util.promisify(fs.stat) } -const { resolve } = require('path') -module.exports = async path => { - try { - const st = await stat(resolve(path, 'server.js')) - return st.isFile() - } catch (er) { - return false - } -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/is-windows.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/is-windows.js deleted file mode 100644 index 651917e6ad27a..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/is-windows.js +++ /dev/null @@ -1,2 +0,0 @@ -const platform = process.env.__FAKE_TESTING_PLATFORM__ || process.platform -module.exports = platform === 'win32' diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/make-spawn-args.js deleted file mode 100644 index 8f299954a7a80..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/make-spawn-args.js +++ /dev/null @@ -1,40 +0,0 @@ -/* eslint camelcase: "off" */ -const isWindows = require('./is-windows.js') -const setPATH = require('./set-path.js') -const {resolve} = require('path') -const npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js') - -const makeSpawnArgs = options => { - const { - event, - path, - scriptShell = isWindows ? process.env.ComSpec || 'cmd' : 'sh', - env = {}, - stdio, - cmd, - stdioString = false, - } = options - - const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(scriptShell) - const args = isCmd ? ['/d', '/s', '/c', cmd] : ['-c', cmd] - - const spawnOpts = { - env: setPATH(path, { - // we need to at least save the PATH environment var - ...process.env, - ...env, - npm_package_json: resolve(path, 'package.json'), - npm_lifecycle_event: event, - npm_lifecycle_script: cmd, - npm_config_node_gyp, - }), - stdioString, - stdio, - cwd: path, - ...(isCmd ? { windowsVerbatimArguments: true } : {}), - } - - return [scriptShell, args, spawnOpts] -} - -module.exports = makeSpawnArgs diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp deleted file mode 100755 index 5bec64d961a3a..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp +++ /dev/null @@ -1,2 +0,0 @@ -#!/usr/bin/env sh -node "$npm_config_node_gyp" "$@" diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd deleted file mode 100755 index 4c6987ac9868b..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd +++ /dev/null @@ -1 +0,0 @@ -@node "%npm_config_node_gyp%" %* diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/package-envs.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/package-envs.js deleted file mode 100644 index 47791fb991bd5..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/package-envs.js +++ /dev/null @@ -1,25 +0,0 @@ -// https://github.com/npm/rfcs/pull/183 - -const envVal = val => Array.isArray(val) ? val.map(v => envVal(v)).join('\n\n') - : val === null || val === false ? '' - : String(val) - -const packageEnvs = (env, vals, prefix) => { - for (const [key, val] of Object.entries(vals)) { - if (val === undefined) - continue - else if (val && !Array.isArray(val) && typeof val === 'object') - packageEnvs(env, val, `${prefix}${key}_`) - else - env[`${prefix}${key}`] = envVal(val) - } - return env -} - -module.exports = (env, pkg) => packageEnvs({ ...env }, { - name: pkg.name, - version: pkg.version, - config: pkg.config, - engines: pkg.engines, - bin: pkg.bin, -}, 'npm_package_') diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/run-script-pkg.js deleted file mode 100644 index ccde173e014f6..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/run-script-pkg.js +++ /dev/null @@ -1,88 +0,0 @@ -const makeSpawnArgs = require('./make-spawn-args.js') -const promiseSpawn = require('@npmcli/promise-spawn') -const packageEnvs = require('./package-envs.js') -const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp') -const signalManager = require('./signal-manager.js') -const isServerPackage = require('./is-server-package.js') - -// you wouldn't like me when I'm angry... -const bruce = (id, event, cmd) => - `\n> ${id ? id + ' ' : ''}${event}\n> ${cmd.trim().replace(/\n/g, '\n> ')}\n` - -const runScriptPkg = async options => { - const { - event, - path, - scriptShell, - env = {}, - stdio = 'pipe', - pkg, - args = [], - stdioString = false, - // note: only used when stdio:inherit - banner = true, - // how long to wait for a process.kill signal - // only exposed here so that we can make the test go a bit faster. - signalTimeout = 500, - } = options - - const {scripts = {}, gypfile} = pkg - let cmd = null - if (options.cmd) - cmd = options.cmd - else if (pkg.scripts && pkg.scripts[event]) - cmd = pkg.scripts[event] + args.map(a => ` ${JSON.stringify(a)}`).join('') - else if ( // If there is no preinstall or install script, default to rebuilding node-gyp packages. - event === 'install' && - !scripts.install && - !scripts.preinstall && - gypfile !== false && - await isNodeGypPackage(path) - ) - cmd = defaultGypInstallScript - else if (event === 'start' && await isServerPackage(path)) - cmd = 'node server.js' + args.map(a => ` ${JSON.stringify(a)}`).join('') - - if (!cmd) - return { code: 0, signal: null } - - if (stdio === 'inherit' && banner !== false) { - // we're dumping to the parent's stdout, so print the banner - console.log(bruce(pkg._id, event, cmd)) - } - - const p = promiseSpawn(...makeSpawnArgs({ - event, - path, - scriptShell, - env: packageEnvs(env, pkg), - stdio, - cmd, - stdioString, - }), { - event, - script: cmd, - pkgid: pkg._id, - path, - }) - - if (stdio === 'inherit') - signalManager.add(p.process) - - if (p.stdin) - p.stdin.end() - - return p.catch(er => { - const { signal } = er - if (stdio === 'inherit' && signal) { - process.kill(process.pid, signal) - // just in case we don't die, reject after 500ms - // this also keeps the node process open long enough to actually - // get the signal, rather than terminating gracefully. - return new Promise((res, rej) => setTimeout(() => rej(er), signalTimeout)) - } else - throw er - }) -} - -module.exports = runScriptPkg diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/run-script.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/run-script.js deleted file mode 100644 index af33d2113f9ef..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/run-script.js +++ /dev/null @@ -1,13 +0,0 @@ -const rpj = require('read-package-json-fast') -const runScriptPkg = require('./run-script-pkg.js') -const validateOptions = require('./validate-options.js') -const isServerPackage = require('./is-server-package.js') - -const runScript = options => { - validateOptions(options) - const {pkg, path} = options - return pkg ? runScriptPkg(options) - : rpj(path + '/package.json').then(pkg => runScriptPkg({...options, pkg})) -} - -module.exports = Object.assign(runScript, { isServerPackage }) diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/set-path.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/set-path.js deleted file mode 100644 index d7bd2c2886b35..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/set-path.js +++ /dev/null @@ -1,44 +0,0 @@ -const {resolve, dirname} = require('path') -const isWindows = require('./is-windows.js') -// the path here is relative, even though it does not need to be -// in order to make the posix tests pass in windows -const nodeGypPath = resolve(__dirname, '../lib/node-gyp-bin') - -// Windows typically calls its PATH environ 'Path', but this is not -// guaranteed, nor is it guaranteed to be the only one. Merge them -// all together in the order they appear in the object. -const setPATH = (projectPath, env) => { - // not require('path').delimiter, because we fake this for testing - const delimiter = isWindows ? ';' : ':' - const PATH = Object.keys(env).filter(p => /^path$/i.test(p) && env[p]) - .map(p => env[p].split(delimiter)) - .reduce((set, p) => set.concat(p.filter(p => !set.includes(p))), []) - .join(delimiter) - - const pathArr = [] - // unshift the ./node_modules/.bin from every folder - // walk up until dirname() does nothing, at the root - // XXX should we specify a cwd that we don't go above? - let p = projectPath - let pp - do { - pathArr.push(resolve(p, 'node_modules', '.bin')) - pp = p - p = dirname(p) - } while (p !== pp) - pathArr.push(nodeGypPath, PATH) - - const pathVal = pathArr.join(delimiter) - - // XXX include the node-gyp-bin path somehow? Probably better for - // npm or arborist or whoever to just provide that by putting it in - // the PATH environ, since that's preserved anyway. - for (const key of Object.keys(env)) { - if (/^path$/i.test(key)) - env[key] = pathVal - } - - return env -} - -module.exports = setPATH diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/signal-manager.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/signal-manager.js deleted file mode 100644 index 556e758c254f0..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/signal-manager.js +++ /dev/null @@ -1,46 +0,0 @@ -const runningProcs = new Set() -let handlersInstalled = false - -const forwardedSignals = [ - 'SIGINT', - 'SIGTERM' -] - -const handleSignal = signal => { - for (const proc of runningProcs) { - proc.kill(signal) - } -} - -const setupListeners = () => { - for (const signal of forwardedSignals) { - process.on(signal, handleSignal) - } - handlersInstalled = true -} - -const cleanupListeners = () => { - if (runningProcs.size === 0) { - for (const signal of forwardedSignals) { - process.removeListener(signal, handleSignal) - } - handlersInstalled = false - } -} - -const add = proc => { - runningProcs.add(proc) - if (!handlersInstalled) - setupListeners() - - proc.once('exit', () => { - runningProcs.delete(proc) - cleanupListeners() - }) -} - -module.exports = { - add, - handleSignal, - forwardedSignals -} diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/validate-options.js b/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/validate-options.js deleted file mode 100644 index 48ac5c5d9e399..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/lib/validate-options.js +++ /dev/null @@ -1,31 +0,0 @@ -const validateOptions = options => { - if (typeof options !== 'object' || !options) - throw new TypeError('invalid options object provided to runScript') - - const { - event, - path, - scriptShell, - env = {}, - stdio = 'pipe', - args = [], - cmd, - } = options - - if (!event || typeof event !== 'string') - throw new TypeError('valid event not provided to runScript') - if (!path || typeof path !== 'string') - throw new TypeError('valid path not provided to runScript') - if (scriptShell !== undefined && typeof scriptShell !== 'string') - throw new TypeError('invalid scriptShell option provided to runScript') - if (typeof env !== 'object' || !env) - throw new TypeError('invalid env option provided to runScript') - if (typeof stdio !== 'string' && !Array.isArray(stdio)) - throw new TypeError('invalid stdio option provided to runScript') - if (!Array.isArray(args) || args.some(a => typeof a !== 'string')) - throw new TypeError('invalid args option provided to runScript') - if (cmd !== undefined && typeof cmd !== 'string') - throw new TypeError('invalid cmd option provided to runScript') -} - -module.exports = validateOptions diff --git a/node_modules/libnpmexec/node_modules/@npmcli/run-script/package.json b/node_modules/libnpmexec/node_modules/@npmcli/run-script/package.json deleted file mode 100644 index 5a5d1a71d8f35..0000000000000 --- a/node_modules/libnpmexec/node_modules/@npmcli/run-script/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "@npmcli/run-script", - "version": "1.8.6", - "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", - "author": "Isaac Z. Schlueter (https://izs.me)", - "license": "ISC", - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "eslint": "eslint", - "lint": "npm run eslint -- \"lib/**/*.js\"", - "lintfix": "npm run lint -- --fix" - }, - "tap": { - "check-coverage": true, - "coverage-map": "map.js" - }, - "devDependencies": { - "eslint": "^7.19.0", - "eslint-plugin-import": "^2.22.1", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^4.2.1", - "eslint-plugin-standard": "^5.0.0", - "minipass": "^3.1.1", - "require-inject": "^1.4.4", - "tap": "^15.0.4" - }, - "dependencies": { - "@npmcli/node-gyp": "^1.0.2", - "@npmcli/promise-spawn": "^1.3.2", - "node-gyp": "^7.1.0", - "read-package-json-fast": "^2.0.1" - }, - "files": [ - "lib/**/*.js", - "lib/node-gyp-bin" - ], - "main": "lib/run-script.js", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/run-script.git" - } -} diff --git a/node_modules/libnpmexec/node_modules/aproba/LICENSE b/node_modules/libnpmexec/node_modules/aproba/LICENSE deleted file mode 100644 index f4be44d881b2d..0000000000000 --- a/node_modules/libnpmexec/node_modules/aproba/LICENSE +++ /dev/null @@ -1,14 +0,0 @@ -Copyright (c) 2015, Rebecca Turner - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - diff --git a/node_modules/libnpmexec/node_modules/aproba/index.js b/node_modules/libnpmexec/node_modules/aproba/index.js deleted file mode 100644 index 6f3f797c09a75..0000000000000 --- a/node_modules/libnpmexec/node_modules/aproba/index.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict' - -function isArguments (thingy) { - return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') -} - -var types = { - '*': {label: 'any', check: function () { return true }}, - A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }}, - S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }}, - N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }}, - F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }}, - O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }}, - B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }}, - E: {label: 'error', check: function (thingy) { return thingy instanceof Error }}, - Z: {label: 'null', check: function (thingy) { return thingy == null }} -} - -function addSchema (schema, arity) { - var group = arity[schema.length] = arity[schema.length] || [] - if (group.indexOf(schema) === -1) group.push(schema) -} - -var validate = module.exports = function (rawSchemas, args) { - if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) - if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') - if (!args) throw missingRequiredArg(1, 'args') - if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) - if (!types.A.check(args)) throw invalidType(1, ['array'], args) - var schemas = rawSchemas.split('|') - var arity = {} - - schemas.forEach(function (schema) { - for (var ii = 0; ii < schema.length; ++ii) { - var type = schema[ii] - if (!types[type]) throw unknownType(ii, type) - } - if (/E.*E/.test(schema)) throw moreThanOneError(schema) - addSchema(schema, arity) - if (/E/.test(schema)) { - addSchema(schema.replace(/E.*$/, 'E'), arity) - addSchema(schema.replace(/E/, 'Z'), arity) - if (schema.length === 1) addSchema('', arity) - } - }) - var matching = arity[args.length] - if (!matching) { - throw wrongNumberOfArgs(Object.keys(arity), args.length) - } - for (var ii = 0; ii < args.length; ++ii) { - var newMatching = matching.filter(function (schema) { - var type = schema[ii] - var typeCheck = types[type].check - return typeCheck(args[ii]) - }) - if (!newMatching.length) { - var labels = matching.map(function (schema) { - return types[schema[ii]].label - }).filter(function (schema) { return schema != null }) - throw invalidType(ii, labels, args[ii]) - } - matching = newMatching - } -} - -function missingRequiredArg (num) { - return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) -} - -function unknownType (num, type) { - return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) -} - -function invalidType (num, expectedTypes, value) { - var valueType - Object.keys(types).forEach(function (typeCode) { - if (types[typeCode].check(value)) valueType = types[typeCode].label - }) - return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + - englishList(expectedTypes) + ' but got ' + valueType) -} - -function englishList (list) { - return list.join(', ').replace(/, ([^,]+)$/, ' or $1') -} - -function wrongNumberOfArgs (expected, got) { - var english = englishList(expected) - var args = expected.every(function (ex) { return ex.length === 1 }) - ? 'argument' - : 'arguments' - return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) -} - -function moreThanOneError (schema) { - return newException('ETOOMANYERRORTYPES', - 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"') -} - -function newException (code, msg) { - var e = new Error(msg) - e.code = code - if (Error.captureStackTrace) Error.captureStackTrace(e, validate) - return e -} diff --git a/node_modules/libnpmexec/node_modules/aproba/package.json b/node_modules/libnpmexec/node_modules/aproba/package.json deleted file mode 100644 index f008787bc265e..0000000000000 --- a/node_modules/libnpmexec/node_modules/aproba/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "aproba", - "version": "1.2.0", - "description": "A ridiculously light-weight argument validator (now browser friendly)", - "main": "index.js", - "directories": { - "test": "test" - }, - "dependencies": {}, - "devDependencies": { - "standard": "^10.0.3", - "tap": "^10.0.2" - }, - "files": [ - "index.js" - ], - "scripts": { - "test": "standard && tap -j3 test/*.js" - }, - "repository": { - "type": "git", - "url": "https://github.com/iarna/aproba" - }, - "keywords": [ - "argument", - "validate" - ], - "author": "Rebecca Turner ", - "license": "ISC", - "bugs": { - "url": "https://github.com/iarna/aproba/issues" - }, - "homepage": "https://github.com/iarna/aproba" -} diff --git a/node_modules/libnpmexec/node_modules/gauge/LICENSE b/node_modules/libnpmexec/node_modules/gauge/LICENSE deleted file mode 100644 index e756052969b78..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2014, Rebecca Turner - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmexec/node_modules/gauge/base-theme.js b/node_modules/libnpmexec/node_modules/gauge/base-theme.js deleted file mode 100644 index 0b67638e0211d..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/base-theme.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' -var spin = require('./spin.js') -var progressBar = require('./progress-bar.js') - -module.exports = { - activityIndicator: function (values, theme, width) { - if (values.spun == null) return - return spin(theme, values.spun) - }, - progressbar: function (values, theme, width) { - if (values.completed == null) return - return progressBar(theme, width, values.completed) - } -} diff --git a/node_modules/libnpmexec/node_modules/gauge/error.js b/node_modules/libnpmexec/node_modules/gauge/error.js deleted file mode 100644 index d9914ba5335d2..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/error.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict' -var util = require('util') - -var User = exports.User = function User (msg) { - var err = new Error(msg) - Error.captureStackTrace(err, User) - err.code = 'EGAUGE' - return err -} - -exports.MissingTemplateValue = function MissingTemplateValue (item, values) { - var err = new User(util.format('Missing template value "%s"', item.type)) - Error.captureStackTrace(err, MissingTemplateValue) - err.template = item - err.values = values - return err -} - -exports.Internal = function Internal (msg) { - var err = new Error(msg) - Error.captureStackTrace(err, Internal) - err.code = 'EGAUGEINTERNAL' - return err -} diff --git a/node_modules/libnpmexec/node_modules/gauge/has-color.js b/node_modules/libnpmexec/node_modules/gauge/has-color.js deleted file mode 100644 index e283a256f26b7..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/has-color.js +++ /dev/null @@ -1,12 +0,0 @@ -'use strict' - -module.exports = isWin32() || isColorTerm() - -function isWin32 () { - return process.platform === 'win32' -} - -function isColorTerm () { - var termHasColor = /^screen|^xterm|^vt100|color|ansi|cygwin|linux/i - return !!process.env.COLORTERM || termHasColor.test(process.env.TERM) -} diff --git a/node_modules/libnpmexec/node_modules/gauge/index.js b/node_modules/libnpmexec/node_modules/gauge/index.js deleted file mode 100644 index c55324008cbfa..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/index.js +++ /dev/null @@ -1,233 +0,0 @@ -'use strict' -var Plumbing = require('./plumbing.js') -var hasUnicode = require('has-unicode') -var hasColor = require('./has-color.js') -var onExit = require('signal-exit') -var defaultThemes = require('./themes') -var setInterval = require('./set-interval.js') -var process = require('./process.js') -var setImmediate = require('./set-immediate') - -module.exports = Gauge - -function callWith (obj, method) { - return function () { - return method.call(obj) - } -} - -function Gauge (arg1, arg2) { - var options, writeTo - if (arg1 && arg1.write) { - writeTo = arg1 - options = arg2 || {} - } else if (arg2 && arg2.write) { - writeTo = arg2 - options = arg1 || {} - } else { - writeTo = process.stderr - options = arg1 || arg2 || {} - } - - this._status = { - spun: 0, - section: '', - subsection: '' - } - this._paused = false // are we paused for back pressure? - this._disabled = true // are all progress bar updates disabled? - this._showing = false // do we WANT the progress bar on screen - this._onScreen = false // IS the progress bar on screen - this._needsRedraw = false // should we print something at next tick? - this._hideCursor = options.hideCursor == null ? true : options.hideCursor - this._fixedFramerate = options.fixedFramerate == null - ? !(/^v0\.8\./.test(process.version)) - : options.fixedFramerate - this._lastUpdateAt = null - this._updateInterval = options.updateInterval == null ? 50 : options.updateInterval - - this._themes = options.themes || defaultThemes - this._theme = options.theme - var theme = this._computeTheme(options.theme) - var template = options.template || [ - {type: 'progressbar', length: 20}, - {type: 'activityIndicator', kerning: 1, length: 1}, - {type: 'section', kerning: 1, default: ''}, - {type: 'subsection', kerning: 1, default: ''} - ] - this.setWriteTo(writeTo, options.tty) - var PlumbingClass = options.Plumbing || Plumbing - this._gauge = new PlumbingClass(theme, template, this.getWidth()) - - this._$$doRedraw = callWith(this, this._doRedraw) - this._$$handleSizeChange = callWith(this, this._handleSizeChange) - - this._cleanupOnExit = options.cleanupOnExit == null || options.cleanupOnExit - this._removeOnExit = null - - if (options.enabled || (options.enabled == null && this._tty && this._tty.isTTY)) { - this.enable() - } else { - this.disable() - } -} -Gauge.prototype = {} - -Gauge.prototype.isEnabled = function () { - return !this._disabled -} - -Gauge.prototype.setTemplate = function (template) { - this._gauge.setTemplate(template) - if (this._showing) this._requestRedraw() -} - -Gauge.prototype._computeTheme = function (theme) { - if (!theme) theme = {} - if (typeof theme === 'string') { - theme = this._themes.getTheme(theme) - } else if (theme && (Object.keys(theme).length === 0 || theme.hasUnicode != null || theme.hasColor != null)) { - var useUnicode = theme.hasUnicode == null ? hasUnicode() : theme.hasUnicode - var useColor = theme.hasColor == null ? hasColor : theme.hasColor - theme = this._themes.getDefault({hasUnicode: useUnicode, hasColor: useColor, platform: theme.platform}) - } - return theme -} - -Gauge.prototype.setThemeset = function (themes) { - this._themes = themes - this.setTheme(this._theme) -} - -Gauge.prototype.setTheme = function (theme) { - this._gauge.setTheme(this._computeTheme(theme)) - if (this._showing) this._requestRedraw() - this._theme = theme -} - -Gauge.prototype._requestRedraw = function () { - this._needsRedraw = true - if (!this._fixedFramerate) this._doRedraw() -} - -Gauge.prototype.getWidth = function () { - return ((this._tty && this._tty.columns) || 80) - 1 -} - -Gauge.prototype.setWriteTo = function (writeTo, tty) { - var enabled = !this._disabled - if (enabled) this.disable() - this._writeTo = writeTo - this._tty = tty || - (writeTo === process.stderr && process.stdout.isTTY && process.stdout) || - (writeTo.isTTY && writeTo) || - this._tty - if (this._gauge) this._gauge.setWidth(this.getWidth()) - if (enabled) this.enable() -} - -Gauge.prototype.enable = function () { - if (!this._disabled) return - this._disabled = false - if (this._tty) this._enableEvents() - if (this._showing) this.show() -} - -Gauge.prototype.disable = function () { - if (this._disabled) return - if (this._showing) { - this._lastUpdateAt = null - this._showing = false - this._doRedraw() - this._showing = true - } - this._disabled = true - if (this._tty) this._disableEvents() -} - -Gauge.prototype._enableEvents = function () { - if (this._cleanupOnExit) { - this._removeOnExit = onExit(callWith(this, this.disable)) - } - this._tty.on('resize', this._$$handleSizeChange) - if (this._fixedFramerate) { - this.redrawTracker = setInterval(this._$$doRedraw, this._updateInterval) - if (this.redrawTracker.unref) this.redrawTracker.unref() - } -} - -Gauge.prototype._disableEvents = function () { - this._tty.removeListener('resize', this._$$handleSizeChange) - if (this._fixedFramerate) clearInterval(this.redrawTracker) - if (this._removeOnExit) this._removeOnExit() -} - -Gauge.prototype.hide = function (cb) { - if (this._disabled) return cb && process.nextTick(cb) - if (!this._showing) return cb && process.nextTick(cb) - this._showing = false - this._doRedraw() - cb && setImmediate(cb) -} - -Gauge.prototype.show = function (section, completed) { - this._showing = true - if (typeof section === 'string') { - this._status.section = section - } else if (typeof section === 'object') { - var sectionKeys = Object.keys(section) - for (var ii = 0; ii < sectionKeys.length; ++ii) { - var key = sectionKeys[ii] - this._status[key] = section[key] - } - } - if (completed != null) this._status.completed = completed - if (this._disabled) return - this._requestRedraw() -} - -Gauge.prototype.pulse = function (subsection) { - this._status.subsection = subsection || '' - this._status.spun ++ - if (this._disabled) return - if (!this._showing) return - this._requestRedraw() -} - -Gauge.prototype._handleSizeChange = function () { - this._gauge.setWidth(this._tty.columns - 1) - this._requestRedraw() -} - -Gauge.prototype._doRedraw = function () { - if (this._disabled || this._paused) return - if (!this._fixedFramerate) { - var now = Date.now() - if (this._lastUpdateAt && now - this._lastUpdateAt < this._updateInterval) return - this._lastUpdateAt = now - } - if (!this._showing && this._onScreen) { - this._onScreen = false - var result = this._gauge.hide() - if (this._hideCursor) { - result += this._gauge.showCursor() - } - return this._writeTo.write(result) - } - if (!this._showing && !this._onScreen) return - if (this._showing && !this._onScreen) { - this._onScreen = true - this._needsRedraw = true - if (this._hideCursor) { - this._writeTo.write(this._gauge.hideCursor()) - } - } - if (!this._needsRedraw) return - if (!this._writeTo.write(this._gauge.show(this._status))) { - this._paused = true - this._writeTo.on('drain', callWith(this, function () { - this._paused = false - this._doRedraw() - })) - } -} diff --git a/node_modules/libnpmexec/node_modules/gauge/package.json b/node_modules/libnpmexec/node_modules/gauge/package.json deleted file mode 100644 index 4882cff8390d8..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/package.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "name": "gauge", - "version": "2.7.4", - "description": "A terminal based horizontal guage", - "main": "index.js", - "scripts": { - "test": "standard && tap test/*.js --coverage", - "prepublish": "rm -f *~" - }, - "repository": { - "type": "git", - "url": "https://github.com/iarna/gauge" - }, - "keywords": [ - "progressbar", - "progress", - "gauge" - ], - "author": "Rebecca Turner ", - "license": "ISC", - "bugs": { - "url": "https://github.com/iarna/gauge/issues" - }, - "homepage": "https://github.com/iarna/gauge", - "dependencies": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - }, - "devDependencies": { - "readable-stream": "^2.0.6", - "require-inject": "^1.4.0", - "standard": "^7.1.2", - "tap": "^5.7.2", - "through2": "^2.0.0" - }, - "files": [ - "base-theme.js", - "CHANGELOG.md", - "error.js", - "has-color.js", - "index.js", - "LICENSE", - "package.json", - "plumbing.js", - "process.js", - "progress-bar.js", - "README.md", - "render-template.js", - "set-immediate.js", - "set-interval.js", - "spin.js", - "template-item.js", - "theme-set.js", - "themes.js", - "wide-truncate.js" - ] -} diff --git a/node_modules/libnpmexec/node_modules/gauge/plumbing.js b/node_modules/libnpmexec/node_modules/gauge/plumbing.js deleted file mode 100644 index 1afb4af6d5017..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/plumbing.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict' -var consoleControl = require('console-control-strings') -var renderTemplate = require('./render-template.js') -var validate = require('aproba') - -var Plumbing = module.exports = function (theme, template, width) { - if (!width) width = 80 - validate('OAN', [theme, template, width]) - this.showing = false - this.theme = theme - this.width = width - this.template = template -} -Plumbing.prototype = {} - -Plumbing.prototype.setTheme = function (theme) { - validate('O', [theme]) - this.theme = theme -} - -Plumbing.prototype.setTemplate = function (template) { - validate('A', [template]) - this.template = template -} - -Plumbing.prototype.setWidth = function (width) { - validate('N', [width]) - this.width = width -} - -Plumbing.prototype.hide = function () { - return consoleControl.gotoSOL() + consoleControl.eraseLine() -} - -Plumbing.prototype.hideCursor = consoleControl.hideCursor - -Plumbing.prototype.showCursor = consoleControl.showCursor - -Plumbing.prototype.show = function (status) { - var values = Object.create(this.theme) - for (var key in status) { - values[key] = status[key] - } - - return renderTemplate(this.width, this.template, values).trim() + - consoleControl.color('reset') + - consoleControl.eraseLine() + consoleControl.gotoSOL() -} diff --git a/node_modules/libnpmexec/node_modules/gauge/process.js b/node_modules/libnpmexec/node_modules/gauge/process.js deleted file mode 100644 index 05e85694d755b..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/process.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' -// this exists so we can replace it during testing -module.exports = process diff --git a/node_modules/libnpmexec/node_modules/gauge/progress-bar.js b/node_modules/libnpmexec/node_modules/gauge/progress-bar.js deleted file mode 100644 index 7f8dd68be24cf..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/progress-bar.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict' -var validate = require('aproba') -var renderTemplate = require('./render-template.js') -var wideTruncate = require('./wide-truncate') -var stringWidth = require('string-width') - -module.exports = function (theme, width, completed) { - validate('ONN', [theme, width, completed]) - if (completed < 0) completed = 0 - if (completed > 1) completed = 1 - if (width <= 0) return '' - var sofar = Math.round(width * completed) - var rest = width - sofar - var template = [ - {type: 'complete', value: repeat(theme.complete, sofar), length: sofar}, - {type: 'remaining', value: repeat(theme.remaining, rest), length: rest} - ] - return renderTemplate(width, template, theme) -} - -// lodash's way of repeating -function repeat (string, width) { - var result = '' - var n = width - do { - if (n % 2) { - result += string - } - n = Math.floor(n / 2) - /*eslint no-self-assign: 0*/ - string += string - } while (n && stringWidth(result) < width) - - return wideTruncate(result, width) -} diff --git a/node_modules/libnpmexec/node_modules/gauge/render-template.js b/node_modules/libnpmexec/node_modules/gauge/render-template.js deleted file mode 100644 index 3261bfbe6f4be..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/render-template.js +++ /dev/null @@ -1,181 +0,0 @@ -'use strict' -var align = require('wide-align') -var validate = require('aproba') -var objectAssign = require('object-assign') -var wideTruncate = require('./wide-truncate') -var error = require('./error') -var TemplateItem = require('./template-item') - -function renderValueWithValues (values) { - return function (item) { - return renderValue(item, values) - } -} - -var renderTemplate = module.exports = function (width, template, values) { - var items = prepareItems(width, template, values) - var rendered = items.map(renderValueWithValues(values)).join('') - return align.left(wideTruncate(rendered, width), width) -} - -function preType (item) { - var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) - return 'pre' + cappedTypeName -} - -function postType (item) { - var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) - return 'post' + cappedTypeName -} - -function hasPreOrPost (item, values) { - if (!item.type) return - return values[preType(item)] || values[postType(item)] -} - -function generatePreAndPost (baseItem, parentValues) { - var item = objectAssign({}, baseItem) - var values = Object.create(parentValues) - var template = [] - var pre = preType(item) - var post = postType(item) - if (values[pre]) { - template.push({value: values[pre]}) - values[pre] = null - } - item.minLength = null - item.length = null - item.maxLength = null - template.push(item) - values[item.type] = values[item.type] - if (values[post]) { - template.push({value: values[post]}) - values[post] = null - } - return function ($1, $2, length) { - return renderTemplate(length, template, values) - } -} - -function prepareItems (width, template, values) { - function cloneAndObjectify (item, index, arr) { - var cloned = new TemplateItem(item, width) - var type = cloned.type - if (cloned.value == null) { - if (!(type in values)) { - if (cloned.default == null) { - throw new error.MissingTemplateValue(cloned, values) - } else { - cloned.value = cloned.default - } - } else { - cloned.value = values[type] - } - } - if (cloned.value == null || cloned.value === '') return null - cloned.index = index - cloned.first = index === 0 - cloned.last = index === arr.length - 1 - if (hasPreOrPost(cloned, values)) cloned.value = generatePreAndPost(cloned, values) - return cloned - } - - var output = template.map(cloneAndObjectify).filter(function (item) { return item != null }) - - var outputLength = 0 - var remainingSpace = width - var variableCount = output.length - - function consumeSpace (length) { - if (length > remainingSpace) length = remainingSpace - outputLength += length - remainingSpace -= length - } - - function finishSizing (item, length) { - if (item.finished) throw new error.Internal('Tried to finish template item that was already finished') - if (length === Infinity) throw new error.Internal('Length of template item cannot be infinity') - if (length != null) item.length = length - item.minLength = null - item.maxLength = null - --variableCount - item.finished = true - if (item.length == null) item.length = item.getBaseLength() - if (item.length == null) throw new error.Internal('Finished template items must have a length') - consumeSpace(item.getLength()) - } - - output.forEach(function (item) { - if (!item.kerning) return - var prevPadRight = item.first ? 0 : output[item.index - 1].padRight - if (!item.first && prevPadRight < item.kerning) item.padLeft = item.kerning - prevPadRight - if (!item.last) item.padRight = item.kerning - }) - - // Finish any that have a fixed (literal or intuited) length - output.forEach(function (item) { - if (item.getBaseLength() == null) return - finishSizing(item) - }) - - var resized = 0 - var resizing - var hunkSize - do { - resizing = false - hunkSize = Math.round(remainingSpace / variableCount) - output.forEach(function (item) { - if (item.finished) return - if (!item.maxLength) return - if (item.getMaxLength() < hunkSize) { - finishSizing(item, item.maxLength) - resizing = true - } - }) - } while (resizing && resized++ < output.length) - if (resizing) throw new error.Internal('Resize loop iterated too many times while determining maxLength') - - resized = 0 - do { - resizing = false - hunkSize = Math.round(remainingSpace / variableCount) - output.forEach(function (item) { - if (item.finished) return - if (!item.minLength) return - if (item.getMinLength() >= hunkSize) { - finishSizing(item, item.minLength) - resizing = true - } - }) - } while (resizing && resized++ < output.length) - if (resizing) throw new error.Internal('Resize loop iterated too many times while determining minLength') - - hunkSize = Math.round(remainingSpace / variableCount) - output.forEach(function (item) { - if (item.finished) return - finishSizing(item, hunkSize) - }) - - return output -} - -function renderFunction (item, values, length) { - validate('OON', arguments) - if (item.type) { - return item.value(values, values[item.type + 'Theme'] || {}, length) - } else { - return item.value(values, {}, length) - } -} - -function renderValue (item, values) { - var length = item.getBaseLength() - var value = typeof item.value === 'function' ? renderFunction(item, values, length) : item.value - if (value == null || value === '') return '' - var alignWith = align[item.align] || align.left - var leftPadding = item.padLeft ? align.left('', item.padLeft) : '' - var rightPadding = item.padRight ? align.right('', item.padRight) : '' - var truncated = wideTruncate(String(value), length) - var aligned = alignWith(truncated, length) - return leftPadding + aligned + rightPadding -} diff --git a/node_modules/libnpmexec/node_modules/gauge/set-immediate.js b/node_modules/libnpmexec/node_modules/gauge/set-immediate.js deleted file mode 100644 index 6650a485c4993..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/set-immediate.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' -var process = require('./process') -try { - module.exports = setImmediate -} catch (ex) { - module.exports = process.nextTick -} diff --git a/node_modules/libnpmexec/node_modules/gauge/set-interval.js b/node_modules/libnpmexec/node_modules/gauge/set-interval.js deleted file mode 100644 index 576198793c550..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/set-interval.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' -// this exists so we can replace it during testing -module.exports = setInterval diff --git a/node_modules/libnpmexec/node_modules/gauge/spin.js b/node_modules/libnpmexec/node_modules/gauge/spin.js deleted file mode 100644 index 34142ee31acc7..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/spin.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict' - -module.exports = function spin (spinstr, spun) { - return spinstr[spun % spinstr.length] -} diff --git a/node_modules/libnpmexec/node_modules/gauge/template-item.js b/node_modules/libnpmexec/node_modules/gauge/template-item.js deleted file mode 100644 index e46f447c941d3..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/template-item.js +++ /dev/null @@ -1,73 +0,0 @@ -'use strict' -var stringWidth = require('string-width') - -module.exports = TemplateItem - -function isPercent (num) { - if (typeof num !== 'string') return false - return num.slice(-1) === '%' -} - -function percent (num) { - return Number(num.slice(0, -1)) / 100 -} - -function TemplateItem (values, outputLength) { - this.overallOutputLength = outputLength - this.finished = false - this.type = null - this.value = null - this.length = null - this.maxLength = null - this.minLength = null - this.kerning = null - this.align = 'left' - this.padLeft = 0 - this.padRight = 0 - this.index = null - this.first = null - this.last = null - if (typeof values === 'string') { - this.value = values - } else { - for (var prop in values) this[prop] = values[prop] - } - // Realize percents - if (isPercent(this.length)) { - this.length = Math.round(this.overallOutputLength * percent(this.length)) - } - if (isPercent(this.minLength)) { - this.minLength = Math.round(this.overallOutputLength * percent(this.minLength)) - } - if (isPercent(this.maxLength)) { - this.maxLength = Math.round(this.overallOutputLength * percent(this.maxLength)) - } - return this -} - -TemplateItem.prototype = {} - -TemplateItem.prototype.getBaseLength = function () { - var length = this.length - if (length == null && typeof this.value === 'string' && this.maxLength == null && this.minLength == null) { - length = stringWidth(this.value) - } - return length -} - -TemplateItem.prototype.getLength = function () { - var length = this.getBaseLength() - if (length == null) return null - return length + this.padLeft + this.padRight -} - -TemplateItem.prototype.getMaxLength = function () { - if (this.maxLength == null) return null - return this.maxLength + this.padLeft + this.padRight -} - -TemplateItem.prototype.getMinLength = function () { - if (this.minLength == null) return null - return this.minLength + this.padLeft + this.padRight -} - diff --git a/node_modules/libnpmexec/node_modules/gauge/theme-set.js b/node_modules/libnpmexec/node_modules/gauge/theme-set.js deleted file mode 100644 index 68971d5d231b0..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/theme-set.js +++ /dev/null @@ -1,115 +0,0 @@ -'use strict' -var objectAssign = require('object-assign') - -module.exports = function () { - return ThemeSetProto.newThemeSet() -} - -var ThemeSetProto = {} - -ThemeSetProto.baseTheme = require('./base-theme.js') - -ThemeSetProto.newTheme = function (parent, theme) { - if (!theme) { - theme = parent - parent = this.baseTheme - } - return objectAssign({}, parent, theme) -} - -ThemeSetProto.getThemeNames = function () { - return Object.keys(this.themes) -} - -ThemeSetProto.addTheme = function (name, parent, theme) { - this.themes[name] = this.newTheme(parent, theme) -} - -ThemeSetProto.addToAllThemes = function (theme) { - var themes = this.themes - Object.keys(themes).forEach(function (name) { - objectAssign(themes[name], theme) - }) - objectAssign(this.baseTheme, theme) -} - -ThemeSetProto.getTheme = function (name) { - if (!this.themes[name]) throw this.newMissingThemeError(name) - return this.themes[name] -} - -ThemeSetProto.setDefault = function (opts, name) { - if (name == null) { - name = opts - opts = {} - } - var platform = opts.platform == null ? 'fallback' : opts.platform - var hasUnicode = !!opts.hasUnicode - var hasColor = !!opts.hasColor - if (!this.defaults[platform]) this.defaults[platform] = {true: {}, false: {}} - this.defaults[platform][hasUnicode][hasColor] = name -} - -ThemeSetProto.getDefault = function (opts) { - if (!opts) opts = {} - var platformName = opts.platform || process.platform - var platform = this.defaults[platformName] || this.defaults.fallback - var hasUnicode = !!opts.hasUnicode - var hasColor = !!opts.hasColor - if (!platform) throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) - if (!platform[hasUnicode][hasColor]) { - if (hasUnicode && hasColor && platform[!hasUnicode][hasColor]) { - hasUnicode = false - } else if (hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { - hasColor = false - } else if (hasUnicode && hasColor && platform[!hasUnicode][!hasColor]) { - hasUnicode = false - hasColor = false - } else if (hasUnicode && !hasColor && platform[!hasUnicode][hasColor]) { - hasUnicode = false - } else if (!hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { - hasColor = false - } else if (platform === this.defaults.fallback) { - throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) - } - } - if (platform[hasUnicode][hasColor]) { - return this.getTheme(platform[hasUnicode][hasColor]) - } else { - return this.getDefault(objectAssign({}, opts, {platform: 'fallback'})) - } -} - -ThemeSetProto.newMissingThemeError = function newMissingThemeError (name) { - var err = new Error('Could not find a gauge theme named "' + name + '"') - Error.captureStackTrace.call(err, newMissingThemeError) - err.theme = name - err.code = 'EMISSINGTHEME' - return err -} - -ThemeSetProto.newMissingDefaultThemeError = function newMissingDefaultThemeError (platformName, hasUnicode, hasColor) { - var err = new Error( - 'Could not find a gauge theme for your platform/unicode/color use combo:\n' + - ' platform = ' + platformName + '\n' + - ' hasUnicode = ' + hasUnicode + '\n' + - ' hasColor = ' + hasColor) - Error.captureStackTrace.call(err, newMissingDefaultThemeError) - err.platform = platformName - err.hasUnicode = hasUnicode - err.hasColor = hasColor - err.code = 'EMISSINGTHEME' - return err -} - -ThemeSetProto.newThemeSet = function () { - var themeset = function (opts) { - return themeset.getDefault(opts) - } - return objectAssign(themeset, ThemeSetProto, { - themes: objectAssign({}, this.themes), - baseTheme: objectAssign({}, this.baseTheme), - defaults: JSON.parse(JSON.stringify(this.defaults || {})) - }) -} - diff --git a/node_modules/libnpmexec/node_modules/gauge/themes.js b/node_modules/libnpmexec/node_modules/gauge/themes.js deleted file mode 100644 index eb5a4f5b5e103..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/themes.js +++ /dev/null @@ -1,54 +0,0 @@ -'use strict' -var consoleControl = require('console-control-strings') -var ThemeSet = require('./theme-set.js') - -var themes = module.exports = new ThemeSet() - -themes.addTheme('ASCII', { - preProgressbar: '[', - postProgressbar: ']', - progressbarTheme: { - complete: '#', - remaining: '.' - }, - activityIndicatorTheme: '-\\|/', - preSubsection: '>' -}) - -themes.addTheme('colorASCII', themes.getTheme('ASCII'), { - progressbarTheme: { - preComplete: consoleControl.color('inverse'), - complete: ' ', - postComplete: consoleControl.color('stopInverse'), - preRemaining: consoleControl.color('brightBlack'), - remaining: '.', - postRemaining: consoleControl.color('reset') - } -}) - -themes.addTheme('brailleSpinner', { - preProgressbar: '⸨', - postProgressbar: '⸩', - progressbarTheme: { - complete: '░', - remaining: '⠂' - }, - activityIndicatorTheme: '⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏', - preSubsection: '>' -}) - -themes.addTheme('colorBrailleSpinner', themes.getTheme('brailleSpinner'), { - progressbarTheme: { - preComplete: consoleControl.color('inverse'), - complete: ' ', - postComplete: consoleControl.color('stopInverse'), - preRemaining: consoleControl.color('brightBlack'), - remaining: '░', - postRemaining: consoleControl.color('reset') - } -}) - -themes.setDefault({}, 'ASCII') -themes.setDefault({hasColor: true}, 'colorASCII') -themes.setDefault({platform: 'darwin', hasUnicode: true}, 'brailleSpinner') -themes.setDefault({platform: 'darwin', hasUnicode: true, hasColor: true}, 'colorBrailleSpinner') diff --git a/node_modules/libnpmexec/node_modules/gauge/wide-truncate.js b/node_modules/libnpmexec/node_modules/gauge/wide-truncate.js deleted file mode 100644 index c531bc491fbb5..0000000000000 --- a/node_modules/libnpmexec/node_modules/gauge/wide-truncate.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict' -var stringWidth = require('string-width') -var stripAnsi = require('strip-ansi') - -module.exports = wideTruncate - -function wideTruncate (str, target) { - if (stringWidth(str) === 0) return str - if (target <= 0) return '' - if (stringWidth(str) <= target) return str - - // We compute the number of bytes of ansi sequences here and add - // that to our initial truncation to ensure that we don't slice one - // that we want to keep in half. - var noAnsi = stripAnsi(str) - var ansiSize = str.length + noAnsi.length - var truncated = str.slice(0, target + ansiSize) - - // we have to shrink the result to account for our ansi sequence buffer - // (if an ansi sequence was truncated) and double width characters. - while (stringWidth(truncated) > target) { - truncated = truncated.slice(0, -1) - } - return truncated -} diff --git a/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/index.js b/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/index.js deleted file mode 100644 index a7d3e3855f1c2..0000000000000 --- a/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/index.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict'; -var numberIsNan = require('number-is-nan'); - -module.exports = function (x) { - if (numberIsNan(x)) { - return false; - } - - // https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1369 - - // code points are derived from: - // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt - if (x >= 0x1100 && ( - x <= 0x115f || // Hangul Jamo - 0x2329 === x || // LEFT-POINTING ANGLE BRACKET - 0x232a === x || // RIGHT-POINTING ANGLE BRACKET - // CJK Radicals Supplement .. Enclosed CJK Letters and Months - (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) || - // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A - 0x3250 <= x && x <= 0x4dbf || - // CJK Unified Ideographs .. Yi Radicals - 0x4e00 <= x && x <= 0xa4c6 || - // Hangul Jamo Extended-A - 0xa960 <= x && x <= 0xa97c || - // Hangul Syllables - 0xac00 <= x && x <= 0xd7a3 || - // CJK Compatibility Ideographs - 0xf900 <= x && x <= 0xfaff || - // Vertical Forms - 0xfe10 <= x && x <= 0xfe19 || - // CJK Compatibility Forms .. Small Form Variants - 0xfe30 <= x && x <= 0xfe6b || - // Halfwidth and Fullwidth Forms - 0xff01 <= x && x <= 0xff60 || - 0xffe0 <= x && x <= 0xffe6 || - // Kana Supplement - 0x1b000 <= x && x <= 0x1b001 || - // Enclosed Ideographic Supplement - 0x1f200 <= x && x <= 0x1f251 || - // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane - 0x20000 <= x && x <= 0x3fffd)) { - return true; - } - - return false; -} diff --git a/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/license b/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/license deleted file mode 100644 index 654d0bfe94343..0000000000000 --- a/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/license +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/package.json b/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/package.json deleted file mode 100644 index b678d40de728c..0000000000000 --- a/node_modules/libnpmexec/node_modules/is-fullwidth-code-point/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "is-fullwidth-code-point", - "version": "1.0.0", - "description": "Check if the character represented by a given Unicode code point is fullwidth", - "license": "MIT", - "repository": "sindresorhus/is-fullwidth-code-point", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "node test.js" - }, - "files": [ - "index.js" - ], - "keywords": [ - "fullwidth", - "full-width", - "full", - "width", - "unicode", - "character", - "char", - "string", - "str", - "codepoint", - "code", - "point", - "is", - "detect", - "check" - ], - "dependencies": { - "number-is-nan": "^1.0.0" - }, - "devDependencies": { - "ava": "0.0.4", - "code-point-at": "^1.0.0" - } -} diff --git a/node_modules/libnpmexec/node_modules/node-gyp/CONTRIBUTING.md b/node_modules/libnpmexec/node_modules/node-gyp/CONTRIBUTING.md deleted file mode 100644 index c1c50eab4e58b..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/CONTRIBUTING.md +++ /dev/null @@ -1,34 +0,0 @@ -# Contributing to node-gyp - -## Code of Conduct - -Please read the -[Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md) -which explains the minimum behavior expectations for node-gyp contributors. - - -## Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -* (a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - -* (b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - -* (c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - -* (d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. diff --git a/node_modules/libnpmexec/node_modules/node-gyp/LICENSE b/node_modules/libnpmexec/node_modules/node-gyp/LICENSE deleted file mode 100644 index 2ea4dc5efb872..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -(The MIT License) - -Copyright (c) 2012 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/libnpmexec/node_modules/node-gyp/addon.gypi b/node_modules/libnpmexec/node_modules/node-gyp/addon.gypi deleted file mode 100644 index 9327b0d722e74..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/addon.gypi +++ /dev/null @@ -1,185 +0,0 @@ -{ - 'variables' : { - 'node_engine_include_dir%': 'deps/v8/include', - 'node_host_binary%': 'node', - 'node_with_ltcg%': 'true', - }, - 'target_defaults': { - 'type': 'loadable_module', - 'win_delay_load_hook': 'true', - 'product_prefix': '', - - 'conditions': [ - [ 'node_engine=="chakracore"', { - 'variables': { - 'node_engine_include_dir%': 'deps/chakrashim/include' - }, - }] - ], - - 'include_dirs': [ - '<(node_root_dir)/include/node', - '<(node_root_dir)/src', - '<(node_root_dir)/deps/openssl/config', - '<(node_root_dir)/deps/openssl/openssl/include', - '<(node_root_dir)/deps/uv/include', - '<(node_root_dir)/deps/zlib', - '<(node_root_dir)/<(node_engine_include_dir)' - ], - 'defines!': [ - 'BUILDING_UV_SHARED=1', # Inherited from common.gypi. - 'BUILDING_V8_SHARED=1', # Inherited from common.gypi. - ], - 'defines': [ - 'NODE_GYP_MODULE_NAME=>(_target_name)', - 'USING_UV_SHARED=1', - 'USING_V8_SHARED=1', - # Warn when using deprecated V8 APIs. - 'V8_DEPRECATION_WARNINGS=1' - ], - - 'target_conditions': [ - ['_type=="loadable_module"', { - 'product_extension': 'node', - 'defines': [ - 'BUILDING_NODE_EXTENSION' - ], - 'xcode_settings': { - 'OTHER_LDFLAGS': [ - '-undefined dynamic_lookup' - ], - }, - }], - - ['_type=="static_library"', { - # set to `1` to *disable* the -T thin archive 'ld' flag. - # older linkers don't support this flag. - 'standalone_static_library': '<(standalone_static_library)' - }], - - ['_type!="executable"', { - 'conditions': [ - [ 'OS=="android"', { - 'cflags!': [ '-fPIE' ], - }] - ] - }], - - ['_win_delay_load_hook=="true"', { - # If the addon specifies `'win_delay_load_hook': 'true'` in its - # binding.gyp, link a delay-load hook into the DLL. This hook ensures - # that the addon will work regardless of whether the node/iojs binary - # is named node.exe, iojs.exe, or something else. - 'conditions': [ - [ 'OS=="win"', { - 'defines': [ 'HOST_BINARY=\"<(node_host_binary)<(EXECUTABLE_SUFFIX)\"', ], - 'sources': [ - '<(node_gyp_dir)/src/win_delay_load_hook.cc', - ], - 'msvs_settings': { - 'VCLinkerTool': { - 'DelayLoadDLLs': [ '<(node_host_binary)<(EXECUTABLE_SUFFIX)' ], - # Don't print a linker warning when no imports from either .exe - # are used. - 'AdditionalOptions': [ '/ignore:4199' ], - }, - }, - }], - ], - }], - ], - - 'conditions': [ - [ 'OS=="mac"', { - 'defines': [ - '_DARWIN_USE_64_BIT_INODE=1' - ], - 'xcode_settings': { - 'DYLIB_INSTALL_NAME_BASE': '@rpath' - }, - }], - [ 'OS=="aix"', { - 'ldflags': [ - '-Wl,-bimport:<(node_exp_file)' - ], - }], - [ 'OS=="zos"', { - 'cflags': [ - '-q64', - '-Wc,DLL', - '-qlonglong', - '-qenum=int', - '-qxclang=-fexec-charset=ISO8859-1' - ], - 'defines': [ - '_ALL_SOURCE=1', - 'MAP_FAILED=-1', - '_UNIX03_SOURCE=1' - ], - 'ldflags': [ - '-q64', - '<(node_exp_file)' - ], - }], - [ 'OS=="win"', { - 'conditions': [ - ['node_engine=="chakracore"', { - 'library_dirs': [ '<(node_root_dir)/$(ConfigurationName)' ], - 'libraries': [ '<@(node_engine_libs)' ], - }], - ['node_with_ltcg=="true"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - 'WholeProgramOptimization': 'true' # /GL, whole program optimization, needed for LTCG - }, - 'VCLibrarianTool': { - 'AdditionalOptions': [ - '/LTCG:INCREMENTAL', # incremental link-time code generation - ] - }, - 'VCLinkerTool': { - 'OptimizeReferences': 2, # /OPT:REF - 'EnableCOMDATFolding': 2, # /OPT:ICF - 'LinkIncremental': 1, # disable incremental linking - 'AdditionalOptions': [ - '/LTCG:INCREMENTAL', # incremental link-time code generation - ] - } - } - }] - ], - 'libraries': [ - '-lkernel32.lib', - '-luser32.lib', - '-lgdi32.lib', - '-lwinspool.lib', - '-lcomdlg32.lib', - '-ladvapi32.lib', - '-lshell32.lib', - '-lole32.lib', - '-loleaut32.lib', - '-luuid.lib', - '-lodbc32.lib', - '-lDelayImp.lib', - '-l"<(node_lib_file)"' - ], - 'msvs_disabled_warnings': [ - # warning C4251: 'node::ObjectWrap::handle_' : class 'v8::Persistent' - # needs to have dll-interface to be used by - # clients of class 'node::ObjectWrap' - 4251 - ], - }, { - # OS!="win" - 'defines': [ - '_LARGEFILE_SOURCE', - '_FILE_OFFSET_BITS=64' - ], - }], - [ 'OS in "freebsd openbsd netbsd solaris android" or \ - (OS=="linux" and target_arch!="ia32")', { - 'cflags': [ '-fPIC' ], - }], - ] - } -} diff --git a/node_modules/libnpmexec/node_modules/node-gyp/bin/node-gyp.js b/node_modules/libnpmexec/node_modules/node-gyp/bin/node-gyp.js deleted file mode 100755 index 8652ea21eceeb..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/bin/node-gyp.js +++ /dev/null @@ -1,140 +0,0 @@ -#!/usr/bin/env node - -'use strict' - -process.title = 'node-gyp' - -const envPaths = require('env-paths') -const gyp = require('../') -const log = require('npmlog') -const os = require('os') - -/** - * Process and execute the selected commands. - */ - -const prog = gyp() -var completed = false -prog.parseArgv(process.argv) -prog.devDir = prog.opts.devdir - -var homeDir = os.homedir() -if (prog.devDir) { - prog.devDir = prog.devDir.replace(/^~/, homeDir) -} else if (homeDir) { - prog.devDir = envPaths('node-gyp', { suffix: '' }).cache -} else { - throw new Error( - "node-gyp requires that the user's home directory is specified " + - 'in either of the environmental variables HOME or USERPROFILE. ' + - 'Overide with: --devdir /path/to/.node-gyp') -} - -if (prog.todo.length === 0) { - if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { - console.log('v%s', prog.version) - } else { - console.log('%s', prog.usage()) - } - process.exit(0) -} - -log.info('it worked if it ends with', 'ok') -log.verbose('cli', process.argv) -log.info('using', 'node-gyp@%s', prog.version) -log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch) - -/** - * Change dir if -C/--directory was passed. - */ - -var dir = prog.opts.directory -if (dir) { - var fs = require('fs') - try { - var stat = fs.statSync(dir) - if (stat.isDirectory()) { - log.info('chdir', dir) - process.chdir(dir) - } else { - log.warn('chdir', dir + ' is not a directory') - } - } catch (e) { - if (e.code === 'ENOENT') { - log.warn('chdir', dir + ' is not a directory') - } else { - log.warn('chdir', 'error during chdir() "%s"', e.message) - } - } -} - -function run () { - var command = prog.todo.shift() - if (!command) { - // done! - completed = true - log.info('ok') - return - } - - prog.commands[command.name](command.args, function (err) { - if (err) { - log.error(command.name + ' error') - log.error('stack', err.stack) - errorMessage() - log.error('not ok') - return process.exit(1) - } - if (command.name === 'list') { - var versions = arguments[1] - if (versions.length > 0) { - versions.forEach(function (version) { - console.log(version) - }) - } else { - console.log('No node development files installed. Use `node-gyp install` to install a version.') - } - } else if (arguments.length >= 2) { - console.log.apply(console, [].slice.call(arguments, 1)) - } - - // now run the next command in the queue - process.nextTick(run) - }) -} - -process.on('exit', function (code) { - if (!completed && !code) { - log.error('Completion callback never invoked!') - issueMessage() - process.exit(6) - } -}) - -process.on('uncaughtException', function (err) { - log.error('UNCAUGHT EXCEPTION') - log.error('stack', err.stack) - issueMessage() - process.exit(7) -}) - -function errorMessage () { - // copied from npm's lib/utils/error-handler.js - var os = require('os') - log.error('System', os.type() + ' ' + os.release()) - log.error('command', process.argv - .map(JSON.stringify).join(' ')) - log.error('cwd', process.cwd()) - log.error('node -v', process.version) - log.error('node-gyp -v', 'v' + prog.package.version) -} - -function issueMessage () { - errorMessage() - log.error('', ['Node-gyp failed to build your package.', - 'Try to update npm and/or node-gyp and if it does not help file an issue with the package author.' - ].join('\n')) -} - -// start running the given commands! -run() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/.flake8 b/node_modules/libnpmexec/node_modules/node-gyp/gyp/.flake8 deleted file mode 100644 index ea0c7680ef87b..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/.flake8 +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -max-complexity = 101 -max-line-length = 88 -extend-ignore = E203 # whitespace before ':' to agree with psf/black diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/AUTHORS b/node_modules/libnpmexec/node_modules/node-gyp/gyp/AUTHORS deleted file mode 100644 index f49a357b9ed10..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/AUTHORS +++ /dev/null @@ -1,16 +0,0 @@ -# Names should be added to this file like so: -# Name or Organization - -Google Inc. <*@google.com> -Bloomberg Finance L.P. <*@bloomberg.net> -IBM Inc. <*@*.ibm.com> -Yandex LLC <*@yandex-team.ru> - -Steven Knight -Ryan Norton -David J. Sankel -Eric N. Vander Weele -Tom Freudenberg -Julien Brianceau -Refael Ackermann -Ujjwal Sharma diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md b/node_modules/libnpmexec/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md deleted file mode 100644 index 4c211405596cb..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,4 +0,0 @@ -# Code of Conduct - -* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md) -* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/master/Moderation-Policy.md) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/CONTRIBUTING.md b/node_modules/libnpmexec/node_modules/node-gyp/gyp/CONTRIBUTING.md deleted file mode 100644 index f9dd574a47cd9..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/CONTRIBUTING.md +++ /dev/null @@ -1,32 +0,0 @@ -# Contributing to gyp-next - -## Code of Conduct - -This project is bound to the [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md). - - -## Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -* (a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - -* (b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - -* (c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - -* (d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/LICENSE b/node_modules/libnpmexec/node_modules/node-gyp/gyp/LICENSE deleted file mode 100644 index c6944c5e4ed48..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2020 Node.js contributors. All rights reserved. -Copyright (c) 2009 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc b/node_modules/libnpmexec/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc deleted file mode 100644 index 8bca510815e0a..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) 2013 Google Inc. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is -// then used during the final link for modules that have large PDBs. Otherwise, -// the linker will generate a pdb with a page size of 1KB, which imposes a limit -// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler -// (rather than the linker), this limit is avoided. With this in place PDBs may -// grow to 2GB. -// -// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py. diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp b/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp deleted file mode 100755 index 1b8b9bdfb05f5..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# Copyright 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -set -e -base=$(dirname "$0") -exec python "${base}/gyp_main.py" "$@" diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp.bat b/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp.bat deleted file mode 100755 index c0b4ca24e5df0..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp.bat +++ /dev/null @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python "%~dp0gyp_main.py" %* diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp_main.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp_main.py deleted file mode 100755 index da696cfc4b1c3..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/gyp_main.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os -import sys -import subprocess - -PY3 = bytes != str - - -def IsCygwin(): - # Function copied from pylib/gyp/common.py - try: - out = subprocess.Popen( - "uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - stdout, stderr = out.communicate() - if PY3: - stdout = stdout.decode("utf-8") - return "CYGWIN" in str(stdout) - except Exception: - return False - - -def UnixifyPath(path): - try: - if not IsCygwin(): - return path - out = subprocess.Popen( - ["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - stdout, _ = out.communicate() - if PY3: - stdout = stdout.decode("utf-8") - return str(stdout) - except Exception: - return path - - -# Make sure we're using the version of pylib in this repo, not one installed -# elsewhere on the system. Also convert to Unix style path on Cygwin systems, -# else the 'gyp' library will not be found -path = UnixifyPath(sys.argv[0]) -sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib")) -import gyp # noqa: E402 - -if __name__ == "__main__": - sys.exit(gyp.script_main()) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py deleted file mode 100644 index 04bbb3df71872..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py +++ /dev/null @@ -1,370 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""New implementation of Visual Studio project generation.""" - -import hashlib -import os -import random -from operator import attrgetter - -import gyp.common - -try: - cmp -except NameError: - - def cmp(x, y): - return (x > y) - (x < y) - - -# Initialize random number generator -random.seed() - -# GUIDs for project types -ENTRY_TYPE_GUIDS = { - "project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}", - "folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}", -} - -# ------------------------------------------------------------------------------ -# Helper functions - - -def MakeGuid(name, seed="msvs_new"): - """Returns a GUID for the specified target name. - - Args: - name: Target name. - seed: Seed for MD5 hash. - Returns: - A GUID-line string calculated from the name and seed. - - This generates something which looks like a GUID, but depends only on the - name and seed. This means the same name/seed will always generate the same - GUID, so that projects and solutions which refer to each other can explicitly - determine the GUID to refer to explicitly. It also means that the GUID will - not change when the project for a target is rebuilt. - """ - # Calculate a MD5 signature for the seed and name. - d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper() - # Convert most of the signature to GUID form (discard the rest) - guid = ( - "{" - + d[:8] - + "-" - + d[8:12] - + "-" - + d[12:16] - + "-" - + d[16:20] - + "-" - + d[20:32] - + "}" - ) - return guid - - -# ------------------------------------------------------------------------------ - - -class MSVSSolutionEntry(object): - def __cmp__(self, other): - # Sort by name then guid (so things are in order on vs2008). - return cmp((self.name, self.get_guid()), (other.name, other.get_guid())) - - -class MSVSFolder(MSVSSolutionEntry): - """Folder in a Visual Studio project or solution.""" - - def __init__(self, path, name=None, entries=None, guid=None, items=None): - """Initializes the folder. - - Args: - path: Full path to the folder. - name: Name of the folder. - entries: List of folder entries to nest inside this folder. May contain - Folder or Project objects. May be None, if the folder is empty. - guid: GUID to use for folder, if not None. - items: List of solution items to include in the folder project. May be - None, if the folder does not directly contain items. - """ - if name: - self.name = name - else: - # Use last layer. - self.name = os.path.basename(path) - - self.path = path - self.guid = guid - - # Copy passed lists (or set to empty lists) - self.entries = sorted(entries or [], key=attrgetter("path")) - self.items = list(items or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"] - - def get_guid(self): - if self.guid is None: - # Use consistent guids for folders (so things don't regenerate). - self.guid = MakeGuid(self.path, seed="msvs_folder") - return self.guid - - -# ------------------------------------------------------------------------------ - - -class MSVSProject(MSVSSolutionEntry): - """Visual Studio project.""" - - def __init__( - self, - path, - name=None, - dependencies=None, - guid=None, - spec=None, - build_file=None, - config_platform_overrides=None, - fixpath_prefix=None, - ): - """Initializes the project. - - Args: - path: Absolute path to the project file. - name: Name of project. If None, the name will be the same as the base - name of the project file. - dependencies: List of other Project objects this project is dependent - upon, if not None. - guid: GUID to use for project, if not None. - spec: Dictionary specifying how to build this project. - build_file: Filename of the .gyp file that the vcproj file comes from. - config_platform_overrides: optional dict of configuration platforms to - used in place of the default for this target. - fixpath_prefix: the path used to adjust the behavior of _fixpath - """ - self.path = path - self.guid = guid - self.spec = spec - self.build_file = build_file - # Use project filename if name not specified - self.name = name or os.path.splitext(os.path.basename(path))[0] - - # Copy passed lists (or set to empty lists) - self.dependencies = list(dependencies or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS["project"] - - if config_platform_overrides: - self.config_platform_overrides = config_platform_overrides - else: - self.config_platform_overrides = {} - self.fixpath_prefix = fixpath_prefix - self.msbuild_toolset = None - - def set_dependencies(self, dependencies): - self.dependencies = list(dependencies or []) - - def get_guid(self): - if self.guid is None: - # Set GUID from path - # TODO(rspangler): This is fragile. - # 1. We can't just use the project filename sans path, since there could - # be multiple projects with the same base name (for example, - # foo/unittest.vcproj and bar/unittest.vcproj). - # 2. The path needs to be relative to $SOURCE_ROOT, so that the project - # GUID is the same whether it's included from base/base.sln or - # foo/bar/baz/baz.sln. - # 3. The GUID needs to be the same each time this builder is invoked, so - # that we don't need to rebuild the solution when the project changes. - # 4. We should be able to handle pre-built project files by reading the - # GUID from the files. - self.guid = MakeGuid(self.name) - return self.guid - - def set_msbuild_toolset(self, msbuild_toolset): - self.msbuild_toolset = msbuild_toolset - - -# ------------------------------------------------------------------------------ - - -class MSVSSolution(object): - """Visual Studio solution.""" - - def __init__( - self, path, version, entries=None, variants=None, websiteProperties=True - ): - """Initializes the solution. - - Args: - path: Path to solution file. - version: Format version to emit. - entries: List of entries in solution. May contain Folder or Project - objects. May be None, if the folder is empty. - variants: List of build variant strings. If none, a default list will - be used. - websiteProperties: Flag to decide if the website properties section - is generated. - """ - self.path = path - self.websiteProperties = websiteProperties - self.version = version - - # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) - - if variants: - # Copy passed list - self.variants = variants[:] - else: - # Use default - self.variants = ["Debug|Win32", "Release|Win32"] - # TODO(rspangler): Need to be able to handle a mapping of solution config - # to project config. Should we be able to handle variants being a dict, - # or add a separate variant_map variable? If it's a dict, we can't - # guarantee the order of variants since dict keys aren't ordered. - - # TODO(rspangler): Automatically write to disk for now; should delay until - # node-evaluation time. - self.Write() - - def Write(self, writer=gyp.common.WriteOnDiff): - """Writes the solution file to disk. - - Raises: - IndexError: An entry appears multiple times. - """ - # Walk the entry tree and collect all the folders and projects. - all_entries = set() - entries_to_check = self.entries[:] - while entries_to_check: - e = entries_to_check.pop(0) - - # If this entry has been visited, nothing to do. - if e in all_entries: - continue - - all_entries.add(e) - - # If this is a folder, check its entries too. - if isinstance(e, MSVSFolder): - entries_to_check += e.entries - - all_entries = sorted(all_entries, key=attrgetter("path")) - - # Open file and print header - f = writer(self.path) - f.write( - "Microsoft Visual Studio Solution File, " - "Format Version %s\r\n" % self.version.SolutionVersion() - ) - f.write("# %s\r\n" % self.version.Description()) - - # Project entries - sln_root = os.path.split(self.path)[0] - for e in all_entries: - relative_path = gyp.common.RelativePath(e.path, sln_root) - # msbuild does not accept an empty folder_name. - # use '.' in case relative_path is empty. - folder_name = relative_path.replace("/", "\\") or "." - f.write( - 'Project("%s") = "%s", "%s", "%s"\r\n' - % ( - e.entry_type_guid, # Entry type GUID - e.name, # Folder name - folder_name, # Folder name (again) - e.get_guid(), # Entry GUID - ) - ) - - # TODO(rspangler): Need a way to configure this stuff - if self.websiteProperties: - f.write( - "\tProjectSection(WebsiteProperties) = preProject\r\n" - '\t\tDebug.AspNetCompiler.Debug = "True"\r\n' - '\t\tRelease.AspNetCompiler.Debug = "False"\r\n' - "\tEndProjectSection\r\n" - ) - - if isinstance(e, MSVSFolder): - if e.items: - f.write("\tProjectSection(SolutionItems) = preProject\r\n") - for i in e.items: - f.write("\t\t%s = %s\r\n" % (i, i)) - f.write("\tEndProjectSection\r\n") - - if isinstance(e, MSVSProject): - if e.dependencies: - f.write("\tProjectSection(ProjectDependencies) = postProject\r\n") - for d in e.dependencies: - f.write("\t\t%s = %s\r\n" % (d.get_guid(), d.get_guid())) - f.write("\tEndProjectSection\r\n") - - f.write("EndProject\r\n") - - # Global section - f.write("Global\r\n") - - # Configurations (variants) - f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n") - for v in self.variants: - f.write("\t\t%s = %s\r\n" % (v, v)) - f.write("\tEndGlobalSection\r\n") - - # Sort config guids for easier diffing of solution changes. - config_guids = [] - config_guids_overrides = {} - for e in all_entries: - if isinstance(e, MSVSProject): - config_guids.append(e.get_guid()) - config_guids_overrides[e.get_guid()] = e.config_platform_overrides - config_guids.sort() - - f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n") - for g in config_guids: - for v in self.variants: - nv = config_guids_overrides[g].get(v, v) - # Pick which project configuration to build for this solution - # configuration. - f.write( - "\t\t%s.%s.ActiveCfg = %s\r\n" - % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - ) - ) - - # Enable project in this solution configuration. - f.write( - "\t\t%s.%s.Build.0 = %s\r\n" - % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - ) - ) - f.write("\tEndGlobalSection\r\n") - - # TODO(rspangler): Should be able to configure this stuff too (though I've - # never seen this be any different) - f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n") - f.write("\t\tHideSolutionNode = FALSE\r\n") - f.write("\tEndGlobalSection\r\n") - - # Folder mappings - # Omit this section if there are no folders - if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]): - f.write("\tGlobalSection(NestedProjects) = preSolution\r\n") - for e in all_entries: - if not isinstance(e, MSVSFolder): - continue # Does not apply to projects, only folders - for subentry in e.entries: - f.write("\t\t%s = %s\r\n" % (subentry.get_guid(), e.get_guid())) - f.write("\tEndGlobalSection\r\n") - - f.write("EndGlobal\r\n") - - f.close() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py deleted file mode 100644 index f953d52cd03d3..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py +++ /dev/null @@ -1,206 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.easy_xml as easy_xml - -# ------------------------------------------------------------------------------ - - -class Tool(object): - """Visual Studio tool.""" - - def __init__(self, name, attrs=None): - """Initializes the tool. - - Args: - name: Tool name. - attrs: Dict of tool attributes; may be None. - """ - self._attrs = attrs or {} - self._attrs["Name"] = name - - def _GetSpecification(self): - """Creates an element for the tool. - - Returns: - A new xml.dom.Element for the tool. - """ - return ["Tool", self._attrs] - - -class Filter(object): - """Visual Studio filter - that is, a virtual folder.""" - - def __init__(self, name, contents=None): - """Initializes the folder. - - Args: - name: Filter (folder) name. - contents: List of filenames and/or Filter objects contained. - """ - self.name = name - self.contents = list(contents or []) - - -# ------------------------------------------------------------------------------ - - -class Writer(object): - """Visual Studio XML project writer.""" - - def __init__(self, project_path, version, name, guid=None, platforms=None): - """Initializes the project. - - Args: - project_path: Path to the project file. - version: Format version to emit. - name: Name of the project. - guid: GUID to use for project, if not None. - platforms: Array of string, the supported platforms. If null, ['Win32'] - """ - self.project_path = project_path - self.version = version - self.name = name - self.guid = guid - - # Default to Win32 for platforms. - if not platforms: - platforms = ["Win32"] - - # Initialize the specifications of the various sections. - self.platform_section = ["Platforms"] - for platform in platforms: - self.platform_section.append(["Platform", {"Name": platform}]) - self.tool_files_section = ["ToolFiles"] - self.configurations_section = ["Configurations"] - self.files_section = ["Files"] - - # Keep a dict keyed on filename to speed up access. - self.files_dict = dict() - - def AddToolFile(self, path): - """Adds a tool file to the project. - - Args: - path: Relative path from project to tool file. - """ - self.tool_files_section.append(["ToolFile", {"RelativePath": path}]) - - def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools): - """Returns the specification for a configuration. - - Args: - config_type: Type of configuration node. - config_name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - Returns: - """ - # Handle defaults - if not attrs: - attrs = {} - if not tools: - tools = [] - - # Add configuration node and its attributes - node_attrs = attrs.copy() - node_attrs["Name"] = config_name - specification = [config_type, node_attrs] - - # Add tool nodes and their attributes - if tools: - for t in tools: - if isinstance(t, Tool): - specification.append(t._GetSpecification()) - else: - specification.append(Tool(t)._GetSpecification()) - return specification - - def AddConfig(self, name, attrs=None, tools=None): - """Adds a configuration to the project. - - Args: - name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools) - self.configurations_section.append(spec) - - def _AddFilesToNode(self, parent, files): - """Adds files and/or filters to the parent node. - - Args: - parent: Destination node - files: A list of Filter objects and/or relative paths to files. - - Will call itself recursively, if the files list contains Filter objects. - """ - for f in files: - if isinstance(f, Filter): - node = ["Filter", {"Name": f.name}] - self._AddFilesToNode(node, f.contents) - else: - node = ["File", {"RelativePath": f}] - self.files_dict[f] = node - parent.append(node) - - def AddFiles(self, files): - """Adds files to the project. - - Args: - files: A list of Filter objects and/or relative paths to files. - - This makes a copy of the file/filter tree at the time of this call. If you - later add files to a Filter object which was passed into a previous call - to AddFiles(), it will not be reflected in this project. - """ - self._AddFilesToNode(self.files_section, files) - # TODO(rspangler) This also doesn't handle adding files to an existing - # filter. That is, it doesn't merge the trees. - - def AddFileConfig(self, path, config, attrs=None, tools=None): - """Adds a configuration to a file. - - Args: - path: Relative path to the file. - config: Name of configuration to add. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - - Raises: - ValueError: Relative path does not match any file added via AddFiles(). - """ - # Find the file node with the right relative path - parent = self.files_dict.get(path) - if not parent: - raise ValueError('AddFileConfig: file "%s" not in project.' % path) - - # Add the config to the file node - spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools) - parent.append(spec) - - def WriteIfChanged(self): - """Writes the project file.""" - # First create XML content definition - content = [ - "VisualStudioProject", - { - "ProjectType": "Visual C++", - "Version": self.version.ProjectVersion(), - "Name": self.name, - "ProjectGUID": self.guid, - "RootNamespace": self.name, - "Keyword": "Win32Proj", - }, - self.platform_section, - self.tool_files_section, - self.configurations_section, - ["References"], # empty section - self.files_section, - ["Globals"], # empty section - ] - easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252") diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py deleted file mode 100644 index 6ef16f2a0b23e..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py +++ /dev/null @@ -1,1274 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -r"""Code to validate and convert settings of the Microsoft build tools. - -This file contains code to validate and convert settings of the Microsoft -build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), -and ValidateMSBuildSettings() are the entry points. - -This file was created by comparing the projects created by Visual Studio 2008 -and Visual Studio 2010 for all available settings through the user interface. -The MSBuild schemas were also considered. They are typically found in the -MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild -""" - -from __future__ import print_function - -from gyp import string_types - -import sys -import re - -# Dictionaries of settings validators. The key is the tool name, the value is -# a dictionary mapping setting names to validation functions. -_msvs_validators = {} -_msbuild_validators = {} - - -# A dictionary of settings converters. The key is the tool name, the value is -# a dictionary mapping setting names to conversion functions. -_msvs_to_msbuild_converters = {} - - -# Tool name mapping from MSVS to MSBuild. -_msbuild_name_of_tool = {} - - -class _Tool(object): - """Represents a tool used by MSVS or MSBuild. - - Attributes: - msvs_name: The name of the tool in MSVS. - msbuild_name: The name of the tool in MSBuild. - """ - - def __init__(self, msvs_name, msbuild_name): - self.msvs_name = msvs_name - self.msbuild_name = msbuild_name - - -def _AddTool(tool): - """Adds a tool to the four dictionaries used to process settings. - - This only defines the tool. Each setting also needs to be added. - - Args: - tool: The _Tool object to be added. - """ - _msvs_validators[tool.msvs_name] = {} - _msbuild_validators[tool.msbuild_name] = {} - _msvs_to_msbuild_converters[tool.msvs_name] = {} - _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name - - -def _GetMSBuildToolSettings(msbuild_settings, tool): - """Returns an MSBuild tool dictionary. Creates it if needed.""" - return msbuild_settings.setdefault(tool.msbuild_name, {}) - - -class _Type(object): - """Type of settings (Base class).""" - - def ValidateMSVS(self, value): - """Verifies that the value is legal for MSVS. - - Args: - value: the value to check for this type. - - Raises: - ValueError if value is not valid for MSVS. - """ - - def ValidateMSBuild(self, value): - """Verifies that the value is legal for MSBuild. - - Args: - value: the value to check for this type. - - Raises: - ValueError if value is not valid for MSBuild. - """ - - def ConvertToMSBuild(self, value): - """Returns the MSBuild equivalent of the MSVS value given. - - Args: - value: the MSVS value to convert. - - Returns: - the MSBuild equivalent. - - Raises: - ValueError if value is not valid. - """ - return value - - -class _String(_Type): - """A setting that's just a string.""" - - def ValidateMSVS(self, value): - if not isinstance(value, string_types): - raise ValueError("expected string; got %r" % value) - - def ValidateMSBuild(self, value): - if not isinstance(value, string_types): - raise ValueError("expected string; got %r" % value) - - def ConvertToMSBuild(self, value): - # Convert the macros - return ConvertVCMacrosToMSBuild(value) - - -class _StringList(_Type): - """A settings that's a list of strings.""" - - def ValidateMSVS(self, value): - if not isinstance(value, string_types) and not isinstance(value, list): - raise ValueError("expected string list; got %r" % value) - - def ValidateMSBuild(self, value): - if not isinstance(value, string_types) and not isinstance(value, list): - raise ValueError("expected string list; got %r" % value) - - def ConvertToMSBuild(self, value): - # Convert the macros - if isinstance(value, list): - return [ConvertVCMacrosToMSBuild(i) for i in value] - else: - return ConvertVCMacrosToMSBuild(value) - - -class _Boolean(_Type): - """Boolean settings, can have the values 'false' or 'true'.""" - - def _Validate(self, value): - if value != "true" and value != "false": - raise ValueError("expected bool; got %r" % value) - - def ValidateMSVS(self, value): - self._Validate(value) - - def ValidateMSBuild(self, value): - self._Validate(value) - - def ConvertToMSBuild(self, value): - self._Validate(value) - return value - - -class _Integer(_Type): - """Integer settings.""" - - def __init__(self, msbuild_base=10): - _Type.__init__(self) - self._msbuild_base = msbuild_base - - def ValidateMSVS(self, value): - # Try to convert, this will raise ValueError if invalid. - self.ConvertToMSBuild(value) - - def ValidateMSBuild(self, value): - # Try to convert, this will raise ValueError if invalid. - int(value, self._msbuild_base) - - def ConvertToMSBuild(self, value): - msbuild_format = (self._msbuild_base == 10) and "%d" or "0x%04x" - return msbuild_format % int(value) - - -class _Enumeration(_Type): - """Type of settings that is an enumeration. - - In MSVS, the values are indexes like '0', '1', and '2'. - MSBuild uses text labels that are more representative, like 'Win32'. - - Constructor args: - label_list: an array of MSBuild labels that correspond to the MSVS index. - In the rare cases where MSVS has skipped an index value, None is - used in the array to indicate the unused spot. - new: an array of labels that are new to MSBuild. - """ - - def __init__(self, label_list, new=None): - _Type.__init__(self) - self._label_list = label_list - self._msbuild_values = set(value for value in label_list if value is not None) - if new is not None: - self._msbuild_values.update(new) - - def ValidateMSVS(self, value): - # Try to convert. It will raise an exception if not valid. - self.ConvertToMSBuild(value) - - def ValidateMSBuild(self, value): - if value not in self._msbuild_values: - raise ValueError("unrecognized enumerated value %s" % value) - - def ConvertToMSBuild(self, value): - index = int(value) - if index < 0 or index >= len(self._label_list): - raise ValueError( - "index value (%d) not in expected range [0, %d)" - % (index, len(self._label_list)) - ) - label = self._label_list[index] - if label is None: - raise ValueError("converted value for %s not specified." % value) - return label - - -# Instantiate the various generic types. -_boolean = _Boolean() -_integer = _Integer() -# For now, we don't do any special validation on these types: -_string = _String() -_file_name = _String() -_folder_name = _String() -_file_list = _StringList() -_folder_list = _StringList() -_string_list = _StringList() -# Some boolean settings went from numerical values to boolean. The -# mapping is 0: default, 1: false, 2: true. -_newly_boolean = _Enumeration(["", "false", "true"]) - - -def _Same(tool, name, setting_type): - """Defines a setting that has the same name in MSVS and MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - _Renamed(tool, name, name, setting_type) - - -def _Renamed(tool, msvs_name, msbuild_name, setting_type): - """Defines a setting for which the name has changed. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_name: the name of the MSVS setting. - msbuild_name: the name of the MSBuild setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) - - _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS - _msbuild_validators[tool.msbuild_name][msbuild_name] = setting_type.ValidateMSBuild - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -def _Moved(tool, settings_name, msbuild_tool_name, setting_type): - _MovedAndRenamed( - tool, settings_name, msbuild_tool_name, settings_name, setting_type - ) - - -def _MovedAndRenamed( - tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type -): - """Defines a setting that may have moved to a new section. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_settings_name: the MSVS name of the setting. - msbuild_tool_name: the name of the MSBuild tool to place the setting under. - msbuild_settings_name: the MSBuild name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) - tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) - - _msvs_validators[tool.msvs_name][msvs_settings_name] = setting_type.ValidateMSVS - validator = setting_type.ValidateMSBuild - _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator - _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate - - -def _MSVSOnly(tool, name, setting_type): - """Defines a setting that is only found in MSVS. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(unused_value, unused_msbuild_settings): - # Since this is for MSVS only settings, no translation will happen. - pass - - _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS - _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate - - -def _MSBuildOnly(tool, name, setting_type): - """Defines a setting that is only found in MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - # Let msbuild-only properties get translated as-is from msvs_settings. - tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {}) - tool_settings[name] = value - - _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild - _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate - - -def _ConvertedToAdditionalOption(tool, msvs_name, flag): - """Defines a setting that's handled via a command line option in MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_name: the name of the MSVS setting that if 'true' becomes a flag - flag: the flag to insert at the end of the AdditionalOptions - """ - - def _Translate(value, msbuild_settings): - if value == "true": - tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - if "AdditionalOptions" in tool_settings: - new_flags = "%s %s" % (tool_settings["AdditionalOptions"], flag) - else: - new_flags = flag - tool_settings["AdditionalOptions"] = new_flags - - _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -def _CustomGeneratePreprocessedFile(tool, msvs_name): - def _Translate(value, msbuild_settings): - tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - if value == "0": - tool_settings["PreprocessToFile"] = "false" - tool_settings["PreprocessSuppressLineNumbers"] = "false" - elif value == "1": # /P - tool_settings["PreprocessToFile"] = "true" - tool_settings["PreprocessSuppressLineNumbers"] = "false" - elif value == "2": # /EP /P - tool_settings["PreprocessToFile"] = "true" - tool_settings["PreprocessSuppressLineNumbers"] = "true" - else: - raise ValueError("value must be one of [0, 1, 2]; got %s" % value) - - # Create a bogus validator that looks for '0', '1', or '2' - msvs_validator = _Enumeration(["a", "b", "c"]).ValidateMSVS - _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator - msbuild_validator = _boolean.ValidateMSBuild - msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] - msbuild_tool_validators["PreprocessToFile"] = msbuild_validator - msbuild_tool_validators["PreprocessSuppressLineNumbers"] = msbuild_validator - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -fix_vc_macro_slashes_regex_list = ("IntDir", "OutDir") -fix_vc_macro_slashes_regex = re.compile( - r"(\$\((?:%s)\))(?:[\\/]+)" % "|".join(fix_vc_macro_slashes_regex_list) -) - -# Regular expression to detect keys that were generated by exclusion lists -_EXCLUDED_SUFFIX_RE = re.compile("^(.*)_excluded$") - - -def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr): - """Verify that 'setting' is valid if it is generated from an exclusion list. - - If the setting appears to be generated from an exclusion list, the root name - is checked. - - Args: - setting: A string that is the setting name to validate - settings: A dictionary where the keys are valid settings - error_msg: The message to emit in the event of error - stderr: The stream receiving the error messages. - """ - # This may be unrecognized because it's an exclusion list. If the - # setting name has the _excluded suffix, then check the root name. - unrecognized = True - m = re.match(_EXCLUDED_SUFFIX_RE, setting) - if m: - root_setting = m.group(1) - unrecognized = root_setting not in settings - - if unrecognized: - # We don't know this setting. Give a warning. - print(error_msg, file=stderr) - - -def FixVCMacroSlashes(s): - """Replace macros which have excessive following slashes. - - These macros are known to have a built-in trailing slash. Furthermore, many - scripts hiccup on processing paths with extra slashes in the middle. - - This list is probably not exhaustive. Add as needed. - """ - if "$" in s: - s = fix_vc_macro_slashes_regex.sub(r"\1", s) - return s - - -def ConvertVCMacrosToMSBuild(s): - """Convert the MSVS macros found in the string to the MSBuild equivalent. - - This list is probably not exhaustive. Add as needed. - """ - if "$" in s: - replace_map = { - "$(ConfigurationName)": "$(Configuration)", - "$(InputDir)": "%(RelativeDir)", - "$(InputExt)": "%(Extension)", - "$(InputFileName)": "%(Filename)%(Extension)", - "$(InputName)": "%(Filename)", - "$(InputPath)": "%(Identity)", - "$(ParentName)": "$(ProjectFileName)", - "$(PlatformName)": "$(Platform)", - "$(SafeInputName)": "%(Filename)", - } - for old, new in replace_map.items(): - s = s.replace(old, new) - s = FixVCMacroSlashes(s) - return s - - -def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): - """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). - - Args: - msvs_settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - - Returns: - A dictionary of MSBuild settings. The key is either the MSBuild tool name - or the empty string (for the global settings). The values are themselves - dictionaries of settings and their values. - """ - msbuild_settings = {} - for msvs_tool_name, msvs_tool_settings in msvs_settings.items(): - if msvs_tool_name in _msvs_to_msbuild_converters: - msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] - for msvs_setting, msvs_value in msvs_tool_settings.items(): - if msvs_setting in msvs_tool: - # Invoke the translation function. - try: - msvs_tool[msvs_setting](msvs_value, msbuild_settings) - except ValueError as e: - print( - "Warning: while converting %s/%s to MSBuild, " - "%s" % (msvs_tool_name, msvs_setting, e), - file=stderr, - ) - else: - _ValidateExclusionSetting( - msvs_setting, - msvs_tool, - ( - "Warning: unrecognized setting %s/%s " - "while converting to MSBuild." - % (msvs_tool_name, msvs_setting) - ), - stderr, - ) - else: - print( - "Warning: unrecognized tool %s while converting to " - "MSBuild." % msvs_tool_name, - file=stderr, - ) - return msbuild_settings - - -def ValidateMSVSSettings(settings, stderr=sys.stderr): - """Validates that the names of the settings are valid for MSVS. - - Args: - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - _ValidateSettings(_msvs_validators, settings, stderr) - - -def ValidateMSBuildSettings(settings, stderr=sys.stderr): - """Validates that the names of the settings are valid for MSBuild. - - Args: - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - _ValidateSettings(_msbuild_validators, settings, stderr) - - -def _ValidateSettings(validators, settings, stderr): - """Validates that the settings are valid for MSBuild or MSVS. - - We currently only validate the names of the settings, not their values. - - Args: - validators: A dictionary of tools and their validators. - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - for tool_name in settings: - if tool_name in validators: - tool_validators = validators[tool_name] - for setting, value in settings[tool_name].items(): - if setting in tool_validators: - try: - tool_validators[setting](value) - except ValueError as e: - print( - "Warning: for %s/%s, %s" % (tool_name, setting, e), - file=stderr, - ) - else: - _ValidateExclusionSetting( - setting, - tool_validators, - ("Warning: unrecognized setting %s/%s" % (tool_name, setting)), - stderr, - ) - - else: - print("Warning: unrecognized tool %s" % (tool_name), file=stderr) - - -# MSVS and MBuild names of the tools. -_compile = _Tool("VCCLCompilerTool", "ClCompile") -_link = _Tool("VCLinkerTool", "Link") -_midl = _Tool("VCMIDLTool", "Midl") -_rc = _Tool("VCResourceCompilerTool", "ResourceCompile") -_lib = _Tool("VCLibrarianTool", "Lib") -_manifest = _Tool("VCManifestTool", "Manifest") -_masm = _Tool("MASM", "MASM") -_armasm = _Tool("ARMASM", "ARMASM") - - -_AddTool(_compile) -_AddTool(_link) -_AddTool(_midl) -_AddTool(_rc) -_AddTool(_lib) -_AddTool(_manifest) -_AddTool(_masm) -_AddTool(_armasm) -# Add sections only found in the MSBuild settings. -_msbuild_validators[""] = {} -_msbuild_validators["ProjectReference"] = {} -_msbuild_validators["ManifestResourceCompile"] = {} - -# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and -# ClCompile in MSBuild. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for -# the schema of the MSBuild ClCompile settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_compile, "AdditionalIncludeDirectories", _folder_list) # /I -_Same(_compile, "AdditionalOptions", _string_list) -_Same(_compile, "AdditionalUsingDirectories", _folder_list) # /AI -_Same(_compile, "AssemblerListingLocation", _file_name) # /Fa -_Same(_compile, "BrowseInformationFile", _file_name) -_Same(_compile, "BufferSecurityCheck", _boolean) # /GS -_Same(_compile, "DisableLanguageExtensions", _boolean) # /Za -_Same(_compile, "DisableSpecificWarnings", _string_list) # /wd -_Same(_compile, "EnableFiberSafeOptimizations", _boolean) # /GT -_Same(_compile, "EnablePREfast", _boolean) # /analyze Visible='false' -_Same(_compile, "ExpandAttributedSource", _boolean) # /Fx -_Same(_compile, "FloatingPointExceptions", _boolean) # /fp:except -_Same(_compile, "ForceConformanceInForLoopScope", _boolean) # /Zc:forScope -_Same(_compile, "ForcedIncludeFiles", _file_list) # /FI -_Same(_compile, "ForcedUsingFiles", _file_list) # /FU -_Same(_compile, "GenerateXMLDocumentationFiles", _boolean) # /doc -_Same(_compile, "IgnoreStandardIncludePath", _boolean) # /X -_Same(_compile, "MinimalRebuild", _boolean) # /Gm -_Same(_compile, "OmitDefaultLibName", _boolean) # /Zl -_Same(_compile, "OmitFramePointers", _boolean) # /Oy -_Same(_compile, "PreprocessorDefinitions", _string_list) # /D -_Same(_compile, "ProgramDataBaseFileName", _file_name) # /Fd -_Same(_compile, "RuntimeTypeInfo", _boolean) # /GR -_Same(_compile, "ShowIncludes", _boolean) # /showIncludes -_Same(_compile, "SmallerTypeCheck", _boolean) # /RTCc -_Same(_compile, "StringPooling", _boolean) # /GF -_Same(_compile, "SuppressStartupBanner", _boolean) # /nologo -_Same(_compile, "TreatWChar_tAsBuiltInType", _boolean) # /Zc:wchar_t -_Same(_compile, "UndefineAllPreprocessorDefinitions", _boolean) # /u -_Same(_compile, "UndefinePreprocessorDefinitions", _string_list) # /U -_Same(_compile, "UseFullPaths", _boolean) # /FC -_Same(_compile, "WholeProgramOptimization", _boolean) # /GL -_Same(_compile, "XMLDocumentationFileName", _file_name) -_Same(_compile, "CompileAsWinRT", _boolean) # /ZW - -_Same( - _compile, - "AssemblerOutput", - _Enumeration( - [ - "NoListing", - "AssemblyCode", # /FA - "All", # /FAcs - "AssemblyAndMachineCode", # /FAc - "AssemblyAndSourceCode", - ] - ), -) # /FAs -_Same( - _compile, - "BasicRuntimeChecks", - _Enumeration( - [ - "Default", - "StackFrameRuntimeCheck", # /RTCs - "UninitializedLocalUsageCheck", # /RTCu - "EnableFastChecks", - ] - ), -) # /RTC1 -_Same( - _compile, "BrowseInformation", _Enumeration(["false", "true", "true"]) # /FR -) # /Fr -_Same( - _compile, - "CallingConvention", - _Enumeration(["Cdecl", "FastCall", "StdCall", "VectorCall"]), # /Gd # /Gr # /Gz -) # /Gv -_Same( - _compile, - "CompileAs", - _Enumeration(["Default", "CompileAsC", "CompileAsCpp"]), # /TC -) # /TP -_Same( - _compile, - "DebugInformationFormat", - _Enumeration( - [ - "", # Disabled - "OldStyle", # /Z7 - None, - "ProgramDatabase", # /Zi - "EditAndContinue", - ] - ), -) # /ZI -_Same( - _compile, - "EnableEnhancedInstructionSet", - _Enumeration( - [ - "NotSet", - "StreamingSIMDExtensions", # /arch:SSE - "StreamingSIMDExtensions2", # /arch:SSE2 - "AdvancedVectorExtensions", # /arch:AVX (vs2012+) - "NoExtensions", # /arch:IA32 (vs2012+) - # This one only exists in the new msbuild format. - "AdvancedVectorExtensions2", # /arch:AVX2 (vs2013r2+) - ] - ), -) -_Same( - _compile, - "ErrorReporting", - _Enumeration( - [ - "None", # /errorReport:none - "Prompt", # /errorReport:prompt - "Queue", - ], # /errorReport:queue - new=["Send"], - ), -) # /errorReport:send" -_Same( - _compile, - "ExceptionHandling", - _Enumeration(["false", "Sync", "Async"], new=["SyncCThrow"]), # /EHsc # /EHa -) # /EHs -_Same( - _compile, "FavorSizeOrSpeed", _Enumeration(["Neither", "Speed", "Size"]) # /Ot -) # /Os -_Same( - _compile, - "FloatingPointModel", - _Enumeration(["Precise", "Strict", "Fast"]), # /fp:precise # /fp:strict -) # /fp:fast -_Same( - _compile, - "InlineFunctionExpansion", - _Enumeration( - ["Default", "OnlyExplicitInline", "AnySuitable"], # /Ob1 # /Ob2 - new=["Disabled"], - ), -) # /Ob0 -_Same( - _compile, - "Optimization", - _Enumeration(["Disabled", "MinSpace", "MaxSpeed", "Full"]), # /Od # /O1 # /O2 -) # /Ox -_Same( - _compile, - "RuntimeLibrary", - _Enumeration( - [ - "MultiThreaded", # /MT - "MultiThreadedDebug", # /MTd - "MultiThreadedDLL", # /MD - "MultiThreadedDebugDLL", - ] - ), -) # /MDd -_Same( - _compile, - "StructMemberAlignment", - _Enumeration( - [ - "Default", - "1Byte", # /Zp1 - "2Bytes", # /Zp2 - "4Bytes", # /Zp4 - "8Bytes", # /Zp8 - "16Bytes", - ] - ), -) # /Zp16 -_Same( - _compile, - "WarningLevel", - _Enumeration( - [ - "TurnOffAllWarnings", # /W0 - "Level1", # /W1 - "Level2", # /W2 - "Level3", # /W3 - "Level4", - ], # /W4 - new=["EnableAllWarnings"], - ), -) # /Wall - -# Options found in MSVS that have been renamed in MSBuild. -_Renamed( - _compile, "EnableFunctionLevelLinking", "FunctionLevelLinking", _boolean -) # /Gy -_Renamed(_compile, "EnableIntrinsicFunctions", "IntrinsicFunctions", _boolean) # /Oi -_Renamed(_compile, "KeepComments", "PreprocessKeepComments", _boolean) # /C -_Renamed(_compile, "ObjectFile", "ObjectFileName", _file_name) # /Fo -_Renamed(_compile, "OpenMP", "OpenMPSupport", _boolean) # /openmp -_Renamed( - _compile, "PrecompiledHeaderThrough", "PrecompiledHeaderFile", _file_name -) # Used with /Yc and /Yu -_Renamed( - _compile, "PrecompiledHeaderFile", "PrecompiledHeaderOutputFile", _file_name -) # /Fp -_Renamed( - _compile, - "UsePrecompiledHeader", - "PrecompiledHeader", - _Enumeration( - ["NotUsing", "Create", "Use"] # VS recognized '' for this value too. # /Yc - ), -) # /Yu -_Renamed(_compile, "WarnAsError", "TreatWarningAsError", _boolean) # /WX - -_ConvertedToAdditionalOption(_compile, "DefaultCharIsUnsigned", "/J") - -# MSVS options not found in MSBuild. -_MSVSOnly(_compile, "Detect64BitPortabilityProblems", _boolean) -_MSVSOnly(_compile, "UseUnicodeResponseFiles", _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_compile, "BuildingInIDE", _boolean) -_MSBuildOnly( - _compile, "CompileAsManaged", _Enumeration([], new=["false", "true"]) -) # /clr -_MSBuildOnly(_compile, "CreateHotpatchableImage", _boolean) # /hotpatch -_MSBuildOnly(_compile, "MultiProcessorCompilation", _boolean) # /MP -_MSBuildOnly(_compile, "PreprocessOutputPath", _string) # /Fi -_MSBuildOnly(_compile, "ProcessorNumber", _integer) # the number of processors -_MSBuildOnly(_compile, "TrackerLogDirectory", _folder_name) -_MSBuildOnly(_compile, "TreatSpecificWarningsAsErrors", _string_list) # /we -_MSBuildOnly(_compile, "UseUnicodeForAssemblerListing", _boolean) # /FAu - -# Defines a setting that needs very customized processing -_CustomGeneratePreprocessedFile(_compile, "GeneratePreprocessedFile") - - -# Directives for converting MSVS VCLinkerTool to MSBuild Link. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for -# the schema of the MSBuild Link settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_link, "AdditionalDependencies", _file_list) -_Same(_link, "AdditionalLibraryDirectories", _folder_list) # /LIBPATH -# /MANIFESTDEPENDENCY: -_Same(_link, "AdditionalManifestDependencies", _file_list) -_Same(_link, "AdditionalOptions", _string_list) -_Same(_link, "AddModuleNamesToAssembly", _file_list) # /ASSEMBLYMODULE -_Same(_link, "AllowIsolation", _boolean) # /ALLOWISOLATION -_Same(_link, "AssemblyLinkResource", _file_list) # /ASSEMBLYLINKRESOURCE -_Same(_link, "BaseAddress", _string) # /BASE -_Same(_link, "CLRUnmanagedCodeCheck", _boolean) # /CLRUNMANAGEDCODECHECK -_Same(_link, "DelayLoadDLLs", _file_list) # /DELAYLOAD -_Same(_link, "DelaySign", _boolean) # /DELAYSIGN -_Same(_link, "EmbedManagedResourceFile", _file_list) # /ASSEMBLYRESOURCE -_Same(_link, "EnableUAC", _boolean) # /MANIFESTUAC -_Same(_link, "EntryPointSymbol", _string) # /ENTRY -_Same(_link, "ForceSymbolReferences", _file_list) # /INCLUDE -_Same(_link, "FunctionOrder", _file_name) # /ORDER -_Same(_link, "GenerateDebugInformation", _boolean) # /DEBUG -_Same(_link, "GenerateMapFile", _boolean) # /MAP -_Same(_link, "HeapCommitSize", _string) -_Same(_link, "HeapReserveSize", _string) # /HEAP -_Same(_link, "IgnoreAllDefaultLibraries", _boolean) # /NODEFAULTLIB -_Same(_link, "IgnoreEmbeddedIDL", _boolean) # /IGNOREIDL -_Same(_link, "ImportLibrary", _file_name) # /IMPLIB -_Same(_link, "KeyContainer", _file_name) # /KEYCONTAINER -_Same(_link, "KeyFile", _file_name) # /KEYFILE -_Same(_link, "ManifestFile", _file_name) # /ManifestFile -_Same(_link, "MapExports", _boolean) # /MAPINFO:EXPORTS -_Same(_link, "MapFileName", _file_name) -_Same(_link, "MergedIDLBaseFileName", _file_name) # /IDLOUT -_Same(_link, "MergeSections", _string) # /MERGE -_Same(_link, "MidlCommandFile", _file_name) # /MIDL -_Same(_link, "ModuleDefinitionFile", _file_name) # /DEF -_Same(_link, "OutputFile", _file_name) # /OUT -_Same(_link, "PerUserRedirection", _boolean) -_Same(_link, "Profile", _boolean) # /PROFILE -_Same(_link, "ProfileGuidedDatabase", _file_name) # /PGD -_Same(_link, "ProgramDatabaseFile", _file_name) # /PDB -_Same(_link, "RegisterOutput", _boolean) -_Same(_link, "SetChecksum", _boolean) # /RELEASE -_Same(_link, "StackCommitSize", _string) -_Same(_link, "StackReserveSize", _string) # /STACK -_Same(_link, "StripPrivateSymbols", _file_name) # /PDBSTRIPPED -_Same(_link, "SupportUnloadOfDelayLoadedDLL", _boolean) # /DELAY:UNLOAD -_Same(_link, "SuppressStartupBanner", _boolean) # /NOLOGO -_Same(_link, "SwapRunFromCD", _boolean) # /SWAPRUN:CD -_Same(_link, "TurnOffAssemblyGeneration", _boolean) # /NOASSEMBLY -_Same(_link, "TypeLibraryFile", _file_name) # /TLBOUT -_Same(_link, "TypeLibraryResourceID", _integer) # /TLBID -_Same(_link, "UACUIAccess", _boolean) # /uiAccess='true' -_Same(_link, "Version", _string) # /VERSION - -_Same(_link, "EnableCOMDATFolding", _newly_boolean) # /OPT:ICF -_Same(_link, "FixedBaseAddress", _newly_boolean) # /FIXED -_Same(_link, "LargeAddressAware", _newly_boolean) # /LARGEADDRESSAWARE -_Same(_link, "OptimizeReferences", _newly_boolean) # /OPT:REF -_Same(_link, "RandomizedBaseAddress", _newly_boolean) # /DYNAMICBASE -_Same(_link, "TerminalServerAware", _newly_boolean) # /TSAWARE - -_subsystem_enumeration = _Enumeration( - [ - "NotSet", - "Console", # /SUBSYSTEM:CONSOLE - "Windows", # /SUBSYSTEM:WINDOWS - "Native", # /SUBSYSTEM:NATIVE - "EFI Application", # /SUBSYSTEM:EFI_APPLICATION - "EFI Boot Service Driver", # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER - "EFI ROM", # /SUBSYSTEM:EFI_ROM - "EFI Runtime", # /SUBSYSTEM:EFI_RUNTIME_DRIVER - "WindowsCE", - ], # /SUBSYSTEM:WINDOWSCE - new=["POSIX"], -) # /SUBSYSTEM:POSIX - -_target_machine_enumeration = _Enumeration( - [ - "NotSet", - "MachineX86", # /MACHINE:X86 - None, - "MachineARM", # /MACHINE:ARM - "MachineEBC", # /MACHINE:EBC - "MachineIA64", # /MACHINE:IA64 - None, - "MachineMIPS", # /MACHINE:MIPS - "MachineMIPS16", # /MACHINE:MIPS16 - "MachineMIPSFPU", # /MACHINE:MIPSFPU - "MachineMIPSFPU16", # /MACHINE:MIPSFPU16 - None, - None, - None, - "MachineSH4", # /MACHINE:SH4 - None, - "MachineTHUMB", # /MACHINE:THUMB - "MachineX64", - ] -) # /MACHINE:X64 - -_Same( - _link, "AssemblyDebug", _Enumeration(["", "true", "false"]) # /ASSEMBLYDEBUG -) # /ASSEMBLYDEBUG:DISABLE -_Same( - _link, - "CLRImageType", - _Enumeration( - [ - "Default", - "ForceIJWImage", # /CLRIMAGETYPE:IJW - "ForcePureILImage", # /Switch="CLRIMAGETYPE:PURE - "ForceSafeILImage", - ] - ), -) # /Switch="CLRIMAGETYPE:SAFE -_Same( - _link, - "CLRThreadAttribute", - _Enumeration( - [ - "DefaultThreadingAttribute", # /CLRTHREADATTRIBUTE:NONE - "MTAThreadingAttribute", # /CLRTHREADATTRIBUTE:MTA - "STAThreadingAttribute", - ] - ), -) # /CLRTHREADATTRIBUTE:STA -_Same( - _link, - "DataExecutionPrevention", - _Enumeration(["", "false", "true"]), # /NXCOMPAT:NO -) # /NXCOMPAT -_Same( - _link, - "Driver", - _Enumeration(["NotSet", "Driver", "UpOnly", "WDM"]), # /Driver # /DRIVER:UPONLY -) # /DRIVER:WDM -_Same( - _link, - "LinkTimeCodeGeneration", - _Enumeration( - [ - "Default", - "UseLinkTimeCodeGeneration", # /LTCG - "PGInstrument", # /LTCG:PGInstrument - "PGOptimization", # /LTCG:PGOptimize - "PGUpdate", - ] - ), -) # /LTCG:PGUpdate -_Same( - _link, - "ShowProgress", - _Enumeration( - ["NotSet", "LinkVerbose", "LinkVerboseLib"], # /VERBOSE # /VERBOSE:Lib - new=[ - "LinkVerboseICF", # /VERBOSE:ICF - "LinkVerboseREF", # /VERBOSE:REF - "LinkVerboseSAFESEH", # /VERBOSE:SAFESEH - "LinkVerboseCLR", - ], - ), -) # /VERBOSE:CLR -_Same(_link, "SubSystem", _subsystem_enumeration) -_Same(_link, "TargetMachine", _target_machine_enumeration) -_Same( - _link, - "UACExecutionLevel", - _Enumeration( - [ - "AsInvoker", # /level='asInvoker' - "HighestAvailable", # /level='highestAvailable' - "RequireAdministrator", - ] - ), -) # /level='requireAdministrator' -_Same(_link, "MinimumRequiredVersion", _string) -_Same(_link, "TreatLinkerWarningAsErrors", _boolean) # /WX - - -# Options found in MSVS that have been renamed in MSBuild. -_Renamed( - _link, - "ErrorReporting", - "LinkErrorReporting", - _Enumeration( - [ - "NoErrorReport", # /ERRORREPORT:NONE - "PromptImmediately", # /ERRORREPORT:PROMPT - "QueueForNextLogin", - ], # /ERRORREPORT:QUEUE - new=["SendErrorReport"], - ), -) # /ERRORREPORT:SEND -_Renamed( - _link, "IgnoreDefaultLibraryNames", "IgnoreSpecificDefaultLibraries", _file_list -) # /NODEFAULTLIB -_Renamed(_link, "ResourceOnlyDLL", "NoEntryPoint", _boolean) # /NOENTRY -_Renamed(_link, "SwapRunFromNet", "SwapRunFromNET", _boolean) # /SWAPRUN:NET - -_Moved(_link, "GenerateManifest", "", _boolean) -_Moved(_link, "IgnoreImportLibrary", "", _boolean) -_Moved(_link, "LinkIncremental", "", _newly_boolean) -_Moved(_link, "LinkLibraryDependencies", "ProjectReference", _boolean) -_Moved(_link, "UseLibraryDependencyInputs", "ProjectReference", _boolean) - -# MSVS options not found in MSBuild. -_MSVSOnly(_link, "OptimizeForWindows98", _newly_boolean) -_MSVSOnly(_link, "UseUnicodeResponseFiles", _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_link, "BuildingInIDE", _boolean) -_MSBuildOnly(_link, "ImageHasSafeExceptionHandlers", _boolean) # /SAFESEH -_MSBuildOnly(_link, "LinkDLL", _boolean) # /DLL Visible='false' -_MSBuildOnly(_link, "LinkStatus", _boolean) # /LTCG:STATUS -_MSBuildOnly(_link, "PreventDllBinding", _boolean) # /ALLOWBIND -_MSBuildOnly(_link, "SupportNobindOfDelayLoadedDLL", _boolean) # /DELAY:NOBIND -_MSBuildOnly(_link, "TrackerLogDirectory", _folder_name) -_MSBuildOnly(_link, "MSDOSStubFileName", _file_name) # /STUB Visible='false' -_MSBuildOnly(_link, "SectionAlignment", _integer) # /ALIGN -_MSBuildOnly(_link, "SpecifySectionAttributes", _string) # /SECTION -_MSBuildOnly( - _link, - "ForceFileOutput", - _Enumeration( - [], - new=[ - "Enabled", # /FORCE - # /FORCE:MULTIPLE - "MultiplyDefinedSymbolOnly", - "UndefinedSymbolOnly", - ], - ), -) # /FORCE:UNRESOLVED -_MSBuildOnly( - _link, - "CreateHotPatchableImage", - _Enumeration( - [], - new=[ - "Enabled", # /FUNCTIONPADMIN - "X86Image", # /FUNCTIONPADMIN:5 - "X64Image", # /FUNCTIONPADMIN:6 - "ItaniumImage", - ], - ), -) # /FUNCTIONPADMIN:16 -_MSBuildOnly( - _link, - "CLRSupportLastError", - _Enumeration( - [], - new=[ - "Enabled", # /CLRSupportLastError - "Disabled", # /CLRSupportLastError:NO - # /CLRSupportLastError:SYSTEMDLL - "SystemDlls", - ], - ), -) - - -# Directives for converting VCResourceCompilerTool to ResourceCompile. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for -# the schema of the MSBuild ResourceCompile settings. - -_Same(_rc, "AdditionalOptions", _string_list) -_Same(_rc, "AdditionalIncludeDirectories", _folder_list) # /I -_Same(_rc, "Culture", _Integer(msbuild_base=16)) -_Same(_rc, "IgnoreStandardIncludePath", _boolean) # /X -_Same(_rc, "PreprocessorDefinitions", _string_list) # /D -_Same(_rc, "ResourceOutputFileName", _string) # /fo -_Same(_rc, "ShowProgress", _boolean) # /v -# There is no UI in VisualStudio 2008 to set the following properties. -# However they are found in CL and other tools. Include them here for -# completeness, as they are very likely to have the same usage pattern. -_Same(_rc, "SuppressStartupBanner", _boolean) # /nologo -_Same(_rc, "UndefinePreprocessorDefinitions", _string_list) # /u - -# MSBuild options not found in MSVS. -_MSBuildOnly(_rc, "NullTerminateStrings", _boolean) # /n -_MSBuildOnly(_rc, "TrackerLogDirectory", _folder_name) - - -# Directives for converting VCMIDLTool to Midl. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for -# the schema of the MSBuild Midl settings. - -_Same(_midl, "AdditionalIncludeDirectories", _folder_list) # /I -_Same(_midl, "AdditionalOptions", _string_list) -_Same(_midl, "CPreprocessOptions", _string) # /cpp_opt -_Same(_midl, "ErrorCheckAllocations", _boolean) # /error allocation -_Same(_midl, "ErrorCheckBounds", _boolean) # /error bounds_check -_Same(_midl, "ErrorCheckEnumRange", _boolean) # /error enum -_Same(_midl, "ErrorCheckRefPointers", _boolean) # /error ref -_Same(_midl, "ErrorCheckStubData", _boolean) # /error stub_data -_Same(_midl, "GenerateStublessProxies", _boolean) # /Oicf -_Same(_midl, "GenerateTypeLibrary", _boolean) -_Same(_midl, "HeaderFileName", _file_name) # /h -_Same(_midl, "IgnoreStandardIncludePath", _boolean) # /no_def_idir -_Same(_midl, "InterfaceIdentifierFileName", _file_name) # /iid -_Same(_midl, "MkTypLibCompatible", _boolean) # /mktyplib203 -_Same(_midl, "OutputDirectory", _string) # /out -_Same(_midl, "PreprocessorDefinitions", _string_list) # /D -_Same(_midl, "ProxyFileName", _file_name) # /proxy -_Same(_midl, "RedirectOutputAndErrors", _file_name) # /o -_Same(_midl, "SuppressStartupBanner", _boolean) # /nologo -_Same(_midl, "TypeLibraryName", _file_name) # /tlb -_Same(_midl, "UndefinePreprocessorDefinitions", _string_list) # /U -_Same(_midl, "WarnAsError", _boolean) # /WX - -_Same( - _midl, - "DefaultCharType", - _Enumeration(["Unsigned", "Signed", "Ascii"]), # /char unsigned # /char signed -) # /char ascii7 -_Same( - _midl, - "TargetEnvironment", - _Enumeration( - [ - "NotSet", - "Win32", # /env win32 - "Itanium", # /env ia64 - "X64", # /env x64 - "ARM64", # /env arm64 - ] - ), -) -_Same( - _midl, - "EnableErrorChecks", - _Enumeration(["EnableCustom", "None", "All"]), # /error none -) # /error all -_Same( - _midl, - "StructMemberAlignment", - _Enumeration(["NotSet", "1", "2", "4", "8"]), # Zp1 # Zp2 # Zp4 -) # Zp8 -_Same( - _midl, - "WarningLevel", - _Enumeration(["0", "1", "2", "3", "4"]), # /W0 # /W1 # /W2 # /W3 -) # /W4 - -_Renamed(_midl, "DLLDataFileName", "DllDataFileName", _file_name) # /dlldata -_Renamed(_midl, "ValidateParameters", "ValidateAllParameters", _boolean) # /robust - -# MSBuild options not found in MSVS. -_MSBuildOnly(_midl, "ApplicationConfigurationMode", _boolean) # /app_config -_MSBuildOnly(_midl, "ClientStubFile", _file_name) # /cstub -_MSBuildOnly( - _midl, "GenerateClientFiles", _Enumeration([], new=["Stub", "None"]) # /client stub -) # /client none -_MSBuildOnly( - _midl, "GenerateServerFiles", _Enumeration([], new=["Stub", "None"]) # /client stub -) # /client none -_MSBuildOnly(_midl, "LocaleID", _integer) # /lcid DECIMAL -_MSBuildOnly(_midl, "ServerStubFile", _file_name) # /sstub -_MSBuildOnly(_midl, "SuppressCompilerWarnings", _boolean) # /no_warn -_MSBuildOnly(_midl, "TrackerLogDirectory", _folder_name) -_MSBuildOnly( - _midl, "TypeLibFormat", _Enumeration([], new=["NewFormat", "OldFormat"]) # /newtlb -) # /oldtlb - - -# Directives for converting VCLibrarianTool to Lib. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for -# the schema of the MSBuild Lib settings. - -_Same(_lib, "AdditionalDependencies", _file_list) -_Same(_lib, "AdditionalLibraryDirectories", _folder_list) # /LIBPATH -_Same(_lib, "AdditionalOptions", _string_list) -_Same(_lib, "ExportNamedFunctions", _string_list) # /EXPORT -_Same(_lib, "ForceSymbolReferences", _string) # /INCLUDE -_Same(_lib, "IgnoreAllDefaultLibraries", _boolean) # /NODEFAULTLIB -_Same(_lib, "IgnoreSpecificDefaultLibraries", _file_list) # /NODEFAULTLIB -_Same(_lib, "ModuleDefinitionFile", _file_name) # /DEF -_Same(_lib, "OutputFile", _file_name) # /OUT -_Same(_lib, "SuppressStartupBanner", _boolean) # /NOLOGO -_Same(_lib, "UseUnicodeResponseFiles", _boolean) -_Same(_lib, "LinkTimeCodeGeneration", _boolean) # /LTCG -_Same(_lib, "TargetMachine", _target_machine_enumeration) - -# TODO(jeanluc) _link defines the same value that gets moved to -# ProjectReference. We may want to validate that they are consistent. -_Moved(_lib, "LinkLibraryDependencies", "ProjectReference", _boolean) - -_MSBuildOnly(_lib, "DisplayLibrary", _string) # /LIST Visible='false' -_MSBuildOnly( - _lib, - "ErrorReporting", - _Enumeration( - [], - new=[ - "PromptImmediately", # /ERRORREPORT:PROMPT - "QueueForNextLogin", # /ERRORREPORT:QUEUE - "SendErrorReport", # /ERRORREPORT:SEND - "NoErrorReport", - ], - ), -) # /ERRORREPORT:NONE -_MSBuildOnly(_lib, "MinimumRequiredVersion", _string) -_MSBuildOnly(_lib, "Name", _file_name) # /NAME -_MSBuildOnly(_lib, "RemoveObjects", _file_list) # /REMOVE -_MSBuildOnly(_lib, "SubSystem", _subsystem_enumeration) -_MSBuildOnly(_lib, "TrackerLogDirectory", _folder_name) -_MSBuildOnly(_lib, "TreatLibWarningAsErrors", _boolean) # /WX -_MSBuildOnly(_lib, "Verbose", _boolean) - - -# Directives for converting VCManifestTool to Mt. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for -# the schema of the MSBuild Lib settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_manifest, "AdditionalManifestFiles", _file_list) # /manifest -_Same(_manifest, "AdditionalOptions", _string_list) -_Same(_manifest, "AssemblyIdentity", _string) # /identity: -_Same(_manifest, "ComponentFileName", _file_name) # /dll -_Same(_manifest, "GenerateCatalogFiles", _boolean) # /makecdfs -_Same(_manifest, "InputResourceManifests", _string) # /inputresource -_Same(_manifest, "OutputManifestFile", _file_name) # /out -_Same(_manifest, "RegistrarScriptFile", _file_name) # /rgs -_Same(_manifest, "ReplacementsFile", _file_name) # /replacements -_Same(_manifest, "SuppressStartupBanner", _boolean) # /nologo -_Same(_manifest, "TypeLibraryFile", _file_name) # /tlb: -_Same(_manifest, "UpdateFileHashes", _boolean) # /hashupdate -_Same(_manifest, "UpdateFileHashesSearchPath", _file_name) -_Same(_manifest, "VerboseOutput", _boolean) # /verbose - -# Options that have moved location. -_MovedAndRenamed( - _manifest, - "ManifestResourceFile", - "ManifestResourceCompile", - "ResourceOutputFileName", - _file_name, -) -_Moved(_manifest, "EmbedManifest", "", _boolean) - -# MSVS options not found in MSBuild. -_MSVSOnly(_manifest, "DependencyInformationFile", _file_name) -_MSVSOnly(_manifest, "UseFAT32Workaround", _boolean) -_MSVSOnly(_manifest, "UseUnicodeResponseFiles", _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_manifest, "EnableDPIAwareness", _boolean) -_MSBuildOnly(_manifest, "GenerateCategoryTags", _boolean) # /category -_MSBuildOnly( - _manifest, "ManifestFromManagedAssembly", _file_name -) # /managedassemblyname -_MSBuildOnly(_manifest, "OutputResourceManifests", _string) # /outputresource -_MSBuildOnly(_manifest, "SuppressDependencyElement", _boolean) # /nodependency -_MSBuildOnly(_manifest, "TrackerLogDirectory", _folder_name) - - -# Directives for MASM. -# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the -# MSBuild MASM settings. - -# Options that have the same name in MSVS and MSBuild. -_Same(_masm, "UseSafeExceptionHandlers", _boolean) # /safeseh diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py deleted file mode 100755 index 99860c880ec5d..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py +++ /dev/null @@ -1,1550 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the MSVSSettings.py file.""" - -import unittest -import gyp.MSVSSettings as MSVSSettings - -try: - from StringIO import StringIO # Python 2 -except ImportError: - from io import StringIO # Python 3 - - -class TestSequenceFunctions(unittest.TestCase): - def setUp(self): - self.stderr = StringIO() - - def _ExpectedWarnings(self, expected): - """Compares recorded lines to expected warnings.""" - self.stderr.seek(0) - actual = self.stderr.read().split("\n") - actual = [line for line in actual if line] - self.assertEqual(sorted(expected), sorted(actual)) - - def testValidateMSVSSettings_tool_names(self): - """Tests that only MSVS tool names are allowed.""" - MSVSSettings.ValidateMSVSSettings( - { - "VCCLCompilerTool": {}, - "VCLinkerTool": {}, - "VCMIDLTool": {}, - "foo": {}, - "VCResourceCompilerTool": {}, - "VCLibrarianTool": {}, - "VCManifestTool": {}, - "ClCompile": {}, - }, - self.stderr, - ) - self._ExpectedWarnings( - ["Warning: unrecognized tool foo", "Warning: unrecognized tool ClCompile"] - ) - - def testValidateMSVSSettings_settings(self): - """Tests that for invalid MSVS settings.""" - MSVSSettings.ValidateMSVSSettings( - { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": ["string1", "string2"], - "AdditionalUsingDirectories": "folder1;folder2", - "AssemblerListingLocation": "a_file_name", - "AssemblerOutput": "0", - "BasicRuntimeChecks": "5", - "BrowseInformation": "fdkslj", - "BrowseInformationFile": "a_file_name", - "BufferSecurityCheck": "true", - "CallingConvention": "-1", - "CompileAs": "1", - "DebugInformationFormat": "2", - "DefaultCharIsUnsigned": "true", - "Detect64BitPortabilityProblems": "true", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "string1;string2", - "EnableEnhancedInstructionSet": "1", - "EnableFiberSafeOptimizations": "true", - "EnableFunctionLevelLinking": "true", - "EnableIntrinsicFunctions": "true", - "EnablePREfast": "true", - "Enableprefast": "bogus", - "ErrorReporting": "1", - "ExceptionHandling": "1", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "1", - "FloatingPointExceptions": "true", - "FloatingPointModel": "1", - "ForceConformanceInForLoopScope": "true", - "ForcedIncludeFiles": "file1;file2", - "ForcedUsingFiles": "file1;file2", - "GeneratePreprocessedFile": "1", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "1", - "KeepComments": "true", - "MinimalRebuild": "true", - "ObjectFile": "a_file_name", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMP": "true", - "Optimization": "1", - "PrecompiledHeaderFile": "a_file_name", - "PrecompiledHeaderThrough": "a_file_name", - "PreprocessorDefinitions": "string1;string2", - "ProgramDataBaseFileName": "a_file_name", - "RuntimeLibrary": "1", - "RuntimeTypeInfo": "true", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "1", - "SuppressStartupBanner": "true", - "TreatWChar_tAsBuiltInType": "true", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "string1;string2", - "UseFullPaths": "true", - "UsePrecompiledHeader": "1", - "UseUnicodeResponseFiles": "true", - "WarnAsError": "true", - "WarningLevel": "1", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "a_file_name", - "ZZXYZ": "bogus", - }, - "VCLinkerTool": { - "AdditionalDependencies": "file1;file2", - "AdditionalDependencies_excluded": "file3", - "AdditionalLibraryDirectories": "folder1;folder2", - "AdditionalManifestDependencies": "file1;file2", - "AdditionalOptions": "a string1", - "AddModuleNamesToAssembly": "file1;file2", - "AllowIsolation": "true", - "AssemblyDebug": "2", - "AssemblyLinkResource": "file1;file2", - "BaseAddress": "a string1", - "CLRImageType": "2", - "CLRThreadAttribute": "2", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "2", - "DelayLoadDLLs": "file1;file2", - "DelaySign": "true", - "Driver": "2", - "EmbedManagedResourceFile": "file1;file2", - "EnableCOMDATFolding": "2", - "EnableUAC": "true", - "EntryPointSymbol": "a string1", - "ErrorReporting": "2", - "FixedBaseAddress": "2", - "ForceSymbolReferences": "file1;file2", - "FunctionOrder": "a_file_name", - "GenerateDebugInformation": "true", - "GenerateManifest": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "a string1", - "HeapReserveSize": "a string1", - "IgnoreAllDefaultLibraries": "true", - "IgnoreDefaultLibraryNames": "file1;file2", - "IgnoreEmbeddedIDL": "true", - "IgnoreImportLibrary": "true", - "ImportLibrary": "a_file_name", - "KeyContainer": "a_file_name", - "KeyFile": "a_file_name", - "LargeAddressAware": "2", - "LinkIncremental": "2", - "LinkLibraryDependencies": "true", - "LinkTimeCodeGeneration": "2", - "ManifestFile": "a_file_name", - "MapExports": "true", - "MapFileName": "a_file_name", - "MergedIDLBaseFileName": "a_file_name", - "MergeSections": "a string1", - "MidlCommandFile": "a_file_name", - "ModuleDefinitionFile": "a_file_name", - "OptimizeForWindows98": "1", - "OptimizeReferences": "2", - "OutputFile": "a_file_name", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "a_file_name", - "ProgramDatabaseFile": "a_file_name", - "RandomizedBaseAddress": "2", - "RegisterOutput": "true", - "ResourceOnlyDLL": "true", - "SetChecksum": "true", - "ShowProgress": "2", - "StackCommitSize": "a string1", - "StackReserveSize": "a string1", - "StripPrivateSymbols": "a_file_name", - "SubSystem": "2", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "true", - "SwapRunFromCD": "true", - "SwapRunFromNet": "true", - "TargetMachine": "2", - "TerminalServerAware": "2", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "a_file_name", - "TypeLibraryResourceID": "33", - "UACExecutionLevel": "2", - "UACUIAccess": "true", - "UseLibraryDependencyInputs": "true", - "UseUnicodeResponseFiles": "true", - "Version": "a string1", - }, - "VCMIDLTool": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "CPreprocessOptions": "a string1", - "DefaultCharType": "1", - "DLLDataFileName": "a_file_name", - "EnableErrorChecks": "1", - "ErrorCheckAllocations": "true", - "ErrorCheckBounds": "true", - "ErrorCheckEnumRange": "true", - "ErrorCheckRefPointers": "true", - "ErrorCheckStubData": "true", - "GenerateStublessProxies": "true", - "GenerateTypeLibrary": "true", - "HeaderFileName": "a_file_name", - "IgnoreStandardIncludePath": "true", - "InterfaceIdentifierFileName": "a_file_name", - "MkTypLibCompatible": "true", - "notgood": "bogus", - "OutputDirectory": "a string1", - "PreprocessorDefinitions": "string1;string2", - "ProxyFileName": "a_file_name", - "RedirectOutputAndErrors": "a_file_name", - "StructMemberAlignment": "1", - "SuppressStartupBanner": "true", - "TargetEnvironment": "1", - "TypeLibraryName": "a_file_name", - "UndefinePreprocessorDefinitions": "string1;string2", - "ValidateParameters": "true", - "WarnAsError": "true", - "WarningLevel": "1", - }, - "VCResourceCompilerTool": { - "AdditionalOptions": "a string1", - "AdditionalIncludeDirectories": "folder1;folder2", - "Culture": "1003", - "IgnoreStandardIncludePath": "true", - "notgood2": "bogus", - "PreprocessorDefinitions": "string1;string2", - "ResourceOutputFileName": "a string1", - "ShowProgress": "true", - "SuppressStartupBanner": "true", - "UndefinePreprocessorDefinitions": "string1;string2", - }, - "VCLibrarianTool": { - "AdditionalDependencies": "file1;file2", - "AdditionalLibraryDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "ExportNamedFunctions": "string1;string2", - "ForceSymbolReferences": "a string1", - "IgnoreAllDefaultLibraries": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2", - "LinkLibraryDependencies": "true", - "ModuleDefinitionFile": "a_file_name", - "OutputFile": "a_file_name", - "SuppressStartupBanner": "true", - "UseUnicodeResponseFiles": "true", - }, - "VCManifestTool": { - "AdditionalManifestFiles": "file1;file2", - "AdditionalOptions": "a string1", - "AssemblyIdentity": "a string1", - "ComponentFileName": "a_file_name", - "DependencyInformationFile": "a_file_name", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "a string1", - "ManifestResourceFile": "a_file_name", - "OutputManifestFile": "a_file_name", - "RegistrarScriptFile": "a_file_name", - "ReplacementsFile": "a_file_name", - "SuppressStartupBanner": "true", - "TypeLibraryFile": "a_file_name", - "UpdateFileHashes": "truel", - "UpdateFileHashesSearchPath": "a_file_name", - "UseFAT32Workaround": "true", - "UseUnicodeResponseFiles": "true", - "VerboseOutput": "true", - }, - }, - self.stderr, - ) - self._ExpectedWarnings( - [ - "Warning: for VCCLCompilerTool/BasicRuntimeChecks, " - "index value (5) not in expected range [0, 4)", - "Warning: for VCCLCompilerTool/BrowseInformation, " - "invalid literal for int() with base 10: 'fdkslj'", - "Warning: for VCCLCompilerTool/CallingConvention, " - "index value (-1) not in expected range [0, 4)", - "Warning: for VCCLCompilerTool/DebugInformationFormat, " - "converted value for 2 not specified.", - "Warning: unrecognized setting VCCLCompilerTool/Enableprefast", - "Warning: unrecognized setting VCCLCompilerTool/ZZXYZ", - "Warning: for VCLinkerTool/TargetMachine, " - "converted value for 2 not specified.", - "Warning: unrecognized setting VCMIDLTool/notgood", - "Warning: unrecognized setting VCResourceCompilerTool/notgood2", - "Warning: for VCManifestTool/UpdateFileHashes, " - "expected bool; got 'truel'" - "", - ] - ) - - def testValidateMSBuildSettings_settings(self): - """Tests that for invalid MSBuild settings.""" - MSVSSettings.ValidateMSBuildSettings( - { - "ClCompile": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": ["string1", "string2"], - "AdditionalUsingDirectories": "folder1;folder2", - "AssemblerListingLocation": "a_file_name", - "AssemblerOutput": "NoListing", - "BasicRuntimeChecks": "StackFrameRuntimeCheck", - "BrowseInformation": "false", - "BrowseInformationFile": "a_file_name", - "BufferSecurityCheck": "true", - "BuildingInIDE": "true", - "CallingConvention": "Cdecl", - "CompileAs": "CompileAsC", - "CompileAsManaged": "true", - "CreateHotpatchableImage": "true", - "DebugInformationFormat": "ProgramDatabase", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "string1;string2", - "EnableEnhancedInstructionSet": "StreamingSIMDExtensions", - "EnableFiberSafeOptimizations": "true", - "EnablePREfast": "true", - "Enableprefast": "bogus", - "ErrorReporting": "Prompt", - "ExceptionHandling": "SyncCThrow", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "Neither", - "FloatingPointExceptions": "true", - "FloatingPointModel": "Precise", - "ForceConformanceInForLoopScope": "true", - "ForcedIncludeFiles": "file1;file2", - "ForcedUsingFiles": "file1;file2", - "FunctionLevelLinking": "false", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "OnlyExplicitInline", - "IntrinsicFunctions": "false", - "MinimalRebuild": "true", - "MultiProcessorCompilation": "true", - "ObjectFileName": "a_file_name", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMPSupport": "true", - "Optimization": "Disabled", - "PrecompiledHeader": "NotUsing", - "PrecompiledHeaderFile": "a_file_name", - "PrecompiledHeaderOutputFile": "a_file_name", - "PreprocessKeepComments": "true", - "PreprocessorDefinitions": "string1;string2", - "PreprocessOutputPath": "a string1", - "PreprocessSuppressLineNumbers": "false", - "PreprocessToFile": "false", - "ProcessorNumber": "33", - "ProgramDataBaseFileName": "a_file_name", - "RuntimeLibrary": "MultiThreaded", - "RuntimeTypeInfo": "true", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "1Byte", - "SuppressStartupBanner": "true", - "TrackerLogDirectory": "a_folder", - "TreatSpecificWarningsAsErrors": "string1;string2", - "TreatWarningAsError": "true", - "TreatWChar_tAsBuiltInType": "true", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "string1;string2", - "UseFullPaths": "true", - "UseUnicodeForAssemblerListing": "true", - "WarningLevel": "TurnOffAllWarnings", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "a_file_name", - "ZZXYZ": "bogus", - }, - "Link": { - "AdditionalDependencies": "file1;file2", - "AdditionalLibraryDirectories": "folder1;folder2", - "AdditionalManifestDependencies": "file1;file2", - "AdditionalOptions": "a string1", - "AddModuleNamesToAssembly": "file1;file2", - "AllowIsolation": "true", - "AssemblyDebug": "", - "AssemblyLinkResource": "file1;file2", - "BaseAddress": "a string1", - "BuildingInIDE": "true", - "CLRImageType": "ForceIJWImage", - "CLRSupportLastError": "Enabled", - "CLRThreadAttribute": "MTAThreadingAttribute", - "CLRUnmanagedCodeCheck": "true", - "CreateHotPatchableImage": "X86Image", - "DataExecutionPrevention": "false", - "DelayLoadDLLs": "file1;file2", - "DelaySign": "true", - "Driver": "NotSet", - "EmbedManagedResourceFile": "file1;file2", - "EnableCOMDATFolding": "false", - "EnableUAC": "true", - "EntryPointSymbol": "a string1", - "FixedBaseAddress": "false", - "ForceFileOutput": "Enabled", - "ForceSymbolReferences": "file1;file2", - "FunctionOrder": "a_file_name", - "GenerateDebugInformation": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "a string1", - "HeapReserveSize": "a string1", - "IgnoreAllDefaultLibraries": "true", - "IgnoreEmbeddedIDL": "true", - "IgnoreSpecificDefaultLibraries": "a_file_list", - "ImageHasSafeExceptionHandlers": "true", - "ImportLibrary": "a_file_name", - "KeyContainer": "a_file_name", - "KeyFile": "a_file_name", - "LargeAddressAware": "false", - "LinkDLL": "true", - "LinkErrorReporting": "SendErrorReport", - "LinkStatus": "true", - "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", - "ManifestFile": "a_file_name", - "MapExports": "true", - "MapFileName": "a_file_name", - "MergedIDLBaseFileName": "a_file_name", - "MergeSections": "a string1", - "MidlCommandFile": "a_file_name", - "MinimumRequiredVersion": "a string1", - "ModuleDefinitionFile": "a_file_name", - "MSDOSStubFileName": "a_file_name", - "NoEntryPoint": "true", - "OptimizeReferences": "false", - "OutputFile": "a_file_name", - "PerUserRedirection": "true", - "PreventDllBinding": "true", - "Profile": "true", - "ProfileGuidedDatabase": "a_file_name", - "ProgramDatabaseFile": "a_file_name", - "RandomizedBaseAddress": "false", - "RegisterOutput": "true", - "SectionAlignment": "33", - "SetChecksum": "true", - "ShowProgress": "LinkVerboseREF", - "SpecifySectionAttributes": "a string1", - "StackCommitSize": "a string1", - "StackReserveSize": "a string1", - "StripPrivateSymbols": "a_file_name", - "SubSystem": "Console", - "SupportNobindOfDelayLoadedDLL": "true", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "true", - "SwapRunFromCD": "true", - "SwapRunFromNET": "true", - "TargetMachine": "MachineX86", - "TerminalServerAware": "false", - "TrackerLogDirectory": "a_folder", - "TreatLinkerWarningAsErrors": "true", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "a_file_name", - "TypeLibraryResourceID": "33", - "UACExecutionLevel": "AsInvoker", - "UACUIAccess": "true", - "Version": "a string1", - }, - "ResourceCompile": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "Culture": "0x236", - "IgnoreStandardIncludePath": "true", - "NullTerminateStrings": "true", - "PreprocessorDefinitions": "string1;string2", - "ResourceOutputFileName": "a string1", - "ShowProgress": "true", - "SuppressStartupBanner": "true", - "TrackerLogDirectory": "a_folder", - "UndefinePreprocessorDefinitions": "string1;string2", - }, - "Midl": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "ApplicationConfigurationMode": "true", - "ClientStubFile": "a_file_name", - "CPreprocessOptions": "a string1", - "DefaultCharType": "Signed", - "DllDataFileName": "a_file_name", - "EnableErrorChecks": "EnableCustom", - "ErrorCheckAllocations": "true", - "ErrorCheckBounds": "true", - "ErrorCheckEnumRange": "true", - "ErrorCheckRefPointers": "true", - "ErrorCheckStubData": "true", - "GenerateClientFiles": "Stub", - "GenerateServerFiles": "None", - "GenerateStublessProxies": "true", - "GenerateTypeLibrary": "true", - "HeaderFileName": "a_file_name", - "IgnoreStandardIncludePath": "true", - "InterfaceIdentifierFileName": "a_file_name", - "LocaleID": "33", - "MkTypLibCompatible": "true", - "OutputDirectory": "a string1", - "PreprocessorDefinitions": "string1;string2", - "ProxyFileName": "a_file_name", - "RedirectOutputAndErrors": "a_file_name", - "ServerStubFile": "a_file_name", - "StructMemberAlignment": "NotSet", - "SuppressCompilerWarnings": "true", - "SuppressStartupBanner": "true", - "TargetEnvironment": "Itanium", - "TrackerLogDirectory": "a_folder", - "TypeLibFormat": "NewFormat", - "TypeLibraryName": "a_file_name", - "UndefinePreprocessorDefinitions": "string1;string2", - "ValidateAllParameters": "true", - "WarnAsError": "true", - "WarningLevel": "1", - }, - "Lib": { - "AdditionalDependencies": "file1;file2", - "AdditionalLibraryDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "DisplayLibrary": "a string1", - "ErrorReporting": "PromptImmediately", - "ExportNamedFunctions": "string1;string2", - "ForceSymbolReferences": "a string1", - "IgnoreAllDefaultLibraries": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2", - "LinkTimeCodeGeneration": "true", - "MinimumRequiredVersion": "a string1", - "ModuleDefinitionFile": "a_file_name", - "Name": "a_file_name", - "OutputFile": "a_file_name", - "RemoveObjects": "file1;file2", - "SubSystem": "Console", - "SuppressStartupBanner": "true", - "TargetMachine": "MachineX86i", - "TrackerLogDirectory": "a_folder", - "TreatLibWarningAsErrors": "true", - "UseUnicodeResponseFiles": "true", - "Verbose": "true", - }, - "Manifest": { - "AdditionalManifestFiles": "file1;file2", - "AdditionalOptions": "a string1", - "AssemblyIdentity": "a string1", - "ComponentFileName": "a_file_name", - "EnableDPIAwareness": "fal", - "GenerateCatalogFiles": "truel", - "GenerateCategoryTags": "true", - "InputResourceManifests": "a string1", - "ManifestFromManagedAssembly": "a_file_name", - "notgood3": "bogus", - "OutputManifestFile": "a_file_name", - "OutputResourceManifests": "a string1", - "RegistrarScriptFile": "a_file_name", - "ReplacementsFile": "a_file_name", - "SuppressDependencyElement": "true", - "SuppressStartupBanner": "true", - "TrackerLogDirectory": "a_folder", - "TypeLibraryFile": "a_file_name", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "a_file_name", - "VerboseOutput": "true", - }, - "ProjectReference": { - "LinkLibraryDependencies": "true", - "UseLibraryDependencyInputs": "true", - }, - "ManifestResourceCompile": {"ResourceOutputFileName": "a_file_name"}, - "": { - "EmbedManifest": "true", - "GenerateManifest": "true", - "IgnoreImportLibrary": "true", - "LinkIncremental": "false", - }, - }, - self.stderr, - ) - self._ExpectedWarnings( - [ - "Warning: unrecognized setting ClCompile/Enableprefast", - "Warning: unrecognized setting ClCompile/ZZXYZ", - "Warning: unrecognized setting Manifest/notgood3", - "Warning: for Manifest/GenerateCatalogFiles, " - "expected bool; got 'truel'", - "Warning: for Lib/TargetMachine, unrecognized enumerated value " - "MachineX86i", - "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'", - ] - ) - - def testConvertToMSBuildSettings_empty(self): - """Tests an empty conversion.""" - msvs_settings = {} - expected_msbuild_settings = {} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_minimal(self): - """Tests a minimal conversion.""" - msvs_settings = { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "dir1", - "AdditionalOptions": "/foo", - "BasicRuntimeChecks": "0", - }, - "VCLinkerTool": { - "LinkTimeCodeGeneration": "1", - "ErrorReporting": "1", - "DataExecutionPrevention": "2", - }, - } - expected_msbuild_settings = { - "ClCompile": { - "AdditionalIncludeDirectories": "dir1", - "AdditionalOptions": "/foo", - "BasicRuntimeChecks": "Default", - }, - "Link": { - "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", - "LinkErrorReporting": "PromptImmediately", - "DataExecutionPrevention": "true", - }, - } - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_warnings(self): - """Tests conversion that generates warnings.""" - msvs_settings = { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "1", - "AdditionalOptions": "2", - # These are incorrect values: - "BasicRuntimeChecks": "12", - "BrowseInformation": "21", - "UsePrecompiledHeader": "13", - "GeneratePreprocessedFile": "14", - }, - "VCLinkerTool": { - # These are incorrect values: - "Driver": "10", - "LinkTimeCodeGeneration": "31", - "ErrorReporting": "21", - "FixedBaseAddress": "6", - }, - "VCResourceCompilerTool": { - # Custom - "Culture": "1003" - }, - } - expected_msbuild_settings = { - "ClCompile": { - "AdditionalIncludeDirectories": "1", - "AdditionalOptions": "2", - }, - "Link": {}, - "ResourceCompile": { - # Custom - "Culture": "0x03eb" - }, - } - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings( - [ - "Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to " - "MSBuild, index value (12) not in expected range [0, 4)", - "Warning: while converting VCCLCompilerTool/BrowseInformation to " - "MSBuild, index value (21) not in expected range [0, 3)", - "Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to " - "MSBuild, index value (13) not in expected range [0, 3)", - "Warning: while converting " - "VCCLCompilerTool/GeneratePreprocessedFile to " - "MSBuild, value must be one of [0, 1, 2]; got 14", - "Warning: while converting VCLinkerTool/Driver to " - "MSBuild, index value (10) not in expected range [0, 4)", - "Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to " - "MSBuild, index value (31) not in expected range [0, 5)", - "Warning: while converting VCLinkerTool/ErrorReporting to " - "MSBuild, index value (21) not in expected range [0, 3)", - "Warning: while converting VCLinkerTool/FixedBaseAddress to " - "MSBuild, index value (6) not in expected range [0, 3)", - ] - ) - - def testConvertToMSBuildSettings_full_synthetic(self): - """Tests conversion of all the MSBuild settings.""" - msvs_settings = { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "AdditionalUsingDirectories": "folder1;folder2;folder3", - "AssemblerListingLocation": "a_file_name", - "AssemblerOutput": "0", - "BasicRuntimeChecks": "1", - "BrowseInformation": "2", - "BrowseInformationFile": "a_file_name", - "BufferSecurityCheck": "true", - "CallingConvention": "0", - "CompileAs": "1", - "DebugInformationFormat": "4", - "DefaultCharIsUnsigned": "true", - "Detect64BitPortabilityProblems": "true", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "d1;d2;d3", - "EnableEnhancedInstructionSet": "0", - "EnableFiberSafeOptimizations": "true", - "EnableFunctionLevelLinking": "true", - "EnableIntrinsicFunctions": "true", - "EnablePREfast": "true", - "ErrorReporting": "1", - "ExceptionHandling": "2", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "0", - "FloatingPointExceptions": "true", - "FloatingPointModel": "1", - "ForceConformanceInForLoopScope": "true", - "ForcedIncludeFiles": "file1;file2;file3", - "ForcedUsingFiles": "file1;file2;file3", - "GeneratePreprocessedFile": "1", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "2", - "KeepComments": "true", - "MinimalRebuild": "true", - "ObjectFile": "a_file_name", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMP": "true", - "Optimization": "3", - "PrecompiledHeaderFile": "a_file_name", - "PrecompiledHeaderThrough": "a_file_name", - "PreprocessorDefinitions": "d1;d2;d3", - "ProgramDataBaseFileName": "a_file_name", - "RuntimeLibrary": "0", - "RuntimeTypeInfo": "true", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "1", - "SuppressStartupBanner": "true", - "TreatWChar_tAsBuiltInType": "true", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - "UseFullPaths": "true", - "UsePrecompiledHeader": "1", - "UseUnicodeResponseFiles": "true", - "WarnAsError": "true", - "WarningLevel": "2", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "a_file_name", - }, - "VCLinkerTool": { - "AdditionalDependencies": "file1;file2;file3", - "AdditionalLibraryDirectories": "folder1;folder2;folder3", - "AdditionalLibraryDirectories_excluded": "folder1;folder2;folder3", - "AdditionalManifestDependencies": "file1;file2;file3", - "AdditionalOptions": "a_string", - "AddModuleNamesToAssembly": "file1;file2;file3", - "AllowIsolation": "true", - "AssemblyDebug": "0", - "AssemblyLinkResource": "file1;file2;file3", - "BaseAddress": "a_string", - "CLRImageType": "1", - "CLRThreadAttribute": "2", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "0", - "DelayLoadDLLs": "file1;file2;file3", - "DelaySign": "true", - "Driver": "1", - "EmbedManagedResourceFile": "file1;file2;file3", - "EnableCOMDATFolding": "0", - "EnableUAC": "true", - "EntryPointSymbol": "a_string", - "ErrorReporting": "0", - "FixedBaseAddress": "1", - "ForceSymbolReferences": "file1;file2;file3", - "FunctionOrder": "a_file_name", - "GenerateDebugInformation": "true", - "GenerateManifest": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "a_string", - "HeapReserveSize": "a_string", - "IgnoreAllDefaultLibraries": "true", - "IgnoreDefaultLibraryNames": "file1;file2;file3", - "IgnoreEmbeddedIDL": "true", - "IgnoreImportLibrary": "true", - "ImportLibrary": "a_file_name", - "KeyContainer": "a_file_name", - "KeyFile": "a_file_name", - "LargeAddressAware": "2", - "LinkIncremental": "1", - "LinkLibraryDependencies": "true", - "LinkTimeCodeGeneration": "2", - "ManifestFile": "a_file_name", - "MapExports": "true", - "MapFileName": "a_file_name", - "MergedIDLBaseFileName": "a_file_name", - "MergeSections": "a_string", - "MidlCommandFile": "a_file_name", - "ModuleDefinitionFile": "a_file_name", - "OptimizeForWindows98": "1", - "OptimizeReferences": "0", - "OutputFile": "a_file_name", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "a_file_name", - "ProgramDatabaseFile": "a_file_name", - "RandomizedBaseAddress": "1", - "RegisterOutput": "true", - "ResourceOnlyDLL": "true", - "SetChecksum": "true", - "ShowProgress": "0", - "StackCommitSize": "a_string", - "StackReserveSize": "a_string", - "StripPrivateSymbols": "a_file_name", - "SubSystem": "2", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "true", - "SwapRunFromCD": "true", - "SwapRunFromNet": "true", - "TargetMachine": "3", - "TerminalServerAware": "2", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "a_file_name", - "TypeLibraryResourceID": "33", - "UACExecutionLevel": "1", - "UACUIAccess": "true", - "UseLibraryDependencyInputs": "false", - "UseUnicodeResponseFiles": "true", - "Version": "a_string", - }, - "VCResourceCompilerTool": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "Culture": "1003", - "IgnoreStandardIncludePath": "true", - "PreprocessorDefinitions": "d1;d2;d3", - "ResourceOutputFileName": "a_string", - "ShowProgress": "true", - "SuppressStartupBanner": "true", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - }, - "VCMIDLTool": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "CPreprocessOptions": "a_string", - "DefaultCharType": "0", - "DLLDataFileName": "a_file_name", - "EnableErrorChecks": "2", - "ErrorCheckAllocations": "true", - "ErrorCheckBounds": "true", - "ErrorCheckEnumRange": "true", - "ErrorCheckRefPointers": "true", - "ErrorCheckStubData": "true", - "GenerateStublessProxies": "true", - "GenerateTypeLibrary": "true", - "HeaderFileName": "a_file_name", - "IgnoreStandardIncludePath": "true", - "InterfaceIdentifierFileName": "a_file_name", - "MkTypLibCompatible": "true", - "OutputDirectory": "a_string", - "PreprocessorDefinitions": "d1;d2;d3", - "ProxyFileName": "a_file_name", - "RedirectOutputAndErrors": "a_file_name", - "StructMemberAlignment": "3", - "SuppressStartupBanner": "true", - "TargetEnvironment": "1", - "TypeLibraryName": "a_file_name", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - "ValidateParameters": "true", - "WarnAsError": "true", - "WarningLevel": "4", - }, - "VCLibrarianTool": { - "AdditionalDependencies": "file1;file2;file3", - "AdditionalLibraryDirectories": "folder1;folder2;folder3", - "AdditionalLibraryDirectories_excluded": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "ExportNamedFunctions": "d1;d2;d3", - "ForceSymbolReferences": "a_string", - "IgnoreAllDefaultLibraries": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2;file3", - "LinkLibraryDependencies": "true", - "ModuleDefinitionFile": "a_file_name", - "OutputFile": "a_file_name", - "SuppressStartupBanner": "true", - "UseUnicodeResponseFiles": "true", - }, - "VCManifestTool": { - "AdditionalManifestFiles": "file1;file2;file3", - "AdditionalOptions": "a_string", - "AssemblyIdentity": "a_string", - "ComponentFileName": "a_file_name", - "DependencyInformationFile": "a_file_name", - "EmbedManifest": "true", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "a_string", - "ManifestResourceFile": "my_name", - "OutputManifestFile": "a_file_name", - "RegistrarScriptFile": "a_file_name", - "ReplacementsFile": "a_file_name", - "SuppressStartupBanner": "true", - "TypeLibraryFile": "a_file_name", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "a_file_name", - "UseFAT32Workaround": "true", - "UseUnicodeResponseFiles": "true", - "VerboseOutput": "true", - }, - } - expected_msbuild_settings = { - "ClCompile": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string /J", - "AdditionalUsingDirectories": "folder1;folder2;folder3", - "AssemblerListingLocation": "a_file_name", - "AssemblerOutput": "NoListing", - "BasicRuntimeChecks": "StackFrameRuntimeCheck", - "BrowseInformation": "true", - "BrowseInformationFile": "a_file_name", - "BufferSecurityCheck": "true", - "CallingConvention": "Cdecl", - "CompileAs": "CompileAsC", - "DebugInformationFormat": "EditAndContinue", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "d1;d2;d3", - "EnableEnhancedInstructionSet": "NotSet", - "EnableFiberSafeOptimizations": "true", - "EnablePREfast": "true", - "ErrorReporting": "Prompt", - "ExceptionHandling": "Async", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "Neither", - "FloatingPointExceptions": "true", - "FloatingPointModel": "Strict", - "ForceConformanceInForLoopScope": "true", - "ForcedIncludeFiles": "file1;file2;file3", - "ForcedUsingFiles": "file1;file2;file3", - "FunctionLevelLinking": "true", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "AnySuitable", - "IntrinsicFunctions": "true", - "MinimalRebuild": "true", - "ObjectFileName": "a_file_name", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMPSupport": "true", - "Optimization": "Full", - "PrecompiledHeader": "Create", - "PrecompiledHeaderFile": "a_file_name", - "PrecompiledHeaderOutputFile": "a_file_name", - "PreprocessKeepComments": "true", - "PreprocessorDefinitions": "d1;d2;d3", - "PreprocessSuppressLineNumbers": "false", - "PreprocessToFile": "true", - "ProgramDataBaseFileName": "a_file_name", - "RuntimeLibrary": "MultiThreaded", - "RuntimeTypeInfo": "true", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "1Byte", - "SuppressStartupBanner": "true", - "TreatWarningAsError": "true", - "TreatWChar_tAsBuiltInType": "true", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - "UseFullPaths": "true", - "WarningLevel": "Level2", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "a_file_name", - }, - "Link": { - "AdditionalDependencies": "file1;file2;file3", - "AdditionalLibraryDirectories": "folder1;folder2;folder3", - "AdditionalManifestDependencies": "file1;file2;file3", - "AdditionalOptions": "a_string", - "AddModuleNamesToAssembly": "file1;file2;file3", - "AllowIsolation": "true", - "AssemblyDebug": "", - "AssemblyLinkResource": "file1;file2;file3", - "BaseAddress": "a_string", - "CLRImageType": "ForceIJWImage", - "CLRThreadAttribute": "STAThreadingAttribute", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "", - "DelayLoadDLLs": "file1;file2;file3", - "DelaySign": "true", - "Driver": "Driver", - "EmbedManagedResourceFile": "file1;file2;file3", - "EnableCOMDATFolding": "", - "EnableUAC": "true", - "EntryPointSymbol": "a_string", - "FixedBaseAddress": "false", - "ForceSymbolReferences": "file1;file2;file3", - "FunctionOrder": "a_file_name", - "GenerateDebugInformation": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "a_string", - "HeapReserveSize": "a_string", - "IgnoreAllDefaultLibraries": "true", - "IgnoreEmbeddedIDL": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2;file3", - "ImportLibrary": "a_file_name", - "KeyContainer": "a_file_name", - "KeyFile": "a_file_name", - "LargeAddressAware": "true", - "LinkErrorReporting": "NoErrorReport", - "LinkTimeCodeGeneration": "PGInstrument", - "ManifestFile": "a_file_name", - "MapExports": "true", - "MapFileName": "a_file_name", - "MergedIDLBaseFileName": "a_file_name", - "MergeSections": "a_string", - "MidlCommandFile": "a_file_name", - "ModuleDefinitionFile": "a_file_name", - "NoEntryPoint": "true", - "OptimizeReferences": "", - "OutputFile": "a_file_name", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "a_file_name", - "ProgramDatabaseFile": "a_file_name", - "RandomizedBaseAddress": "false", - "RegisterOutput": "true", - "SetChecksum": "true", - "ShowProgress": "NotSet", - "StackCommitSize": "a_string", - "StackReserveSize": "a_string", - "StripPrivateSymbols": "a_file_name", - "SubSystem": "Windows", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "true", - "SwapRunFromCD": "true", - "SwapRunFromNET": "true", - "TargetMachine": "MachineARM", - "TerminalServerAware": "true", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "a_file_name", - "TypeLibraryResourceID": "33", - "UACExecutionLevel": "HighestAvailable", - "UACUIAccess": "true", - "Version": "a_string", - }, - "ResourceCompile": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "Culture": "0x03eb", - "IgnoreStandardIncludePath": "true", - "PreprocessorDefinitions": "d1;d2;d3", - "ResourceOutputFileName": "a_string", - "ShowProgress": "true", - "SuppressStartupBanner": "true", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - }, - "Midl": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "CPreprocessOptions": "a_string", - "DefaultCharType": "Unsigned", - "DllDataFileName": "a_file_name", - "EnableErrorChecks": "All", - "ErrorCheckAllocations": "true", - "ErrorCheckBounds": "true", - "ErrorCheckEnumRange": "true", - "ErrorCheckRefPointers": "true", - "ErrorCheckStubData": "true", - "GenerateStublessProxies": "true", - "GenerateTypeLibrary": "true", - "HeaderFileName": "a_file_name", - "IgnoreStandardIncludePath": "true", - "InterfaceIdentifierFileName": "a_file_name", - "MkTypLibCompatible": "true", - "OutputDirectory": "a_string", - "PreprocessorDefinitions": "d1;d2;d3", - "ProxyFileName": "a_file_name", - "RedirectOutputAndErrors": "a_file_name", - "StructMemberAlignment": "4", - "SuppressStartupBanner": "true", - "TargetEnvironment": "Win32", - "TypeLibraryName": "a_file_name", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - "ValidateAllParameters": "true", - "WarnAsError": "true", - "WarningLevel": "4", - }, - "Lib": { - "AdditionalDependencies": "file1;file2;file3", - "AdditionalLibraryDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "ExportNamedFunctions": "d1;d2;d3", - "ForceSymbolReferences": "a_string", - "IgnoreAllDefaultLibraries": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2;file3", - "ModuleDefinitionFile": "a_file_name", - "OutputFile": "a_file_name", - "SuppressStartupBanner": "true", - "UseUnicodeResponseFiles": "true", - }, - "Manifest": { - "AdditionalManifestFiles": "file1;file2;file3", - "AdditionalOptions": "a_string", - "AssemblyIdentity": "a_string", - "ComponentFileName": "a_file_name", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "a_string", - "OutputManifestFile": "a_file_name", - "RegistrarScriptFile": "a_file_name", - "ReplacementsFile": "a_file_name", - "SuppressStartupBanner": "true", - "TypeLibraryFile": "a_file_name", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "a_file_name", - "VerboseOutput": "true", - }, - "ManifestResourceCompile": {"ResourceOutputFileName": "my_name"}, - "ProjectReference": { - "LinkLibraryDependencies": "true", - "UseLibraryDependencyInputs": "false", - }, - "": { - "EmbedManifest": "true", - "GenerateManifest": "true", - "IgnoreImportLibrary": "true", - "LinkIncremental": "false", - }, - } - self.maxDiff = 9999 # on failure display a long diff - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_actual(self): - """Tests the conversion of an actual project. - - A VS2008 project with most of the options defined was created through the - VS2008 IDE. It was then converted to VS2010. The tool settings found in - the .vcproj and .vcxproj files were converted to the two dictionaries - msvs_settings and expected_msbuild_settings. - - Note that for many settings, the VS2010 converter adds macros like - %(AdditionalIncludeDirectories) to make sure than inherited values are - included. Since the Gyp projects we generate do not use inheritance, - we removed these macros. They were: - ClCompile: - AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)' - AdditionalOptions: ' %(AdditionalOptions)' - AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)' - DisableSpecificWarnings: ';%(DisableSpecificWarnings)', - ForcedIncludeFiles: ';%(ForcedIncludeFiles)', - ForcedUsingFiles: ';%(ForcedUsingFiles)', - PreprocessorDefinitions: ';%(PreprocessorDefinitions)', - UndefinePreprocessorDefinitions: - ';%(UndefinePreprocessorDefinitions)', - Link: - AdditionalDependencies: ';%(AdditionalDependencies)', - AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)', - AdditionalManifestDependencies: - ';%(AdditionalManifestDependencies)', - AdditionalOptions: ' %(AdditionalOptions)', - AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)', - AssemblyLinkResource: ';%(AssemblyLinkResource)', - DelayLoadDLLs: ';%(DelayLoadDLLs)', - EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)', - ForceSymbolReferences: ';%(ForceSymbolReferences)', - IgnoreSpecificDefaultLibraries: - ';%(IgnoreSpecificDefaultLibraries)', - ResourceCompile: - AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)', - AdditionalOptions: ' %(AdditionalOptions)', - PreprocessorDefinitions: ';%(PreprocessorDefinitions)', - Manifest: - AdditionalManifestFiles: ';%(AdditionalManifestFiles)', - AdditionalOptions: ' %(AdditionalOptions)', - InputResourceManifests: ';%(InputResourceManifests)', - """ - msvs_settings = { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "dir1", - "AdditionalOptions": "/more", - "AdditionalUsingDirectories": "test", - "AssemblerListingLocation": "$(IntDir)\\a", - "AssemblerOutput": "1", - "BasicRuntimeChecks": "3", - "BrowseInformation": "1", - "BrowseInformationFile": "$(IntDir)\\e", - "BufferSecurityCheck": "false", - "CallingConvention": "1", - "CompileAs": "1", - "DebugInformationFormat": "4", - "DefaultCharIsUnsigned": "true", - "Detect64BitPortabilityProblems": "true", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "abc", - "EnableEnhancedInstructionSet": "1", - "EnableFiberSafeOptimizations": "true", - "EnableFunctionLevelLinking": "true", - "EnableIntrinsicFunctions": "true", - "EnablePREfast": "true", - "ErrorReporting": "2", - "ExceptionHandling": "2", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "2", - "FloatingPointExceptions": "true", - "FloatingPointModel": "1", - "ForceConformanceInForLoopScope": "false", - "ForcedIncludeFiles": "def", - "ForcedUsingFiles": "ge", - "GeneratePreprocessedFile": "2", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "1", - "KeepComments": "true", - "MinimalRebuild": "true", - "ObjectFile": "$(IntDir)\\b", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMP": "true", - "Optimization": "3", - "PrecompiledHeaderFile": "$(IntDir)\\$(TargetName).pche", - "PrecompiledHeaderThrough": "StdAfx.hd", - "PreprocessorDefinitions": "WIN32;_DEBUG;_CONSOLE", - "ProgramDataBaseFileName": "$(IntDir)\\vc90b.pdb", - "RuntimeLibrary": "3", - "RuntimeTypeInfo": "false", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "3", - "SuppressStartupBanner": "false", - "TreatWChar_tAsBuiltInType": "false", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "wer", - "UseFullPaths": "true", - "UsePrecompiledHeader": "0", - "UseUnicodeResponseFiles": "false", - "WarnAsError": "true", - "WarningLevel": "3", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "$(IntDir)\\c", - }, - "VCLinkerTool": { - "AdditionalDependencies": "zx", - "AdditionalLibraryDirectories": "asd", - "AdditionalManifestDependencies": "s2", - "AdditionalOptions": "/mor2", - "AddModuleNamesToAssembly": "d1", - "AllowIsolation": "false", - "AssemblyDebug": "1", - "AssemblyLinkResource": "d5", - "BaseAddress": "23423", - "CLRImageType": "3", - "CLRThreadAttribute": "1", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "0", - "DelayLoadDLLs": "d4", - "DelaySign": "true", - "Driver": "2", - "EmbedManagedResourceFile": "d2", - "EnableCOMDATFolding": "1", - "EnableUAC": "false", - "EntryPointSymbol": "f5", - "ErrorReporting": "2", - "FixedBaseAddress": "1", - "ForceSymbolReferences": "d3", - "FunctionOrder": "fssdfsd", - "GenerateDebugInformation": "true", - "GenerateManifest": "false", - "GenerateMapFile": "true", - "HeapCommitSize": "13", - "HeapReserveSize": "12", - "IgnoreAllDefaultLibraries": "true", - "IgnoreDefaultLibraryNames": "flob;flok", - "IgnoreEmbeddedIDL": "true", - "IgnoreImportLibrary": "true", - "ImportLibrary": "f4", - "KeyContainer": "f7", - "KeyFile": "f6", - "LargeAddressAware": "2", - "LinkIncremental": "0", - "LinkLibraryDependencies": "false", - "LinkTimeCodeGeneration": "1", - "ManifestFile": "$(IntDir)\\$(TargetFileName).2intermediate.manifest", - "MapExports": "true", - "MapFileName": "d5", - "MergedIDLBaseFileName": "f2", - "MergeSections": "f5", - "MidlCommandFile": "f1", - "ModuleDefinitionFile": "sdsd", - "OptimizeForWindows98": "2", - "OptimizeReferences": "2", - "OutputFile": "$(OutDir)\\$(ProjectName)2.exe", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "$(TargetDir)$(TargetName).pgdd", - "ProgramDatabaseFile": "Flob.pdb", - "RandomizedBaseAddress": "1", - "RegisterOutput": "true", - "ResourceOnlyDLL": "true", - "SetChecksum": "false", - "ShowProgress": "1", - "StackCommitSize": "15", - "StackReserveSize": "14", - "StripPrivateSymbols": "d3", - "SubSystem": "1", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "false", - "SwapRunFromCD": "true", - "SwapRunFromNet": "true", - "TargetMachine": "1", - "TerminalServerAware": "1", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "f3", - "TypeLibraryResourceID": "12", - "UACExecutionLevel": "2", - "UACUIAccess": "true", - "UseLibraryDependencyInputs": "true", - "UseUnicodeResponseFiles": "false", - "Version": "333", - }, - "VCResourceCompilerTool": { - "AdditionalIncludeDirectories": "f3", - "AdditionalOptions": "/more3", - "Culture": "3084", - "IgnoreStandardIncludePath": "true", - "PreprocessorDefinitions": "_UNICODE;UNICODE2", - "ResourceOutputFileName": "$(IntDir)/$(InputName)3.res", - "ShowProgress": "true", - }, - "VCManifestTool": { - "AdditionalManifestFiles": "sfsdfsd", - "AdditionalOptions": "afdsdafsd", - "AssemblyIdentity": "sddfdsadfsa", - "ComponentFileName": "fsdfds", - "DependencyInformationFile": "$(IntDir)\\mt.depdfd", - "EmbedManifest": "false", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "asfsfdafs", - "ManifestResourceFile": - "$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf", - "OutputManifestFile": "$(TargetPath).manifestdfs", - "RegistrarScriptFile": "sdfsfd", - "ReplacementsFile": "sdffsd", - "SuppressStartupBanner": "false", - "TypeLibraryFile": "sfsd", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "sfsd", - "UseFAT32Workaround": "true", - "UseUnicodeResponseFiles": "false", - "VerboseOutput": "true", - }, - } - expected_msbuild_settings = { - "ClCompile": { - "AdditionalIncludeDirectories": "dir1", - "AdditionalOptions": "/more /J", - "AdditionalUsingDirectories": "test", - "AssemblerListingLocation": "$(IntDir)a", - "AssemblerOutput": "AssemblyCode", - "BasicRuntimeChecks": "EnableFastChecks", - "BrowseInformation": "true", - "BrowseInformationFile": "$(IntDir)e", - "BufferSecurityCheck": "false", - "CallingConvention": "FastCall", - "CompileAs": "CompileAsC", - "DebugInformationFormat": "EditAndContinue", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "abc", - "EnableEnhancedInstructionSet": "StreamingSIMDExtensions", - "EnableFiberSafeOptimizations": "true", - "EnablePREfast": "true", - "ErrorReporting": "Queue", - "ExceptionHandling": "Async", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "Size", - "FloatingPointExceptions": "true", - "FloatingPointModel": "Strict", - "ForceConformanceInForLoopScope": "false", - "ForcedIncludeFiles": "def", - "ForcedUsingFiles": "ge", - "FunctionLevelLinking": "true", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "OnlyExplicitInline", - "IntrinsicFunctions": "true", - "MinimalRebuild": "true", - "ObjectFileName": "$(IntDir)b", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMPSupport": "true", - "Optimization": "Full", - "PrecompiledHeader": "NotUsing", # Actual conversion gives '' - "PrecompiledHeaderFile": "StdAfx.hd", - "PrecompiledHeaderOutputFile": "$(IntDir)$(TargetName).pche", - "PreprocessKeepComments": "true", - "PreprocessorDefinitions": "WIN32;_DEBUG;_CONSOLE", - "PreprocessSuppressLineNumbers": "true", - "PreprocessToFile": "true", - "ProgramDataBaseFileName": "$(IntDir)vc90b.pdb", - "RuntimeLibrary": "MultiThreadedDebugDLL", - "RuntimeTypeInfo": "false", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "4Bytes", - "SuppressStartupBanner": "false", - "TreatWarningAsError": "true", - "TreatWChar_tAsBuiltInType": "false", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "wer", - "UseFullPaths": "true", - "WarningLevel": "Level3", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "$(IntDir)c", - }, - "Link": { - "AdditionalDependencies": "zx", - "AdditionalLibraryDirectories": "asd", - "AdditionalManifestDependencies": "s2", - "AdditionalOptions": "/mor2", - "AddModuleNamesToAssembly": "d1", - "AllowIsolation": "false", - "AssemblyDebug": "true", - "AssemblyLinkResource": "d5", - "BaseAddress": "23423", - "CLRImageType": "ForceSafeILImage", - "CLRThreadAttribute": "MTAThreadingAttribute", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "", - "DelayLoadDLLs": "d4", - "DelaySign": "true", - "Driver": "UpOnly", - "EmbedManagedResourceFile": "d2", - "EnableCOMDATFolding": "false", - "EnableUAC": "false", - "EntryPointSymbol": "f5", - "FixedBaseAddress": "false", - "ForceSymbolReferences": "d3", - "FunctionOrder": "fssdfsd", - "GenerateDebugInformation": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "13", - "HeapReserveSize": "12", - "IgnoreAllDefaultLibraries": "true", - "IgnoreEmbeddedIDL": "true", - "IgnoreSpecificDefaultLibraries": "flob;flok", - "ImportLibrary": "f4", - "KeyContainer": "f7", - "KeyFile": "f6", - "LargeAddressAware": "true", - "LinkErrorReporting": "QueueForNextLogin", - "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", - "ManifestFile": "$(IntDir)$(TargetFileName).2intermediate.manifest", - "MapExports": "true", - "MapFileName": "d5", - "MergedIDLBaseFileName": "f2", - "MergeSections": "f5", - "MidlCommandFile": "f1", - "ModuleDefinitionFile": "sdsd", - "NoEntryPoint": "true", - "OptimizeReferences": "true", - "OutputFile": "$(OutDir)$(ProjectName)2.exe", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "$(TargetDir)$(TargetName).pgdd", - "ProgramDatabaseFile": "Flob.pdb", - "RandomizedBaseAddress": "false", - "RegisterOutput": "true", - "SetChecksum": "false", - "ShowProgress": "LinkVerbose", - "StackCommitSize": "15", - "StackReserveSize": "14", - "StripPrivateSymbols": "d3", - "SubSystem": "Console", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "false", - "SwapRunFromCD": "true", - "SwapRunFromNET": "true", - "TargetMachine": "MachineX86", - "TerminalServerAware": "false", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "f3", - "TypeLibraryResourceID": "12", - "UACExecutionLevel": "RequireAdministrator", - "UACUIAccess": "true", - "Version": "333", - }, - "ResourceCompile": { - "AdditionalIncludeDirectories": "f3", - "AdditionalOptions": "/more3", - "Culture": "0x0c0c", - "IgnoreStandardIncludePath": "true", - "PreprocessorDefinitions": "_UNICODE;UNICODE2", - "ResourceOutputFileName": "$(IntDir)%(Filename)3.res", - "ShowProgress": "true", - }, - "Manifest": { - "AdditionalManifestFiles": "sfsdfsd", - "AdditionalOptions": "afdsdafsd", - "AssemblyIdentity": "sddfdsadfsa", - "ComponentFileName": "fsdfds", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "asfsfdafs", - "OutputManifestFile": "$(TargetPath).manifestdfs", - "RegistrarScriptFile": "sdfsfd", - "ReplacementsFile": "sdffsd", - "SuppressStartupBanner": "false", - "TypeLibraryFile": "sfsd", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "sfsd", - "VerboseOutput": "true", - }, - "ProjectReference": { - "LinkLibraryDependencies": "false", - "UseLibraryDependencyInputs": "true", - }, - "": { - "EmbedManifest": "false", - "GenerateManifest": "false", - "IgnoreImportLibrary": "true", - "LinkIncremental": "", - }, - "ManifestResourceCompile": { - "ResourceOutputFileName": - "$(IntDir)$(TargetFileName).embed.manifest.resfdsf" - }, - } - self.maxDiff = 9999 # on failure display a long diff - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py deleted file mode 100644 index 2c08589e06dc6..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.easy_xml as easy_xml - - -class Writer(object): - """Visual Studio XML tool file writer.""" - - def __init__(self, tool_file_path, name): - """Initializes the tool file. - - Args: - tool_file_path: Path to the tool file. - name: Name of the tool file. - """ - self.tool_file_path = tool_file_path - self.name = name - self.rules_section = ["Rules"] - - def AddCustomBuildRule( - self, name, cmd, description, additional_dependencies, outputs, extensions - ): - """Adds a rule to the tool file. - - Args: - name: Name of the rule. - description: Description of the rule. - cmd: Command line of the rule. - additional_dependencies: other files which may trigger the rule. - outputs: outputs of the rule. - extensions: extensions handled by the rule. - """ - rule = [ - "CustomBuildRule", - { - "Name": name, - "ExecutionDescription": description, - "CommandLine": cmd, - "Outputs": ";".join(outputs), - "FileExtensions": ";".join(extensions), - "AdditionalDependencies": ";".join(additional_dependencies), - }, - ] - self.rules_section.append(rule) - - def WriteIfChanged(self): - """Writes the tool file.""" - content = [ - "VisualStudioToolFile", - {"Version": "8.00", "Name": self.name}, - self.rules_section, - ] - easy_xml.WriteXmlIfChanged( - content, self.tool_file_path, encoding="Windows-1252" - ) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py deleted file mode 100644 index de0896e693181..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio user preferences file writer.""" - -import os -import re -import socket # for gethostname - -import gyp.easy_xml as easy_xml - - -# ------------------------------------------------------------------------------ - - -def _FindCommandInPath(command): - """If there are no slashes in the command given, this function - searches the PATH env to find the given command, and converts it - to an absolute path. We have to do this because MSVS is looking - for an actual file to launch a debugger on, not just a command - line. Note that this happens at GYP time, so anything needing to - be built needs to have a full path.""" - if "/" in command or "\\" in command: - # If the command already has path elements (either relative or - # absolute), then assume it is constructed properly. - return command - else: - # Search through the path list and find an existing file that - # we can access. - paths = os.environ.get("PATH", "").split(os.pathsep) - for path in paths: - item = os.path.join(path, command) - if os.path.isfile(item) and os.access(item, os.X_OK): - return item - return command - - -def _QuoteWin32CommandLineArgs(args): - new_args = [] - for arg in args: - # Replace all double-quotes with double-double-quotes to escape - # them for cmd shell, and then quote the whole thing if there - # are any. - if arg.find('"') != -1: - arg = '""'.join(arg.split('"')) - arg = '"%s"' % arg - - # Otherwise, if there are any spaces, quote the whole arg. - elif re.search(r"[ \t\n]", arg): - arg = '"%s"' % arg - new_args.append(arg) - return new_args - - -class Writer(object): - """Visual Studio XML user user file writer.""" - - def __init__(self, user_file_path, version, name): - """Initializes the user file. - - Args: - user_file_path: Path to the user file. - version: Version info. - name: Name of the user file. - """ - self.user_file_path = user_file_path - self.version = version - self.name = name - self.configurations = {} - - def AddConfig(self, name): - """Adds a configuration to the project. - - Args: - name: Configuration name. - """ - self.configurations[name] = ["Configuration", {"Name": name}] - - def AddDebugSettings( - self, config_name, command, environment={}, working_directory="" - ): - """Adds a DebugSettings node to the user file for a particular config. - - Args: - command: command line to run. First element in the list is the - executable. All elements of the command will be quoted if - necessary. - working_directory: other files which may trigger the rule. (optional) - """ - command = _QuoteWin32CommandLineArgs(command) - - abs_command = _FindCommandInPath(command[0]) - - if environment and isinstance(environment, dict): - env_list = ['%s="%s"' % (key, val) for (key, val) in environment.items()] - environment = " ".join(env_list) - else: - environment = "" - - n_cmd = [ - "DebugSettings", - { - "Command": abs_command, - "WorkingDirectory": working_directory, - "CommandArguments": " ".join(command[1:]), - "RemoteMachine": socket.gethostname(), - "Environment": environment, - "EnvironmentMerge": "true", - # Currently these are all "dummy" values that we're just setting - # in the default manner that MSVS does it. We could use some of - # these to add additional capabilities, I suppose, but they might - # not have parity with other platforms then. - "Attach": "false", - "DebuggerType": "3", # 'auto' debugger - "Remote": "1", - "RemoteCommand": "", - "HttpUrl": "", - "PDBPath": "", - "SQLDebugging": "", - "DebuggerFlavor": "0", - "MPIRunCommand": "", - "MPIRunArguments": "", - "MPIRunWorkingDirectory": "", - "ApplicationCommand": "", - "ApplicationArguments": "", - "ShimCommand": "", - "MPIAcceptMode": "", - "MPIAcceptFilter": "", - }, - ] - - # Find the config, and add it if it doesn't exist. - if config_name not in self.configurations: - self.AddConfig(config_name) - - # Add the DebugSettings onto the appropriate config. - self.configurations[config_name].append(n_cmd) - - def WriteIfChanged(self): - """Writes the user file.""" - configs = ["Configurations"] - for config, spec in sorted(self.configurations.items()): - configs.append(spec) - - content = [ - "VisualStudioUserFile", - {"Version": self.version.ProjectVersion(), "Name": self.name}, - configs, - ] - easy_xml.WriteXmlIfChanged( - content, self.user_file_path, encoding="Windows-1252" - ) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py deleted file mode 100644 index 83a9c297ed6e1..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py +++ /dev/null @@ -1,271 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions shared amongst the Windows generators.""" - -import copy -import os - - -# A dictionary mapping supported target types to extensions. -TARGET_TYPE_EXT = { - "executable": "exe", - "loadable_module": "dll", - "shared_library": "dll", - "static_library": "lib", - "windows_driver": "sys", -} - - -def _GetLargePdbShimCcPath(): - """Returns the path of the large_pdb_shim.cc file.""" - this_dir = os.path.abspath(os.path.dirname(__file__)) - src_dir = os.path.abspath(os.path.join(this_dir, "..", "..")) - win_data_dir = os.path.join(src_dir, "data", "win") - large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc") - return large_pdb_shim_cc - - -def _DeepCopySomeKeys(in_dict, keys): - """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. - - Arguments: - in_dict: The dictionary to copy. - keys: The keys to be copied. If a key is in this list and doesn't exist in - |in_dict| this is not an error. - Returns: - The partially deep-copied dictionary. - """ - d = {} - for key in keys: - if key not in in_dict: - continue - d[key] = copy.deepcopy(in_dict[key]) - return d - - -def _SuffixName(name, suffix): - """Add a suffix to the end of a target. - - Arguments: - name: name of the target (foo#target) - suffix: the suffix to be added - Returns: - Target name with suffix added (foo_suffix#target) - """ - parts = name.rsplit("#", 1) - parts[0] = "%s_%s" % (parts[0], suffix) - return "#".join(parts) - - -def _ShardName(name, number): - """Add a shard number to the end of a target. - - Arguments: - name: name of the target (foo#target) - number: shard number - Returns: - Target name with shard added (foo_1#target) - """ - return _SuffixName(name, str(number)) - - -def ShardTargets(target_list, target_dicts): - """Shard some targets apart to work around the linkers limits. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - Returns: - Tuple of the new sharded versions of the inputs. - """ - # Gather the targets to shard, and how many pieces. - targets_to_shard = {} - for t in target_dicts: - shards = int(target_dicts[t].get("msvs_shard", 0)) - if shards: - targets_to_shard[t] = shards - # Shard target_list. - new_target_list = [] - for t in target_list: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - new_target_list.append(_ShardName(t, i)) - else: - new_target_list.append(t) - # Shard target_dict. - new_target_dicts = {} - for t in target_dicts: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - name = _ShardName(t, i) - new_target_dicts[name] = copy.copy(target_dicts[t]) - new_target_dicts[name]["target_name"] = _ShardName( - new_target_dicts[name]["target_name"], i - ) - sources = new_target_dicts[name].get("sources", []) - new_sources = [] - for pos in range(i, len(sources), targets_to_shard[t]): - new_sources.append(sources[pos]) - new_target_dicts[name]["sources"] = new_sources - else: - new_target_dicts[t] = target_dicts[t] - # Shard dependencies. - for t in sorted(new_target_dicts): - for deptype in ("dependencies", "dependencies_original"): - dependencies = copy.copy(new_target_dicts[t].get(deptype, [])) - new_dependencies = [] - for d in dependencies: - if d in targets_to_shard: - for i in range(targets_to_shard[d]): - new_dependencies.append(_ShardName(d, i)) - else: - new_dependencies.append(d) - new_target_dicts[t][deptype] = new_dependencies - - return (new_target_list, new_target_dicts) - - -def _GetPdbPath(target_dict, config_name, vars): - """Returns the path to the PDB file that will be generated by a given - configuration. - - The lookup proceeds as follows: - - Look for an explicit path in the VCLinkerTool configuration block. - - Look for an 'msvs_large_pdb_path' variable. - - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is - specified. - - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'. - - Arguments: - target_dict: The target dictionary to be searched. - config_name: The name of the configuration of interest. - vars: A dictionary of common GYP variables with generator-specific values. - Returns: - The path of the corresponding PDB file. - """ - config = target_dict["configurations"][config_name] - msvs = config.setdefault("msvs_settings", {}) - - linker = msvs.get("VCLinkerTool", {}) - - pdb_path = linker.get("ProgramDatabaseFile") - if pdb_path: - return pdb_path - - variables = target_dict.get("variables", {}) - pdb_path = variables.get("msvs_large_pdb_path", None) - if pdb_path: - return pdb_path - - pdb_base = target_dict.get("product_name", target_dict["target_name"]) - pdb_base = "%s.%s.pdb" % (pdb_base, TARGET_TYPE_EXT[target_dict["type"]]) - pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base - - return pdb_path - - -def InsertLargePdbShims(target_list, target_dicts, vars): - """Insert a shim target that forces the linker to use 4KB pagesize PDBs. - - This is a workaround for targets with PDBs greater than 1GB in size, the - limit for the 1KB pagesize PDBs created by the linker by default. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - vars: A dictionary of common GYP variables with generator-specific values. - Returns: - Tuple of the shimmed version of the inputs. - """ - # Determine which targets need shimming. - targets_to_shim = [] - for t in target_dicts: - target_dict = target_dicts[t] - - # We only want to shim targets that have msvs_large_pdb enabled. - if not int(target_dict.get("msvs_large_pdb", 0)): - continue - # This is intended for executable, shared_library and loadable_module - # targets where every configuration is set up to produce a PDB output. - # If any of these conditions is not true then the shim logic will fail - # below. - targets_to_shim.append(t) - - large_pdb_shim_cc = _GetLargePdbShimCcPath() - - for t in targets_to_shim: - target_dict = target_dicts[t] - target_name = target_dict.get("target_name") - - base_dict = _DeepCopySomeKeys( - target_dict, ["configurations", "default_configuration", "toolset"] - ) - - # This is the dict for copying the source file (part of the GYP tree) - # to the intermediate directory of the project. This is necessary because - # we can't always build a relative path to the shim source file (on Windows - # GYP and the project may be on different drives), and Ninja hates absolute - # paths (it ends up generating the .obj and .obj.d alongside the source - # file, polluting GYPs tree). - copy_suffix = "large_pdb_copy" - copy_target_name = target_name + "_" + copy_suffix - full_copy_target_name = _SuffixName(t, copy_suffix) - shim_cc_basename = os.path.basename(large_pdb_shim_cc) - shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name - shim_cc_path = shim_cc_dir + "/" + shim_cc_basename - copy_dict = copy.deepcopy(base_dict) - copy_dict["target_name"] = copy_target_name - copy_dict["type"] = "none" - copy_dict["sources"] = [large_pdb_shim_cc] - copy_dict["copies"] = [ - {"destination": shim_cc_dir, "files": [large_pdb_shim_cc]} - ] - - # This is the dict for the PDB generating shim target. It depends on the - # copy target. - shim_suffix = "large_pdb_shim" - shim_target_name = target_name + "_" + shim_suffix - full_shim_target_name = _SuffixName(t, shim_suffix) - shim_dict = copy.deepcopy(base_dict) - shim_dict["target_name"] = shim_target_name - shim_dict["type"] = "static_library" - shim_dict["sources"] = [shim_cc_path] - shim_dict["dependencies"] = [full_copy_target_name] - - # Set up the shim to output its PDB to the same location as the final linker - # target. - for config_name, config in shim_dict.get("configurations").items(): - pdb_path = _GetPdbPath(target_dict, config_name, vars) - - # A few keys that we don't want to propagate. - for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]: - config.pop(key, None) - - msvs = config.setdefault("msvs_settings", {}) - - # Update the compiler directives in the shim target. - compiler = msvs.setdefault("VCCLCompilerTool", {}) - compiler["DebugInformationFormat"] = "3" - compiler["ProgramDataBaseFileName"] = pdb_path - - # Set the explicit PDB path in the appropriate configuration of the - # original target. - config = target_dict["configurations"][config_name] - msvs = config.setdefault("msvs_settings", {}) - linker = msvs.setdefault("VCLinkerTool", {}) - linker["GenerateDebugInformation"] = "true" - linker["ProgramDatabaseFile"] = pdb_path - - # Add the new targets. They must go to the beginning of the list so that - # the dependency generation works as expected in ninja. - target_list.insert(0, full_copy_target_name) - target_list.insert(0, full_shim_target_name) - target_dicts[full_copy_target_name] = copy_dict - target_dicts[full_shim_target_name] = shim_dict - - # Update the original target to depend on the shim target. - target_dict.setdefault("dependencies", []).append(full_shim_target_name) - - return (target_list, target_dicts) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py deleted file mode 100644 index 36b006aaa9375..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py +++ /dev/null @@ -1,571 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Handle version information related to Visual Stuio.""" - -import errno -import os -import re -import subprocess -import sys -import glob - -PY3 = bytes != str - - -def JoinPath(*args): - return os.path.normpath(os.path.join(*args)) - - -class VisualStudioVersion(object): - """Information regarding a version of Visual Studio.""" - - def __init__( - self, - short_name, - description, - solution_version, - project_version, - flat_sln, - uses_vcxproj, - path, - sdk_based, - default_toolset=None, - compatible_sdks=None, - ): - self.short_name = short_name - self.description = description - self.solution_version = solution_version - self.project_version = project_version - self.flat_sln = flat_sln - self.uses_vcxproj = uses_vcxproj - self.path = path - self.sdk_based = sdk_based - self.default_toolset = default_toolset - compatible_sdks = compatible_sdks or [] - compatible_sdks.sort(key=lambda v: float(v.replace("v", "")), reverse=True) - self.compatible_sdks = compatible_sdks - - def ShortName(self): - return self.short_name - - def Description(self): - """Get the full description of the version.""" - return self.description - - def SolutionVersion(self): - """Get the version number of the sln files.""" - return self.solution_version - - def ProjectVersion(self): - """Get the version number of the vcproj or vcxproj files.""" - return self.project_version - - def FlatSolution(self): - return self.flat_sln - - def UsesVcxproj(self): - """Returns true if this version uses a vcxproj file.""" - return self.uses_vcxproj - - def ProjectExtension(self): - """Returns the file extension for the project.""" - return self.uses_vcxproj and ".vcxproj" or ".vcproj" - - def Path(self): - """Returns the path to Visual Studio installation.""" - return self.path - - def ToolPath(self, tool): - """Returns the path to a given compiler tool. """ - return os.path.normpath(os.path.join(self.path, "VC/bin", tool)) - - def DefaultToolset(self): - """Returns the msbuild toolset version that will be used in the absence - of a user override.""" - return self.default_toolset - - def _SetupScriptInternal(self, target_arch): - """Returns a command (with arguments) to be used to set up the - environment.""" - assert target_arch in ("x86", "x64"), "target_arch not supported" - # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the - # depot_tools build tools and should run SetEnv.Cmd to set up the - # environment. The check for WindowsSDKDir alone is not sufficient because - # this is set by running vcvarsall.bat. - sdk_dir = os.environ.get("WindowsSDKDir", "") - setup_path = JoinPath(sdk_dir, "Bin", "SetEnv.Cmd") - if self.sdk_based and sdk_dir and os.path.exists(setup_path): - return [setup_path, "/" + target_arch] - - is_host_arch_x64 = ( - os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64" - or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64" - ) - - # For VS2017 (and newer) it's fairly easy - if self.short_name >= "2017": - script_path = JoinPath( - self.path, "VC", "Auxiliary", "Build", "vcvarsall.bat" - ) - - # Always use a native executable, cross-compiling if necessary. - host_arch = "amd64" if is_host_arch_x64 else "x86" - msvc_target_arch = "amd64" if target_arch == "x64" else "x86" - arg = host_arch - if host_arch != msvc_target_arch: - arg += "_" + msvc_target_arch - - return [script_path, arg] - - # We try to find the best version of the env setup batch. - vcvarsall = JoinPath(self.path, "VC", "vcvarsall.bat") - if target_arch == "x86": - if ( - self.short_name >= "2013" - and self.short_name[-1] != "e" - and is_host_arch_x64 - ): - # VS2013 and later, non-Express have a x64-x86 cross that we want - # to prefer. - return [vcvarsall, "amd64_x86"] - else: - # Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat - # for x86 because vcvarsall calls vcvars32, which it can only find if - # VS??COMNTOOLS is set, which isn't guaranteed. - return [JoinPath(self.path, "Common7", "Tools", "vsvars32.bat")] - elif target_arch == "x64": - arg = "x86_amd64" - # Use the 64-on-64 compiler if we're not using an express edition and - # we're running on a 64bit OS. - if self.short_name[-1] != "e" and is_host_arch_x64: - arg = "amd64" - return [vcvarsall, arg] - - def SetupScript(self, target_arch): - script_data = self._SetupScriptInternal(target_arch) - script_path = script_data[0] - if not os.path.exists(script_path): - raise Exception( - "%s is missing - make sure VC++ tools are installed." % script_path - ) - return script_data - - -def _RegistryQueryBase(sysdir, key, value): - """Use reg.exe to read a particular key. - - While ideally we might use the win32 module, we would like gyp to be - python neutral, so for instance cygwin python lacks this module. - - Arguments: - sysdir: The system subdirectory to attempt to launch reg.exe from. - key: The registry key to read from. - value: The particular value to read. - Return: - stdout from reg.exe, or None for failure. - """ - # Skip if not on Windows or Python Win32 setup issue - if sys.platform not in ("win32", "cygwin"): - return None - # Setup params to pass to and attempt to launch reg.exe - cmd = [os.path.join(os.environ.get("WINDIR", ""), sysdir, "reg.exe"), "query", key] - if value: - cmd.extend(["/v", value]) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid - # Note that the error text may be in [1] in some cases - text = p.communicate()[0] - if PY3: - text = text.decode("utf-8") - # Check return code from reg.exe; officially 0==success and 1==error - if p.returncode: - return None - return text - - -def _RegistryQuery(key, value=None): - r"""Use reg.exe to read a particular key through _RegistryQueryBase. - - First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If - that fails, it falls back to System32. Sysnative is available on Vista and - up and available on Windows Server 2003 and XP through KB patch 942589. Note - that Sysnative will always fail if using 64-bit python due to it being a - virtual directory and System32 will work correctly in the first place. - - KB 942589 - http://support.microsoft.com/kb/942589/en-us. - - Arguments: - key: The registry key. - value: The particular registry value to read (optional). - Return: - stdout from reg.exe, or None for failure. - """ - text = None - try: - text = _RegistryQueryBase("Sysnative", key, value) - except OSError as e: - if e.errno == errno.ENOENT: - text = _RegistryQueryBase("System32", key, value) - else: - raise - return text - - -def _RegistryGetValueUsingWinReg(key, value): - """Use the _winreg module to obtain the value of a registry key. - - Args: - key: The registry key. - value: The particular registry value to read. - Return: - contents of the registry key's value, or None on failure. Throws - ImportError if _winreg is unavailable. - """ - try: - # Python 2 - from _winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx - except ImportError: - # Python 3 - from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx - - try: - root, subkey = key.split("\\", 1) - assert root == "HKLM" # Only need HKLM for now. - with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey: - return QueryValueEx(hkey, value)[0] - except WindowsError: - return None - - -def _RegistryGetValue(key, value): - """Use _winreg or reg.exe to obtain the value of a registry key. - - Using _winreg is preferable because it solves an issue on some corporate - environments where access to reg.exe is locked down. However, we still need - to fallback to reg.exe for the case where the _winreg module is not available - (for example in cygwin python). - - Args: - key: The registry key. - value: The particular registry value to read. - Return: - contents of the registry key's value, or None on failure. - """ - try: - return _RegistryGetValueUsingWinReg(key, value) - except ImportError: - pass - - # Fallback to reg.exe if we fail to import _winreg. - text = _RegistryQuery(key, value) - if not text: - return None - # Extract value. - match = re.search(r"REG_\w+\s+([^\r]+)\r\n", text) - if not match: - return None - return match.group(1) - - -def _CreateVersion(name, path, sdk_based=False): - """Sets up MSVS project generation. - - Setup is based off the GYP_MSVS_VERSION environment variable or whatever is - autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is - passed in that doesn't match a value in versions python will throw a error. - """ - if path: - path = os.path.normpath(path) - versions = { - "2019": VisualStudioVersion( - "2019", - "Visual Studio 2019", - solution_version="12.00", - project_version="16.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v142", - compatible_sdks=["v8.1", "v10.0"], - ), - "2017": VisualStudioVersion( - "2017", - "Visual Studio 2017", - solution_version="12.00", - project_version="15.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v141", - compatible_sdks=["v8.1", "v10.0"], - ), - "2015": VisualStudioVersion( - "2015", - "Visual Studio 2015", - solution_version="12.00", - project_version="14.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v140", - ), - "2013": VisualStudioVersion( - "2013", - "Visual Studio 2013", - solution_version="13.00", - project_version="12.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v120", - ), - "2013e": VisualStudioVersion( - "2013e", - "Visual Studio 2013", - solution_version="13.00", - project_version="12.0", - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v120", - ), - "2012": VisualStudioVersion( - "2012", - "Visual Studio 2012", - solution_version="12.00", - project_version="4.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v110", - ), - "2012e": VisualStudioVersion( - "2012e", - "Visual Studio 2012", - solution_version="12.00", - project_version="4.0", - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v110", - ), - "2010": VisualStudioVersion( - "2010", - "Visual Studio 2010", - solution_version="11.00", - project_version="4.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - ), - "2010e": VisualStudioVersion( - "2010e", - "Visual C++ Express 2010", - solution_version="11.00", - project_version="4.0", - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - ), - "2008": VisualStudioVersion( - "2008", - "Visual Studio 2008", - solution_version="10.00", - project_version="9.00", - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based, - ), - "2008e": VisualStudioVersion( - "2008e", - "Visual Studio 2008", - solution_version="10.00", - project_version="9.00", - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based, - ), - "2005": VisualStudioVersion( - "2005", - "Visual Studio 2005", - solution_version="9.00", - project_version="8.00", - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based, - ), - "2005e": VisualStudioVersion( - "2005e", - "Visual Studio 2005", - solution_version="9.00", - project_version="8.00", - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based, - ), - } - return versions[str(name)] - - -def _ConvertToCygpath(path): - """Convert to cygwin path if we are using cygwin.""" - if sys.platform == "cygwin": - p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE) - path = p.communicate()[0].strip() - if PY3: - path = path.decode("utf-8") - return path - - -def _DetectVisualStudioVersions(versions_to_check, force_express): - """Collect the list of installed visual studio versions. - - Returns: - A list of visual studio versions installed in descending order of - usage preference. - Base this on the registry and a quick check if devenv.exe exists. - Possibilities are: - 2005(e) - Visual Studio 2005 (8) - 2008(e) - Visual Studio 2008 (9) - 2010(e) - Visual Studio 2010 (10) - 2012(e) - Visual Studio 2012 (11) - 2013(e) - Visual Studio 2013 (12) - 2015 - Visual Studio 2015 (14) - 2017 - Visual Studio 2017 (15) - 2019 - Visual Studio 2019 (16) - Where (e) is e for express editions of MSVS and blank otherwise. - """ - version_to_year = { - "8.0": "2005", - "9.0": "2008", - "10.0": "2010", - "11.0": "2012", - "12.0": "2013", - "14.0": "2015", - "15.0": "2017", - "16.0": "2019", - } - versions = [] - for version in versions_to_check: - # Old method of searching for which VS version is installed - # We don't use the 2010-encouraged-way because we also want to get the - # path to the binaries, which it doesn't offer. - keys = [ - r"HKLM\Software\Microsoft\VisualStudio\%s" % version, - r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s" % version, - r"HKLM\Software\Microsoft\VCExpress\%s" % version, - r"HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s" % version, - ] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], "InstallDir") - if not path: - continue - path = _ConvertToCygpath(path) - # Check for full. - full_path = os.path.join(path, "devenv.exe") - express_path = os.path.join(path, "*express.exe") - if not force_express and os.path.exists(full_path): - # Add this one. - versions.append( - _CreateVersion( - version_to_year[version], os.path.join(path, "..", "..") - ) - ) - # Check for express. - elif glob.glob(express_path): - # Add this one. - versions.append( - _CreateVersion( - version_to_year[version] + "e", os.path.join(path, "..", "..") - ) - ) - - # The old method above does not work when only SDK is installed. - keys = [ - r"HKLM\Software\Microsoft\VisualStudio\SxS\VC7", - r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7", - r"HKLM\Software\Microsoft\VisualStudio\SxS\VS7", - r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7", - ] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], version) - if not path: - continue - path = _ConvertToCygpath(path) - if version == "15.0": - if os.path.exists(path): - versions.append(_CreateVersion("2017", path)) - elif version != "14.0": # There is no Express edition for 2015. - versions.append( - _CreateVersion( - version_to_year[version] + "e", - os.path.join(path, ".."), - sdk_based=True, - ) - ) - - return versions - - -def SelectVisualStudioVersion(version="auto", allow_fallback=True): - """Select which version of Visual Studio projects to generate. - - Arguments: - version: Hook to allow caller to force a particular version (vs auto). - Returns: - An object representing a visual studio project format version. - """ - # In auto mode, check environment variable for override. - if version == "auto": - version = os.environ.get("GYP_MSVS_VERSION", "auto") - version_map = { - "auto": ("16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"), - "2005": ("8.0",), - "2005e": ("8.0",), - "2008": ("9.0",), - "2008e": ("9.0",), - "2010": ("10.0",), - "2010e": ("10.0",), - "2012": ("11.0",), - "2012e": ("11.0",), - "2013": ("12.0",), - "2013e": ("12.0",), - "2015": ("14.0",), - "2017": ("15.0",), - "2019": ("16.0",), - } - override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH") - if override_path: - msvs_version = os.environ.get("GYP_MSVS_VERSION") - if not msvs_version: - raise ValueError( - "GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be " - "set to a particular version (e.g. 2010e)." - ) - return _CreateVersion(msvs_version, override_path, sdk_based=True) - version = str(version) - versions = _DetectVisualStudioVersions(version_map[version], "e" in version) - if not versions: - if not allow_fallback: - raise ValueError("Could not locate Visual Studio installation.") - if version == "auto": - # Default to 2005 if we couldn't find anything - return _CreateVersion("2005", None) - else: - return _CreateVersion(version, None) - return versions[0] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/__init__.py deleted file mode 100755 index f6ea625d40a3c..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/__init__.py +++ /dev/null @@ -1,674 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import print_function - -import copy -import gyp.input -import argparse -import os.path -import re -import shlex -import sys -import traceback -from gyp.common import GypError - -try: - # Python 2 - string_types = basestring -except NameError: - # Python 3 - string_types = str - -# Default debug modes for GYP -debug = {} - -# List of "official" debug modes, but you can use anything you like. -DEBUG_GENERAL = "general" -DEBUG_VARIABLES = "variables" -DEBUG_INCLUDES = "includes" - - -def DebugOutput(mode, message, *args): - if "all" in gyp.debug or mode in gyp.debug: - ctx = ("unknown", 0, "unknown") - try: - f = traceback.extract_stack(limit=2) - if f: - ctx = f[0][:3] - except Exception: - pass - if args: - message %= args - print( - "%s:%s:%d:%s %s" - % (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message) - ) - - -def FindBuildFiles(): - extension = ".gyp" - files = os.listdir(os.getcwd()) - build_files = [] - for file in files: - if file.endswith(extension): - build_files.append(file) - return build_files - - -def Load( - build_files, - format, - default_variables={}, - includes=[], - depth=".", - params=None, - check=False, - circular_check=True, -): - """ - Loads one or more specified build files. - default_variables and includes will be copied before use. - Returns the generator for the specified format and the - data returned by loading the specified build files. - """ - if params is None: - params = {} - - if "-" in format: - format, params["flavor"] = format.split("-", 1) - - default_variables = copy.copy(default_variables) - - # Default variables provided by this program and its modules should be - # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, - # avoiding collisions with user and automatic variables. - default_variables["GENERATOR"] = format - default_variables["GENERATOR_FLAVOR"] = params.get("flavor", "") - - # Format can be a custom python file, or by default the name of a module - # within gyp.generator. - if format.endswith(".py"): - generator_name = os.path.splitext(format)[0] - path, generator_name = os.path.split(generator_name) - - # Make sure the path to the custom generator is in sys.path - # Don't worry about removing it once we are done. Keeping the path - # to each generator that is used in sys.path is likely harmless and - # arguably a good idea. - path = os.path.abspath(path) - if path not in sys.path: - sys.path.insert(0, path) - else: - generator_name = "gyp.generator." + format - - # These parameters are passed in order (as opposed to by key) - # because ActivePython cannot handle key parameters to __import__. - generator = __import__(generator_name, globals(), locals(), generator_name) - for (key, val) in generator.generator_default_variables.items(): - default_variables.setdefault(key, val) - - # Give the generator the opportunity to set additional variables based on - # the params it will receive in the output phase. - if getattr(generator, "CalculateVariables", None): - generator.CalculateVariables(default_variables, params) - - # Give the generator the opportunity to set generator_input_info based on - # the params it will receive in the output phase. - if getattr(generator, "CalculateGeneratorInputInfo", None): - generator.CalculateGeneratorInputInfo(params) - - # Fetch the generator specific info that gets fed to input, we use getattr - # so we can default things and the generators only have to provide what - # they need. - generator_input_info = { - "non_configuration_keys": getattr( - generator, "generator_additional_non_configuration_keys", [] - ), - "path_sections": getattr(generator, "generator_additional_path_sections", []), - "extra_sources_for_rules": getattr( - generator, "generator_extra_sources_for_rules", [] - ), - "generator_supports_multiple_toolsets": getattr( - generator, "generator_supports_multiple_toolsets", False - ), - "generator_wants_static_library_dependencies_adjusted": getattr( - generator, "generator_wants_static_library_dependencies_adjusted", True - ), - "generator_wants_sorted_dependencies": getattr( - generator, "generator_wants_sorted_dependencies", False - ), - "generator_filelist_paths": getattr( - generator, "generator_filelist_paths", None - ), - } - - # Process the input specific to this generator. - result = gyp.input.Load( - build_files, - default_variables, - includes[:], - depth, - generator_input_info, - check, - circular_check, - params["parallel"], - params["root_targets"], - ) - return [generator] + result - - -def NameValueListToDict(name_value_list): - """ - Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary - of the pairs. If a string is simply NAME, then the value in the dictionary - is set to True. If VALUE can be converted to an integer, it is. - """ - result = {} - for item in name_value_list: - tokens = item.split("=", 1) - if len(tokens) == 2: - # If we can make it an int, use that, otherwise, use the string. - try: - token_value = int(tokens[1]) - except ValueError: - token_value = tokens[1] - # Set the variable to the supplied value. - result[tokens[0]] = token_value - else: - # No value supplied, treat it as a boolean and set it. - result[tokens[0]] = True - return result - - -def ShlexEnv(env_name): - flags = os.environ.get(env_name, []) - if flags: - flags = shlex.split(flags) - return flags - - -def FormatOpt(opt, value): - if opt.startswith("--"): - return "%s=%s" % (opt, value) - return opt + value - - -def RegenerateAppendFlag(flag, values, predicate, env_name, options): - """Regenerate a list of command line flags, for an option of action='append'. - - The |env_name|, if given, is checked in the environment and used to generate - an initial list of options, then the options that were specified on the - command line (given in |values|) are appended. This matches the handling of - environment variables and command line flags where command line flags override - the environment, while not requiring the environment to be set when the flags - are used again. - """ - flags = [] - if options.use_environment and env_name: - for flag_value in ShlexEnv(env_name): - value = FormatOpt(flag, predicate(flag_value)) - if value in flags: - flags.remove(value) - flags.append(value) - if values: - for flag_value in values: - flags.append(FormatOpt(flag, predicate(flag_value))) - return flags - - -def RegenerateFlags(options): - """Given a parsed options object, and taking the environment variables into - account, returns a list of flags that should regenerate an equivalent options - object (even in the absence of the environment variables.) - - Any path options will be normalized relative to depth. - - The format flag is not included, as it is assumed the calling generator will - set that as appropriate. - """ - - def FixPath(path): - path = gyp.common.FixIfRelativePath(path, options.depth) - if not path: - return os.path.curdir - return path - - def Noop(value): - return value - - # We always want to ignore the environment when regenerating, to avoid - # duplicate or changed flags in the environment at the time of regeneration. - flags = ["--ignore-environment"] - for name, metadata in options._regeneration_metadata.items(): - opt = metadata["opt"] - value = getattr(options, name) - value_predicate = metadata["type"] == "path" and FixPath or Noop - action = metadata["action"] - env_name = metadata["env_name"] - if action == "append": - flags.extend( - RegenerateAppendFlag(opt, value, value_predicate, env_name, options) - ) - elif action in ("store", None): # None is a synonym for 'store'. - if value: - flags.append(FormatOpt(opt, value_predicate(value))) - elif options.use_environment and env_name and os.environ.get(env_name): - flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) - elif action in ("store_true", "store_false"): - if (action == "store_true" and value) or ( - action == "store_false" and not value - ): - flags.append(opt) - elif options.use_environment and env_name: - print( - "Warning: environment regeneration unimplemented " - "for %s flag %r env_name %r" % (action, opt, env_name), - file=sys.stderr, - ) - else: - print( - "Warning: regeneration unimplemented for action %r " - "flag %r" % (action, opt), - file=sys.stderr, - ) - - return flags - - -class RegeneratableOptionParser(argparse.ArgumentParser): - def __init__(self, usage): - self.__regeneratable_options = {} - argparse.ArgumentParser.__init__(self, usage=usage) - - def add_argument(self, *args, **kw): - """Add an option to the parser. - - This accepts the same arguments as ArgumentParser.add_argument, plus the - following: - regenerate: can be set to False to prevent this option from being included - in regeneration. - env_name: name of environment variable that additional values for this - option come from. - type: adds type='path', to tell the regenerator that the values of - this option need to be made relative to options.depth - """ - env_name = kw.pop("env_name", None) - if "dest" in kw and kw.pop("regenerate", True): - dest = kw["dest"] - - # The path type is needed for regenerating, for optparse we can just treat - # it as a string. - type = kw.get("type") - if type == "path": - kw["type"] = str - - self.__regeneratable_options[dest] = { - "action": kw.get("action"), - "type": type, - "env_name": env_name, - "opt": args[0], - } - - argparse.ArgumentParser.add_argument(self, *args, **kw) - - def parse_args(self, *args): - values, args = argparse.ArgumentParser.parse_known_args(self, *args) - values._regeneration_metadata = self.__regeneratable_options - return values, args - - -def gyp_main(args): - my_name = os.path.basename(sys.argv[0]) - usage = "usage: %(prog)s [options ...] [build_file ...]" - - parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s")) - parser.add_argument( - "--build", - dest="configs", - action="append", - help="configuration for build after project generation", - ) - parser.add_argument( - "--check", dest="check", action="store_true", help="check format of gyp files" - ) - parser.add_argument( - "--config-dir", - dest="config_dir", - action="store", - env_name="GYP_CONFIG_DIR", - default=None, - help="The location for configuration files like " "include.gypi.", - ) - parser.add_argument( - "-d", - "--debug", - dest="debug", - metavar="DEBUGMODE", - action="append", - default=[], - help="turn on a debugging " - 'mode for debugging GYP. Supported modes are "variables", ' - '"includes" and "general" or "all" for all of them.', - ) - parser.add_argument( - "-D", - dest="defines", - action="append", - metavar="VAR=VAL", - env_name="GYP_DEFINES", - help="sets variable VAR to value VAL", - ) - parser.add_argument( - "--depth", - dest="depth", - metavar="PATH", - type="path", - help="set DEPTH gyp variable to a relative path to PATH", - ) - parser.add_argument( - "-f", - "--format", - dest="formats", - action="append", - env_name="GYP_GENERATORS", - regenerate=False, - help="output formats to generate", - ) - parser.add_argument( - "-G", - dest="generator_flags", - action="append", - default=[], - metavar="FLAG=VAL", - env_name="GYP_GENERATOR_FLAGS", - help="sets generator flag FLAG to VAL", - ) - parser.add_argument( - "--generator-output", - dest="generator_output", - action="store", - default=None, - metavar="DIR", - type="path", - env_name="GYP_GENERATOR_OUTPUT", - help="puts generated build files under DIR", - ) - parser.add_argument( - "--ignore-environment", - dest="use_environment", - action="store_false", - default=True, - regenerate=False, - help="do not read options from environment variables", - ) - parser.add_argument( - "-I", - "--include", - dest="includes", - action="append", - metavar="INCLUDE", - type="path", - help="files to include in all loaded .gyp files", - ) - # --no-circular-check disables the check for circular relationships between - # .gyp files. These relationships should not exist, but they've only been - # observed to be harmful with the Xcode generator. Chromium's .gyp files - # currently have some circular relationships on non-Mac platforms, so this - # option allows the strict behavior to be used on Macs and the lenient - # behavior to be used elsewhere. - # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. - parser.add_argument( - "--no-circular-check", - dest="circular_check", - action="store_false", - default=True, - regenerate=False, - help="don't check for circular relationships between files", - ) - parser.add_argument( - "--no-parallel", - action="store_true", - default=False, - help="Disable multiprocessing", - ) - parser.add_argument( - "-S", - "--suffix", - dest="suffix", - default="", - help="suffix to add to generated files", - ) - parser.add_argument( - "--toplevel-dir", - dest="toplevel_dir", - action="store", - default=None, - metavar="DIR", - type="path", - help="directory to use as the root of the source tree", - ) - parser.add_argument( - "-R", - "--root-target", - dest="root_targets", - action="append", - metavar="TARGET", - help="include only TARGET and its deep dependencies", - ) - - options, build_files_arg = parser.parse_args(args) - build_files = build_files_arg - - # Set up the configuration directory (defaults to ~/.gyp) - if not options.config_dir: - home = None - home_dot_gyp = None - if options.use_environment: - home_dot_gyp = os.environ.get("GYP_CONFIG_DIR", None) - if home_dot_gyp: - home_dot_gyp = os.path.expanduser(home_dot_gyp) - - if not home_dot_gyp: - home_vars = ["HOME"] - if sys.platform in ("cygwin", "win32"): - home_vars.append("USERPROFILE") - for home_var in home_vars: - home = os.getenv(home_var) - if home: - home_dot_gyp = os.path.join(home, ".gyp") - if not os.path.exists(home_dot_gyp): - home_dot_gyp = None - else: - break - else: - home_dot_gyp = os.path.expanduser(options.config_dir) - - if home_dot_gyp and not os.path.exists(home_dot_gyp): - home_dot_gyp = None - - if not options.formats: - # If no format was given on the command line, then check the env variable. - generate_formats = [] - if options.use_environment: - generate_formats = os.environ.get("GYP_GENERATORS", []) - if generate_formats: - generate_formats = re.split(r"[\s,]", generate_formats) - if generate_formats: - options.formats = generate_formats - else: - # Nothing in the variable, default based on platform. - if sys.platform == "darwin": - options.formats = ["xcode"] - elif sys.platform in ("win32", "cygwin"): - options.formats = ["msvs"] - else: - options.formats = ["make"] - - if not options.generator_output and options.use_environment: - g_o = os.environ.get("GYP_GENERATOR_OUTPUT") - if g_o: - options.generator_output = g_o - - options.parallel = not options.no_parallel - - for mode in options.debug: - gyp.debug[mode] = 1 - - # Do an extra check to avoid work when we're not debugging. - if DEBUG_GENERAL in gyp.debug: - DebugOutput(DEBUG_GENERAL, "running with these options:") - for option, value in sorted(options.__dict__.items()): - if option[0] == "_": - continue - if isinstance(value, string_types): - DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) - else: - DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) - - if not build_files: - build_files = FindBuildFiles() - if not build_files: - raise GypError((usage + "\n\n%s: error: no build_file") % (my_name, my_name)) - - # TODO(mark): Chromium-specific hack! - # For Chromium, the gyp "depth" variable should always be a relative path - # to Chromium's top-level "src" directory. If no depth variable was set - # on the command line, try to find a "src" directory by looking at the - # absolute path to each build file's directory. The first "src" component - # found will be treated as though it were the path used for --depth. - if not options.depth: - for build_file in build_files: - build_file_dir = os.path.abspath(os.path.dirname(build_file)) - build_file_dir_components = build_file_dir.split(os.path.sep) - components_len = len(build_file_dir_components) - for index in range(components_len - 1, -1, -1): - if build_file_dir_components[index] == "src": - options.depth = os.path.sep.join(build_file_dir_components) - break - del build_file_dir_components[index] - - # If the inner loop found something, break without advancing to another - # build file. - if options.depth: - break - - if not options.depth: - raise GypError( - "Could not automatically locate src directory. This is" - "a temporary Chromium feature that will be removed. Use" - "--depth as a workaround." - ) - - # If toplevel-dir is not set, we assume that depth is the root of our source - # tree. - if not options.toplevel_dir: - options.toplevel_dir = options.depth - - # -D on the command line sets variable defaults - D isn't just for define, - # it's for default. Perhaps there should be a way to force (-F?) a - # variable's value so that it can't be overridden by anything else. - cmdline_default_variables = {} - defines = [] - if options.use_environment: - defines += ShlexEnv("GYP_DEFINES") - if options.defines: - defines += options.defines - cmdline_default_variables = NameValueListToDict(defines) - if DEBUG_GENERAL in gyp.debug: - DebugOutput( - DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables - ) - - # Set up includes. - includes = [] - - # If ~/.gyp/include.gypi exists, it'll be forcibly included into every - # .gyp file that's loaded, before anything else is included. - if home_dot_gyp: - default_include = os.path.join(home_dot_gyp, "include.gypi") - if os.path.exists(default_include): - print("Using overrides found in " + default_include) - includes.append(default_include) - - # Command-line --include files come after the default include. - if options.includes: - includes.extend(options.includes) - - # Generator flags should be prefixed with the target generator since they - # are global across all generator runs. - gen_flags = [] - if options.use_environment: - gen_flags += ShlexEnv("GYP_GENERATOR_FLAGS") - if options.generator_flags: - gen_flags += options.generator_flags - generator_flags = NameValueListToDict(gen_flags) - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) - - # Generate all requested formats (use a set in case we got one format request - # twice) - for format in set(options.formats): - params = { - "options": options, - "build_files": build_files, - "generator_flags": generator_flags, - "cwd": os.getcwd(), - "build_files_arg": build_files_arg, - "gyp_binary": sys.argv[0], - "home_dot_gyp": home_dot_gyp, - "parallel": options.parallel, - "root_targets": options.root_targets, - "target_arch": cmdline_default_variables.get("target_arch", ""), - } - - # Start with the default variables from the command line. - [generator, flat_list, targets, data] = Load( - build_files, - format, - cmdline_default_variables, - includes, - options.depth, - params, - options.check, - options.circular_check, - ) - - # TODO(mark): Pass |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - # NOTE: flat_list is the flattened dependency graph specifying the order - # that targets may be built. Build systems that operate serially or that - # need to have dependencies defined before dependents reference them should - # generate targets in the order specified in flat_list. - generator.GenerateOutput(flat_list, targets, data, params) - - if options.configs: - valid_configs = targets[flat_list[0]]["configurations"] - for conf in options.configs: - if conf not in valid_configs: - raise GypError("Invalid config specified via --build: %s" % conf) - generator.PerformBuild(data, options.configs, params) - - # Done - return 0 - - -def main(args): - try: - return gyp_main(args) - except GypError as e: - sys.stderr.write("gyp: %s\n" % e) - return 1 - - -# NOTE: setuptools generated console_scripts calls function with no arguments -def script_main(): - return main(sys.argv[1:]) - - -if __name__ == "__main__": - sys.exit(script_main()) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/common.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/common.py deleted file mode 100644 index a915643867293..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/common.py +++ /dev/null @@ -1,661 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import errno -import filecmp -import os.path -import re -import tempfile -import sys -import subprocess - -try: - from collections.abc import MutableSet -except ImportError: - from collections import MutableSet - -PY3 = bytes != str - - -# A minimal memoizing decorator. It'll blow up if the args aren't immutable, -# among other "problems". -class memoize(object): - def __init__(self, func): - self.func = func - self.cache = {} - - def __call__(self, *args): - try: - return self.cache[args] - except KeyError: - result = self.func(*args) - self.cache[args] = result - return result - - -class GypError(Exception): - """Error class representing an error, which is to be presented - to the user. The main entry point will catch and display this. - """ - - pass - - -def ExceptionAppend(e, msg): - """Append a message to the given exception's message.""" - if not e.args: - e.args = (msg,) - elif len(e.args) == 1: - e.args = (str(e.args[0]) + " " + msg,) - else: - e.args = (str(e.args[0]) + " " + msg,) + e.args[1:] - - -def FindQualifiedTargets(target, qualified_list): - """ - Given a list of qualified targets, return the qualified targets for the - specified |target|. - """ - return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target] - - -def ParseQualifiedTarget(target): - # Splits a qualified target into a build file, target name and toolset. - - # NOTE: rsplit is used to disambiguate the Windows drive letter separator. - target_split = target.rsplit(":", 1) - if len(target_split) == 2: - [build_file, target] = target_split - else: - build_file = None - - target_split = target.rsplit("#", 1) - if len(target_split) == 2: - [target, toolset] = target_split - else: - toolset = None - - return [build_file, target, toolset] - - -def ResolveTarget(build_file, target, toolset): - # This function resolves a target into a canonical form: - # - a fully defined build file, either absolute or relative to the current - # directory - # - a target name - # - a toolset - # - # build_file is the file relative to which 'target' is defined. - # target is the qualified target. - # toolset is the default toolset for that target. - [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target) - - if parsed_build_file: - if build_file: - # If a relative path, parsed_build_file is relative to the directory - # containing build_file. If build_file is not in the current directory, - # parsed_build_file is not a usable path as-is. Resolve it by - # interpreting it as relative to build_file. If parsed_build_file is - # absolute, it is usable as a path regardless of the current directory, - # and os.path.join will return it as-is. - build_file = os.path.normpath( - os.path.join(os.path.dirname(build_file), parsed_build_file) - ) - # Further (to handle cases like ../cwd), make it relative to cwd) - if not os.path.isabs(build_file): - build_file = RelativePath(build_file, ".") - else: - build_file = parsed_build_file - - if parsed_toolset: - toolset = parsed_toolset - - return [build_file, target, toolset] - - -def BuildFile(fully_qualified_target): - # Extracts the build file from the fully qualified target. - return ParseQualifiedTarget(fully_qualified_target)[0] - - -def GetEnvironFallback(var_list, default): - """Look up a key in the environment, with fallback to secondary keys - and finally falling back to a default value.""" - for var in var_list: - if var in os.environ: - return os.environ[var] - return default - - -def QualifiedTarget(build_file, target, toolset): - # "Qualified" means the file that a target was defined in and the target - # name, separated by a colon, suffixed by a # and the toolset name: - # /path/to/file.gyp:target_name#toolset - fully_qualified = build_file + ":" + target - if toolset: - fully_qualified = fully_qualified + "#" + toolset - return fully_qualified - - -@memoize -def RelativePath(path, relative_to, follow_path_symlink=True): - # Assuming both |path| and |relative_to| are relative to the current - # directory, returns a relative path that identifies path relative to - # relative_to. - # If |follow_symlink_path| is true (default) and |path| is a symlink, then - # this method returns a path to the real file represented by |path|. If it is - # false, this method returns a path to the symlink. If |path| is not a - # symlink, this option has no effect. - - # Convert to normalized (and therefore absolute paths). - if follow_path_symlink: - path = os.path.realpath(path) - else: - path = os.path.abspath(path) - relative_to = os.path.realpath(relative_to) - - # On Windows, we can't create a relative path to a different drive, so just - # use the absolute path. - if sys.platform == "win32": - if ( - os.path.splitdrive(path)[0].lower() - != os.path.splitdrive(relative_to)[0].lower() - ): - return path - - # Split the paths into components. - path_split = path.split(os.path.sep) - relative_to_split = relative_to.split(os.path.sep) - - # Determine how much of the prefix the two paths share. - prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) - - # Put enough ".." components to back up out of relative_to to the common - # prefix, and then append the part of path_split after the common prefix. - relative_split = [os.path.pardir] * ( - len(relative_to_split) - prefix_len - ) + path_split[prefix_len:] - - if len(relative_split) == 0: - # The paths were the same. - return "" - - # Turn it back into a string and we're done. - return os.path.join(*relative_split) - - -@memoize -def InvertRelativePath(path, toplevel_dir=None): - """Given a path like foo/bar that is relative to toplevel_dir, return - the inverse relative path back to the toplevel_dir. - - E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) - should always produce the empty string, unless the path contains symlinks. - """ - if not path: - return path - toplevel_dir = "." if toplevel_dir is None else toplevel_dir - return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path)) - - -def FixIfRelativePath(path, relative_to): - # Like RelativePath but returns |path| unchanged if it is absolute. - if os.path.isabs(path): - return path - return RelativePath(path, relative_to) - - -def UnrelativePath(path, relative_to): - # Assuming that |relative_to| is relative to the current directory, and |path| - # is a path relative to the dirname of |relative_to|, returns a path that - # identifies |path| relative to the current directory. - rel_dir = os.path.dirname(relative_to) - return os.path.normpath(os.path.join(rel_dir, path)) - - -# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at -# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02 -# and the documentation for various shells. - -# _quote is a pattern that should match any argument that needs to be quoted -# with double-quotes by EncodePOSIXShellArgument. It matches the following -# characters appearing anywhere in an argument: -# \t, \n, space parameter separators -# # comments -# $ expansions (quoted to always expand within one argument) -# % called out by IEEE 1003.1 XCU.2.2 -# & job control -# ' quoting -# (, ) subshell execution -# *, ?, [ pathname expansion -# ; command delimiter -# <, >, | redirection -# = assignment -# {, } brace expansion (bash) -# ~ tilde expansion -# It also matches the empty string, because "" (or '') is the only way to -# represent an empty string literal argument to a POSIX shell. -# -# This does not match the characters in _escape, because those need to be -# backslash-escaped regardless of whether they appear in a double-quoted -# string. -_quote = re.compile("[\t\n #$%&'()*;<=>?[{|}~]|^$") - -# _escape is a pattern that should match any character that needs to be -# escaped with a backslash, whether or not the argument matched the _quote -# pattern. _escape is used with re.sub to backslash anything in _escape's -# first match group, hence the (parentheses) in the regular expression. -# -# _escape matches the following characters appearing anywhere in an argument: -# " to prevent POSIX shells from interpreting this character for quoting -# \ to prevent POSIX shells from interpreting this character for escaping -# ` to prevent POSIX shells from interpreting this character for command -# substitution -# Missing from this list is $, because the desired behavior of -# EncodePOSIXShellArgument is to permit parameter (variable) expansion. -# -# Also missing from this list is !, which bash will interpret as the history -# expansion character when history is enabled. bash does not enable history -# by default in non-interactive shells, so this is not thought to be a problem. -# ! was omitted from this list because bash interprets "\!" as a literal string -# including the backslash character (avoiding history expansion but retaining -# the backslash), which would not be correct for argument encoding. Handling -# this case properly would also be problematic because bash allows the history -# character to be changed with the histchars shell variable. Fortunately, -# as history is not enabled in non-interactive shells and -# EncodePOSIXShellArgument is only expected to encode for non-interactive -# shells, there is no room for error here by ignoring !. -_escape = re.compile(r'(["\\`])') - - -def EncodePOSIXShellArgument(argument): - """Encodes |argument| suitably for consumption by POSIX shells. - - argument may be quoted and escaped as necessary to ensure that POSIX shells - treat the returned value as a literal representing the argument passed to - this function. Parameter (variable) expansions beginning with $ are allowed - to remain intact without escaping the $, to allow the argument to contain - references to variables to be expanded by the shell. - """ - - if not isinstance(argument, str): - argument = str(argument) - - if _quote.search(argument): - quote = '"' - else: - quote = "" - - encoded = quote + re.sub(_escape, r"\\\1", argument) + quote - - return encoded - - -def EncodePOSIXShellList(list): - """Encodes |list| suitably for consumption by POSIX shells. - - Returns EncodePOSIXShellArgument for each item in list, and joins them - together using the space character as an argument separator. - """ - - encoded_arguments = [] - for argument in list: - encoded_arguments.append(EncodePOSIXShellArgument(argument)) - return " ".join(encoded_arguments) - - -def DeepDependencyTargets(target_dicts, roots): - """Returns the recursive list of target dependencies.""" - dependencies = set() - pending = set(roots) - while pending: - # Pluck out one. - r = pending.pop() - # Skip if visited already. - if r in dependencies: - continue - # Add it. - dependencies.add(r) - # Add its children. - spec = target_dicts[r] - pending.update(set(spec.get("dependencies", []))) - pending.update(set(spec.get("dependencies_original", []))) - return list(dependencies - set(roots)) - - -def BuildFileTargets(target_list, build_file): - """From a target_list, returns the subset from the specified build_file. - """ - return [p for p in target_list if BuildFile(p) == build_file] - - -def AllTargets(target_list, target_dicts, build_file): - """Returns all targets (direct and dependencies) for the specified build_file. - """ - bftargets = BuildFileTargets(target_list, build_file) - deptargets = DeepDependencyTargets(target_dicts, bftargets) - return bftargets + deptargets - - -def WriteOnDiff(filename): - """Write to a file only if the new contents differ. - - Arguments: - filename: name of the file to potentially write to. - Returns: - A file like object which will write to temporary file and only overwrite - the target if it differs (on close). - """ - - class Writer(object): - """Wrapper around file which only covers the target if it differs.""" - - def __init__(self): - # On Cygwin remove the "dir" argument - # `C:` prefixed paths are treated as relative, - # consequently ending up with current dir "/cygdrive/c/..." - # being prefixed to those, which was - # obviously a non-existent path, - # for example: "/cygdrive/c//C:\". - # For more details see: - # https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp - base_temp_dir = "" if IsCygwin() else os.path.dirname(filename) - # Pick temporary file. - tmp_fd, self.tmp_path = tempfile.mkstemp( - suffix=".tmp", - prefix=os.path.split(filename)[1] + ".gyp.", - dir=base_temp_dir, - ) - try: - self.tmp_file = os.fdopen(tmp_fd, "wb") - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - def __getattr__(self, attrname): - # Delegate everything else to self.tmp_file - return getattr(self.tmp_file, attrname) - - def close(self): - try: - # Close tmp file. - self.tmp_file.close() - # Determine if different. - same = False - try: - same = filecmp.cmp(self.tmp_path, filename, False) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(self.tmp_path) - else: - # The new file is different from the old one, - # or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, - # which means that an extra hoop is required - # to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(0o77) - os.umask(umask) - os.chmod(self.tmp_path, 0o666 & ~umask) - if sys.platform == "win32" and os.path.exists(filename): - # NOTE: on windows (but not cygwin) rename will not replace an - # existing file, so it must be preceded with a remove. - # Sadly there is no way to make the switch atomic. - os.remove(filename) - os.rename(self.tmp_path, filename) - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - def write(self, s): - self.tmp_file.write(s.encode("utf-8")) - - return Writer() - - -def EnsureDirExists(path): - """Make sure the directory for |path| exists.""" - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass - - -def GetFlavor(params): - """Returns |params.flavor| if it's set, the system's default flavor else.""" - flavors = { - "cygwin": "win", - "win32": "win", - "darwin": "mac", - } - - if "flavor" in params: - return params["flavor"] - if sys.platform in flavors: - return flavors[sys.platform] - if sys.platform.startswith("sunos"): - return "solaris" - if sys.platform.startswith(("dragonfly", "freebsd")): - return "freebsd" - if sys.platform.startswith("openbsd"): - return "openbsd" - if sys.platform.startswith("netbsd"): - return "netbsd" - if sys.platform.startswith("aix"): - return "aix" - if sys.platform.startswith(("os390", "zos")): - return "zos" - - return "linux" - - -def CopyTool(flavor, out_path, generator_flags={}): - """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it - to |out_path|.""" - # aix and solaris just need flock emulation. mac and win use more complicated - # support scripts. - prefix = {"aix": "flock", "solaris": "flock", "mac": "mac", "win": "win"}.get( - flavor, None - ) - if not prefix: - return - - # Slurp input file. - source_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "%s_tool.py" % prefix - ) - with open(source_path) as source_file: - source = source_file.readlines() - - # Set custom header flags. - header = "# Generated by gyp. Do not edit.\n" - mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) - if flavor == "mac" and mac_toolchain_dir: - header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir - - # Add header and write it out. - tool_path = os.path.join(out_path, "gyp-%s-tool" % prefix) - with open(tool_path, "w") as tool_file: - tool_file.write("".join([source[0], header] + source[1:])) - - # Make file executable. - os.chmod(tool_path, 0o755) - - -# From Alex Martelli, -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# First comment, dated 2001/10/13. -# (Also in the printed Python Cookbook.) - - -def uniquer(seq, idfun=lambda x: x): - seen = {} - result = [] - for item in seq: - marker = idfun(item) - if marker in seen: - continue - seen[marker] = 1 - result.append(item) - return result - - -# Based on http://code.activestate.com/recipes/576694/. -class OrderedSet(MutableSet): - def __init__(self, iterable=None): - self.end = end = [] - end += [None, end, end] # sentinel node for doubly linked list - self.map = {} # key --> [key, prev, next] - if iterable is not None: - self |= iterable - - def __len__(self): - return len(self.map) - - def __contains__(self, key): - return key in self.map - - def add(self, key): - if key not in self.map: - end = self.end - curr = end[1] - curr[2] = end[1] = self.map[key] = [key, curr, end] - - def discard(self, key): - if key in self.map: - key, prev_item, next_item = self.map.pop(key) - prev_item[2] = next_item - next_item[1] = prev_item - - def __iter__(self): - end = self.end - curr = end[2] - while curr is not end: - yield curr[0] - curr = curr[2] - - def __reversed__(self): - end = self.end - curr = end[1] - while curr is not end: - yield curr[0] - curr = curr[1] - - # The second argument is an addition that causes a pylint warning. - def pop(self, last=True): # pylint: disable=W0221 - if not self: - raise KeyError("set is empty") - key = self.end[1][0] if last else self.end[2][0] - self.discard(key) - return key - - def __repr__(self): - if not self: - return "%s()" % (self.__class__.__name__,) - return "%s(%r)" % (self.__class__.__name__, list(self)) - - def __eq__(self, other): - if isinstance(other, OrderedSet): - return len(self) == len(other) and list(self) == list(other) - return set(self) == set(other) - - # Extensions to the recipe. - def update(self, iterable): - for i in iterable: - if i not in self: - self.add(i) - - -class CycleError(Exception): - """An exception raised when an unexpected cycle is detected.""" - - def __init__(self, nodes): - self.nodes = nodes - - def __str__(self): - return "CycleError: cycle involving: " + str(self.nodes) - - -def TopologicallySorted(graph, get_edges): - r"""Topologically sort based on a user provided edge definition. - - Args: - graph: A list of node names. - get_edges: A function mapping from node name to a hashable collection - of node names which this node has outgoing edges to. - Returns: - A list containing all of the node in graph in topological order. - It is assumed that calling get_edges once for each node and caching is - cheaper than repeatedly calling get_edges. - Raises: - CycleError in the event of a cycle. - Example: - graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'} - def GetEdges(node): - return re.findall(r'\$\(([^))]\)', graph[node]) - print TopologicallySorted(graph.keys(), GetEdges) - ==> - ['a', 'c', b'] - """ - get_edges = memoize(get_edges) - visited = set() - visiting = set() - ordered_nodes = [] - - def Visit(node): - if node in visiting: - raise CycleError(visiting) - if node in visited: - return - visited.add(node) - visiting.add(node) - for neighbor in get_edges(node): - Visit(neighbor) - visiting.remove(node) - ordered_nodes.insert(0, node) - - for node in sorted(graph): - Visit(node) - return ordered_nodes - - -def CrossCompileRequested(): - # TODO: figure out how to not build extra host objects in the - # non-cross-compile case when this is enabled, and enable unconditionally. - return ( - os.environ.get("GYP_CROSSCOMPILE") - or os.environ.get("AR_host") - or os.environ.get("CC_host") - or os.environ.get("CXX_host") - or os.environ.get("AR_target") - or os.environ.get("CC_target") - or os.environ.get("CXX_target") - ) - - -def IsCygwin(): - try: - out = subprocess.Popen( - "uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - stdout, stderr = out.communicate() - if PY3: - stdout = stdout.decode("utf-8") - return "CYGWIN" in str(stdout) - except Exception: - return False diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/common_test.py deleted file mode 100755 index 0310fb266c4e4..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/common_test.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the common.py file.""" - -import gyp.common -import unittest -import sys - - -class TestTopologicallySorted(unittest.TestCase): - def test_Valid(self): - """Test that sorting works on a valid graph with one possible order.""" - graph = { - "a": ["b", "c"], - "b": [], - "c": ["d"], - "d": ["b"], - } - - def GetEdge(node): - return tuple(graph[node]) - - self.assertEqual( - gyp.common.TopologicallySorted(graph.keys(), GetEdge), ["a", "c", "d", "b"] - ) - - def test_Cycle(self): - """Test that an exception is thrown on a cyclic graph.""" - graph = { - "a": ["b"], - "b": ["c"], - "c": ["d"], - "d": ["a"], - } - - def GetEdge(node): - return tuple(graph[node]) - - self.assertRaises( - gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge - ) - - -class TestGetFlavor(unittest.TestCase): - """Test that gyp.common.GetFlavor works as intended""" - - original_platform = "" - - def setUp(self): - self.original_platform = sys.platform - - def tearDown(self): - sys.platform = self.original_platform - - def assertFlavor(self, expected, argument, param): - sys.platform = argument - self.assertEqual(expected, gyp.common.GetFlavor(param)) - - def test_platform_default(self): - self.assertFlavor("freebsd", "freebsd9", {}) - self.assertFlavor("freebsd", "freebsd10", {}) - self.assertFlavor("openbsd", "openbsd5", {}) - self.assertFlavor("solaris", "sunos5", {}) - self.assertFlavor("solaris", "sunos", {}) - self.assertFlavor("linux", "linux2", {}) - self.assertFlavor("linux", "linux3", {}) - self.assertFlavor("linux", "linux", {}) - - def test_param(self): - self.assertFlavor("foobar", "linux2", {"flavor": "foobar"}) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py deleted file mode 100644 index e0628ef4d8310..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import re -import os -import locale -from functools import reduce - - -def XmlToString(content, encoding="utf-8", pretty=False): - """ Writes the XML content to disk, touching the file only if it has changed. - - Visual Studio files have a lot of pre-defined structures. This function makes - it easy to represent these structures as Python data structures, instead of - having to create a lot of function calls. - - Each XML element of the content is represented as a list composed of: - 1. The name of the element, a string, - 2. The attributes of the element, a dictionary (optional), and - 3+. The content of the element, if any. Strings are simple text nodes and - lists are child elements. - - Example 1: - - becomes - ['test'] - - Example 2: - - This is - it! - - - becomes - ['myelement', {'a':'value1', 'b':'value2'}, - ['childtype', 'This is'], - ['childtype', 'it!'], - ] - - Args: - content: The structured content to be converted. - encoding: The encoding to report on the first XML line. - pretty: True if we want pretty printing with indents and new lines. - - Returns: - The XML content as a string. - """ - # We create a huge list of all the elements of the file. - xml_parts = ['' % encoding] - if pretty: - xml_parts.append("\n") - _ConstructContentList(xml_parts, content, pretty) - - # Convert it to a string - return "".join(xml_parts) - - -def _ConstructContentList(xml_parts, specification, pretty, level=0): - """ Appends the XML parts corresponding to the specification. - - Args: - xml_parts: A list of XML parts to be appended to. - specification: The specification of the element. See EasyXml docs. - pretty: True if we want pretty printing with indents and new lines. - level: Indentation level. - """ - # The first item in a specification is the name of the element. - if pretty: - indentation = " " * level - new_line = "\n" - else: - indentation = "" - new_line = "" - name = specification[0] - if not isinstance(name, str): - raise Exception( - "The first item of an EasyXml specification should be " - "a string. Specification was " + str(specification) - ) - xml_parts.append(indentation + "<" + name) - - # Optionally in second position is a dictionary of the attributes. - rest = specification[1:] - if rest and isinstance(rest[0], dict): - for at, val in sorted(rest[0].items()): - xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) - rest = rest[1:] - if rest: - xml_parts.append(">") - all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) - multi_line = not all_strings - if multi_line and new_line: - xml_parts.append(new_line) - for child_spec in rest: - # If it's a string, append a text node. - # Otherwise recurse over that child definition - if isinstance(child_spec, str): - xml_parts.append(_XmlEscape(child_spec)) - else: - _ConstructContentList(xml_parts, child_spec, pretty, level + 1) - if multi_line and indentation: - xml_parts.append(indentation) - xml_parts.append("%s" % (name, new_line)) - else: - xml_parts.append("/>%s" % new_line) - - -def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, win32=False): - """ Writes the XML content to disk, touching the file only if it has changed. - - Args: - content: The structured content to be written. - path: Location of the file. - encoding: The encoding to report on the first line of the XML file. - pretty: True if we want pretty printing with indents and new lines. - """ - xml_string = XmlToString(content, encoding, pretty) - if win32 and os.linesep != "\r\n": - xml_string = xml_string.replace("\n", "\r\n") - - default_encoding = locale.getdefaultlocale()[1] - if default_encoding and default_encoding.upper() != encoding.upper(): - xml_string = xml_string.encode(encoding) - - # Get the old content - try: - with open(path, "r") as file: - existing = file.read() - except IOError: - existing = None - - # It has changed, write it - if existing != xml_string: - with open(path, "wb") as file: - file.write(xml_string) - - -_xml_escape_map = { - '"': """, - "'": "'", - "<": "<", - ">": ">", - "&": "&", - "\n": " ", - "\r": " ", -} - - -_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) - - -def _XmlEscape(value, attr=False): - """ Escape a string for inclusion in XML.""" - - def replace(match): - m = match.string[match.start() : match.end()] - # don't replace single quotes in attrs - if attr and m == "'": - return m - return _xml_escape_map[m] - - return _xml_escape_re.sub(replace, value) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py deleted file mode 100755 index 5bc795ddbe441..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py +++ /dev/null @@ -1,112 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the easy_xml.py file. """ - -import gyp.easy_xml as easy_xml -import unittest - -try: - from StringIO import StringIO # Python 2 -except ImportError: - from io import StringIO # Python 3 - - -class TestSequenceFunctions(unittest.TestCase): - def setUp(self): - self.stderr = StringIO() - - def test_EasyXml_simple(self): - self.assertEqual( - easy_xml.XmlToString(["test"]), - '', - ) - - self.assertEqual( - easy_xml.XmlToString(["test"], encoding="Windows-1252"), - '', - ) - - def test_EasyXml_simple_with_attributes(self): - self.assertEqual( - easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]), - '', - ) - - def test_EasyXml_escaping(self): - original = "'\"\r&\nfoo" - converted = "<test>'" & foo" - converted_apos = converted.replace("'", "'") - self.assertEqual( - easy_xml.XmlToString(["test3", {"a": original}, original]), - '%s' - % (converted, converted_apos), - ) - - def test_EasyXml_pretty(self): - self.assertEqual( - easy_xml.XmlToString( - ["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]], - pretty=True, - ), - '\n' - "\n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - "\n", - ) - - def test_EasyXml_complex(self): - # We want to create: - target = ( - '' - "" - '' - "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}" - "Win32Proj" - "automated_ui_tests" - "" - '' - "' - "Application" - "Unicode" - "" - "" - ) - - xml = easy_xml.XmlToString( - [ - "Project", - [ - "PropertyGroup", - {"Label": "Globals"}, - ["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"], - ["Keyword", "Win32Proj"], - ["RootNamespace", "automated_ui_tests"], - ], - ["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}], - [ - "PropertyGroup", - { - "Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'", - "Label": "Configuration", - }, - ["ConfigurationType", "Application"], - ["CharacterSet", "Unicode"], - ], - ] - ) - self.assertEqual(xml, target) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py deleted file mode 100755 index f9f89e520ac68..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""These functions are executed via gyp-flock-tool when using the Makefile -generator. Used on systems that don't have a built-in flock.""" - -import fcntl -import os -import struct -import subprocess -import sys - - -def main(args): - executor = FlockTool() - executor.Dispatch(args) - - -class FlockTool(object): - """This class emulates the 'flock' command.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace("-", "") - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - # Note that the stock python on SunOS has a bug - # where fcntl.flock(fd, LOCK_EX) always fails - # with EBADF, that's why we use this F_SETLK - # hack instead. - fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666) - if sys.platform.startswith("aix"): - # Python on AIX is compiled with LARGEFILE support, which changes the - # struct size. - op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) - else: - op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) - fcntl.fcntl(fd, fcntl.F_SETLK, op) - return subprocess.call(cmd_list) - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py deleted file mode 100644 index 7a393c1f9393d..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py +++ /dev/null @@ -1,809 +0,0 @@ -# Copyright (c) 2014 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -This script is intended for use as a GYP_GENERATOR. It takes as input (by way of -the generator flag config_path) the path of a json file that dictates the files -and targets to search for. The following keys are supported: -files: list of paths (relative) of the files to search for. -test_targets: unqualified target names to search for. Any target in this list -that depends upon a file in |files| is output regardless of the type of target -or chain of dependencies. -additional_compile_targets: Unqualified targets to search for in addition to -test_targets. Targets in the combined list that depend upon a file in |files| -are not necessarily output. For example, if the target is of type none then the -target is not output (but one of the descendants of the target will be). - -The following is output: -error: only supplied if there is an error. -compile_targets: minimal set of targets that directly or indirectly (for - targets of type none) depend on the files in |files| and is one of the - supplied targets or a target that one of the supplied targets depends on. - The expectation is this set of targets is passed into a build step. This list - always contains the output of test_targets as well. -test_targets: set of targets from the supplied |test_targets| that either - directly or indirectly depend upon a file in |files|. This list if useful - if additional processing needs to be done for certain targets after the - build, such as running tests. -status: outputs one of three values: none of the supplied files were found, - one of the include files changed so that it should be assumed everything - changed (in this case test_targets and compile_targets are not output) or at - least one file was found. -invalid_targets: list of supplied targets that were not found. - -Example: -Consider a graph like the following: - A D - / \ -B C -A depends upon both B and C, A is of type none and B and C are executables. -D is an executable, has no dependencies and nothing depends on it. -If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and -files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then -the following is output: -|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc -and the supplied target A depends upon it. A is not output as a build_target -as it is of type none with no rules and actions. -|test_targets| = ["B"] B directly depends upon the change file b.cc. - -Even though the file d.cc, which D depends upon, has changed D is not output -as it was not supplied by way of |additional_compile_targets| or |test_targets|. - -If the generator flag analyzer_output_path is specified, output is written -there. Otherwise output is written to stdout. - -In Gyp the "all" target is shorthand for the root targets in the files passed -to gyp. For example, if file "a.gyp" contains targets "a1" and -"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency -on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2". -Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not -directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp -then the "all" target includes "b1" and "b2". -""" - -from __future__ import print_function - -import gyp.common -import json -import os -import posixpath - -debug = False - -found_dependency_string = "Found dependency" -no_dependency_string = "No dependencies" -# Status when it should be assumed that everything has changed. -all_changed_string = "Found dependency (all)" - -# MatchStatus is used indicate if and how a target depends upon the supplied -# sources. -# The target's sources contain one of the supplied paths. -MATCH_STATUS_MATCHES = 1 -# The target has a dependency on another target that contains one of the -# supplied paths. -MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2 -# The target's sources weren't in the supplied paths and none of the target's -# dependencies depend upon a target that matched. -MATCH_STATUS_DOESNT_MATCH = 3 -# The target doesn't contain the source, but the dependent targets have not yet -# been visited to determine a more specific status yet. -MATCH_STATUS_TBD = 4 - -generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() - -generator_wants_static_library_dependencies_adjusted = False - -generator_default_variables = {} -for dirname in [ - "INTERMEDIATE_DIR", - "SHARED_INTERMEDIATE_DIR", - "PRODUCT_DIR", - "LIB_DIR", - "SHARED_LIB_DIR", -]: - generator_default_variables[dirname] = "!!!" - -for unused in [ - "RULE_INPUT_PATH", - "RULE_INPUT_ROOT", - "RULE_INPUT_NAME", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "STATIC_LIB_PREFIX", - "STATIC_LIB_SUFFIX", - "SHARED_LIB_PREFIX", - "SHARED_LIB_SUFFIX", - "CONFIGURATION_NAME", -]: - generator_default_variables[unused] = "" - - -def _ToGypPath(path): - """Converts a path to the format used by gyp.""" - if os.sep == "\\" and os.altsep == "/": - return path.replace("\\", "/") - return path - - -def _ResolveParent(path, base_path_components): - """Resolves |path|, which starts with at least one '../'. Returns an empty - string if the path shouldn't be considered. See _AddSources() for a - description of |base_path_components|.""" - depth = 0 - while path.startswith("../"): - depth += 1 - path = path[3:] - # Relative includes may go outside the source tree. For example, an action may - # have inputs in /usr/include, which are not in the source tree. - if depth > len(base_path_components): - return "" - if depth == len(base_path_components): - return path - return ( - "/".join(base_path_components[0 : len(base_path_components) - depth]) - + "/" - + path - ) - - -def _AddSources(sources, base_path, base_path_components, result): - """Extracts valid sources from |sources| and adds them to |result|. Each - source file is relative to |base_path|, but may contain '..'. To make - resolving '..' easier |base_path_components| contains each of the - directories in |base_path|. Additionally each source may contain variables. - Such sources are ignored as it is assumed dependencies on them are expressed - and tracked in some other means.""" - # NOTE: gyp paths are always posix style. - for source in sources: - if not len(source) or source.startswith("!!!") or source.startswith("$"): - continue - # variable expansion may lead to //. - org_source = source - source = source[0] + source[1:].replace("//", "/") - if source.startswith("../"): - source = _ResolveParent(source, base_path_components) - if len(source): - result.append(source) - continue - result.append(base_path + source) - if debug: - print("AddSource", org_source, result[len(result) - 1]) - - -def _ExtractSourcesFromAction(action, base_path, base_path_components, results): - if "inputs" in action: - _AddSources(action["inputs"], base_path, base_path_components, results) - - -def _ToLocalPath(toplevel_dir, path): - """Converts |path| to a path relative to |toplevel_dir|.""" - if path == toplevel_dir: - return "" - if path.startswith(toplevel_dir + "/"): - return path[len(toplevel_dir) + len("/") :] - return path - - -def _ExtractSources(target, target_dict, toplevel_dir): - # |target| is either absolute or relative and in the format of the OS. Gyp - # source paths are always posix. Convert |target| to a posix path relative to - # |toplevel_dir_|. This is done to make it easy to build source paths. - base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target))) - base_path_components = base_path.split("/") - - # Add a trailing '/' so that _AddSources() can easily build paths. - if len(base_path): - base_path += "/" - - if debug: - print("ExtractSources", target, base_path) - - results = [] - if "sources" in target_dict: - _AddSources(target_dict["sources"], base_path, base_path_components, results) - # Include the inputs from any actions. Any changes to these affect the - # resulting output. - if "actions" in target_dict: - for action in target_dict["actions"]: - _ExtractSourcesFromAction(action, base_path, base_path_components, results) - if "rules" in target_dict: - for rule in target_dict["rules"]: - _ExtractSourcesFromAction(rule, base_path, base_path_components, results) - - return results - - -class Target(object): - """Holds information about a particular target: - deps: set of Targets this Target depends upon. This is not recursive, only the - direct dependent Targets. - match_status: one of the MatchStatus values. - back_deps: set of Targets that have a dependency on this Target. - visited: used during iteration to indicate whether we've visited this target. - This is used for two iterations, once in building the set of Targets and - again in _GetBuildTargets(). - name: fully qualified name of the target. - requires_build: True if the target type is such that it needs to be built. - See _DoesTargetTypeRequireBuild for details. - added_to_compile_targets: used when determining if the target was added to the - set of targets that needs to be built. - in_roots: true if this target is a descendant of one of the root nodes. - is_executable: true if the type of target is executable. - is_static_library: true if the type of target is static_library. - is_or_has_linked_ancestor: true if the target does a link (eg executable), or - if there is a target in back_deps that does a link.""" - - def __init__(self, name): - self.deps = set() - self.match_status = MATCH_STATUS_TBD - self.back_deps = set() - self.name = name - # TODO(sky): I don't like hanging this off Target. This state is specific - # to certain functions and should be isolated there. - self.visited = False - self.requires_build = False - self.added_to_compile_targets = False - self.in_roots = False - self.is_executable = False - self.is_static_library = False - self.is_or_has_linked_ancestor = False - - -class Config(object): - """Details what we're looking for - files: set of files to search for - targets: see file description for details.""" - - def __init__(self): - self.files = [] - self.targets = set() - self.additional_compile_target_names = set() - self.test_target_names = set() - - def Init(self, params): - """Initializes Config. This is a separate method as it raises an exception - if there is a parse error.""" - generator_flags = params.get("generator_flags", {}) - config_path = generator_flags.get("config_path", None) - if not config_path: - return - try: - f = open(config_path, "r") - config = json.load(f) - f.close() - except IOError: - raise Exception("Unable to open file " + config_path) - except ValueError as e: - raise Exception("Unable to parse config file " + config_path + str(e)) - if not isinstance(config, dict): - raise Exception("config_path must be a JSON file containing a dictionary") - self.files = config.get("files", []) - self.additional_compile_target_names = set( - config.get("additional_compile_targets", []) - ) - self.test_target_names = set(config.get("test_targets", [])) - - -def _WasBuildFileModified(build_file, data, files, toplevel_dir): - """Returns true if the build file |build_file| is either in |files| or - one of the files included by |build_file| is in |files|. |toplevel_dir| is - the root of the source tree.""" - if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files: - if debug: - print("gyp file modified", build_file) - return True - - # First element of included_files is the file itself. - if len(data[build_file]["included_files"]) <= 1: - return False - - for include_file in data[build_file]["included_files"][1:]: - # |included_files| are relative to the directory of the |build_file|. - rel_include_file = _ToGypPath( - gyp.common.UnrelativePath(include_file, build_file) - ) - if _ToLocalPath(toplevel_dir, rel_include_file) in files: - if debug: - print( - "included gyp file modified, gyp_file=", - build_file, - "included file=", - rel_include_file, - ) - return True - return False - - -def _GetOrCreateTargetByName(targets, target_name): - """Creates or returns the Target at targets[target_name]. If there is no - Target for |target_name| one is created. Returns a tuple of whether a new - Target was created and the Target.""" - if target_name in targets: - return False, targets[target_name] - target = Target(target_name) - targets[target_name] = target - return True, target - - -def _DoesTargetTypeRequireBuild(target_dict): - """Returns true if the target type is such that it needs to be built.""" - # If a 'none' target has rules or actions we assume it requires a build. - return bool( - target_dict["type"] != "none" - or target_dict.get("actions") - or target_dict.get("rules") - ) - - -def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files): - """Returns a tuple of the following: - . A dictionary mapping from fully qualified name to Target. - . A list of the targets that have a source file in |files|. - . Targets that constitute the 'all' target. See description at top of file - for details on the 'all' target. - This sets the |match_status| of the targets that contain any of the source - files in |files| to MATCH_STATUS_MATCHES. - |toplevel_dir| is the root of the source tree.""" - # Maps from target name to Target. - name_to_target = {} - - # Targets that matched. - matching_targets = [] - - # Queue of targets to visit. - targets_to_visit = target_list[:] - - # Maps from build file to a boolean indicating whether the build file is in - # |files|. - build_file_in_files = {} - - # Root targets across all files. - roots = set() - - # Set of Targets in |build_files|. - build_file_targets = set() - - while len(targets_to_visit) > 0: - target_name = targets_to_visit.pop() - created_target, target = _GetOrCreateTargetByName(name_to_target, target_name) - if created_target: - roots.add(target) - elif target.visited: - continue - - target.visited = True - target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name]) - target_type = target_dicts[target_name]["type"] - target.is_executable = target_type == "executable" - target.is_static_library = target_type == "static_library" - target.is_or_has_linked_ancestor = ( - target_type == "executable" or target_type == "shared_library" - ) - - build_file = gyp.common.ParseQualifiedTarget(target_name)[0] - if build_file not in build_file_in_files: - build_file_in_files[build_file] = _WasBuildFileModified( - build_file, data, files, toplevel_dir - ) - - if build_file in build_files: - build_file_targets.add(target) - - # If a build file (or any of its included files) is modified we assume all - # targets in the file are modified. - if build_file_in_files[build_file]: - print("matching target from modified build file", target_name) - target.match_status = MATCH_STATUS_MATCHES - matching_targets.append(target) - else: - sources = _ExtractSources( - target_name, target_dicts[target_name], toplevel_dir - ) - for source in sources: - if _ToGypPath(os.path.normpath(source)) in files: - print("target", target_name, "matches", source) - target.match_status = MATCH_STATUS_MATCHES - matching_targets.append(target) - break - - # Add dependencies to visit as well as updating back pointers for deps. - for dep in target_dicts[target_name].get("dependencies", []): - targets_to_visit.append(dep) - - created_dep_target, dep_target = _GetOrCreateTargetByName( - name_to_target, dep - ) - if not created_dep_target: - roots.discard(dep_target) - - target.deps.add(dep_target) - dep_target.back_deps.add(target) - - return name_to_target, matching_targets, roots & build_file_targets - - -def _GetUnqualifiedToTargetMapping(all_targets, to_find): - """Returns a tuple of the following: - . mapping (dictionary) from unqualified name to Target for all the - Targets in |to_find|. - . any target names not found. If this is empty all targets were found.""" - result = {} - if not to_find: - return {}, [] - to_find = set(to_find) - for target_name in all_targets.keys(): - extracted = gyp.common.ParseQualifiedTarget(target_name) - if len(extracted) > 1 and extracted[1] in to_find: - to_find.remove(extracted[1]) - result[extracted[1]] = all_targets[target_name] - if not to_find: - return result, [] - return result, [x for x in to_find] - - -def _DoesTargetDependOnMatchingTargets(target): - """Returns true if |target| or any of its dependencies is one of the - targets containing the files supplied as input to analyzer. This updates - |matches| of the Targets as it recurses. - target: the Target to look for.""" - if target.match_status == MATCH_STATUS_DOESNT_MATCH: - return False - if ( - target.match_status == MATCH_STATUS_MATCHES - or target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY - ): - return True - for dep in target.deps: - if _DoesTargetDependOnMatchingTargets(dep): - target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY - print("\t", target.name, "matches by dep", dep.name) - return True - target.match_status = MATCH_STATUS_DOESNT_MATCH - return False - - -def _GetTargetsDependingOnMatchingTargets(possible_targets): - """Returns the list of Targets in |possible_targets| that depend (either - directly on indirectly) on at least one of the targets containing the files - supplied as input to analyzer. - possible_targets: targets to search from.""" - found = [] - print("Targets that matched by dependency:") - for target in possible_targets: - if _DoesTargetDependOnMatchingTargets(target): - found.append(target) - return found - - -def _AddCompileTargets(target, roots, add_if_no_ancestor, result): - """Recurses through all targets that depend on |target|, adding all targets - that need to be built (and are in |roots|) to |result|. - roots: set of root targets. - add_if_no_ancestor: If true and there are no ancestors of |target| then add - |target| to |result|. |target| must still be in |roots|. - result: targets that need to be built are added here.""" - if target.visited: - return - - target.visited = True - target.in_roots = target in roots - - for back_dep_target in target.back_deps: - _AddCompileTargets(back_dep_target, roots, False, result) - target.added_to_compile_targets |= back_dep_target.added_to_compile_targets - target.in_roots |= back_dep_target.in_roots - target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor - - # Always add 'executable' targets. Even though they may be built by other - # targets that depend upon them it makes detection of what is going to be - # built easier. - # And always add static_libraries that have no dependencies on them from - # linkables. This is necessary as the other dependencies on them may be - # static libraries themselves, which are not compile time dependencies. - if target.in_roots and ( - target.is_executable - or ( - not target.added_to_compile_targets - and (add_if_no_ancestor or target.requires_build) - ) - or ( - target.is_static_library - and add_if_no_ancestor - and not target.is_or_has_linked_ancestor - ) - ): - print( - "\t\tadding to compile targets", - target.name, - "executable", - target.is_executable, - "added_to_compile_targets", - target.added_to_compile_targets, - "add_if_no_ancestor", - add_if_no_ancestor, - "requires_build", - target.requires_build, - "is_static_library", - target.is_static_library, - "is_or_has_linked_ancestor", - target.is_or_has_linked_ancestor, - ) - result.add(target) - target.added_to_compile_targets = True - - -def _GetCompileTargets(matching_targets, supplied_targets): - """Returns the set of Targets that require a build. - matching_targets: targets that changed and need to be built. - supplied_targets: set of targets supplied to analyzer to search from.""" - result = set() - for target in matching_targets: - print("finding compile targets for match", target.name) - _AddCompileTargets(target, supplied_targets, True, result) - return result - - -def _WriteOutput(params, **values): - """Writes the output, either to stdout or a file is specified.""" - if "error" in values: - print("Error:", values["error"]) - if "status" in values: - print(values["status"]) - if "targets" in values: - values["targets"].sort() - print("Supplied targets that depend on changed files:") - for target in values["targets"]: - print("\t", target) - if "invalid_targets" in values: - values["invalid_targets"].sort() - print("The following targets were not found:") - for target in values["invalid_targets"]: - print("\t", target) - if "build_targets" in values: - values["build_targets"].sort() - print("Targets that require a build:") - for target in values["build_targets"]: - print("\t", target) - if "compile_targets" in values: - values["compile_targets"].sort() - print("Targets that need to be built:") - for target in values["compile_targets"]: - print("\t", target) - if "test_targets" in values: - values["test_targets"].sort() - print("Test targets:") - for target in values["test_targets"]: - print("\t", target) - - output_path = params.get("generator_flags", {}).get("analyzer_output_path", None) - if not output_path: - print(json.dumps(values)) - return - try: - f = open(output_path, "w") - f.write(json.dumps(values) + "\n") - f.close() - except IOError as e: - print("Error writing to output file", output_path, str(e)) - - -def _WasGypIncludeFileModified(params, files): - """Returns true if one of the files in |files| is in the set of included - files.""" - if params["options"].includes: - for include in params["options"].includes: - if _ToGypPath(os.path.normpath(include)) in files: - print("Include file modified, assuming all changed", include) - return True - return False - - -def _NamesNotIn(names, mapping): - """Returns a list of the values in |names| that are not in |mapping|.""" - return [name for name in names if name not in mapping] - - -def _LookupTargets(names, mapping): - """Returns a list of the mapping[name] for each value in |names| that is in - |mapping|.""" - return [mapping[name] for name in names if name in mapping] - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - flavor = gyp.common.GetFlavor(params) - if flavor == "mac": - default_variables.setdefault("OS", "mac") - elif flavor == "win": - default_variables.setdefault("OS", "win") - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - else: - operating_system = flavor - if flavor == "android": - operating_system = "linux" # Keep this legacy behavior for now. - default_variables.setdefault("OS", operating_system) - - -class TargetCalculator(object): - """Calculates the matching test_targets and matching compile_targets.""" - - def __init__( - self, - files, - additional_compile_target_names, - test_target_names, - data, - target_list, - target_dicts, - toplevel_dir, - build_files, - ): - self._additional_compile_target_names = set(additional_compile_target_names) - self._test_target_names = set(test_target_names) - ( - self._name_to_target, - self._changed_targets, - self._root_targets, - ) = _GenerateTargets( - data, target_list, target_dicts, toplevel_dir, frozenset(files), build_files - ) - ( - self._unqualified_mapping, - self.invalid_targets, - ) = _GetUnqualifiedToTargetMapping( - self._name_to_target, self._supplied_target_names_no_all() - ) - - def _supplied_target_names(self): - return self._additional_compile_target_names | self._test_target_names - - def _supplied_target_names_no_all(self): - """Returns the supplied test targets without 'all'.""" - result = self._supplied_target_names() - result.discard("all") - return result - - def is_build_impacted(self): - """Returns true if the supplied files impact the build at all.""" - return self._changed_targets - - def find_matching_test_target_names(self): - """Returns the set of output test targets.""" - assert self.is_build_impacted() - # Find the test targets first. 'all' is special cased to mean all the - # root targets. To deal with all the supplied |test_targets| are expanded - # to include the root targets during lookup. If any of the root targets - # match, we remove it and replace it with 'all'. - test_target_names_no_all = set(self._test_target_names) - test_target_names_no_all.discard("all") - test_targets_no_all = _LookupTargets( - test_target_names_no_all, self._unqualified_mapping - ) - test_target_names_contains_all = "all" in self._test_target_names - if test_target_names_contains_all: - test_targets = [ - x for x in (set(test_targets_no_all) | set(self._root_targets)) - ] - else: - test_targets = [x for x in test_targets_no_all] - print("supplied test_targets") - for target_name in self._test_target_names: - print("\t", target_name) - print("found test_targets") - for target in test_targets: - print("\t", target.name) - print("searching for matching test targets") - matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) - matching_test_targets_contains_all = test_target_names_contains_all and set( - matching_test_targets - ) & set(self._root_targets) - if matching_test_targets_contains_all: - # Remove any of the targets for all that were not explicitly supplied, - # 'all' is subsequentely added to the matching names below. - matching_test_targets = [ - x for x in (set(matching_test_targets) & set(test_targets_no_all)) - ] - print("matched test_targets") - for target in matching_test_targets: - print("\t", target.name) - matching_target_names = [ - gyp.common.ParseQualifiedTarget(target.name)[1] - for target in matching_test_targets - ] - if matching_test_targets_contains_all: - matching_target_names.append("all") - print("\tall") - return matching_target_names - - def find_matching_compile_target_names(self): - """Returns the set of output compile targets.""" - assert self.is_build_impacted() - # Compile targets are found by searching up from changed targets. - # Reset the visited status for _GetBuildTargets. - for target in self._name_to_target.values(): - target.visited = False - - supplied_targets = _LookupTargets( - self._supplied_target_names_no_all(), self._unqualified_mapping - ) - if "all" in self._supplied_target_names(): - supplied_targets = [ - x for x in (set(supplied_targets) | set(self._root_targets)) - ] - print("Supplied test_targets & compile_targets") - for target in supplied_targets: - print("\t", target.name) - print("Finding compile targets") - compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets) - return [ - gyp.common.ParseQualifiedTarget(target.name)[1] - for target in compile_targets - ] - - -def GenerateOutput(target_list, target_dicts, data, params): - """Called by gyp as the final stage. Outputs results.""" - config = Config() - try: - config.Init(params) - - if not config.files: - raise Exception( - "Must specify files to analyze via config_path generator " "flag" - ) - - toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir)) - if debug: - print("toplevel_dir", toplevel_dir) - - if _WasGypIncludeFileModified(params, config.files): - result_dict = { - "status": all_changed_string, - "test_targets": list(config.test_target_names), - "compile_targets": list( - config.additional_compile_target_names | config.test_target_names - ), - } - _WriteOutput(params, **result_dict) - return - - calculator = TargetCalculator( - config.files, - config.additional_compile_target_names, - config.test_target_names, - data, - target_list, - target_dicts, - toplevel_dir, - params["build_files"], - ) - if not calculator.is_build_impacted(): - result_dict = { - "status": no_dependency_string, - "test_targets": [], - "compile_targets": [], - } - if calculator.invalid_targets: - result_dict["invalid_targets"] = calculator.invalid_targets - _WriteOutput(params, **result_dict) - return - - test_target_names = calculator.find_matching_test_target_names() - compile_target_names = calculator.find_matching_compile_target_names() - found_at_least_one_target = compile_target_names or test_target_names - result_dict = { - "test_targets": test_target_names, - "status": found_dependency_string - if found_at_least_one_target - else no_dependency_string, - "compile_targets": list(set(compile_target_names) | set(test_target_names)), - } - if calculator.invalid_targets: - result_dict["invalid_targets"] = calculator.invalid_targets - _WriteOutput(params, **result_dict) - - except Exception as e: - _WriteOutput(params, error=str(e)) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py deleted file mode 100644 index 16728847c5c9f..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py +++ /dev/null @@ -1,1174 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This generates makefiles suitable for inclusion into the Android build system -# via an Android.mk file. It is based on make.py, the standard makefile -# generator. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level GypAndroid.mk. This means that all -# variables in .mk-files clobber one another, and furthermore that any -# variables set potentially clash with other Android build system variables. -# Try to avoid setting global variables where possible. - -from __future__ import print_function - -import gyp -import gyp.common -import gyp.generator.make as make # Reuse global functions from make backend. -import os -import re -import subprocess - -generator_default_variables = { - "OS": "android", - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "SHARED_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "SHARED_LIB_SUFFIX": ".so", - "INTERMEDIATE_DIR": "$(gyp_intermediate_dir)", - "SHARED_INTERMEDIATE_DIR": "$(gyp_shared_intermediate_dir)", - "PRODUCT_DIR": "$(gyp_shared_intermediate_dir)", - "SHARED_LIB_DIR": "$(builddir)/lib.$(TOOLSET)", - "LIB_DIR": "$(obj).$(TOOLSET)", - "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", # This gets expanded by Python. - "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", # This gets expanded by Python. - "RULE_INPUT_PATH": "$(RULE_SOURCES)", - "RULE_INPUT_EXT": "$(suffix $<)", - "RULE_INPUT_NAME": "$(notdir $<)", - "CONFIGURATION_NAME": "$(GYP_CONFIGURATION)", -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - - -# Generator-specific gyp specs. -generator_additional_non_configuration_keys = [ - # Boolean to declare that this target does not want its name mangled. - "android_unmangled_name", - # Map of android build system variables to set. - "aosp_build_settings", -] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - - -ALL_MODULES_FOOTER = """\ -# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from -# all the included sub-makefiles. This is just here to clarify. -gyp_all_modules: -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -# Map gyp target types to Android module classes. -MODULE_CLASSES = { - "static_library": "STATIC_LIBRARIES", - "shared_library": "SHARED_LIBRARIES", - "executable": "EXECUTABLES", -} - - -def IsCPPExtension(ext): - return make.COMPILABLE_EXTENSIONS.get(ext) == "cxx" - - -def Sourceify(path): - """Convert a path to its source directory form. The Android backend does not - support options.generator_output, so this function is a noop.""" - return path - - -# Map from qualified target to path to output. -# For Android, the target of these maps is a tuple ('static', 'modulename'), -# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string, -# since we link by module. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class AndroidMkWriter(object): - """AndroidMkWriter packages up the writing of one target-specific Android.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, android_top_dir): - self.android_top_dir = android_top_dir - - def Write( - self, - qualified_target, - relative_target, - base_path, - output_filename, - spec, - configs, - part_of_all, - write_alias_target, - sdk_version, - ): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - relative_target: qualified target name relative to the root - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - write_alias_target: flag indicating whether to create short aliases for - this target - sdk_version: what to emit for LOCAL_SDK_VERSION in output - """ - gyp.common.EnsureDirExists(output_filename) - - self.fp = open(output_filename, "w") - - self.fp.write(header) - - self.qualified_target = qualified_target - self.relative_target = relative_target - self.path = base_path - self.target = spec["target_name"] - self.type = spec["type"] - self.toolset = spec["toolset"] - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - - self.android_class = MODULE_CLASSES.get(self.type, "GYP") - self.android_module = self.ComputeAndroidModule(spec) - (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) - self.output = self.output_binary = self.ComputeOutput(spec) - - # Standard header. - self.WriteLn("include $(CLEAR_VARS)\n") - - # Module class and name. - self.WriteLn("LOCAL_MODULE_CLASS := " + self.android_class) - self.WriteLn("LOCAL_MODULE := " + self.android_module) - # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE. - # The library module classes fail if the stem is set. ComputeOutputParts - # makes sure that stem == modulename in these cases. - if self.android_stem != self.android_module: - self.WriteLn("LOCAL_MODULE_STEM := " + self.android_stem) - self.WriteLn("LOCAL_MODULE_SUFFIX := " + self.android_suffix) - if self.toolset == "host": - self.WriteLn("LOCAL_IS_HOST_MODULE := true") - self.WriteLn("LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)") - elif sdk_version > 0: - self.WriteLn( - "LOCAL_MODULE_TARGET_ARCH := " "$(TARGET_$(GYP_VAR_PREFIX)ARCH)" - ) - self.WriteLn("LOCAL_SDK_VERSION := %s" % sdk_version) - - # Grab output directories; needed for Actions and Rules. - if self.toolset == "host": - self.WriteLn( - "gyp_intermediate_dir := " - "$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))" - ) - else: - self.WriteLn( - "gyp_intermediate_dir := " - "$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))" - ) - self.WriteLn( - "gyp_shared_intermediate_dir := " - "$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))" - ) - self.WriteLn() - - # List files this target depends on so that actions/rules/copies/sources - # can depend on the list. - # TODO: doesn't pull in things through transitive link deps; needed? - target_dependencies = [x[1] for x in deps if x[0] == "path"] - self.WriteLn("# Make sure our deps are built first.") - self.WriteList( - target_dependencies, "GYP_TARGET_DEPENDENCIES", local_pathify=True - ) - - # Actions must come first, since they can generate more OBJs for use below. - if "actions" in spec: - self.WriteActions(spec["actions"], extra_sources, extra_outputs) - - # Rules must be early like actions. - if "rules" in spec: - self.WriteRules(spec["rules"], extra_sources, extra_outputs) - - if "copies" in spec: - self.WriteCopies(spec["copies"], extra_outputs) - - # GYP generated outputs. - self.WriteList(extra_outputs, "GYP_GENERATED_OUTPUTS", local_pathify=True) - - # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend - # on both our dependency targets and our generated files. - self.WriteLn("# Make sure our deps and generated files are built first.") - self.WriteLn( - "LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) " - "$(GYP_GENERATED_OUTPUTS)" - ) - self.WriteLn() - - # Sources. - if spec.get("sources", []) or extra_sources: - self.WriteSources(spec, configs, extra_sources) - - self.WriteTarget( - spec, configs, deps, link_deps, part_of_all, write_alias_target - ) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = ("path", self.output_binary) - - # Update global list of link dependencies. - if self.type == "static_library": - target_link_deps[qualified_target] = ("static", self.android_module) - elif self.type == "shared_library": - target_link_deps[qualified_target] = ("shared", self.android_module) - - self.fp.close() - return self.android_module - - def WriteActions(self, actions, extra_sources, extra_outputs): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - """ - for action in actions: - name = make.StringToMakefileVariable( - "%s_%s" % (self.relative_target, action["action_name"]) - ) - self.WriteLn('### Rules for action "%s":' % action["action_name"]) - inputs = action["inputs"] - outputs = action["outputs"] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - if not out.startswith("$"): - print( - 'WARNING: Action for target "%s" writes output to local path ' - '"%s".' % (self.target, out) - ) - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get("process_outputs_as_sources", False)): - extra_sources += outputs - - # Prepare the actual command. - command = gyp.common.EncodePOSIXShellList(action["action"]) - if "message" in action: - quiet_cmd = "Gyp action: %s ($@)" % action["message"] - else: - quiet_cmd = "Gyp action: %s ($@)" % name - if len(dirs) > 0: - command = "mkdir -p %s" % " ".join(dirs) + "; " + command - - cd_action = "cd $(gyp_local_path)/%s; " % self.path - command = cd_action + command - - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the gyp_* - # variables for the action rule with an absolute version so that the - # output goes in the right place. - # Only write the gyp_* rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) - self.WriteLn("%s: gyp_local_path := $(LOCAL_PATH)" % main_output) - self.WriteLn("%s: gyp_var_prefix := $(GYP_VAR_PREFIX)" % main_output) - self.WriteLn( - "%s: gyp_intermediate_dir := " - "$(abspath $(gyp_intermediate_dir))" % main_output - ) - self.WriteLn( - "%s: gyp_shared_intermediate_dir := " - "$(abspath $(gyp_shared_intermediate_dir))" % main_output - ) - - # Android's envsetup.sh adds a number of directories to the path including - # the built host binary directory. This causes actions/rules invoked by - # gyp to sometimes use these instead of system versions, e.g. bison. - # The built host binaries may not be suitable, and can cause errors. - # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable - # set by envsetup. - self.WriteLn( - "%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))" - % main_output - ) - - # Don't allow spaces in input/output filenames, but make an exception for - # filenames which start with '$(' since it's okay for there to be spaces - # inside of make function/macro invocations. - for input in inputs: - if not input.startswith("$(") and " " in input: - raise gyp.common.GypError( - 'Action input filename "%s" in target %s contains a space' - % (input, self.target) - ) - for output in outputs: - if not output.startswith("$(") and " " in output: - raise gyp.common.GypError( - 'Action output filename "%s" in target %s contains a space' - % (output, self.target) - ) - - self.WriteLn( - "%s: %s $(GYP_TARGET_DEPENDENCIES)" - % (main_output, " ".join(map(self.LocalPathify, inputs))) - ) - self.WriteLn('\t@echo "%s"' % quiet_cmd) - self.WriteLn("\t$(hide)%s\n" % command) - for output in outputs[1:]: - # Make each output depend on the main output, with an empty command - # to force make to notice that the mtime has changed. - self.WriteLn("%s: %s ;" % (self.LocalPathify(output), main_output)) - - extra_outputs += outputs - self.WriteLn() - - self.WriteLn() - - def WriteRules(self, rules, extra_sources, extra_outputs): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - """ - if len(rules) == 0: - return - - for rule in rules: - if len(rule.get("rule_sources", [])) == 0: - continue - name = make.StringToMakefileVariable( - "%s_%s" % (self.relative_target, rule["rule_name"]) - ) - self.WriteLn('\n### Generated for rule "%s":' % name) - self.WriteLn('# "%s":' % rule) - - inputs = rule.get("inputs") - for rule_source in rule.get("rule_sources", []): - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = os.path.splitext( - rule_source_basename - ) - - outputs = [ - self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) - for out in rule["outputs"] - ] - - dirs = set() - for out in outputs: - if not out.startswith("$"): - print( - "WARNING: Rule for target %s writes output to local path %s" - % (self.target, out) - ) - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - extra_outputs += outputs - if int(rule.get("process_outputs_as_sources", False)): - extra_sources.extend(outputs) - - components = [] - for component in rule["action"]: - component = self.ExpandInputRoot( - component, rule_source_root, rule_source_dirname - ) - if "$(RULE_SOURCES)" in component: - component = component.replace("$(RULE_SOURCES)", rule_source) - components.append(component) - - command = gyp.common.EncodePOSIXShellList(components) - cd_action = "cd $(gyp_local_path)/%s; " % self.path - command = cd_action + command - if dirs: - command = "mkdir -p %s" % " ".join(dirs) + "; " + command - - # We set up a rule to build the first output, and then set up - # a rule for each additional output to depend on the first. - outputs = map(self.LocalPathify, outputs) - main_output = outputs[0] - self.WriteLn("%s: gyp_local_path := $(LOCAL_PATH)" % main_output) - self.WriteLn("%s: gyp_var_prefix := $(GYP_VAR_PREFIX)" % main_output) - self.WriteLn( - "%s: gyp_intermediate_dir := " - "$(abspath $(gyp_intermediate_dir))" % main_output - ) - self.WriteLn( - "%s: gyp_shared_intermediate_dir := " - "$(abspath $(gyp_shared_intermediate_dir))" % main_output - ) - - # See explanation in WriteActions. - self.WriteLn( - "%s: export PATH := " - "$(subst $(ANDROID_BUILD_PATHS),,$(PATH))" % main_output - ) - - main_output_deps = self.LocalPathify(rule_source) - if inputs: - main_output_deps += " " - main_output_deps += " ".join([self.LocalPathify(f) for f in inputs]) - - self.WriteLn( - "%s: %s $(GYP_TARGET_DEPENDENCIES)" - % (main_output, main_output_deps) - ) - self.WriteLn("\t%s\n" % command) - for output in outputs[1:]: - # Make each output depend on the main output, with an empty command - # to force make to notice that the mtime has changed. - self.WriteLn("%s: %s ;" % (output, main_output)) - self.WriteLn() - - self.WriteLn() - - def WriteCopies(self, copies, extra_outputs): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - """ - self.WriteLn("### Generated for copy rule.") - - variable = make.StringToMakefileVariable(self.relative_target + "_copies") - outputs = [] - for copy in copies: - for path in copy["files"]: - # The Android build system does not allow generation of files into the - # source tree. The destination should start with a variable, which will - # typically be $(gyp_intermediate_dir) or - # $(gyp_shared_intermediate_dir). Note that we can't use an assertion - # because some of the gyp tests depend on this. - if not copy["destination"].startswith("$"): - print( - "WARNING: Copy rule for target %s writes output to " - "local path %s" % (self.target, copy["destination"]) - ) - - # LocalPathify() calls normpath, stripping trailing slashes. - path = Sourceify(self.LocalPathify(path)) - filename = os.path.split(path)[1] - output = Sourceify( - self.LocalPathify(os.path.join(copy["destination"], filename)) - ) - - self.WriteLn( - "%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)" % (output, path) - ) - self.WriteLn("\t@echo Copying: $@") - self.WriteLn("\t$(hide) mkdir -p $(dir $@)") - self.WriteLn("\t$(hide) $(ACP) -rpf $< $@") - self.WriteLn() - outputs.append(output) - self.WriteLn("%s = %s" % (variable, " ".join(map(make.QuoteSpaces, outputs)))) - extra_outputs.append("$(%s)" % variable) - self.WriteLn() - - def WriteSourceFlags(self, spec, configs): - """Write out the flags and include paths used to compile source files for - the current target. - - Args: - spec, configs: input from gyp. - """ - for configname, config in sorted(configs.items()): - extracted_includes = [] - - self.WriteLn("\n# Flags passed to both C and C++ files.") - cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( - config.get("cflags", []) + config.get("cflags_c", []) - ) - extracted_includes.extend(includes_from_cflags) - self.WriteList(cflags, "MY_CFLAGS_%s" % configname) - - self.WriteList( - config.get("defines"), - "MY_DEFS_%s" % configname, - prefix="-D", - quoter=make.EscapeCppDefine, - ) - - self.WriteLn("\n# Include paths placed before CFLAGS/CPPFLAGS") - includes = list(config.get("include_dirs", [])) - includes.extend(extracted_includes) - includes = map(Sourceify, map(self.LocalPathify, includes)) - includes = self.NormalizeIncludePaths(includes) - self.WriteList(includes, "LOCAL_C_INCLUDES_%s" % configname) - - self.WriteLn("\n# Flags passed to only C++ (and not C) files.") - self.WriteList(config.get("cflags_cc"), "LOCAL_CPPFLAGS_%s" % configname) - - self.WriteLn( - "\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) " - "$(MY_DEFS_$(GYP_CONFIGURATION))" - ) - # Undefine ANDROID for host modules - # TODO: the source code should not use macro ANDROID to tell if it's host - # or target module. - if self.toolset == "host": - self.WriteLn("# Undefine ANDROID for host modules") - self.WriteLn("LOCAL_CFLAGS += -UANDROID") - self.WriteLn( - "LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) " - "$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))" - ) - self.WriteLn("LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))") - # Android uses separate flags for assembly file invocations, but gyp expects - # the same CFLAGS to be applied: - self.WriteLn("LOCAL_ASFLAGS := $(LOCAL_CFLAGS)") - - def WriteSources(self, spec, configs, extra_sources): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - We need to handle shared_intermediate directory source files as - a special case by copying them to the intermediate directory and - treating them as a generated sources. Otherwise the Android build - rules won't pick them up. - - Args: - spec, configs: input from gyp. - extra_sources: Sources generated from Actions or Rules. - """ - sources = filter(make.Compilable, spec.get("sources", [])) - generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] - extra_sources = filter(make.Compilable, extra_sources) - - # Determine and output the C++ extension used by these sources. - # We simply find the first C++ file and use that extension. - all_sources = sources + extra_sources - local_cpp_extension = ".cpp" - for source in all_sources: - (root, ext) = os.path.splitext(source) - if IsCPPExtension(ext): - local_cpp_extension = ext - break - if local_cpp_extension != ".cpp": - self.WriteLn("LOCAL_CPP_EXTENSION := %s" % local_cpp_extension) - - # We need to move any non-generated sources that are coming from the - # shared intermediate directory out of LOCAL_SRC_FILES and put them - # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files - # that don't match our local_cpp_extension, since Android will only - # generate Makefile rules for a single LOCAL_CPP_EXTENSION. - local_files = [] - for source in sources: - (root, ext) = os.path.splitext(source) - if "$(gyp_shared_intermediate_dir)" in source: - extra_sources.append(source) - elif "$(gyp_intermediate_dir)" in source: - extra_sources.append(source) - elif IsCPPExtension(ext) and ext != local_cpp_extension: - extra_sources.append(source) - else: - local_files.append(os.path.normpath(os.path.join(self.path, source))) - - # For any generated source, if it is coming from the shared intermediate - # directory then we add a Make rule to copy them to the local intermediate - # directory first. This is because the Android LOCAL_GENERATED_SOURCES - # must be in the local module intermediate directory for the compile rules - # to work properly. If the file has the wrong C++ extension, then we add - # a rule to copy that to intermediates and use the new version. - final_generated_sources = [] - # If a source file gets copied, we still need to add the original source - # directory as header search path, for GCC searches headers in the - # directory that contains the source file by default. - origin_src_dirs = [] - for source in extra_sources: - local_file = source - if "$(gyp_intermediate_dir)/" not in local_file: - basename = os.path.basename(local_file) - local_file = "$(gyp_intermediate_dir)/" + basename - (root, ext) = os.path.splitext(local_file) - if IsCPPExtension(ext) and ext != local_cpp_extension: - local_file = root + local_cpp_extension - if local_file != source: - self.WriteLn("%s: %s" % (local_file, self.LocalPathify(source))) - self.WriteLn("\tmkdir -p $(@D); cp $< $@") - origin_src_dirs.append(os.path.dirname(source)) - final_generated_sources.append(local_file) - - # We add back in all of the non-compilable stuff to make sure that the - # make rules have dependencies on them. - final_generated_sources.extend(generated_not_sources) - self.WriteList(final_generated_sources, "LOCAL_GENERATED_SOURCES") - - origin_src_dirs = gyp.common.uniquer(origin_src_dirs) - origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) - self.WriteList(origin_src_dirs, "GYP_COPIED_SOURCE_ORIGIN_DIRS") - - self.WriteList(local_files, "LOCAL_SRC_FILES") - - # Write out the flags used to compile the source; this must be done last - # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. - self.WriteSourceFlags(spec, configs) - - def ComputeAndroidModule(self, spec): - """Return the Android module name used for a gyp spec. - - We use the complete qualified target name to avoid collisions between - duplicate targets in different directories. We also add a suffix to - distinguish gyp-generated module names. - """ - - if int(spec.get("android_unmangled_name", 0)): - assert self.type != "shared_library" or self.target.startswith("lib") - return self.target - - if self.type == "shared_library": - # For reasons of convention, the Android build system requires that all - # shared library modules are named 'libfoo' when generating -l flags. - prefix = "lib_" - else: - prefix = "" - - if spec["toolset"] == "host": - suffix = "_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp" - else: - suffix = "_gyp" - - if self.path: - middle = make.StringToMakefileVariable("%s_%s" % (self.path, self.target)) - else: - middle = make.StringToMakefileVariable(self.target) - - return "".join([prefix, middle, suffix]) - - def ComputeOutputParts(self, spec): - """Return the 'output basename' of a gyp spec, split into filename + ext. - - Android libraries must be named the same thing as their module name, - otherwise the linker can't find them, so product_name and so on must be - ignored if we are building a library, and the "lib" prepending is - not done for Android. - """ - assert self.type != "loadable_module" # TODO: not supported? - - target = spec["target_name"] - target_prefix = "" - target_ext = "" - if self.type == "static_library": - target = self.ComputeAndroidModule(spec) - target_ext = ".a" - elif self.type == "shared_library": - target = self.ComputeAndroidModule(spec) - target_ext = ".so" - elif self.type == "none": - target_ext = ".stamp" - elif self.type != "executable": - print( - "ERROR: What output file should be generated?", - "type", - self.type, - "target", - target, - ) - - if self.type != "static_library" and self.type != "shared_library": - target_prefix = spec.get("product_prefix", target_prefix) - target = spec.get("product_name", target) - product_ext = spec.get("product_extension") - if product_ext: - target_ext = "." + product_ext - - target_stem = target_prefix + target - return (target_stem, target_ext) - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - return "".join(self.ComputeOutputParts(spec)) - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - if self.type == "executable": - # We install host executables into shared_intermediate_dir so they can be - # run by gyp rules that refer to PRODUCT_DIR. - path = "$(gyp_shared_intermediate_dir)" - elif self.type == "shared_library": - if self.toolset == "host": - path = "$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)" - else: - path = "$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)" - else: - # Other targets just get built into their intermediate dir. - if self.toolset == "host": - path = ( - "$(call intermediates-dir-for,%s,%s,true,," - "$(GYP_HOST_VAR_PREFIX))" - % (self.android_class, self.android_module) - ) - else: - path = "$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))" % ( - self.android_class, - self.android_module, - ) - - assert spec.get("product_dir") is None # TODO: not supported? - return os.path.join(path, self.ComputeOutputBasename(spec)) - - def NormalizeIncludePaths(self, include_paths): - """ Normalize include_paths. - Convert absolute paths to relative to the Android top directory. - - Args: - include_paths: A list of unprocessed include paths. - Returns: - A list of normalized include paths. - """ - normalized = [] - for path in include_paths: - if path[0] == "/": - path = gyp.common.RelativePath(path, self.android_top_dir) - normalized.append(path) - return normalized - - def ExtractIncludesFromCFlags(self, cflags): - """Extract includes "-I..." out from cflags - - Args: - cflags: A list of compiler flags, which may be mixed with "-I.." - Returns: - A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed. - """ - clean_cflags = [] - include_paths = [] - for flag in cflags: - if flag.startswith("-I"): - include_paths.append(flag[2:]) - else: - clean_cflags.append(flag) - - return (clean_cflags, include_paths) - - def FilterLibraries(self, libraries): - """Filter the 'libraries' key to separate things that shouldn't be ldflags. - - Library entries that look like filenames should be converted to android - module names instead of being passed to the linker as flags. - - Args: - libraries: the value of spec.get('libraries') - Returns: - A tuple (static_lib_modules, dynamic_lib_modules, ldflags) - """ - static_lib_modules = [] - dynamic_lib_modules = [] - ldflags = [] - for libs in libraries: - # Libs can have multiple words. - for lib in libs.split(): - # Filter the system libraries, which are added by default by the Android - # build system. - if ( - lib == "-lc" - or lib == "-lstdc++" - or lib == "-lm" - or lib.endswith("libgcc.a") - ): - continue - match = re.search(r"([^/]+)\.a$", lib) - if match: - static_lib_modules.append(match.group(1)) - continue - match = re.search(r"([^/]+)\.so$", lib) - if match: - dynamic_lib_modules.append(match.group(1)) - continue - if lib.startswith("-l"): - ldflags.append(lib) - return (static_lib_modules, dynamic_lib_modules, ldflags) - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if "dependencies" in spec: - deps.extend( - [ - target_outputs[dep] - for dep in spec["dependencies"] - if target_outputs[dep] - ] - ) - for dep in spec["dependencies"]: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - def WriteTargetFlags(self, spec, configs, link_deps): - """Write Makefile code to specify the link flags and library dependencies. - - spec, configs: input from gyp. - link_deps: link dependency list; see ComputeDeps() - """ - # Libraries (i.e. -lfoo) - # These must be included even for static libraries as some of them provide - # implicit include paths through the build system. - libraries = gyp.common.uniquer(spec.get("libraries", [])) - static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries) - - if self.type != "static_library": - for configname, config in sorted(configs.items()): - ldflags = list(config.get("ldflags", [])) - self.WriteLn("") - self.WriteList(ldflags, "LOCAL_LDFLAGS_%s" % configname) - self.WriteList(ldflags_libs, "LOCAL_GYP_LIBS") - self.WriteLn( - "LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) " - "$(LOCAL_GYP_LIBS)" - ) - - # Link dependencies (i.e. other gyp targets this target depends on) - # These need not be included for static libraries as within the gyp build - # we do not use the implicit include path mechanism. - if self.type != "static_library": - static_link_deps = [x[1] for x in link_deps if x[0] == "static"] - shared_link_deps = [x[1] for x in link_deps if x[0] == "shared"] - else: - static_link_deps = [] - shared_link_deps = [] - - # Only write the lists if they are non-empty. - if static_libs or static_link_deps: - self.WriteLn("") - self.WriteList(static_libs + static_link_deps, "LOCAL_STATIC_LIBRARIES") - self.WriteLn("# Enable grouping to fix circular references") - self.WriteLn("LOCAL_GROUP_STATIC_LIBRARIES := true") - if dynamic_libs or shared_link_deps: - self.WriteLn("") - self.WriteList(dynamic_libs + shared_link_deps, "LOCAL_SHARED_LIBRARIES") - - def WriteTarget( - self, spec, configs, deps, link_deps, part_of_all, write_alias_target - ): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - part_of_all: flag indicating this target is part of 'all' - write_alias_target: flag indicating whether to create short aliases for this - target - """ - self.WriteLn("### Rules for final target.") - - if self.type != "none": - self.WriteTargetFlags(spec, configs, link_deps) - - settings = spec.get("aosp_build_settings", {}) - if settings: - self.WriteLn("### Set directly by aosp_build_settings.") - for k, v in settings.items(): - if isinstance(v, list): - self.WriteList(v, k) - else: - self.WriteLn("%s := %s" % (k, make.QuoteIfNecessary(v))) - self.WriteLn("") - - # Add to the set of targets which represent the gyp 'all' target. We use the - # name 'gyp_all_modules' as the Android build system doesn't allow the use - # of the Make target 'all' and because 'all_modules' is the equivalent of - # the Make target 'all' on Android. - if part_of_all and write_alias_target: - self.WriteLn('# Add target alias to "gyp_all_modules" target.') - self.WriteLn(".PHONY: gyp_all_modules") - self.WriteLn("gyp_all_modules: %s" % self.android_module) - self.WriteLn("") - - # Add an alias from the gyp target name to the Android module name. This - # simplifies manual builds of the target, and is required by the test - # framework. - if self.target != self.android_module and write_alias_target: - self.WriteLn("# Alias gyp target name.") - self.WriteLn(".PHONY: %s" % self.target) - self.WriteLn("%s: %s" % (self.target, self.android_module)) - self.WriteLn("") - - # Add the command to trigger build of the target type depending - # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY - # NOTE: This has to come last! - modifier = "" - if self.toolset == "host": - modifier = "HOST_" - if self.type == "static_library": - self.WriteLn("include $(BUILD_%sSTATIC_LIBRARY)" % modifier) - elif self.type == "shared_library": - self.WriteLn("LOCAL_PRELINK_MODULE := false") - self.WriteLn("include $(BUILD_%sSHARED_LIBRARY)" % modifier) - elif self.type == "executable": - self.WriteLn("LOCAL_CXX_STL := libc++_static") - # Executables are for build and test purposes only, so they're installed - # to a directory that doesn't get included in the system image. - self.WriteLn("LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)") - self.WriteLn("include $(BUILD_%sEXECUTABLE)" % modifier) - else: - self.WriteLn("LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp") - self.WriteLn("LOCAL_UNINSTALLABLE_MODULE := true") - if self.toolset == "target": - self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)") - else: - self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)") - self.WriteLn() - self.WriteLn("include $(BUILD_SYSTEM)/base_rules.mk") - self.WriteLn() - self.WriteLn("$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)") - self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"') - self.WriteLn("\t$(hide) mkdir -p $(dir $@)") - self.WriteLn("\t$(hide) touch $@") - self.WriteLn() - self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX :=") - - def WriteList( - self, - value_list, - variable=None, - prefix="", - quoter=make.QuoteIfNecessary, - local_pathify=False, - ): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = "" - if value_list: - value_list = [quoter(prefix + value) for value in value_list] - if local_pathify: - value_list = [self.LocalPathify(value) for value in value_list] - values = " \\\n\t" + " \\\n\t".join(value_list) - self.fp.write("%s :=%s\n\n" % (variable, values)) - - def WriteLn(self, text=""): - self.fp.write(text + "\n") - - def LocalPathify(self, path): - """Convert a subdirectory-relative path into a normalized path which starts - with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). - Absolute paths, or paths that contain variables, are just normalized.""" - if "$(" in path or os.path.isabs(path): - # path is not a file in the project tree in this case, but calling - # normpath is still important for trimming trailing slashes. - return os.path.normpath(path) - local_path = os.path.join("$(LOCAL_PATH)", self.path, path) - local_path = os.path.normpath(local_path) - # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH) - # - i.e. that the resulting path is still inside the project tree. The - # path may legitimately have ended up containing just $(LOCAL_PATH), though, - # so we don't look for a slash. - assert local_path.startswith( - "$(LOCAL_PATH)" - ), "Path %s attempts to escape from gyp path %s !)" % (path, self.path) - return local_path - - def ExpandInputRoot(self, template, expansion, dirname): - if "%(INPUT_ROOT)s" not in template and "%(INPUT_DIRNAME)s" not in template: - return template - path = template % { - "INPUT_ROOT": expansion, - "INPUT_DIRNAME": dirname, - } - return os.path.normpath(path) - - -def PerformBuild(data, configurations, params): - # The android backend only supports the default configuration. - options = params["options"] - makefile = os.path.abspath(os.path.join(options.toplevel_dir, "GypAndroid.mk")) - env = dict(os.environ) - env["ONE_SHOT_MAKEFILE"] = makefile - arguments = ["make", "-C", os.environ["ANDROID_BUILD_TOP"], "gyp_all_modules"] - print("Building: %s" % arguments) - subprocess.check_call(arguments, env=env) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params["options"] - generator_flags = params.get("generator_flags", {}) - limit_to_target_all = generator_flags.get("limit_to_target_all", False) - write_alias_targets = generator_flags.get("write_alias_targets", True) - sdk_version = generator_flags.get("aosp_sdk_version", 0) - android_top_dir = os.environ.get("ANDROID_BUILD_TOP") - assert android_top_dir, "$ANDROID_BUILD_TOP not set; you need to run lunch." - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - assert ( - not options.generator_output - ), "The Android backend does not support options.generator_output." - base_path = gyp.common.RelativePath( - os.path.dirname(build_file), options.toplevel_dir - ) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - for target in target_list: - spec = target_dicts[target] - if spec["default_configuration"] != "Default": - default_configuration = spec["default_configuration"] - break - if not default_configuration: - default_configuration = "Default" - - makefile_name = "GypAndroid" + options.suffix + ".mk" - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - assert ( - not options.generator_output - ), "The Android backend does not support options.generator_output." - gyp.common.EnsureDirExists(makefile_path) - root_makefile = open(makefile_path, "w") - - root_makefile.write(header) - - # We set LOCAL_PATH just once, here, to the top of the project tree. This - # allows all the other paths we use to be relative to the Android.mk file, - # as the Android build system expects. - root_makefile.write("\nLOCAL_PATH := $(call my-dir)\n") - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params["build_files"]: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - android_modules = {} - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target) - relative_build_file = gyp.common.RelativePath(build_file, options.toplevel_dir) - build_files.add(relative_build_file) - included_files = data[build_file]["included_files"] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir, - ) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if params["home_dot_gyp"] and abs_include_file.startswith( - params["home_dot_gyp"] - ): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath( - build_file, target + "." + toolset + options.suffix + ".mk" - ) - - spec = target_dicts[qualified_target] - configs = spec["configurations"] - - part_of_all = qualified_target in needed_targets - if limit_to_target_all and not part_of_all: - continue - - relative_target = gyp.common.QualifiedTarget( - relative_build_file, target, toolset - ) - writer = AndroidMkWriter(android_top_dir) - android_module = writer.Write( - qualified_target, - relative_target, - base_path, - output_file, - spec, - configs, - part_of_all=part_of_all, - write_alias_target=write_alias_targets, - sdk_version=sdk_version, - ) - if android_module in android_modules: - print( - "ERROR: Android module names must be unique. The following " - "targets both generate Android module name %s.\n %s\n %s" - % (android_module, android_modules[android_module], qualified_target) - ) - return - android_modules[android_module] = qualified_target - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath( - output_file, os.path.dirname(makefile_path) - ) - include_list.add(mkfile_rel_path) - - root_makefile.write("GYP_CONFIGURATION ?= %s\n" % default_configuration) - root_makefile.write("GYP_VAR_PREFIX ?=\n") - root_makefile.write("GYP_HOST_VAR_PREFIX ?=\n") - root_makefile.write("GYP_HOST_MULTILIB ?= first\n") - - # Write out the sorted list of includes. - root_makefile.write("\n") - for include_file in sorted(include_list): - root_makefile.write("include $(LOCAL_PATH)/" + include_file + "\n") - root_makefile.write("\n") - - if write_alias_targets: - root_makefile.write(ALL_MODULES_FOOTER) - - root_makefile.close() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py deleted file mode 100644 index f5ceacfca364c..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py +++ /dev/null @@ -1,1327 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""cmake output module - -This module is under development and should be considered experimental. - -This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is -created for each configuration. - -This module's original purpose was to support editing in IDEs like KDevelop -which use CMake for project management. It is also possible to use CMake to -generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator -will convert the CMakeLists.txt to a code::blocks cbp for the editor to read, -but build using CMake. As a result QtCreator editor is unaware of compiler -defines. The generated CMakeLists.txt can also be used to build on Linux. There -is currently no support for building on platforms other than Linux. - -The generated CMakeLists.txt should properly compile all projects. However, -there is a mismatch between gyp and cmake with regard to linking. All attempts -are made to work around this, but CMake sometimes sees -Wl,--start-group as a -library and incorrectly repeats it. As a result the output of this generator -should not be relied on for building. - -When using with kdevelop, use version 4.4+. Previous versions of kdevelop will -not be able to find the header file directories described in the generated -CMakeLists.txt file. -""" - -from __future__ import print_function - -import multiprocessing -import os -import signal -import string -import subprocess -import gyp.common -import gyp.xcode_emulation - -try: - # maketrans moved to str in python3. - _maketrans = string.maketrans -except NameError: - _maketrans = str.maketrans - -generator_default_variables = { - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "SHARED_LIB_PREFIX": "lib", - "SHARED_LIB_SUFFIX": ".so", - "SHARED_LIB_DIR": "${builddir}/lib.${TOOLSET}", - "LIB_DIR": "${obj}.${TOOLSET}", - "INTERMEDIATE_DIR": "${obj}.${TOOLSET}/${TARGET}/geni", - "SHARED_INTERMEDIATE_DIR": "${obj}/gen", - "PRODUCT_DIR": "${builddir}", - "RULE_INPUT_PATH": "${RULE_INPUT_PATH}", - "RULE_INPUT_DIRNAME": "${RULE_INPUT_DIRNAME}", - "RULE_INPUT_NAME": "${RULE_INPUT_NAME}", - "RULE_INPUT_ROOT": "${RULE_INPUT_ROOT}", - "RULE_INPUT_EXT": "${RULE_INPUT_EXT}", - "CONFIGURATION_NAME": "${configuration}", -} - -FULL_PATH_VARS = ("${CMAKE_CURRENT_LIST_DIR}", "${builddir}", "${obj}") - -generator_supports_multiple_toolsets = True -generator_wants_static_library_dependencies_adjusted = True - -COMPILABLE_EXTENSIONS = { - ".c": "cc", - ".cc": "cxx", - ".cpp": "cxx", - ".cxx": "cxx", - ".s": "s", # cc - ".S": "s", # cc -} - - -def RemovePrefix(a, prefix): - """Returns 'a' without 'prefix' if it starts with 'prefix'.""" - return a[len(prefix) :] if a.startswith(prefix) else a - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - default_variables.setdefault("OS", gyp.common.GetFlavor(params)) - - -def Compilable(filename): - """Return true if the file is compilable (should be in OBJS).""" - return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS) - - -def Linkable(filename): - """Return true if the file is linkable (should be on the link line).""" - return filename.endswith(".o") - - -def NormjoinPathForceCMakeSource(base_path, rel_path): - """Resolves rel_path against base_path and returns the result. - - If rel_path is an absolute path it is returned unchanged. - Otherwise it is resolved against base_path and normalized. - If the result is a relative path, it is forced to be relative to the - CMakeLists.txt. - """ - if os.path.isabs(rel_path): - return rel_path - if any([rel_path.startswith(var) for var in FULL_PATH_VARS]): - return rel_path - # TODO: do we need to check base_path for absolute variables as well? - return os.path.join( - "${CMAKE_CURRENT_LIST_DIR}", os.path.normpath(os.path.join(base_path, rel_path)) - ) - - -def NormjoinPath(base_path, rel_path): - """Resolves rel_path against base_path and returns the result. - TODO: what is this really used for? - If rel_path begins with '$' it is returned unchanged. - Otherwise it is resolved against base_path if relative, then normalized. - """ - if rel_path.startswith("$") and not rel_path.startswith("${configuration}"): - return rel_path - return os.path.normpath(os.path.join(base_path, rel_path)) - - -def CMakeStringEscape(a): - """Escapes the string 'a' for use inside a CMake string. - - This means escaping - '\' otherwise it may be seen as modifying the next character - '"' otherwise it will end the string - ';' otherwise the string becomes a list - - The following do not need to be escaped - '#' when the lexer is in string state, this does not start a comment - - The following are yet unknown - '$' generator variables (like ${obj}) must not be escaped, - but text $ should be escaped - what is wanted is to know which $ come from generator variables - """ - return a.replace("\\", "\\\\").replace(";", "\\;").replace('"', '\\"') - - -def SetFileProperty(output, source_name, property_name, values, sep): - """Given a set of source file, sets the given property on them.""" - output.write("set_source_files_properties(") - output.write(source_name) - output.write(" PROPERTIES ") - output.write(property_name) - output.write(' "') - for value in values: - output.write(CMakeStringEscape(value)) - output.write(sep) - output.write('")\n') - - -def SetFilesProperty(output, variable, property_name, values, sep): - """Given a set of source files, sets the given property on them.""" - output.write("set_source_files_properties(") - WriteVariable(output, variable) - output.write(" PROPERTIES ") - output.write(property_name) - output.write(' "') - for value in values: - output.write(CMakeStringEscape(value)) - output.write(sep) - output.write('")\n') - - -def SetTargetProperty(output, target_name, property_name, values, sep=""): - """Given a target, sets the given property.""" - output.write("set_target_properties(") - output.write(target_name) - output.write(" PROPERTIES ") - output.write(property_name) - output.write(' "') - for value in values: - output.write(CMakeStringEscape(value)) - output.write(sep) - output.write('")\n') - - -def SetVariable(output, variable_name, value): - """Sets a CMake variable.""" - output.write("set(") - output.write(variable_name) - output.write(' "') - output.write(CMakeStringEscape(value)) - output.write('")\n') - - -def SetVariableList(output, variable_name, values): - """Sets a CMake variable to a list.""" - if not values: - return SetVariable(output, variable_name, "") - if len(values) == 1: - return SetVariable(output, variable_name, values[0]) - output.write("list(APPEND ") - output.write(variable_name) - output.write('\n "') - output.write('"\n "'.join([CMakeStringEscape(value) for value in values])) - output.write('")\n') - - -def UnsetVariable(output, variable_name): - """Unsets a CMake variable.""" - output.write("unset(") - output.write(variable_name) - output.write(")\n") - - -def WriteVariable(output, variable_name, prepend=None): - if prepend: - output.write(prepend) - output.write("${") - output.write(variable_name) - output.write("}") - - -class CMakeTargetType(object): - def __init__(self, command, modifier, property_modifier): - self.command = command - self.modifier = modifier - self.property_modifier = property_modifier - - -cmake_target_type_from_gyp_target_type = { - "executable": CMakeTargetType("add_executable", None, "RUNTIME"), - "static_library": CMakeTargetType("add_library", "STATIC", "ARCHIVE"), - "shared_library": CMakeTargetType("add_library", "SHARED", "LIBRARY"), - "loadable_module": CMakeTargetType("add_library", "MODULE", "LIBRARY"), - "none": CMakeTargetType("add_custom_target", "SOURCES", None), -} - - -def StringToCMakeTargetName(a): - """Converts the given string 'a' to a valid CMake target name. - - All invalid characters are replaced by '_'. - Invalid for cmake: ' ', '/', '(', ')', '"' - Invalid for make: ':' - Invalid for unknown reasons but cause failures: '.' - """ - return a.translate(_maketrans(' /():."', "_______")) - - -def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, output): - """Write CMake for the 'actions' in the target. - - Args: - target_name: the name of the CMake target being generated. - actions: the Gyp 'actions' dict for this target. - extra_sources: [(, )] to append with generated source files. - extra_deps: [] to append with generated targets. - path_to_gyp: relative path from CMakeLists.txt being generated to - the Gyp file in which the target being generated is defined. - """ - for action in actions: - action_name = StringToCMakeTargetName(action["action_name"]) - action_target_name = "%s__%s" % (target_name, action_name) - - inputs = action["inputs"] - inputs_name = action_target_name + "__input" - SetVariableList( - output, - inputs_name, - [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs], - ) - - outputs = action["outputs"] - cmake_outputs = [ - NormjoinPathForceCMakeSource(path_to_gyp, out) for out in outputs - ] - outputs_name = action_target_name + "__output" - SetVariableList(output, outputs_name, cmake_outputs) - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir) - - if int(action.get("process_outputs_as_sources", False)): - extra_sources.extend(zip(cmake_outputs, outputs)) - - # add_custom_command - output.write("add_custom_command(OUTPUT ") - WriteVariable(output, outputs_name) - output.write("\n") - - if len(dirs) > 0: - for directory in dirs: - output.write(" COMMAND ${CMAKE_COMMAND} -E make_directory ") - output.write(directory) - output.write("\n") - - output.write(" COMMAND ") - output.write(gyp.common.EncodePOSIXShellList(action["action"])) - output.write("\n") - - output.write(" DEPENDS ") - WriteVariable(output, inputs_name) - output.write("\n") - - output.write(" WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") - output.write(path_to_gyp) - output.write("\n") - - output.write(" COMMENT ") - if "message" in action: - output.write(action["message"]) - else: - output.write(action_target_name) - output.write("\n") - - output.write(" VERBATIM\n") - output.write(")\n") - - # add_custom_target - output.write("add_custom_target(") - output.write(action_target_name) - output.write("\n DEPENDS ") - WriteVariable(output, outputs_name) - output.write("\n SOURCES ") - WriteVariable(output, inputs_name) - output.write("\n)\n") - - extra_deps.append(action_target_name) - - -def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source): - if rel_path.startswith(("${RULE_INPUT_PATH}", "${RULE_INPUT_DIRNAME}")): - if any([rule_source.startswith(var) for var in FULL_PATH_VARS]): - return rel_path - return NormjoinPathForceCMakeSource(base_path, rel_path) - - -def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, output): - """Write CMake for the 'rules' in the target. - - Args: - target_name: the name of the CMake target being generated. - actions: the Gyp 'actions' dict for this target. - extra_sources: [(, )] to append with generated source files. - extra_deps: [] to append with generated targets. - path_to_gyp: relative path from CMakeLists.txt being generated to - the Gyp file in which the target being generated is defined. - """ - for rule in rules: - rule_name = StringToCMakeTargetName(target_name + "__" + rule["rule_name"]) - - inputs = rule.get("inputs", []) - inputs_name = rule_name + "__input" - SetVariableList( - output, - inputs_name, - [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs], - ) - outputs = rule["outputs"] - var_outputs = [] - - for count, rule_source in enumerate(rule.get("rule_sources", [])): - action_name = rule_name + "_" + str(count) - - rule_source_dirname, rule_source_basename = os.path.split(rule_source) - rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename) - - SetVariable(output, "RULE_INPUT_PATH", rule_source) - SetVariable(output, "RULE_INPUT_DIRNAME", rule_source_dirname) - SetVariable(output, "RULE_INPUT_NAME", rule_source_basename) - SetVariable(output, "RULE_INPUT_ROOT", rule_source_root) - SetVariable(output, "RULE_INPUT_EXT", rule_source_ext) - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir) - - # Create variables for the output, as 'local' variable will be unset. - these_outputs = [] - for output_index, out in enumerate(outputs): - output_name = action_name + "_" + str(output_index) - SetVariable( - output, - output_name, - NormjoinRulePathForceCMakeSource(path_to_gyp, out, rule_source), - ) - if int(rule.get("process_outputs_as_sources", False)): - extra_sources.append(("${" + output_name + "}", out)) - these_outputs.append("${" + output_name + "}") - var_outputs.append("${" + output_name + "}") - - # add_custom_command - output.write("add_custom_command(OUTPUT\n") - for out in these_outputs: - output.write(" ") - output.write(out) - output.write("\n") - - for directory in dirs: - output.write(" COMMAND ${CMAKE_COMMAND} -E make_directory ") - output.write(directory) - output.write("\n") - - output.write(" COMMAND ") - output.write(gyp.common.EncodePOSIXShellList(rule["action"])) - output.write("\n") - - output.write(" DEPENDS ") - WriteVariable(output, inputs_name) - output.write(" ") - output.write(NormjoinPath(path_to_gyp, rule_source)) - output.write("\n") - - # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives. - # The cwd is the current build directory. - output.write(" WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") - output.write(path_to_gyp) - output.write("\n") - - output.write(" COMMENT ") - if "message" in rule: - output.write(rule["message"]) - else: - output.write(action_name) - output.write("\n") - - output.write(" VERBATIM\n") - output.write(")\n") - - UnsetVariable(output, "RULE_INPUT_PATH") - UnsetVariable(output, "RULE_INPUT_DIRNAME") - UnsetVariable(output, "RULE_INPUT_NAME") - UnsetVariable(output, "RULE_INPUT_ROOT") - UnsetVariable(output, "RULE_INPUT_EXT") - - # add_custom_target - output.write("add_custom_target(") - output.write(rule_name) - output.write(" DEPENDS\n") - for out in var_outputs: - output.write(" ") - output.write(out) - output.write("\n") - output.write("SOURCES ") - WriteVariable(output, inputs_name) - output.write("\n") - for rule_source in rule.get("rule_sources", []): - output.write(" ") - output.write(NormjoinPath(path_to_gyp, rule_source)) - output.write("\n") - output.write(")\n") - - extra_deps.append(rule_name) - - -def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output): - """Write CMake for the 'copies' in the target. - - Args: - target_name: the name of the CMake target being generated. - actions: the Gyp 'actions' dict for this target. - extra_deps: [] to append with generated targets. - path_to_gyp: relative path from CMakeLists.txt being generated to - the Gyp file in which the target being generated is defined. - """ - copy_name = target_name + "__copies" - - # CMake gets upset with custom targets with OUTPUT which specify no output. - have_copies = any(copy["files"] for copy in copies) - if not have_copies: - output.write("add_custom_target(") - output.write(copy_name) - output.write(")\n") - extra_deps.append(copy_name) - return - - class Copy(object): - def __init__(self, ext, command): - self.cmake_inputs = [] - self.cmake_outputs = [] - self.gyp_inputs = [] - self.gyp_outputs = [] - self.ext = ext - self.inputs_name = None - self.outputs_name = None - self.command = command - - file_copy = Copy("", "copy") - dir_copy = Copy("_dirs", "copy_directory") - - for copy in copies: - files = copy["files"] - destination = copy["destination"] - for src in files: - path = os.path.normpath(src) - basename = os.path.split(path)[1] - dst = os.path.join(destination, basename) - - copy = file_copy if os.path.basename(src) else dir_copy - - copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src)) - copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst)) - copy.gyp_inputs.append(src) - copy.gyp_outputs.append(dst) - - for copy in (file_copy, dir_copy): - if copy.cmake_inputs: - copy.inputs_name = copy_name + "__input" + copy.ext - SetVariableList(output, copy.inputs_name, copy.cmake_inputs) - - copy.outputs_name = copy_name + "__output" + copy.ext - SetVariableList(output, copy.outputs_name, copy.cmake_outputs) - - # add_custom_command - output.write("add_custom_command(\n") - - output.write("OUTPUT") - for copy in (file_copy, dir_copy): - if copy.outputs_name: - WriteVariable(output, copy.outputs_name, " ") - output.write("\n") - - for copy in (file_copy, dir_copy): - for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs): - # 'cmake -E copy src dst' will create the 'dst' directory if needed. - output.write("COMMAND ${CMAKE_COMMAND} -E %s " % copy.command) - output.write(src) - output.write(" ") - output.write(dst) - output.write("\n") - - output.write("DEPENDS") - for copy in (file_copy, dir_copy): - if copy.inputs_name: - WriteVariable(output, copy.inputs_name, " ") - output.write("\n") - - output.write("WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") - output.write(path_to_gyp) - output.write("\n") - - output.write("COMMENT Copying for ") - output.write(target_name) - output.write("\n") - - output.write("VERBATIM\n") - output.write(")\n") - - # add_custom_target - output.write("add_custom_target(") - output.write(copy_name) - output.write("\n DEPENDS") - for copy in (file_copy, dir_copy): - if copy.outputs_name: - WriteVariable(output, copy.outputs_name, " ") - output.write("\n SOURCES") - if file_copy.inputs_name: - WriteVariable(output, file_copy.inputs_name, " ") - output.write("\n)\n") - - extra_deps.append(copy_name) - - -def CreateCMakeTargetBaseName(qualified_target): - """This is the name we would like the target to have.""" - _, gyp_target_name, gyp_target_toolset = gyp.common.ParseQualifiedTarget( - qualified_target - ) - cmake_target_base_name = gyp_target_name - if gyp_target_toolset and gyp_target_toolset != "target": - cmake_target_base_name += "_" + gyp_target_toolset - return StringToCMakeTargetName(cmake_target_base_name) - - -def CreateCMakeTargetFullName(qualified_target): - """An unambiguous name for the target.""" - gyp_file, gyp_target_name, gyp_target_toolset = gyp.common.ParseQualifiedTarget( - qualified_target - ) - cmake_target_full_name = gyp_file + ":" + gyp_target_name - if gyp_target_toolset and gyp_target_toolset != "target": - cmake_target_full_name += "_" + gyp_target_toolset - return StringToCMakeTargetName(cmake_target_full_name) - - -class CMakeNamer(object): - """Converts Gyp target names into CMake target names. - - CMake requires that target names be globally unique. One way to ensure - this is to fully qualify the names of the targets. Unfortunately, this - ends up with all targets looking like "chrome_chrome_gyp_chrome" instead - of just "chrome". If this generator were only interested in building, it - would be possible to fully qualify all target names, then create - unqualified target names which depend on all qualified targets which - should have had that name. This is more or less what the 'make' generator - does with aliases. However, one goal of this generator is to create CMake - files for use with IDEs, and fully qualified names are not as user - friendly. - - Since target name collision is rare, we do the above only when required. - - Toolset variants are always qualified from the base, as this is required for - building. However, it also makes sense for an IDE, as it is possible for - defines to be different. - """ - - def __init__(self, target_list): - self.cmake_target_base_names_conficting = set() - - cmake_target_base_names_seen = set() - for qualified_target in target_list: - cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target) - - if cmake_target_base_name not in cmake_target_base_names_seen: - cmake_target_base_names_seen.add(cmake_target_base_name) - else: - self.cmake_target_base_names_conficting.add(cmake_target_base_name) - - def CreateCMakeTargetName(self, qualified_target): - base_name = CreateCMakeTargetBaseName(qualified_target) - if base_name in self.cmake_target_base_names_conficting: - return CreateCMakeTargetFullName(qualified_target) - return base_name - - -def WriteTarget( - namer, - qualified_target, - target_dicts, - build_dir, - config_to_use, - options, - generator_flags, - all_qualified_targets, - flavor, - output, -): - # The make generator does this always. - # TODO: It would be nice to be able to tell CMake all dependencies. - circular_libs = generator_flags.get("circular", True) - - if not generator_flags.get("standalone", False): - output.write("\n#") - output.write(qualified_target) - output.write("\n") - - gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) - rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir) - rel_gyp_dir = os.path.dirname(rel_gyp_file) - - # Relative path from build dir to top dir. - build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) - # Relative path from build dir to gyp dir. - build_to_gyp = os.path.join(build_to_top, rel_gyp_dir) - - path_from_cmakelists_to_gyp = build_to_gyp - - spec = target_dicts.get(qualified_target, {}) - config = spec.get("configurations", {}).get(config_to_use, {}) - - xcode_settings = None - if flavor == "mac": - xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - - target_name = spec.get("target_name", "") - target_type = spec.get("type", "") - target_toolset = spec.get("toolset") - - cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) - if cmake_target_type is None: - print( - "Target %s has unknown target type %s, skipping." - % (target_name, target_type) - ) - return - - SetVariable(output, "TARGET", target_name) - SetVariable(output, "TOOLSET", target_toolset) - - cmake_target_name = namer.CreateCMakeTargetName(qualified_target) - - extra_sources = [] - extra_deps = [] - - # Actions must come first, since they can generate more OBJs for use below. - if "actions" in spec: - WriteActions( - cmake_target_name, - spec["actions"], - extra_sources, - extra_deps, - path_from_cmakelists_to_gyp, - output, - ) - - # Rules must be early like actions. - if "rules" in spec: - WriteRules( - cmake_target_name, - spec["rules"], - extra_sources, - extra_deps, - path_from_cmakelists_to_gyp, - output, - ) - - # Copies - if "copies" in spec: - WriteCopies( - cmake_target_name, - spec["copies"], - extra_deps, - path_from_cmakelists_to_gyp, - output, - ) - - # Target and sources - srcs = spec.get("sources", []) - - # Gyp separates the sheep from the goats based on file extensions. - # A full separation is done here because of flag handing (see below). - s_sources = [] - c_sources = [] - cxx_sources = [] - linkable_sources = [] - other_sources = [] - for src in srcs: - _, ext = os.path.splitext(src) - src_type = COMPILABLE_EXTENSIONS.get(ext, None) - src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src) - - if src_type == "s": - s_sources.append(src_norm_path) - elif src_type == "cc": - c_sources.append(src_norm_path) - elif src_type == "cxx": - cxx_sources.append(src_norm_path) - elif Linkable(ext): - linkable_sources.append(src_norm_path) - else: - other_sources.append(src_norm_path) - - for extra_source in extra_sources: - src, real_source = extra_source - _, ext = os.path.splitext(real_source) - src_type = COMPILABLE_EXTENSIONS.get(ext, None) - - if src_type == "s": - s_sources.append(src) - elif src_type == "cc": - c_sources.append(src) - elif src_type == "cxx": - cxx_sources.append(src) - elif Linkable(ext): - linkable_sources.append(src) - else: - other_sources.append(src) - - s_sources_name = None - if s_sources: - s_sources_name = cmake_target_name + "__asm_srcs" - SetVariableList(output, s_sources_name, s_sources) - - c_sources_name = None - if c_sources: - c_sources_name = cmake_target_name + "__c_srcs" - SetVariableList(output, c_sources_name, c_sources) - - cxx_sources_name = None - if cxx_sources: - cxx_sources_name = cmake_target_name + "__cxx_srcs" - SetVariableList(output, cxx_sources_name, cxx_sources) - - linkable_sources_name = None - if linkable_sources: - linkable_sources_name = cmake_target_name + "__linkable_srcs" - SetVariableList(output, linkable_sources_name, linkable_sources) - - other_sources_name = None - if other_sources: - other_sources_name = cmake_target_name + "__other_srcs" - SetVariableList(output, other_sources_name, other_sources) - - # CMake gets upset when executable targets provide no sources. - # http://www.cmake.org/pipermail/cmake/2010-July/038461.html - dummy_sources_name = None - has_sources = ( - s_sources_name - or c_sources_name - or cxx_sources_name - or linkable_sources_name - or other_sources_name - ) - if target_type == "executable" and not has_sources: - dummy_sources_name = cmake_target_name + "__dummy_srcs" - SetVariable( - output, dummy_sources_name, "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c" - ) - output.write('if(NOT EXISTS "') - WriteVariable(output, dummy_sources_name) - output.write('")\n') - output.write(' file(WRITE "') - WriteVariable(output, dummy_sources_name) - output.write('" "")\n') - output.write("endif()\n") - - # CMake is opposed to setting linker directories and considers the practice - # of setting linker directories dangerous. Instead, it favors the use of - # find_library and passing absolute paths to target_link_libraries. - # However, CMake does provide the command link_directories, which adds - # link directories to targets defined after it is called. - # As a result, link_directories must come before the target definition. - # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES. - library_dirs = config.get("library_dirs") - if library_dirs is not None: - output.write("link_directories(") - for library_dir in library_dirs: - output.write(" ") - output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir)) - output.write("\n") - output.write(")\n") - - output.write(cmake_target_type.command) - output.write("(") - output.write(cmake_target_name) - - if cmake_target_type.modifier is not None: - output.write(" ") - output.write(cmake_target_type.modifier) - - if s_sources_name: - WriteVariable(output, s_sources_name, " ") - if c_sources_name: - WriteVariable(output, c_sources_name, " ") - if cxx_sources_name: - WriteVariable(output, cxx_sources_name, " ") - if linkable_sources_name: - WriteVariable(output, linkable_sources_name, " ") - if other_sources_name: - WriteVariable(output, other_sources_name, " ") - if dummy_sources_name: - WriteVariable(output, dummy_sources_name, " ") - - output.write(")\n") - - # Let CMake know if the 'all' target should depend on this target. - exclude_from_all = ( - "TRUE" if qualified_target not in all_qualified_targets else "FALSE" - ) - SetTargetProperty(output, cmake_target_name, "EXCLUDE_FROM_ALL", exclude_from_all) - for extra_target_name in extra_deps: - SetTargetProperty( - output, extra_target_name, "EXCLUDE_FROM_ALL", exclude_from_all - ) - - # Output name and location. - if target_type != "none": - # Link as 'C' if there are no other files - if not c_sources and not cxx_sources: - SetTargetProperty(output, cmake_target_name, "LINKER_LANGUAGE", ["C"]) - - # Mark uncompiled sources as uncompiled. - if other_sources_name: - output.write("set_source_files_properties(") - WriteVariable(output, other_sources_name, "") - output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n') - - # Mark object sources as linkable. - if linkable_sources_name: - output.write("set_source_files_properties(") - WriteVariable(output, other_sources_name, "") - output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n') - - # Output directory - target_output_directory = spec.get("product_dir") - if target_output_directory is None: - if target_type in ("executable", "loadable_module"): - target_output_directory = generator_default_variables["PRODUCT_DIR"] - elif target_type == "shared_library": - target_output_directory = "${builddir}/lib.${TOOLSET}" - elif spec.get("standalone_static_library", False): - target_output_directory = generator_default_variables["PRODUCT_DIR"] - else: - base_path = gyp.common.RelativePath( - os.path.dirname(gyp_file), options.toplevel_dir - ) - target_output_directory = "${obj}.${TOOLSET}" - target_output_directory = os.path.join( - target_output_directory, base_path - ) - - cmake_target_output_directory = NormjoinPathForceCMakeSource( - path_from_cmakelists_to_gyp, target_output_directory - ) - SetTargetProperty( - output, - cmake_target_name, - cmake_target_type.property_modifier + "_OUTPUT_DIRECTORY", - cmake_target_output_directory, - ) - - # Output name - default_product_prefix = "" - default_product_name = target_name - default_product_ext = "" - if target_type == "static_library": - static_library_prefix = generator_default_variables["STATIC_LIB_PREFIX"] - default_product_name = RemovePrefix( - default_product_name, static_library_prefix - ) - default_product_prefix = static_library_prefix - default_product_ext = generator_default_variables["STATIC_LIB_SUFFIX"] - - elif target_type in ("loadable_module", "shared_library"): - shared_library_prefix = generator_default_variables["SHARED_LIB_PREFIX"] - default_product_name = RemovePrefix( - default_product_name, shared_library_prefix - ) - default_product_prefix = shared_library_prefix - default_product_ext = generator_default_variables["SHARED_LIB_SUFFIX"] - - elif target_type != "executable": - print( - "ERROR: What output file should be generated?", - "type", - target_type, - "target", - target_name, - ) - - product_prefix = spec.get("product_prefix", default_product_prefix) - product_name = spec.get("product_name", default_product_name) - product_ext = spec.get("product_extension") - if product_ext: - product_ext = "." + product_ext - else: - product_ext = default_product_ext - - SetTargetProperty(output, cmake_target_name, "PREFIX", product_prefix) - SetTargetProperty( - output, - cmake_target_name, - cmake_target_type.property_modifier + "_OUTPUT_NAME", - product_name, - ) - SetTargetProperty(output, cmake_target_name, "SUFFIX", product_ext) - - # Make the output of this target referenceable as a source. - cmake_target_output_basename = product_prefix + product_name + product_ext - cmake_target_output = os.path.join( - cmake_target_output_directory, cmake_target_output_basename - ) - SetFileProperty(output, cmake_target_output, "GENERATED", ["TRUE"], "") - - # Includes - includes = config.get("include_dirs") - if includes: - # This (target include directories) is what requires CMake 2.8.8 - includes_name = cmake_target_name + "__include_dirs" - SetVariableList( - output, - includes_name, - [ - NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include) - for include in includes - ], - ) - output.write("set_property(TARGET ") - output.write(cmake_target_name) - output.write(" APPEND PROPERTY INCLUDE_DIRECTORIES ") - WriteVariable(output, includes_name, "") - output.write(")\n") - - # Defines - defines = config.get("defines") - if defines is not None: - SetTargetProperty( - output, cmake_target_name, "COMPILE_DEFINITIONS", defines, ";" - ) - - # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493 - # CMake currently does not have target C and CXX flags. - # So, instead of doing... - - # cflags_c = config.get('cflags_c') - # if cflags_c is not None: - # SetTargetProperty(output, cmake_target_name, - # 'C_COMPILE_FLAGS', cflags_c, ' ') - - # cflags_cc = config.get('cflags_cc') - # if cflags_cc is not None: - # SetTargetProperty(output, cmake_target_name, - # 'CXX_COMPILE_FLAGS', cflags_cc, ' ') - - # Instead we must... - cflags = config.get("cflags", []) - cflags_c = config.get("cflags_c", []) - cflags_cxx = config.get("cflags_cc", []) - if xcode_settings: - cflags = xcode_settings.GetCflags(config_to_use) - cflags_c = xcode_settings.GetCflagsC(config_to_use) - cflags_cxx = xcode_settings.GetCflagsCC(config_to_use) - # cflags_objc = xcode_settings.GetCflagsObjC(config_to_use) - # cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use) - - if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources): - SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", cflags, " ") - - elif c_sources and not (s_sources or cxx_sources): - flags = [] - flags.extend(cflags) - flags.extend(cflags_c) - SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", flags, " ") - - elif cxx_sources and not (s_sources or c_sources): - flags = [] - flags.extend(cflags) - flags.extend(cflags_cxx) - SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", flags, " ") - - else: - # TODO: This is broken, one cannot generally set properties on files, - # as other targets may require different properties on the same files. - if s_sources and cflags: - SetFilesProperty(output, s_sources_name, "COMPILE_FLAGS", cflags, " ") - - if c_sources and (cflags or cflags_c): - flags = [] - flags.extend(cflags) - flags.extend(cflags_c) - SetFilesProperty(output, c_sources_name, "COMPILE_FLAGS", flags, " ") - - if cxx_sources and (cflags or cflags_cxx): - flags = [] - flags.extend(cflags) - flags.extend(cflags_cxx) - SetFilesProperty(output, cxx_sources_name, "COMPILE_FLAGS", flags, " ") - - # Linker flags - ldflags = config.get("ldflags") - if ldflags is not None: - SetTargetProperty(output, cmake_target_name, "LINK_FLAGS", ldflags, " ") - - # XCode settings - xcode_settings = config.get("xcode_settings", {}) - for xcode_setting, xcode_value in xcode_settings.viewitems(): - SetTargetProperty( - output, - cmake_target_name, - "XCODE_ATTRIBUTE_%s" % xcode_setting, - xcode_value, - "" if isinstance(xcode_value, str) else " ", - ) - - # Note on Dependencies and Libraries: - # CMake wants to handle link order, resolving the link line up front. - # Gyp does not retain or enforce specifying enough information to do so. - # So do as other gyp generators and use --start-group and --end-group. - # Give CMake as little information as possible so that it doesn't mess it up. - - # Dependencies - rawDeps = spec.get("dependencies", []) - - static_deps = [] - shared_deps = [] - other_deps = [] - for rawDep in rawDeps: - dep_cmake_name = namer.CreateCMakeTargetName(rawDep) - dep_spec = target_dicts.get(rawDep, {}) - dep_target_type = dep_spec.get("type", None) - - if dep_target_type == "static_library": - static_deps.append(dep_cmake_name) - elif dep_target_type == "shared_library": - shared_deps.append(dep_cmake_name) - else: - other_deps.append(dep_cmake_name) - - # ensure all external dependencies are complete before internal dependencies - # extra_deps currently only depend on their own deps, so otherwise run early - if static_deps or shared_deps or other_deps: - for extra_dep in extra_deps: - output.write("add_dependencies(") - output.write(extra_dep) - output.write("\n") - for deps in (static_deps, shared_deps, other_deps): - for dep in gyp.common.uniquer(deps): - output.write(" ") - output.write(dep) - output.write("\n") - output.write(")\n") - - linkable = target_type in ("executable", "loadable_module", "shared_library") - other_deps.extend(extra_deps) - if other_deps or (not linkable and (static_deps or shared_deps)): - output.write("add_dependencies(") - output.write(cmake_target_name) - output.write("\n") - for dep in gyp.common.uniquer(other_deps): - output.write(" ") - output.write(dep) - output.write("\n") - if not linkable: - for deps in (static_deps, shared_deps): - for lib_dep in gyp.common.uniquer(deps): - output.write(" ") - output.write(lib_dep) - output.write("\n") - output.write(")\n") - - # Libraries - if linkable: - external_libs = [lib for lib in spec.get("libraries", []) if len(lib) > 0] - if external_libs or static_deps or shared_deps: - output.write("target_link_libraries(") - output.write(cmake_target_name) - output.write("\n") - if static_deps: - write_group = circular_libs and len(static_deps) > 1 and flavor != "mac" - if write_group: - output.write("-Wl,--start-group\n") - for dep in gyp.common.uniquer(static_deps): - output.write(" ") - output.write(dep) - output.write("\n") - if write_group: - output.write("-Wl,--end-group\n") - if shared_deps: - for dep in gyp.common.uniquer(shared_deps): - output.write(" ") - output.write(dep) - output.write("\n") - if external_libs: - for lib in gyp.common.uniquer(external_libs): - output.write(' "') - output.write(RemovePrefix(lib, "$(SDKROOT)")) - output.write('"\n') - - output.write(")\n") - - UnsetVariable(output, "TOOLSET") - UnsetVariable(output, "TARGET") - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, config_to_use): - options = params["options"] - generator_flags = params["generator_flags"] - flavor = gyp.common.GetFlavor(params) - - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to cmake easier, cmake doesn't put anything here. - # Each Gyp configuration creates a different CMakeLists.txt file - # to avoid incompatibilities between Gyp and CMake configurations. - generator_dir = os.path.relpath(options.generator_output or ".") - - # output_dir: relative path from generator_dir to the build directory. - output_dir = generator_flags.get("output_dir", "out") - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath(os.path.join(generator_dir, output_dir, config_to_use)) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - - output_file = os.path.join(toplevel_build, "CMakeLists.txt") - gyp.common.EnsureDirExists(output_file) - - output = open(output_file, "w") - output.write("cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n") - output.write("cmake_policy(VERSION 2.8.8)\n") - - gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1]) - output.write("project(") - output.write(project_target) - output.write(")\n") - - SetVariable(output, "configuration", config_to_use) - - ar = None - cc = None - cxx = None - - make_global_settings = data[gyp_file].get("make_global_settings", []) - build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) - for key, value in make_global_settings: - if key == "AR": - ar = os.path.join(build_to_top, value) - if key == "CC": - cc = os.path.join(build_to_top, value) - if key == "CXX": - cxx = os.path.join(build_to_top, value) - - ar = gyp.common.GetEnvironFallback(["AR_target", "AR"], ar) - cc = gyp.common.GetEnvironFallback(["CC_target", "CC"], cc) - cxx = gyp.common.GetEnvironFallback(["CXX_target", "CXX"], cxx) - - if ar: - SetVariable(output, "CMAKE_AR", ar) - if cc: - SetVariable(output, "CMAKE_C_COMPILER", cc) - if cxx: - SetVariable(output, "CMAKE_CXX_COMPILER", cxx) - - # The following appears to be as-yet undocumented. - # http://public.kitware.com/Bug/view.php?id=8392 - output.write("enable_language(ASM)\n") - # ASM-ATT does not support .S files. - # output.write('enable_language(ASM-ATT)\n') - - if cc: - SetVariable(output, "CMAKE_ASM_COMPILER", cc) - - SetVariable(output, "builddir", "${CMAKE_CURRENT_BINARY_DIR}") - SetVariable(output, "obj", "${builddir}/obj") - output.write("\n") - - # TODO: Undocumented/unsupported (the CMake Java generator depends on it). - # CMake by default names the object resulting from foo.c to be foo.c.o. - # Gyp traditionally names the object resulting from foo.c foo.o. - # This should be irrelevant, but some targets extract .o files from .a - # and depend on the name of the extracted .o files. - output.write("set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n") - output.write("set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n") - output.write("\n") - - # Force ninja to use rsp files. Otherwise link and ar lines can get too long, - # resulting in 'Argument list too long' errors. - # However, rsp files don't work correctly on Mac. - if flavor != "mac": - output.write("set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n") - output.write("\n") - - namer = CMakeNamer(target_list) - - # The list of targets upon which the 'all' target should depend. - # CMake has it's own implicit 'all' target, one is not created explicitly. - all_qualified_targets = set() - for build_file in params["build_files"]: - for qualified_target in gyp.common.AllTargets( - target_list, target_dicts, os.path.normpath(build_file) - ): - all_qualified_targets.add(qualified_target) - - for qualified_target in target_list: - if flavor == "mac": - gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) - spec = target_dicts[qualified_target] - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec) - - WriteTarget( - namer, - qualified_target, - target_dicts, - build_dir, - config_to_use, - options, - generator_flags, - all_qualified_targets, - flavor, - output, - ) - - output.close() - - -def PerformBuild(data, configurations, params): - options = params["options"] - generator_flags = params["generator_flags"] - - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to cmake easier, cmake doesn't put anything here. - generator_dir = os.path.relpath(options.generator_output or ".") - - # output_dir: relative path from generator_dir to the build directory. - output_dir = generator_flags.get("output_dir", "out") - - for config_name in configurations: - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath( - os.path.join(generator_dir, output_dir, config_name) - ) - arguments = ["cmake", "-G", "Ninja"] - print("Generating [%s]: %s" % (config_name, arguments)) - subprocess.check_call(arguments, cwd=build_dir) - - arguments = ["ninja", "-C", build_dir] - print("Building [%s]: %s" % (config_name, arguments)) - subprocess.check_call(arguments) - - -def CallGenerateOutputForConfig(arglist): - # Ignore the interrupt signal so that the parent process catches it and - # kills all multiprocessing children. - signal.signal(signal.SIGINT, signal.SIG_IGN) - - target_list, target_dicts, data, params, config_name = arglist - GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) - - -def GenerateOutput(target_list, target_dicts, data, params): - user_config = params.get("generator_flags", {}).get("config", None) - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) - else: - config_names = target_dicts[target_list[0]]["configurations"] - if params["parallel"]: - try: - pool = multiprocessing.Pool(len(config_names)) - arglists = [] - for config_name in config_names: - arglists.append( - (target_list, target_dicts, data, params, config_name) - ) - pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt as e: - pool.terminate() - raise e - else: - for config_name in config_names: - GenerateOutputForConfig( - target_list, target_dicts, data, params, config_name - ) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py deleted file mode 100644 index f330a04dea4c5..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (c) 2016 Ben Noordhuis . All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import gyp.common -import gyp.xcode_emulation -import json -import os - -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None -generator_supports_multiple_toolsets = True -generator_wants_sorted_dependencies = False - -# Lifted from make.py. The actual values don't matter much. -generator_default_variables = { - "CONFIGURATION_NAME": "$(BUILDTYPE)", - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni", - "PRODUCT_DIR": "$(builddir)", - "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", - "RULE_INPUT_EXT": "$(suffix $<)", - "RULE_INPUT_NAME": "$(notdir $<)", - "RULE_INPUT_PATH": "$(abspath $<)", - "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", - "SHARED_INTERMEDIATE_DIR": "$(obj)/gen", - "SHARED_LIB_PREFIX": "lib", - "STATIC_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", -} - - -def IsMac(params): - return "mac" == gyp.common.GetFlavor(params) - - -def CalculateVariables(default_variables, params): - default_variables.setdefault("OS", gyp.common.GetFlavor(params)) - - -def AddCommandsForTarget(cwd, target, params, per_config_commands): - output_dir = params["generator_flags"].get("output_dir", "out") - for configuration_name, configuration in target["configurations"].items(): - if IsMac(params): - xcode_settings = gyp.xcode_emulation.XcodeSettings(target) - cflags = xcode_settings.GetCflags(configuration_name) - cflags_c = xcode_settings.GetCflagsC(configuration_name) - cflags_cc = xcode_settings.GetCflagsCC(configuration_name) - else: - cflags = configuration.get("cflags", []) - cflags_c = configuration.get("cflags_c", []) - cflags_cc = configuration.get("cflags_cc", []) - - cflags_c = cflags + cflags_c - cflags_cc = cflags + cflags_cc - - defines = configuration.get("defines", []) - defines = ["-D" + s for s in defines] - - # TODO(bnoordhuis) Handle generated source files. - extensions = (".c", ".cc", ".cpp", ".cxx") - sources = [s for s in target.get("sources", []) if s.endswith(extensions)] - - def resolve(filename): - return os.path.abspath(os.path.join(cwd, filename)) - - # TODO(bnoordhuis) Handle generated header files. - include_dirs = configuration.get("include_dirs", []) - include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")] - includes = ["-I" + resolve(s) for s in include_dirs] - - defines = gyp.common.EncodePOSIXShellList(defines) - includes = gyp.common.EncodePOSIXShellList(includes) - cflags_c = gyp.common.EncodePOSIXShellList(cflags_c) - cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc) - - commands = per_config_commands.setdefault(configuration_name, []) - for source in sources: - file = resolve(source) - isc = source.endswith(".c") - cc = "cc" if isc else "c++" - cflags = cflags_c if isc else cflags_cc - command = " ".join( - ( - cc, - defines, - includes, - cflags, - "-c", - gyp.common.EncodePOSIXShellArgument(file), - ) - ) - commands.append(dict(command=command, directory=output_dir, file=file)) - - -def GenerateOutput(target_list, target_dicts, data, params): - per_config_commands = {} - for qualified_target, target in target_dicts.items(): - build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( - qualified_target - ) - if IsMac(params): - settings = data[build_file] - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target) - cwd = os.path.dirname(build_file) - AddCommandsForTarget(cwd, target, params, per_config_commands) - - output_dir = params["generator_flags"].get("output_dir", "out") - for configuration_name, commands in per_config_commands.items(): - filename = os.path.join(output_dir, configuration_name, "compile_commands.json") - gyp.common.EnsureDirExists(filename) - fp = open(filename, "w") - json.dump(commands, fp=fp, indent=0, check_circular=False) - - -def PerformBuild(data, configurations, params): - pass diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py deleted file mode 100644 index 46f68e038418f..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import print_function - -import os -import gyp -import gyp.common -import gyp.msvs_emulation -import json - -generator_supports_multiple_toolsets = True - -generator_wants_static_library_dependencies_adjusted = False - -generator_filelist_paths = {} - -generator_default_variables = {} -for dirname in [ - "INTERMEDIATE_DIR", - "SHARED_INTERMEDIATE_DIR", - "PRODUCT_DIR", - "LIB_DIR", - "SHARED_LIB_DIR", -]: - # Some gyp steps fail if these are empty(!). - generator_default_variables[dirname] = "dir" -for unused in [ - "RULE_INPUT_PATH", - "RULE_INPUT_ROOT", - "RULE_INPUT_NAME", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "STATIC_LIB_PREFIX", - "STATIC_LIB_SUFFIX", - "SHARED_LIB_PREFIX", - "SHARED_LIB_SUFFIX", - "CONFIGURATION_NAME", -]: - generator_default_variables[unused] = "" - - -def CalculateVariables(default_variables, params): - generator_flags = params.get("generator_flags", {}) - for key, val in generator_flags.items(): - default_variables.setdefault(key, val) - default_variables.setdefault("OS", gyp.common.GetFlavor(params)) - - flavor = gyp.common.GetFlavor(params) - if flavor == "win": - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get("generator_flags", {}) - if generator_flags.get("adjust_static_libraries", False): - global generator_wants_static_library_dependencies_adjusted - generator_wants_static_library_dependencies_adjusted = True - - toplevel = params["options"].toplevel_dir - generator_dir = os.path.relpath(params["options"].generator_output or ".") - # output_dir: relative path from generator_dir to the build directory. - output_dir = generator_flags.get("output_dir", "out") - qualified_out_dir = os.path.normpath( - os.path.join(toplevel, generator_dir, output_dir, "gypfiles") - ) - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": toplevel, - "qualified_out_dir": qualified_out_dir, - } - - -def GenerateOutput(target_list, target_dicts, data, params): - # Map of target -> list of targets it depends on. - edges = {} - - # Queue of targets to visit. - targets_to_visit = target_list[:] - - while len(targets_to_visit) > 0: - target = targets_to_visit.pop() - if target in edges: - continue - edges[target] = [] - - for dep in target_dicts[target].get("dependencies", []): - edges[target].append(dep) - targets_to_visit.append(dep) - - try: - filepath = params["generator_flags"]["output_dir"] - except KeyError: - filepath = "." - filename = os.path.join(filepath, "dump.json") - f = open(filename, "w") - json.dump(edges, f) - f.close() - print("Wrote json to %s." % filename) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py deleted file mode 100644 index 4bd49725dc1e8..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py +++ /dev/null @@ -1,470 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""GYP backend that generates Eclipse CDT settings files. - -This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML -files that can be imported into an Eclipse CDT project. The XML file contains a -list of include paths and symbols (i.e. defines). - -Because a full .cproject definition is not created by this generator, it's not -possible to properly define the include dirs and symbols for each file -individually. Instead, one set of includes/symbols is generated for the entire -project. This works fairly well (and is a vast improvement in general), but may -still result in a few indexer issues here and there. - -This generator has no automated tests, so expect it to be broken. -""" - -from xml.sax.saxutils import escape -import os.path -import subprocess -import gyp -import gyp.common -import gyp.msvs_emulation -import shlex -import xml.etree.cElementTree as ET - -PY3 = bytes != str - -generator_wants_static_library_dependencies_adjusted = False - -generator_default_variables = {} - -for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]: - # Some gyp steps fail if these are empty(!), so we convert them to variables - generator_default_variables[dirname] = "$" + dirname - -for unused in [ - "RULE_INPUT_PATH", - "RULE_INPUT_ROOT", - "RULE_INPUT_NAME", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "STATIC_LIB_PREFIX", - "STATIC_LIB_SUFFIX", - "SHARED_LIB_PREFIX", - "SHARED_LIB_SUFFIX", - "CONFIGURATION_NAME", -]: - generator_default_variables[unused] = "" - -# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as -# part of the path when dealing with generated headers. This value will be -# replaced dynamically for each configuration. -generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR" - - -def CalculateVariables(default_variables, params): - generator_flags = params.get("generator_flags", {}) - for key, val in generator_flags.items(): - default_variables.setdefault(key, val) - flavor = gyp.common.GetFlavor(params) - default_variables.setdefault("OS", flavor) - if flavor == "win": - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get("generator_flags", {}) - if generator_flags.get("adjust_static_libraries", False): - global generator_wants_static_library_dependencies_adjusted - generator_wants_static_library_dependencies_adjusted = True - - -def GetAllIncludeDirectories( - target_list, - target_dicts, - shared_intermediate_dirs, - config_name, - params, - compiler_path, -): - """Calculate the set of include directories to be used. - - Returns: - A list including all the include_dir's specified for every target followed - by any include directories that were added as cflag compiler options. - """ - - gyp_includes_set = set() - compiler_includes_list = [] - - # Find compiler's default include dirs. - if compiler_path: - command = shlex.split(compiler_path) - command.extend(["-E", "-xc++", "-v", "-"]) - proc = subprocess.Popen( - args=command, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - output = proc.communicate()[1] - if PY3: - output = output.decode("utf-8") - # Extract the list of include dirs from the output, which has this format: - # ... - # #include "..." search starts here: - # #include <...> search starts here: - # /usr/include/c++/4.6 - # /usr/local/include - # End of search list. - # ... - in_include_list = False - for line in output.splitlines(): - if line.startswith("#include"): - in_include_list = True - continue - if line.startswith("End of search list."): - break - if in_include_list: - include_dir = line.strip() - if include_dir not in compiler_includes_list: - compiler_includes_list.append(include_dir) - - flavor = gyp.common.GetFlavor(params) - if flavor == "win": - generator_flags = params.get("generator_flags", {}) - for target_name in target_list: - target = target_dicts[target_name] - if config_name in target["configurations"]: - config = target["configurations"][config_name] - - # Look for any include dirs that were explicitly added via cflags. This - # may be done in gyp files to force certain includes to come at the end. - # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and - # remove this. - if flavor == "win": - msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) - cflags = msvs_settings.GetCflags(config_name) - else: - cflags = config["cflags"] - for cflag in cflags: - if cflag.startswith("-I"): - include_dir = cflag[2:] - if include_dir not in compiler_includes_list: - compiler_includes_list.append(include_dir) - - # Find standard gyp include dirs. - if "include_dirs" in config: - include_dirs = config["include_dirs"] - for shared_intermediate_dir in shared_intermediate_dirs: - for include_dir in include_dirs: - include_dir = include_dir.replace( - "$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir - ) - if not os.path.isabs(include_dir): - base_dir = os.path.dirname(target_name) - - include_dir = base_dir + "/" + include_dir - include_dir = os.path.abspath(include_dir) - - gyp_includes_set.add(include_dir) - - # Generate a list that has all the include dirs. - all_includes_list = list(gyp_includes_set) - all_includes_list.sort() - for compiler_include in compiler_includes_list: - if compiler_include not in gyp_includes_set: - all_includes_list.append(compiler_include) - - # All done. - return all_includes_list - - -def GetCompilerPath(target_list, data, options): - """Determine a command that can be used to invoke the compiler. - - Returns: - If this is a gyp project that has explicit make settings, try to determine - the compiler from that. Otherwise, see if a compiler was specified via the - CC_target environment variable. - """ - # First, see if the compiler is configured in make's settings. - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_dict = data[build_file].get("make_global_settings", {}) - for key, value in make_global_settings_dict: - if key in ["CC", "CXX"]: - return os.path.join(options.toplevel_dir, value) - - # Check to see if the compiler was specified as an environment variable. - for key in ["CC_target", "CC", "CXX"]: - compiler = os.environ.get(key) - if compiler: - return compiler - - return "gcc" - - -def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path): - """Calculate the defines for a project. - - Returns: - A dict that includes explicit defines declared in gyp files along with all - of the default defines that the compiler uses. - """ - - # Get defines declared in the gyp files. - all_defines = {} - flavor = gyp.common.GetFlavor(params) - if flavor == "win": - generator_flags = params.get("generator_flags", {}) - for target_name in target_list: - target = target_dicts[target_name] - - if flavor == "win": - msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) - extra_defines = msvs_settings.GetComputedDefines(config_name) - else: - extra_defines = [] - if config_name in target["configurations"]: - config = target["configurations"][config_name] - target_defines = config["defines"] - else: - target_defines = [] - for define in target_defines + extra_defines: - split_define = define.split("=", 1) - if len(split_define) == 1: - split_define.append("1") - if split_define[0].strip() in all_defines: - # Already defined - continue - all_defines[split_define[0].strip()] = split_define[1].strip() - # Get default compiler defines (if possible). - if flavor == "win": - return all_defines # Default defines already processed in the loop above. - if compiler_path: - command = shlex.split(compiler_path) - command.extend(["-E", "-dM", "-"]) - cpp_proc = subprocess.Popen( - args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE - ) - cpp_output = cpp_proc.communicate()[0] - if PY3: - cpp_output = cpp_output.decode("utf-8") - cpp_lines = cpp_output.split("\n") - for cpp_line in cpp_lines: - if not cpp_line.strip(): - continue - cpp_line_parts = cpp_line.split(" ", 2) - key = cpp_line_parts[1] - if len(cpp_line_parts) >= 3: - val = cpp_line_parts[2] - else: - val = "1" - all_defines[key] = val - - return all_defines - - -def WriteIncludePaths(out, eclipse_langs, include_dirs): - """Write the includes section of a CDT settings export file.""" - - out.write( - '
\n' - ) - out.write(' \n') - for lang in eclipse_langs: - out.write(' \n' % lang) - for include_dir in include_dirs: - out.write( - ' %s\n' - % include_dir - ) - out.write(" \n") - out.write("
\n") - - -def WriteMacros(out, eclipse_langs, defines): - """Write the macros section of a CDT settings export file.""" - - out.write( - '
\n' - ) - out.write(' \n') - for lang in eclipse_langs: - out.write(' \n' % lang) - for key in sorted(defines): - out.write( - " %s%s\n" - % (escape(key), escape(defines[key])) - ) - out.write(" \n") - out.write("
\n") - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): - options = params["options"] - generator_flags = params.get("generator_flags", {}) - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the - # SHARED_INTERMEDIATE_DIR. Include both possible locations. - shared_intermediate_dirs = [ - os.path.join(toplevel_build, "obj", "gen"), - os.path.join(toplevel_build, "gen"), - ] - - GenerateCdtSettingsFile( - target_list, - target_dicts, - data, - params, - config_name, - os.path.join(toplevel_build, "eclipse-cdt-settings.xml"), - options, - shared_intermediate_dirs, - ) - GenerateClasspathFile( - target_list, - target_dicts, - options.toplevel_dir, - toplevel_build, - os.path.join(toplevel_build, "eclipse-classpath.xml"), - ) - - -def GenerateCdtSettingsFile( - target_list, - target_dicts, - data, - params, - config_name, - out_name, - options, - shared_intermediate_dirs, -): - gyp.common.EnsureDirExists(out_name) - with open(out_name, "w") as out: - out.write('\n') - out.write("\n") - - eclipse_langs = [ - "C++ Source File", - "C Source File", - "Assembly Source File", - "GNU C++", - "GNU C", - "Assembly", - ] - compiler_path = GetCompilerPath(target_list, data, options) - include_dirs = GetAllIncludeDirectories( - target_list, - target_dicts, - shared_intermediate_dirs, - config_name, - params, - compiler_path, - ) - WriteIncludePaths(out, eclipse_langs, include_dirs) - defines = GetAllDefines( - target_list, target_dicts, data, config_name, params, compiler_path - ) - WriteMacros(out, eclipse_langs, defines) - - out.write("\n") - - -def GenerateClasspathFile( - target_list, target_dicts, toplevel_dir, toplevel_build, out_name -): - """Generates a classpath file suitable for symbol navigation and code - completion of Java code (such as in Android projects) by finding all - .java and .jar files used as action inputs.""" - gyp.common.EnsureDirExists(out_name) - result = ET.Element("classpath") - - def AddElements(kind, paths): - # First, we need to normalize the paths so they are all relative to the - # toplevel dir. - rel_paths = set() - for path in paths: - if os.path.isabs(path): - rel_paths.add(os.path.relpath(path, toplevel_dir)) - else: - rel_paths.add(path) - - for path in sorted(rel_paths): - entry_element = ET.SubElement(result, "classpathentry") - entry_element.set("kind", kind) - entry_element.set("path", path) - - AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir)) - AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir)) - # Include the standard JRE container and a dummy out folder - AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"]) - # Include a dummy out folder so that Eclipse doesn't use the default /bin - # folder in the root of the project. - AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")]) - - ET.ElementTree(result).write(out_name) - - -def GetJavaJars(target_list, target_dicts, toplevel_dir): - """Generates a sequence of all .jars used as inputs.""" - for target_name in target_list: - target = target_dicts[target_name] - for action in target.get("actions", []): - for input_ in action["inputs"]: - if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"): - if os.path.isabs(input_): - yield input_ - else: - yield os.path.join(os.path.dirname(target_name), input_) - - -def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir): - """Generates a sequence of all likely java package root directories.""" - for target_name in target_list: - target = target_dicts[target_name] - for action in target.get("actions", []): - for input_ in action["inputs"]: - if os.path.splitext(input_)[1] == ".java" and not input_.startswith( - "$" - ): - dir_ = os.path.dirname( - os.path.join(os.path.dirname(target_name), input_) - ) - # If there is a parent 'src' or 'java' folder, navigate up to it - - # these are canonical package root names in Chromium. This will - # break if 'src' or 'java' exists in the package structure. This - # could be further improved by inspecting the java file for the - # package name if this proves to be too fragile in practice. - parent_search = dir_ - while os.path.basename(parent_search) not in ["src", "java"]: - parent_search, _ = os.path.split(parent_search) - if not parent_search or parent_search == toplevel_dir: - # Didn't find a known root, just return the original path - yield dir_ - break - else: - yield parent_search - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate an XML settings file that can be imported into a CDT project.""" - - if params["options"].generator_output: - raise NotImplementedError("--generator_output not implemented for eclipse") - - user_config = params.get("generator_flags", {}).get("config", None) - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) - else: - config_names = target_dicts[target_list[0]]["configurations"] - for config_name in config_names: - GenerateOutputForConfig( - target_list, target_dicts, data, params, config_name - ) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py deleted file mode 100644 index 4171704c47a4b..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypd output module - -This module produces gyp input as its output. Output files are given the -.gypd extension to avoid overwriting the .gyp files that they are generated -from. Internal references to .gyp files (such as those found in -"dependencies" sections) are not adjusted to point to .gypd files instead; -unlike other paths, which are relative to the .gyp or .gypd file, such paths -are relative to the directory from which gyp was run to create the .gypd file. - -This generator module is intended to be a sample and a debugging aid, hence -the "d" for "debug" in .gypd. It is useful to inspect the results of the -various merges, expansions, and conditional evaluations performed by gyp -and to see a representation of what would be fed to a generator module. - -It's not advisable to rename .gypd files produced by this module to .gyp, -because they will have all merges, expansions, and evaluations already -performed and the relevant constructs not present in the output; paths to -dependencies may be wrong; and various sections that do not belong in .gyp -files such as such as "included_files" and "*_excluded" will be present. -Output will also be stripped of comments. This is not intended to be a -general-purpose gyp pretty-printer; for that, you probably just want to -run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip -comments but won't do all of the other things done to this module's output. - -The specific formatting of the output generated by this module is subject -to change. -""" - - -import gyp.common -import pprint - - -# These variables should just be spit back out as variable references. -_generator_identity_variables = [ - "CONFIGURATION_NAME", - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "INTERMEDIATE_DIR", - "LIB_DIR", - "PRODUCT_DIR", - "RULE_INPUT_ROOT", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "RULE_INPUT_NAME", - "RULE_INPUT_PATH", - "SHARED_INTERMEDIATE_DIR", - "SHARED_LIB_DIR", - "SHARED_LIB_PREFIX", - "SHARED_LIB_SUFFIX", - "STATIC_LIB_PREFIX", - "STATIC_LIB_SUFFIX", -] - -# gypd doesn't define a default value for OS like many other generator -# modules. Specify "-D OS=whatever" on the command line to provide a value. -generator_default_variables = {} - -# gypd supports multiple toolsets -generator_supports_multiple_toolsets = True - -# TODO(mark): This always uses <, which isn't right. The input module should -# notify the generator to tell it which phase it is operating in, and this -# module should use < for the early phase and then switch to > for the late -# phase. Bonus points for carrying @ back into the output too. -for v in _generator_identity_variables: - generator_default_variables[v] = "<(%s)" % v - - -def GenerateOutput(target_list, target_dicts, data, params): - output_files = {} - for qualified_target in target_list: - [input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2] - - if input_file[-4:] != ".gyp": - continue - input_file_stem = input_file[:-4] - output_file = input_file_stem + params["options"].suffix + ".gypd" - - output_files[output_file] = output_files.get(output_file, input_file) - - for output_file, input_file in output_files.items(): - output = open(output_file, "w") - pprint.pprint(data[input_file], output) - output.close() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py deleted file mode 100644 index 2d8aba5d1c19e..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypsh output module - -gypsh is a GYP shell. It's not really a generator per se. All it does is -fire up an interactive Python session with a few local variables set to the -variables passed to the generator. Like gypd, it's intended as a debugging -aid, to facilitate the exploration of .gyp structures after being processed -by the input module. - -The expected usage is "gyp -f gypsh -D OS=desired_os". -""" - - -import code -import sys - - -# All of this stuff about generator variables was lovingly ripped from gypd.py. -# That module has a much better description of what's going on and why. -_generator_identity_variables = [ - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "INTERMEDIATE_DIR", - "PRODUCT_DIR", - "RULE_INPUT_ROOT", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "RULE_INPUT_NAME", - "RULE_INPUT_PATH", - "SHARED_INTERMEDIATE_DIR", -] - -generator_default_variables = {} - -for v in _generator_identity_variables: - generator_default_variables[v] = "<(%s)" % v - - -def GenerateOutput(target_list, target_dicts, data, params): - locals = { - "target_list": target_list, - "target_dicts": target_dicts, - "data": data, - } - - # Use a banner that looks like the stock Python one and like what - # code.interact uses by default, but tack on something to indicate what - # locals are available, and identify gypsh. - banner = "Python %s on %s\nlocals.keys() = %s\ngypsh" % ( - sys.version, - sys.platform, - repr(sorted(locals.keys())), - ) - - code.interact(banner, local=locals) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py deleted file mode 100644 index d163ae3135a7f..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py +++ /dev/null @@ -1,2514 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This is all roughly based on the Makefile system used by the Linux -# kernel, but is a non-recursive make -- we put the entire dependency -# graph in front of make and let it figure it out. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level Makefile. This means that all -# variables in .mk-files clobber one another. Be careful to use := -# where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last between different -# .mk files. -# -# TODOs: -# -# Global settings and utility functions are currently stuffed in the -# toplevel Makefile. It may make sense to generate some .mk files on -# the side to keep the files readable. - -from __future__ import print_function - -import os -import re -import subprocess -import gyp -import gyp.common -import gyp.xcode_emulation -from gyp.common import GetEnvironFallback - -import hashlib - -generator_default_variables = { - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "SHARED_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni", - "SHARED_INTERMEDIATE_DIR": "$(obj)/gen", - "PRODUCT_DIR": "$(builddir)", - "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", # This gets expanded by Python. - "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", # This gets expanded by Python. - "RULE_INPUT_PATH": "$(abspath $<)", - "RULE_INPUT_EXT": "$(suffix $<)", - "RULE_INPUT_NAME": "$(notdir $<)", - "CONFIGURATION_NAME": "$(BUILDTYPE)", -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - -# Request sorted dependencies in the order from dependents to dependencies. -generator_wants_sorted_dependencies = False - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - flavor = gyp.common.GetFlavor(params) - if flavor == "mac": - default_variables.setdefault("OS", "mac") - default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib") - default_variables.setdefault( - "SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"] - ) - default_variables.setdefault( - "LIB_DIR", generator_default_variables["PRODUCT_DIR"] - ) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Make generator. - import gyp.generator.xcode as xcode_generator - - global generator_additional_non_configuration_keys - generator_additional_non_configuration_keys = getattr( - xcode_generator, "generator_additional_non_configuration_keys", [] - ) - global generator_additional_path_sections - generator_additional_path_sections = getattr( - xcode_generator, "generator_additional_path_sections", [] - ) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr( - xcode_generator, "generator_extra_sources_for_rules", [] - ) - COMPILABLE_EXTENSIONS.update({".m": "objc", ".mm": "objcxx"}) - else: - operating_system = flavor - if flavor == "android": - operating_system = "linux" # Keep this legacy behavior for now. - default_variables.setdefault("OS", operating_system) - if flavor == "aix": - default_variables.setdefault("SHARED_LIB_SUFFIX", ".a") - else: - default_variables.setdefault("SHARED_LIB_SUFFIX", ".so") - default_variables.setdefault("SHARED_LIB_DIR", "$(builddir)/lib.$(TOOLSET)") - default_variables.setdefault("LIB_DIR", "$(obj).$(TOOLSET)") - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get("generator_flags", {}) - android_ndk_version = generator_flags.get("android_ndk_version", None) - # Android NDK requires a strict link order. - if android_ndk_version: - global generator_wants_sorted_dependencies - generator_wants_sorted_dependencies = True - - output_dir = params["options"].generator_output or params["options"].toplevel_dir - builddir_name = generator_flags.get("output_dir", "out") - qualified_out_dir = os.path.normpath( - os.path.join(output_dir, builddir_name, "gypfiles") - ) - - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": params["options"].toplevel_dir, - "qualified_out_dir": qualified_out_dir, - } - - -# The .d checking code below uses these functions: -# wildcard, sort, foreach, shell, wordlist -# wildcard can handle spaces, the rest can't. -# Since I could find no way to make foreach work with spaces in filenames -# correctly, the .d files have spaces replaced with another character. The .d -# file for -# Chromium\ Framework.framework/foo -# is for example -# out/Release/.deps/out/Release/Chromium?Framework.framework/foo -# This is the replacement character. -SPACE_REPLACEMENT = "?" - - -LINK_COMMANDS_LINUX = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) -o $@ $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,--start-group $(LD_INPUTS) $(LIBS) -Wl,--end-group - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -o $@ -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -o $@ -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -""" # noqa: E501 - -LINK_COMMANDS_MAC = """\ -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" # noqa: E501 - -LINK_COMMANDS_ANDROID = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -quiet_cmd_link_host = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) -cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" # noqa: E501 - - -LINK_COMMANDS_AIX = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" # noqa: E501 - - -LINK_COMMANDS_OS390 = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) -Wl,DLL - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -Wl,DLL -""" # noqa: E501 - - -# Header of toplevel Makefile. -# This should go into the build tree, but it's easier to keep it here for now. -SHARED_HEADER = ( - """\ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := %(srcdir)s -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= %(builddir)s - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= %(default_configuration)s - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - -%(make_global_settings)s - -CC.target ?= %(CC.target)s -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= %(CXX.target)s -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= %(LINK.target)s -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= %(CC.host)s -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= %(CXX.host)s -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= %(LINK.host)s -LDFLAGS.host ?= -AR.host ?= %(AR.host)s - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),""" - + SPACE_REPLACEMENT - + """,$1) -unreplace_spaces = $(subst """ - + SPACE_REPLACEMENT - + """,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = %(makedep_args)s -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \\ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters.""" - r""" -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef -""" - """ -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) -o $@ $< $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) -o $@ $< $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -%(extra_commands)s -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@") - -%(link_commands)s -""" # noqa: E501 - r""" -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))' -""" - """ -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain """ - + SPACE_REPLACEMENT - + """ instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\\ - for p in $(POSTBUILDS); do\\ - eval $$p;\\ - E=$$?;\\ - if [ $$E -ne 0 ]; then\\ - break;\\ - fi;\\ - done;\\ - if [ $$E -ne 0 ]; then\\ - rm -rf "$@";\\ - exit $$E;\\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains """ - + SPACE_REPLACEMENT - + """ for -# spaces already and dirx strips the """ - + SPACE_REPLACEMENT - + """ characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "%(default_target)s" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: %(default_target)s -%(default_target)s: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -""" # noqa: E501 -) - -SHARED_HEADER_MAC_COMMANDS = """ -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" -""" # noqa: E501 - - -def WriteRootHeaderSuffixRules(writer): - extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower) - - writer.write("# Suffix rules, putting all outputs into $(obj).\n") - for ext in extensions: - writer.write("$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n" % ext) - writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) - - writer.write("\n# Try building from generated source, too.\n") - for ext in extensions: - writer.write( - "$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n" % ext - ) - writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) - writer.write("\n") - for ext in extensions: - writer.write("$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n" % ext) - writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) - writer.write("\n") - - -SHARED_HEADER_SUFFIX_RULES_COMMENT1 = """\ -# Suffix rules, putting all outputs into $(obj). -""" - - -SHARED_HEADER_SUFFIX_RULES_COMMENT2 = """\ -# Try building from generated source, too. -""" - - -SHARED_FOOTER = """\ -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -# Maps every compilable file extension to the do_cmd that compiles it. -COMPILABLE_EXTENSIONS = { - ".c": "cc", - ".cc": "cxx", - ".cpp": "cxx", - ".cxx": "cxx", - ".s": "cc", - ".S": "cc", -} - - -def Compilable(filename): - """Return true if the file is compilable (should be in OBJS).""" - for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS): - if res: - return True - return False - - -def Linkable(filename): - """Return true if the file is linkable (should be on the link line).""" - return filename.endswith(".o") - - -def Target(filename): - """Translate a compilable filename to its .o target.""" - return os.path.splitext(filename)[0] + ".o" - - -def EscapeShellArgument(s): - """Quotes an argument so that it will be interpreted literally by a POSIX - shell. Taken from - http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python - """ - return "'" + s.replace("'", "'\\''") + "'" - - -def EscapeMakeVariableExpansion(s): - """Make has its own variable expansion syntax using $. We must escape it for - string to be interpreted literally.""" - return s.replace("$", "$$") - - -def EscapeCppDefine(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = EscapeShellArgument(s) - s = EscapeMakeVariableExpansion(s) - # '#' characters must be escaped even embedded in a string, else Make will - # treat it as the start of a comment. - return s.replace("#", r"\#") - - -def QuoteIfNecessary(string): - """TODO: Should this ideally be replaced with one or more of the above - functions?""" - if '"' in string: - string = '"' + string.replace('"', '\\"') + '"' - return string - - -def StringToMakefileVariable(string): - """Convert a string to a value that is acceptable as a make variable name.""" - return re.sub("[^a-zA-Z0-9_]", "_", string) - - -srcdir_prefix = "" - - -def Sourceify(path): - """Convert a path to its source directory form.""" - if "$(" in path: - return path - if os.path.isabs(path): - return path - return srcdir_prefix + path - - -def QuoteSpaces(s, quote=r"\ "): - return s.replace(" ", quote) - - -def SourceifyAndQuoteSpaces(path): - """Convert a path to its source directory form and quote spaces.""" - return QuoteSpaces(Sourceify(path)) - - -# Map from qualified target to path to output. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class MakefileWriter(object): - """MakefileWriter packages up the writing of one target-specific foobar.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, generator_flags, flavor): - self.generator_flags = generator_flags - self.flavor = flavor - - self.suffix_rules_srcdir = {} - self.suffix_rules_objdir1 = {} - self.suffix_rules_objdir2 = {} - - # Generate suffix rules for all compilable extensions. - for ext in COMPILABLE_EXTENSIONS.keys(): - # Suffix rules for source folder. - self.suffix_rules_srcdir.update( - { - ext: ( - """\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD -\t@$(call do_cmd,%s,1) -""" - % (ext, COMPILABLE_EXTENSIONS[ext]) - ) - } - ) - - # Suffix rules for generated source files. - self.suffix_rules_objdir1.update( - { - ext: ( - """\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD -\t@$(call do_cmd,%s,1) -""" - % (ext, COMPILABLE_EXTENSIONS[ext]) - ) - } - ) - self.suffix_rules_objdir2.update( - { - ext: ( - """\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD -\t@$(call do_cmd,%s,1) -""" - % (ext, COMPILABLE_EXTENSIONS[ext]) - ) - } - ) - - def Write( - self, qualified_target, base_path, output_filename, spec, configs, part_of_all - ): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - """ - gyp.common.EnsureDirExists(output_filename) - - self.fp = open(output_filename, "w") - - self.fp.write(header) - - self.qualified_target = qualified_target - self.path = base_path - self.target = spec["target_name"] - self.type = spec["type"] - self.toolset = spec["toolset"] - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - if self.flavor == "mac": - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - else: - self.xcode_settings = None - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - extra_link_deps = [] - extra_mac_bundle_resources = [] - mac_bundle_deps = [] - - if self.is_mac_bundle: - self.output = self.ComputeMacBundleOutput(spec) - self.output_binary = self.ComputeMacBundleBinaryOutput(spec) - else: - self.output = self.output_binary = self.ComputeOutput(spec) - - self.is_standalone_static_library = bool( - spec.get("standalone_static_library", 0) - ) - self._INSTALLABLE_TARGETS = ("executable", "loadable_module", "shared_library") - if self.is_standalone_static_library or self.type in self._INSTALLABLE_TARGETS: - self.alias = os.path.basename(self.output) - install_path = self._InstallableTargetInstallPath() - else: - self.alias = self.output - install_path = self.output - - self.WriteLn("TOOLSET := " + self.toolset) - self.WriteLn("TARGET := " + self.target) - - # Actions must come first, since they can generate more OBJs for use below. - if "actions" in spec: - self.WriteActions( - spec["actions"], - extra_sources, - extra_outputs, - extra_mac_bundle_resources, - part_of_all, - ) - - # Rules must be early like actions. - if "rules" in spec: - self.WriteRules( - spec["rules"], - extra_sources, - extra_outputs, - extra_mac_bundle_resources, - part_of_all, - ) - - if "copies" in spec: - self.WriteCopies(spec["copies"], extra_outputs, part_of_all) - - # Bundle resources. - if self.is_mac_bundle: - all_mac_bundle_resources = ( - spec.get("mac_bundle_resources", []) + extra_mac_bundle_resources - ) - self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps) - self.WriteMacInfoPlist(mac_bundle_deps) - - # Sources. - all_sources = spec.get("sources", []) + extra_sources - if all_sources: - self.WriteSources( - configs, - deps, - all_sources, - extra_outputs, - extra_link_deps, - part_of_all, - gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, - lambda p: Sourceify(self.Absolutify(p)), - self.Pchify, - ), - ) - sources = [x for x in all_sources if Compilable(x)] - if sources: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) - extensions = set([os.path.splitext(s)[1] for s in sources]) - for ext in extensions: - if ext in self.suffix_rules_srcdir: - self.WriteLn(self.suffix_rules_srcdir[ext]) - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2) - for ext in extensions: - if ext in self.suffix_rules_objdir1: - self.WriteLn(self.suffix_rules_objdir1[ext]) - for ext in extensions: - if ext in self.suffix_rules_objdir2: - self.WriteLn(self.suffix_rules_objdir2[ext]) - self.WriteLn("# End of this set of suffix rules") - - # Add dependency from bundle to bundle binary. - if self.is_mac_bundle: - mac_bundle_deps.append(self.output_binary) - - self.WriteTarget( - spec, - configs, - deps, - extra_link_deps + link_deps, - mac_bundle_deps, - extra_outputs, - part_of_all, - ) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = install_path - - # Update global list of link dependencies. - if self.type in ("static_library", "shared_library"): - target_link_deps[qualified_target] = self.output_binary - - # Currently any versions have the same effect, but in future the behavior - # could be different. - if self.generator_flags.get("android_ndk_version", None): - self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps) - - self.fp.close() - - def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): - """Write a "sub-project" Makefile. - - This is a small, wrapper Makefile that calls the top-level Makefile to build - the targets from a single gyp file (i.e. a sub-project). - - Arguments: - output_filename: sub-project Makefile name to write - makefile_path: path to the top-level Makefile - targets: list of "all" targets for this sub-project - build_dir: build output directory, relative to the sub-project - """ - gyp.common.EnsureDirExists(output_filename) - self.fp = open(output_filename, "w") - self.fp.write(header) - # For consistency with other builders, put sub-project build output in the - # sub-project dir (see test/subdirectory/gyptest-subdir-all.py). - self.WriteLn( - "export builddir_name ?= %s" - % os.path.join(os.path.dirname(output_filename), build_dir) - ) - self.WriteLn(".PHONY: all") - self.WriteLn("all:") - if makefile_path: - makefile_path = " -C " + makefile_path - self.WriteLn("\t$(MAKE)%s %s" % (makefile_path, " ".join(targets))) - self.fp.close() - - def WriteActions( - self, - actions, - extra_sources, - extra_outputs, - extra_mac_bundle_resources, - part_of_all, - ): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - part_of_all: flag indicating this target is part of 'all' - """ - env = self.GetSortedXcodeEnv() - for action in actions: - name = StringToMakefileVariable( - "%s_%s" % (self.qualified_target, action["action_name"]) - ) - self.WriteLn('### Rules for action "%s":' % action["action_name"]) - inputs = action["inputs"] - outputs = action["outputs"] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get("process_outputs_as_sources", False)): - extra_sources += outputs - if int(action.get("process_outputs_as_mac_bundle_resources", False)): - extra_mac_bundle_resources += outputs - - # Write the actual command. - action_commands = action["action"] - if self.flavor == "mac": - action_commands = [ - gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action_commands - ] - command = gyp.common.EncodePOSIXShellList(action_commands) - if "message" in action: - self.WriteLn("quiet_cmd_%s = ACTION %s $@" % (name, action["message"])) - else: - self.WriteLn("quiet_cmd_%s = ACTION %s $@" % (name, name)) - if len(dirs) > 0: - command = "mkdir -p %s" % " ".join(dirs) + "; " + command - - cd_action = "cd %s; " % Sourceify(self.path or ".") - - # command and cd_action get written to a toplevel variable called - # cmd_foo. Toplevel variables can't handle things that change per - # makefile like $(TARGET), so hardcode the target. - command = command.replace("$(TARGET)", self.target) - cd_action = cd_action.replace("$(TARGET)", self.target) - - # Set LD_LIBRARY_PATH in case the action runs an executable from this - # build which links to shared libs from this build. - # actions run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn( - "cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:" - "$(builddir)/lib.target:$$LD_LIBRARY_PATH; " - "export LD_LIBRARY_PATH; " - "%s%s" % (name, cd_action, command) - ) - self.WriteLn() - outputs = [self.Absolutify(o) for o in outputs] - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the obj - # variable for the action rule with an absolute version so that the output - # goes in the right place. - # Only write the 'obj' and 'builddir' rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - # Same for environment. - self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0])) - self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0])) - self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv()) - - for input in inputs: - assert " " not in input, ( - "Spaces in action input filenames not supported (%s)" % input - ) - for output in outputs: - assert " " not in output, ( - "Spaces in action output filenames not supported (%s)" % output - ) - - # See the comment in WriteCopies about expanding env vars. - outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] - inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - - self.WriteDoCmd( - outputs, - [Sourceify(self.Absolutify(i)) for i in inputs], - part_of_all=part_of_all, - command=name, - ) - - # Stuff the outputs in a variable so we can refer to them later. - outputs_variable = "action_%s_outputs" % name - self.WriteLn("%s := %s" % (outputs_variable, " ".join(outputs))) - extra_outputs.append("$(%s)" % outputs_variable) - self.WriteLn() - - self.WriteLn() - - def WriteRules( - self, - rules, - extra_sources, - extra_outputs, - extra_mac_bundle_resources, - part_of_all, - ): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - part_of_all: flag indicating this target is part of 'all' - """ - env = self.GetSortedXcodeEnv() - for rule in rules: - name = StringToMakefileVariable( - "%s_%s" % (self.qualified_target, rule["rule_name"]) - ) - count = 0 - self.WriteLn("### Generated for rule %s:" % name) - - all_outputs = [] - - for rule_source in rule.get("rule_sources", []): - dirs = set() - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = os.path.splitext( - rule_source_basename - ) - - outputs = [ - self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) - for out in rule["outputs"] - ] - - for out in outputs: - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - if int(rule.get("process_outputs_as_sources", False)): - extra_sources += outputs - if int(rule.get("process_outputs_as_mac_bundle_resources", False)): - extra_mac_bundle_resources += outputs - inputs = [ - Sourceify(self.Absolutify(i)) - for i in [rule_source] + rule.get("inputs", []) - ] - actions = ["$(call do_cmd,%s_%d)" % (name, count)] - - if name == "resources_grit": - # HACK: This is ugly. Grit intentionally doesn't touch the - # timestamp of its output file when the file doesn't change, - # which is fine in hash-based dependency systems like scons - # and forge, but not kosher in the make world. After some - # discussion, hacking around it here seems like the least - # amount of pain. - actions += ["@touch --no-create $@"] - - # See the comment in WriteCopies about expanding env vars. - outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] - inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - - outputs = [self.Absolutify(o) for o in outputs] - all_outputs += outputs - # Only write the 'obj' and 'builddir' rules for the "primary" output - # (:1); it's superfluous for the "extra outputs", and this avoids - # accidentally writing duplicate dummy rules for those outputs. - self.WriteLn("%s: obj := $(abs_obj)" % outputs[0]) - self.WriteLn("%s: builddir := $(abs_builddir)" % outputs[0]) - self.WriteMakeRule( - outputs, inputs, actions, command="%s_%d" % (name, count) - ) - # Spaces in rule filenames are not supported, but rule variables have - # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)'). - # The spaces within the variables are valid, so remove the variables - # before checking. - variables_with_spaces = re.compile(r"\$\([^ ]* \$<\)") - for output in outputs: - output = re.sub(variables_with_spaces, "", output) - assert " " not in output, ( - "Spaces in rule filenames not yet supported (%s)" % output - ) - self.WriteLn("all_deps += %s" % " ".join(outputs)) - - action = [ - self.ExpandInputRoot(ac, rule_source_root, rule_source_dirname) - for ac in rule["action"] - ] - mkdirs = "" - if len(dirs) > 0: - mkdirs = "mkdir -p %s; " % " ".join(dirs) - cd_action = "cd %s; " % Sourceify(self.path or ".") - - # action, cd_action, and mkdirs get written to a toplevel variable - # called cmd_foo. Toplevel variables can't handle things that change - # per makefile like $(TARGET), so hardcode the target. - if self.flavor == "mac": - action = [ - gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action - ] - action = gyp.common.EncodePOSIXShellList(action) - action = action.replace("$(TARGET)", self.target) - cd_action = cd_action.replace("$(TARGET)", self.target) - mkdirs = mkdirs.replace("$(TARGET)", self.target) - - # Set LD_LIBRARY_PATH in case the rule runs an executable from this - # build which links to shared libs from this build. - # rules run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn( - "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH=" - "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; " - "export LD_LIBRARY_PATH; " - "%(cd_action)s%(mkdirs)s%(action)s" - % { - "action": action, - "cd_action": cd_action, - "count": count, - "mkdirs": mkdirs, - "name": name, - } - ) - self.WriteLn( - "quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@" - % {"count": count, "name": name} - ) - self.WriteLn() - count += 1 - - outputs_variable = "rule_%s_outputs" % name - self.WriteList(all_outputs, outputs_variable) - extra_outputs.append("$(%s)" % outputs_variable) - - self.WriteLn("### Finished generating for rule: %s" % name) - self.WriteLn() - self.WriteLn("### Finished generating for all rules") - self.WriteLn("") - - def WriteCopies(self, copies, extra_outputs, part_of_all): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - part_of_all: flag indicating this target is part of 'all' - """ - self.WriteLn("### Generated for copy rule.") - - variable = StringToMakefileVariable(self.qualified_target + "_copies") - outputs = [] - for copy in copies: - for path in copy["files"]: - # Absolutify() may call normpath, and will strip trailing slashes. - path = Sourceify(self.Absolutify(path)) - filename = os.path.split(path)[1] - output = Sourceify( - self.Absolutify(os.path.join(copy["destination"], filename)) - ) - - # If the output path has variables in it, which happens in practice for - # 'copies', writing the environment as target-local doesn't work, - # because the variables are already needed for the target name. - # Copying the environment variables into global make variables doesn't - # work either, because then the .d files will potentially contain spaces - # after variable expansion, and .d file handling cannot handle spaces. - # As a workaround, manually expand variables at gyp time. Since 'copies' - # can't run scripts, there's no need to write the env then. - # WriteDoCmd() will escape spaces for .d files. - env = self.GetSortedXcodeEnv() - output = gyp.xcode_emulation.ExpandEnvVars(output, env) - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - self.WriteDoCmd([output], [path], "copy", part_of_all) - outputs.append(output) - self.WriteLn("%s = %s" % (variable, " ".join(QuoteSpaces(o) for o in outputs))) - extra_outputs.append("$(%s)" % variable) - self.WriteLn() - - def WriteMacBundleResources(self, resources, bundle_deps): - """Writes Makefile code for 'mac_bundle_resources'.""" - self.WriteLn("### Generated for mac_bundle_resources") - - for output, res in gyp.xcode_emulation.GetMacBundleResources( - generator_default_variables["PRODUCT_DIR"], - self.xcode_settings, - [Sourceify(self.Absolutify(r)) for r in resources], - ): - _, ext = os.path.splitext(output) - if ext != ".xcassets": - # Make does not supports '.xcassets' emulation. - self.WriteDoCmd( - [output], [res], "mac_tool,,,copy-bundle-resource", part_of_all=True - ) - bundle_deps.append(output) - - def WriteMacInfoPlist(self, bundle_deps): - """Write Makefile code for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - generator_default_variables["PRODUCT_DIR"], - self.xcode_settings, - lambda p: Sourceify(self.Absolutify(p)), - ) - if not info_plist: - return - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = "$(obj).$(TOOLSET)/$(TARGET)/" + os.path.basename( - info_plist - ) - self.WriteList( - defines, - intermediate_plist + ": INFOPLIST_DEFINES", - "-D", - quoter=EscapeCppDefine, - ) - self.WriteMakeRule( - [intermediate_plist], - [info_plist], - [ - "$(call do_cmd,infoplist)", - # "Convert" the plist so that any weird whitespace changes from the - # preprocessor do not affect the XML parser in mac_tool. - "@plutil -convert xml1 $@ $@", - ], - ) - info_plist = intermediate_plist - # plists can contain envvars and substitute them into the file. - self.WriteSortedXcodeEnv( - out, self.GetSortedXcodeEnv(additional_settings=extra_env) - ) - self.WriteDoCmd( - [out], [info_plist], "mac_tool,,,copy-info-plist", part_of_all=True - ) - bundle_deps.append(out) - - def WriteSources( - self, - configs, - deps, - sources, - extra_outputs, - extra_link_deps, - part_of_all, - precompiled_header, - ): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - - configs, deps, sources: input from gyp. - extra_outputs: a list of extra outputs this action should be dependent on; - used to serialize action/rules before compilation - extra_link_deps: a list that will be filled in with any outputs of - compilation (to be used in link lines) - part_of_all: flag indicating this target is part of 'all' - """ - - # Write configuration-specific variables for CFLAGS, etc. - for configname in sorted(configs.keys()): - config = configs[configname] - self.WriteList( - config.get("defines"), - "DEFS_%s" % configname, - prefix="-D", - quoter=EscapeCppDefine, - ) - - if self.flavor == "mac": - cflags = self.xcode_settings.GetCflags( - configname, - arch=config.get('xcode_configuration_platform') - ) - cflags_c = self.xcode_settings.GetCflagsC(configname) - cflags_cc = self.xcode_settings.GetCflagsCC(configname) - cflags_objc = self.xcode_settings.GetCflagsObjC(configname) - cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname) - else: - cflags = config.get("cflags") - cflags_c = config.get("cflags_c") - cflags_cc = config.get("cflags_cc") - - self.WriteLn("# Flags passed to all source files.") - self.WriteList(cflags, "CFLAGS_%s" % configname) - self.WriteLn("# Flags passed to only C files.") - self.WriteList(cflags_c, "CFLAGS_C_%s" % configname) - self.WriteLn("# Flags passed to only C++ files.") - self.WriteList(cflags_cc, "CFLAGS_CC_%s" % configname) - if self.flavor == "mac": - self.WriteLn("# Flags passed to only ObjC files.") - self.WriteList(cflags_objc, "CFLAGS_OBJC_%s" % configname) - self.WriteLn("# Flags passed to only ObjC++ files.") - self.WriteList(cflags_objcc, "CFLAGS_OBJCC_%s" % configname) - includes = config.get("include_dirs") - if includes: - includes = [Sourceify(self.Absolutify(i)) for i in includes] - self.WriteList(includes, "INCS_%s" % configname, prefix="-I") - - compilable = list(filter(Compilable, sources)) - objs = [self.Objectify(self.Absolutify(Target(c))) for c in compilable] - self.WriteList(objs, "OBJS") - - for obj in objs: - assert " " not in obj, "Spaces in object filenames not supported (%s)" % obj - self.WriteLn( - "# Add to the list of files we specially track " "dependencies for." - ) - self.WriteLn("all_deps += $(OBJS)") - self.WriteLn() - - # Make sure our dependencies are built first. - if deps: - self.WriteMakeRule( - ["$(OBJS)"], - deps, - comment="Make sure our dependencies are built " "before any of us.", - order_only=True, - ) - - # Make sure the actions and rules run first. - # If they generate any extra headers etc., the per-.o file dep tracking - # will catch the proper rebuilds, so order only is still ok here. - if extra_outputs: - self.WriteMakeRule( - ["$(OBJS)"], - extra_outputs, - comment="Make sure our actions/rules run " "before any of us.", - order_only=True, - ) - - pchdeps = precompiled_header.GetObjDependencies(compilable, objs) - if pchdeps: - self.WriteLn("# Dependencies from obj files to their precompiled headers") - for source, obj, gch in pchdeps: - self.WriteLn("%s: %s" % (obj, gch)) - self.WriteLn("# End precompiled header dependencies") - - if objs: - extra_link_deps.append("$(OBJS)") - self.WriteLn( - """\ -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual.""" - ) - self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)") - self.WriteLn( - "$(OBJS): GYP_CFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude("c") + "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE))" - ) - self.WriteLn( - "$(OBJS): GYP_CXXFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude("cc") + "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE))" - ) - if self.flavor == "mac": - self.WriteLn( - "$(OBJS): GYP_OBJCFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude("m") - + "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE)) " - "$(CFLAGS_OBJC_$(BUILDTYPE))" - ) - self.WriteLn( - "$(OBJS): GYP_OBJCXXFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude("mm") - + "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE)) " - "$(CFLAGS_OBJCC_$(BUILDTYPE))" - ) - - self.WritePchTargets(precompiled_header.GetPchBuildCommands()) - - # If there are any object files in our input file list, link them into our - # output. - extra_link_deps += [source for source in sources if Linkable(source)] - - self.WriteLn() - - def WritePchTargets(self, pch_commands): - """Writes make rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - extra_flags = { - "c": "$(CFLAGS_C_$(BUILDTYPE))", - "cc": "$(CFLAGS_CC_$(BUILDTYPE))", - "m": "$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))", - "mm": "$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))", - }[lang] - var_name = { - "c": "GYP_PCH_CFLAGS", - "cc": "GYP_PCH_CXXFLAGS", - "m": "GYP_PCH_OBJCFLAGS", - "mm": "GYP_PCH_OBJCXXFLAGS", - }[lang] - self.WriteLn( - "%s: %s := %s " % (gch, var_name, lang_flag) + "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "$(CFLAGS_$(BUILDTYPE)) " + extra_flags - ) - - self.WriteLn("%s: %s FORCE_DO_CMD" % (gch, input)) - self.WriteLn("\t@$(call do_cmd,pch_%s,1)" % lang) - self.WriteLn("") - assert " " not in gch, "Spaces in gch filenames not supported (%s)" % gch - self.WriteLn("all_deps += %s" % gch) - self.WriteLn("") - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - assert not self.is_mac_bundle - - if self.flavor == "mac" and self.type in ( - "static_library", - "executable", - "shared_library", - "loadable_module", - ): - return self.xcode_settings.GetExecutablePath() - - target = spec["target_name"] - target_prefix = "" - target_ext = "" - if self.type == "static_library": - if target[:3] == "lib": - target = target[3:] - target_prefix = "lib" - target_ext = ".a" - elif self.type in ("loadable_module", "shared_library"): - if target[:3] == "lib": - target = target[3:] - target_prefix = "lib" - if self.flavor == "aix": - target_ext = ".a" - else: - target_ext = ".so" - elif self.type == "none": - target = "%s.stamp" % target - elif self.type != "executable": - print( - "ERROR: What output file should be generated?", - "type", - self.type, - "target", - target, - ) - - target_prefix = spec.get("product_prefix", target_prefix) - target = spec.get("product_name", target) - product_ext = spec.get("product_extension") - if product_ext: - target_ext = "." + product_ext - - return target_prefix + target + target_ext - - def _InstallImmediately(self): - return ( - self.toolset == "target" - and self.flavor == "mac" - and self.type - in ("static_library", "executable", "shared_library", "loadable_module") - ) - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - assert not self.is_mac_bundle - - path = os.path.join("$(obj)." + self.toolset, self.path) - if self.type == "executable" or self._InstallImmediately(): - path = "$(builddir)" - path = spec.get("product_dir", path) - return os.path.join(path, self.ComputeOutputBasename(spec)) - - def ComputeMacBundleOutput(self, spec): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = generator_default_variables["PRODUCT_DIR"] - return os.path.join(path, self.xcode_settings.GetWrapperName()) - - def ComputeMacBundleBinaryOutput(self, spec): - """Return the 'output' (full output path) to the binary in a bundle.""" - path = generator_default_variables["PRODUCT_DIR"] - return os.path.join(path, self.xcode_settings.GetExecutablePath()) - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if "dependencies" in spec: - deps.extend( - [ - target_outputs[dep] - for dep in spec["dependencies"] - if target_outputs[dep] - ] - ) - for dep in spec["dependencies"]: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)? - # This hack makes it work: - # link_deps.extend(spec.get('libraries', [])) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - def WriteDependencyOnExtraOutputs(self, target, extra_outputs): - self.WriteMakeRule( - [self.output_binary], - extra_outputs, - comment="Build our special outputs first.", - order_only=True, - ) - - def WriteTarget( - self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all - ): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - extra_outputs: any extra outputs that our target should depend on - part_of_all: flag indicating this target is part of 'all' - """ - - self.WriteLn("### Rules for final target.") - - if extra_outputs: - self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) - self.WriteMakeRule( - extra_outputs, - deps, - comment=("Preserve order dependency of " "special output on deps."), - order_only=True, - ) - - target_postbuilds = {} - if self.type != "none": - for configname in sorted(configs.keys()): - config = configs[configname] - if self.flavor == "mac": - ldflags = self.xcode_settings.GetLdflags( - configname, - generator_default_variables["PRODUCT_DIR"], - lambda p: Sourceify(self.Absolutify(p)), - arch=config.get('xcode_configuration_platform') - ) - - # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. - gyp_to_build = gyp.common.InvertRelativePath(self.path) - target_postbuild = self.xcode_settings.AddImplicitPostbuilds( - configname, - QuoteSpaces( - os.path.normpath(os.path.join(gyp_to_build, self.output)) - ), - QuoteSpaces( - os.path.normpath( - os.path.join(gyp_to_build, self.output_binary) - ) - ), - ) - if target_postbuild: - target_postbuilds[configname] = target_postbuild - else: - ldflags = config.get("ldflags", []) - # Compute an rpath for this output if needed. - if any(dep.endswith(".so") or ".so." in dep for dep in deps): - # We want to get the literal string "$ORIGIN" - # into the link command, so we need lots of escaping. - ldflags.append(r"-Wl,-rpath=\$$ORIGIN/") - ldflags.append(r"-Wl,-rpath-link=\$(builddir)/") - library_dirs = config.get("library_dirs", []) - ldflags += [("-L%s" % library_dir) for library_dir in library_dirs] - self.WriteList(ldflags, "LDFLAGS_%s" % configname) - if self.flavor == "mac": - self.WriteList( - self.xcode_settings.GetLibtoolflags(configname), - "LIBTOOLFLAGS_%s" % configname, - ) - libraries = spec.get("libraries") - if libraries: - # Remove duplicate entries - libraries = gyp.common.uniquer(libraries) - if self.flavor == "mac": - libraries = self.xcode_settings.AdjustLibraries(libraries) - self.WriteList(libraries, "LIBS") - self.WriteLn( - "%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))" - % QuoteSpaces(self.output_binary) - ) - self.WriteLn("%s: LIBS := $(LIBS)" % QuoteSpaces(self.output_binary)) - - if self.flavor == "mac": - self.WriteLn( - "%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))" - % QuoteSpaces(self.output_binary) - ) - - # Postbuild actions. Like actions, but implicitly depend on the target's - # output. - postbuilds = [] - if self.flavor == "mac": - if target_postbuilds: - postbuilds.append("$(TARGET_POSTBUILDS_$(BUILDTYPE))") - postbuilds.extend(gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) - - if postbuilds: - # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE), - # so we must output its definition first, since we declare variables - # using ":=". - self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv()) - - for configname in target_postbuilds: - self.WriteLn( - "%s: TARGET_POSTBUILDS_%s := %s" - % ( - QuoteSpaces(self.output), - configname, - gyp.common.EncodePOSIXShellList(target_postbuilds[configname]), - ) - ) - - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert(0, gyp.common.EncodePOSIXShellList(["cd", self.path])) - for i, postbuild in enumerate(postbuilds): - if not postbuild.startswith("$"): - postbuilds[i] = EscapeShellArgument(postbuild) - self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(self.output)) - self.WriteLn( - "%s: POSTBUILDS := %s" - % (QuoteSpaces(self.output), " ".join(postbuilds)) - ) - - # A bundle directory depends on its dependencies such as bundle resources - # and bundle binary. When all dependencies have been built, the bundle - # needs to be packaged. - if self.is_mac_bundle: - # If the framework doesn't contain a binary, then nothing depends - # on the actions -- make the framework depend on them directly too. - self.WriteDependencyOnExtraOutputs(self.output, extra_outputs) - - # Bundle dependencies. Note that the code below adds actions to this - # target, so if you move these two lines, move the lines below as well. - self.WriteList([QuoteSpaces(dep) for dep in bundle_deps], "BUNDLE_DEPS") - self.WriteLn("%s: $(BUNDLE_DEPS)" % QuoteSpaces(self.output)) - - # After the framework is built, package it. Needs to happen before - # postbuilds, since postbuilds depend on this. - if self.type in ("shared_library", "loadable_module"): - self.WriteLn( - "\t@$(call do_cmd,mac_package_framework,,,%s)" - % self.xcode_settings.GetFrameworkVersion() - ) - - # Bundle postbuilds can depend on the whole bundle, so run them after - # the bundle is packaged, not already after the bundle binary is done. - if postbuilds: - self.WriteLn("\t@$(call do_postbuilds)") - postbuilds = [] # Don't write postbuilds for target's output. - - # Needed by test/mac/gyptest-rebuild.py. - self.WriteLn("\t@true # No-op, used by tests") - - # Since this target depends on binary and resources which are in - # nested subfolders, the framework directory will be older than - # its dependencies usually. To prevent this rule from executing - # on every build (expensive, especially with postbuilds), expliclity - # update the time on the framework directory. - self.WriteLn("\t@touch -c %s" % QuoteSpaces(self.output)) - - if postbuilds: - assert not self.is_mac_bundle, ( - "Postbuilds for bundles should be done " - "on the bundle, not the binary (target '%s')" % self.target - ) - assert "product_dir" not in spec, ( - "Postbuilds do not work with " "custom product_dir" - ) - - if self.type == "executable": - self.WriteLn( - "%s: LD_INPUTS := %s" - % ( - QuoteSpaces(self.output_binary), - " ".join(QuoteSpaces(dep) for dep in link_deps), - ) - ) - if self.toolset == "host" and self.flavor == "android": - self.WriteDoCmd( - [self.output_binary], - link_deps, - "link_host", - part_of_all, - postbuilds=postbuilds, - ) - else: - self.WriteDoCmd( - [self.output_binary], - link_deps, - "link", - part_of_all, - postbuilds=postbuilds, - ) - - elif self.type == "static_library": - for link_dep in link_deps: - assert " " not in link_dep, ( - "Spaces in alink input filenames not supported (%s)" % link_dep - ) - if ( - self.flavor not in ("mac", "openbsd", "netbsd", "win") - and not self.is_standalone_static_library - ): - self.WriteDoCmd( - [self.output_binary], - link_deps, - "alink_thin", - part_of_all, - postbuilds=postbuilds, - ) - else: - self.WriteDoCmd( - [self.output_binary], - link_deps, - "alink", - part_of_all, - postbuilds=postbuilds, - ) - elif self.type == "shared_library": - self.WriteLn( - "%s: LD_INPUTS := %s" - % ( - QuoteSpaces(self.output_binary), - " ".join(QuoteSpaces(dep) for dep in link_deps), - ) - ) - self.WriteDoCmd( - [self.output_binary], - link_deps, - "solink", - part_of_all, - postbuilds=postbuilds, - ) - elif self.type == "loadable_module": - for link_dep in link_deps: - assert " " not in link_dep, ( - "Spaces in module input filenames not supported (%s)" % link_dep - ) - if self.toolset == "host" and self.flavor == "android": - self.WriteDoCmd( - [self.output_binary], - link_deps, - "solink_module_host", - part_of_all, - postbuilds=postbuilds, - ) - else: - self.WriteDoCmd( - [self.output_binary], - link_deps, - "solink_module", - part_of_all, - postbuilds=postbuilds, - ) - elif self.type == "none": - # Write a stamp line. - self.WriteDoCmd( - [self.output_binary], deps, "touch", part_of_all, postbuilds=postbuilds - ) - else: - print("WARNING: no output for", self.type, self.target) - - # Add an alias for each target (if there are any outputs). - # Installable target aliases are created below. - if (self.output and self.output != self.target) and ( - self.type not in self._INSTALLABLE_TARGETS - ): - self.WriteMakeRule( - [self.target], [self.output], comment="Add target alias", phony=True - ) - if part_of_all: - self.WriteMakeRule( - ["all"], - [self.target], - comment='Add target alias to "all" target.', - phony=True, - ) - - # Add special-case rules for our installable targets. - # 1) They need to install to the build dir or "product" dir. - # 2) They get shortcuts for building (e.g. "make chrome"). - # 3) They are part of "make all". - if self.type in self._INSTALLABLE_TARGETS or self.is_standalone_static_library: - if self.type == "shared_library": - file_desc = "shared library" - elif self.type == "static_library": - file_desc = "static library" - else: - file_desc = "executable" - install_path = self._InstallableTargetInstallPath() - installable_deps = [self.output] - if ( - self.flavor == "mac" - and "product_dir" not in spec - and self.toolset == "target" - ): - # On mac, products are created in install_path immediately. - assert install_path == self.output, "%s != %s" % ( - install_path, - self.output, - ) - - # Point the target alias to the final binary output. - self.WriteMakeRule( - [self.target], [install_path], comment="Add target alias", phony=True - ) - if install_path != self.output: - assert not self.is_mac_bundle # See comment a few lines above. - self.WriteDoCmd( - [install_path], - [self.output], - "copy", - comment="Copy this to the %s output path." % file_desc, - part_of_all=part_of_all, - ) - installable_deps.append(install_path) - if self.output != self.alias and self.alias != self.target: - self.WriteMakeRule( - [self.alias], - installable_deps, - comment="Short alias for building this %s." % file_desc, - phony=True, - ) - if part_of_all: - self.WriteMakeRule( - ["all"], - [install_path], - comment='Add %s to "all" target.' % file_desc, - phony=True, - ) - - def WriteList(self, value_list, variable=None, prefix="", quoter=QuoteIfNecessary): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = "" - if value_list: - value_list = [quoter(prefix + value) for value in value_list] - values = " \\\n\t" + " \\\n\t".join(value_list) - self.fp.write("%s :=%s\n\n" % (variable, values)) - - def WriteDoCmd( - self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False - ): - """Write a Makefile rule that uses do_cmd. - - This makes the outputs dependent on the command line that was run, - as well as support the V= make command line flag. - """ - suffix = "" - if postbuilds: - assert "," not in command - suffix = ",,1" # Tell do_cmd to honor $POSTBUILDS - self.WriteMakeRule( - outputs, - inputs, - actions=["$(call do_cmd,%s%s)" % (command, suffix)], - comment=comment, - command=command, - force=True, - ) - # Add our outputs to the list of targets we read depfiles from. - # all_deps is only used for deps file reading, and for deps files we replace - # spaces with ? because escaping doesn't work with make's $(sort) and - # other functions. - outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] - self.WriteLn("all_deps += %s" % " ".join(outputs)) - - def WriteMakeRule( - self, - outputs, - inputs, - actions=None, - comment=None, - order_only=False, - force=False, - phony=False, - command=None, - ): - """Write a Makefile rule, with some extra tricks. - - outputs: a list of outputs for the rule (note: this is not directly - supported by make; see comments below) - inputs: a list of inputs for the rule - actions: a list of shell commands to run for the rule - comment: a comment to put in the Makefile above the rule (also useful - for making this Python script's code self-documenting) - order_only: if true, makes the dependency order-only - force: if true, include FORCE_DO_CMD as an order-only dep - phony: if true, the rule does not actually generate the named output, the - output is just a name to run the rule - command: (optional) command name to generate unambiguous labels - """ - outputs = [QuoteSpaces(o) for o in outputs] - inputs = [QuoteSpaces(i) for i in inputs] - - if comment: - self.WriteLn("# " + comment) - if phony: - self.WriteLn(".PHONY: " + " ".join(outputs)) - if actions: - self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0]) - force_append = " FORCE_DO_CMD" if force else "" - - if order_only: - # Order only rule: Just write a simple rule. - # TODO(evanm): just make order_only a list of deps instead of this hack. - self.WriteLn( - "%s: | %s%s" % (" ".join(outputs), " ".join(inputs), force_append) - ) - elif len(outputs) == 1: - # Regular rule, one output: Just write a simple rule. - self.WriteLn("%s: %s%s" % (outputs[0], " ".join(inputs), force_append)) - else: - # Regular rule, more than one output: Multiple outputs are tricky in - # make. We will write three rules: - # - All outputs depend on an intermediate file. - # - Make .INTERMEDIATE depend on the intermediate. - # - The intermediate file depends on the inputs and executes the - # actual command. - # - The intermediate recipe will 'touch' the intermediate file. - # - The multi-output rule will have an do-nothing recipe. - - # Hash the target name to avoid generating overlong filenames. - cmddigest = hashlib.sha1( - (command or self.target).encode("utf-8") - ).hexdigest() - intermediate = "%s.intermediate" % cmddigest - self.WriteLn("%s: %s" % (" ".join(outputs), intermediate)) - self.WriteLn("\t%s" % "@:") - self.WriteLn("%s: %s" % (".INTERMEDIATE", intermediate)) - self.WriteLn("%s: %s%s" % (intermediate, " ".join(inputs), force_append)) - actions.insert(0, "$(call do_cmd,touch)") - - if actions: - for action in actions: - self.WriteLn("\t%s" % action) - self.WriteLn() - - def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): - """Write a set of LOCAL_XXX definitions for Android NDK. - - These variable definitions will be used by Android NDK but do nothing for - non-Android applications. - - Arguments: - module_name: Android NDK module name, which must be unique among all - module names. - all_sources: A list of source files (will be filtered by Compilable). - link_deps: A list of link dependencies, which must be sorted in - the order from dependencies to dependents. - """ - if self.type not in ("executable", "shared_library", "static_library"): - return - - self.WriteLn("# Variable definitions for Android applications") - self.WriteLn("include $(CLEAR_VARS)") - self.WriteLn("LOCAL_MODULE := " + module_name) - self.WriteLn( - "LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) " - "$(DEFS_$(BUILDTYPE)) " - # LOCAL_CFLAGS is applied to both of C and C++. There is - # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C - # sources. - "$(CFLAGS_C_$(BUILDTYPE)) " - # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while - # LOCAL_C_INCLUDES does not expect it. So put it in - # LOCAL_CFLAGS. - "$(INCS_$(BUILDTYPE))" - ) - # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred. - self.WriteLn("LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))") - self.WriteLn("LOCAL_C_INCLUDES :=") - self.WriteLn("LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)") - - # Detect the C++ extension. - cpp_ext = {".cc": 0, ".cpp": 0, ".cxx": 0} - default_cpp_ext = ".cpp" - for filename in all_sources: - ext = os.path.splitext(filename)[1] - if ext in cpp_ext: - cpp_ext[ext] += 1 - if cpp_ext[ext] > cpp_ext[default_cpp_ext]: - default_cpp_ext = ext - self.WriteLn("LOCAL_CPP_EXTENSION := " + default_cpp_ext) - - self.WriteList( - list(map(self.Absolutify, filter(Compilable, all_sources))), - "LOCAL_SRC_FILES", - ) - - # Filter out those which do not match prefix and suffix and produce - # the resulting list without prefix and suffix. - def DepsToModules(deps, prefix, suffix): - modules = [] - for filepath in deps: - filename = os.path.basename(filepath) - if filename.startswith(prefix) and filename.endswith(suffix): - modules.append(filename[len(prefix) : -len(suffix)]) - return modules - - # Retrieve the default value of 'SHARED_LIB_SUFFIX' - params = {"flavor": "linux"} - default_variables = {} - CalculateVariables(default_variables, params) - - self.WriteList( - DepsToModules( - link_deps, - generator_default_variables["SHARED_LIB_PREFIX"], - default_variables["SHARED_LIB_SUFFIX"], - ), - "LOCAL_SHARED_LIBRARIES", - ) - self.WriteList( - DepsToModules( - link_deps, - generator_default_variables["STATIC_LIB_PREFIX"], - generator_default_variables["STATIC_LIB_SUFFIX"], - ), - "LOCAL_STATIC_LIBRARIES", - ) - - if self.type == "executable": - self.WriteLn("include $(BUILD_EXECUTABLE)") - elif self.type == "shared_library": - self.WriteLn("include $(BUILD_SHARED_LIBRARY)") - elif self.type == "static_library": - self.WriteLn("include $(BUILD_STATIC_LIBRARY)") - self.WriteLn() - - def WriteLn(self, text=""): - self.fp.write(text + "\n") - - def GetSortedXcodeEnv(self, additional_settings=None): - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, - "$(abs_builddir)", - os.path.join("$(abs_srcdir)", self.path), - "$(BUILDTYPE)", - additional_settings, - ) - - def GetSortedXcodePostbuildEnv(self): - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - "CHROMIUM_STRIP_SAVE_FILE", "" - ) - # Even if strip_save_file is empty, explicitly write it. Else a postbuild - # might pick up an export from an earlier target. - return self.GetSortedXcodeEnv( - additional_settings={"CHROMIUM_STRIP_SAVE_FILE": strip_save_file} - ) - - def WriteSortedXcodeEnv(self, target, env): - for k, v in env: - # For - # foo := a\ b - # the escaped space does the right thing. For - # export foo := a\ b - # it does not -- the backslash is written to the env as literal character. - # So don't escape spaces in |env[k]|. - self.WriteLn("%s: export %s := %s" % (QuoteSpaces(target), k, v)) - - def Objectify(self, path): - """Convert a path to its output directory form.""" - if "$(" in path: - path = path.replace("$(obj)/", "$(obj).%s/$(TARGET)/" % self.toolset) - if "$(obj)" not in path: - path = "$(obj).%s/$(TARGET)/%s" % (self.toolset, path) - return path - - def Pchify(self, path, lang): - """Convert a prefix header path to its output directory form.""" - path = self.Absolutify(path) - if "$(" in path: - path = path.replace( - "$(obj)/", "$(obj).%s/$(TARGET)/pch-%s" % (self.toolset, lang) - ) - return path - return "$(obj).%s/$(TARGET)/pch-%s/%s" % (self.toolset, lang, path) - - def Absolutify(self, path): - """Convert a subdirectory-relative path into a base-relative path. - Skips over paths that contain variables.""" - if "$(" in path: - # Don't call normpath in this case, as it might collapse the - # path too aggressively if it features '..'. However it's still - # important to strip trailing slashes. - return path.rstrip("/") - return os.path.normpath(os.path.join(self.path, path)) - - def ExpandInputRoot(self, template, expansion, dirname): - if "%(INPUT_ROOT)s" not in template and "%(INPUT_DIRNAME)s" not in template: - return template - path = template % { - "INPUT_ROOT": expansion, - "INPUT_DIRNAME": dirname, - } - return path - - def _InstallableTargetInstallPath(self): - """Returns the location of the final output for an installable target.""" - # Functionality removed for all platforms to match Xcode and hoist - # shared libraries into PRODUCT_DIR for users: - # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files - # rely on this. Emulate this behavior for mac. - # if self.type == "shared_library" and ( - # self.flavor != "mac" or self.toolset != "target" - # ): - # # Install all shared libs into a common directory (per toolset) for - # # convenient access with LD_LIBRARY_PATH. - # return "$(builddir)/lib.%s/%s" % (self.toolset, self.alias) - return "$(builddir)/" + self.alias - - -def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files): - """Write the target to regenerate the Makefile.""" - options = params["options"] - build_files_args = [ - gyp.common.RelativePath(filename, options.toplevel_dir) - for filename in params["build_files_arg"] - ] - - gyp_binary = gyp.common.FixIfRelativePath( - params["gyp_binary"], options.toplevel_dir - ) - if not gyp_binary.startswith(os.sep): - gyp_binary = os.path.join(".", gyp_binary) - - root_makefile.write( - "quiet_cmd_regen_makefile = ACTION Regenerating $@\n" - "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n" - "%(makefile_name)s: %(deps)s\n" - "\t$(call do_cmd,regen_makefile)\n\n" - % { - "makefile_name": makefile_name, - "deps": " ".join(SourceifyAndQuoteSpaces(bf) for bf in build_files), - "cmd": gyp.common.EncodePOSIXShellList( - [gyp_binary, "-fmake"] + gyp.RegenerateFlags(options) + build_files_args - ), - } - ) - - -def PerformBuild(data, configurations, params): - options = params["options"] - for config in configurations: - arguments = ["make"] - if options.toplevel_dir and options.toplevel_dir != ".": - arguments += "-C", options.toplevel_dir - arguments.append("BUILDTYPE=" + config) - print("Building [%s]: %s" % (config, arguments)) - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params["options"] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get("generator_flags", {}) - builddir_name = generator_flags.get("output_dir", "out") - android_ndk_version = generator_flags.get("android_ndk_version", None) - default_target = generator_flags.get("default_target", "all") - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - if options.generator_output: - output_file = os.path.join( - options.depth, options.generator_output, base_path, base_name - ) - base_path = gyp.common.RelativePath( - os.path.dirname(build_file), options.toplevel_dir - ) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]["toolset"] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec["default_configuration"] != "Default": - default_configuration = spec["default_configuration"] - break - if not default_configuration: - default_configuration = "Default" - - srcdir = "." - makefile_name = "Makefile" + options.suffix - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - if options.generator_output: - global srcdir_prefix - makefile_path = os.path.join( - options.toplevel_dir, options.generator_output, makefile_name - ) - srcdir = gyp.common.RelativePath(srcdir, options.generator_output) - srcdir_prefix = "$(srcdir)/" - - flock_command = "flock" - copy_archive_arguments = "-af" - makedep_arguments = "-MMD" - header_params = { - "default_target": default_target, - "builddir": builddir_name, - "default_configuration": default_configuration, - "flock": flock_command, - "flock_index": 1, - "link_commands": LINK_COMMANDS_LINUX, - "extra_commands": "", - "srcdir": srcdir, - "copy_archive_args": copy_archive_arguments, - "makedep_args": makedep_arguments, - "CC.target": GetEnvironFallback(("CC_target", "CC"), "$(CC)"), - "AR.target": GetEnvironFallback(("AR_target", "AR"), "$(AR)"), - "CXX.target": GetEnvironFallback(("CXX_target", "CXX"), "$(CXX)"), - "LINK.target": GetEnvironFallback(("LINK_target", "LINK"), "$(LINK)"), - "CC.host": GetEnvironFallback(("CC_host", "CC"), "gcc"), - "AR.host": GetEnvironFallback(("AR_host", "AR"), "ar"), - "CXX.host": GetEnvironFallback(("CXX_host", "CXX"), "g++"), - "LINK.host": GetEnvironFallback(("LINK_host", "LINK"), "$(CXX.host)"), - } - if flavor == "mac": - flock_command = "./gyp-mac-tool flock" - header_params.update( - { - "flock": flock_command, - "flock_index": 2, - "link_commands": LINK_COMMANDS_MAC, - "extra_commands": SHARED_HEADER_MAC_COMMANDS, - } - ) - elif flavor == "android": - header_params.update({"link_commands": LINK_COMMANDS_ANDROID}) - elif flavor == "zos": - copy_archive_arguments = "-fPR" - makedep_arguments = "-qmakedep=gcc" - header_params.update( - { - "copy_archive_args": copy_archive_arguments, - "makedep_args": makedep_arguments, - "link_commands": LINK_COMMANDS_OS390, - "CC.target": GetEnvironFallback(("CC_target", "CC"), "njsc"), - "CXX.target": GetEnvironFallback(("CXX_target", "CXX"), "njsc++"), - "CC.host": GetEnvironFallback(("CC_host", "CC"), "njsc"), - "CXX.host": GetEnvironFallback(("CXX_host", "CXX"), "njsc++"), - } - ) - elif flavor == "solaris": - copy_archive_arguments = "-pPRf@" - header_params.update( - { - "copy_archive_args": copy_archive_arguments, - "flock": "./gyp-flock-tool flock", - "flock_index": 2 - } - ) - elif flavor == "freebsd": - # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific. - header_params.update({"flock": "lockf"}) - elif flavor == "openbsd": - copy_archive_arguments = "-pPRf" - header_params.update({"copy_archive_args": copy_archive_arguments}) - elif flavor == "aix": - copy_archive_arguments = "-pPRf" - header_params.update( - { - "copy_archive_args": copy_archive_arguments, - "link_commands": LINK_COMMANDS_AIX, - "flock": "./gyp-flock-tool flock", - "flock_index": 2, - } - ) - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_array = data[build_file].get("make_global_settings", []) - wrappers = {} - for key, value in make_global_settings_array: - if key.endswith("_wrapper"): - wrappers[key[: -len("_wrapper")]] = "$(abspath %s)" % value - make_global_settings = "" - for key, value in make_global_settings_array: - if re.match(".*_wrapper", key): - continue - if value[0] != "$": - value = "$(abspath %s)" % value - wrapper = wrappers.get(key) - if wrapper: - value = "%s %s" % (wrapper, value) - del wrappers[key] - if key in ("CC", "CC.host", "CXX", "CXX.host"): - make_global_settings += ( - "ifneq (,$(filter $(origin %s), undefined default))\n" % key - ) - # Let gyp-time envvars win over global settings. - env_key = key.replace(".", "_") # CC.host -> CC_host - if env_key in os.environ: - value = os.environ[env_key] - make_global_settings += " %s = %s\n" % (key, value) - make_global_settings += "endif\n" - else: - make_global_settings += "%s ?= %s\n" % (key, value) - # TODO(ukai): define cmd when only wrapper is specified in - # make_global_settings. - - header_params["make_global_settings"] = make_global_settings - - gyp.common.EnsureDirExists(makefile_path) - root_makefile = open(makefile_path, "w") - root_makefile.write(SHARED_HEADER % header_params) - # Currently any versions have the same effect, but in future the behavior - # could be different. - if android_ndk_version: - root_makefile.write( - "# Define LOCAL_PATH for build of Android applications.\n" - "LOCAL_PATH := $(call my-dir)\n" - "\n" - ) - for toolset in toolsets: - root_makefile.write("TOOLSET := %s\n" % toolset) - WriteRootHeaderSuffixRules(root_makefile) - - # Put build-time support tools next to the root Makefile. - dest_path = os.path.dirname(makefile_path) - gyp.common.CopyTool(flavor, dest_path) - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params["build_files"]: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target) - - this_make_global_settings = data[build_file].get("make_global_settings", []) - assert make_global_settings_array == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets. %s vs. %s" - % (this_make_global_settings, make_global_settings) - ) - - build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) - included_files = data[build_file]["included_files"] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir, - ) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if params["home_dot_gyp"] and abs_include_file.startswith( - params["home_dot_gyp"] - ): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath( - build_file, target + "." + toolset + options.suffix + ".mk" - ) - - spec = target_dicts[qualified_target] - configs = spec["configurations"] - - if flavor == "mac": - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - writer = MakefileWriter(generator_flags, flavor) - writer.Write( - qualified_target, - base_path, - output_file, - spec, - configs, - part_of_all=qualified_target in needed_targets, - ) - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath( - output_file, os.path.dirname(makefile_path) - ) - include_list.add(mkfile_rel_path) - - # Write out per-gyp (sub-project) Makefiles. - depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) - for build_file in build_files: - # The paths in build_files were relativized above, so undo that before - # testing against the non-relativized items in target_list and before - # calculating the Makefile path. - build_file = os.path.join(depth_rel_path, build_file) - gyp_targets = [ - target_dicts[qualified_target]["target_name"] - for qualified_target in target_list - if qualified_target.startswith(build_file) - and qualified_target in needed_targets - ] - # Only generate Makefiles for gyp files with targets. - if not gyp_targets: - continue - base_path, output_file = CalculateMakefilePath( - build_file, os.path.splitext(os.path.basename(build_file))[0] + ".Makefile" - ) - makefile_rel_path = gyp.common.RelativePath( - os.path.dirname(makefile_path), os.path.dirname(output_file) - ) - writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, builddir_name) - - # Write out the sorted list of includes. - root_makefile.write("\n") - for include_file in sorted(include_list): - # We wrap each .mk include in an if statement so users can tell make to - # not load a file by setting NO_LOAD. The below make code says, only - # load the .mk file if the .mk filename doesn't start with a token in - # NO_LOAD. - root_makefile.write( - "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n" - " $(findstring $(join ^,$(prefix)),\\\n" - " $(join ^," + include_file + ")))),)\n" - ) - root_makefile.write(" include " + include_file + "\n") - root_makefile.write("endif\n") - root_makefile.write("\n") - - if not generator_flags.get("standalone") and generator_flags.get( - "auto_regeneration", True - ): - WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files) - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py deleted file mode 100644 index 32bf4746a179c..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py +++ /dev/null @@ -1,3980 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import print_function - -import ntpath -import os -import posixpath -import re -import subprocess -import sys - -from collections import OrderedDict - -import gyp.common -import gyp.easy_xml as easy_xml -import gyp.generator.ninja as ninja_generator -import gyp.MSVSNew as MSVSNew -import gyp.MSVSProject as MSVSProject -import gyp.MSVSSettings as MSVSSettings -import gyp.MSVSToolFile as MSVSToolFile -import gyp.MSVSUserFile as MSVSUserFile -import gyp.MSVSUtil as MSVSUtil -import gyp.MSVSVersion as MSVSVersion -from gyp.common import GypError -from gyp.common import OrderedSet - -PY3 = bytes != str - - -# Regular expression for validating Visual Studio GUIDs. If the GUID -# contains lowercase hex letters, MSVS will be fine. However, -# IncrediBuild BuildConsole will parse the solution file, but then -# silently skip building the target causing hard to track down errors. -# Note that this only happens with the BuildConsole, and does not occur -# if IncrediBuild is executed from inside Visual Studio. This regex -# validates that the string looks like a GUID with all uppercase hex -# letters. -VALID_MSVS_GUID_CHARS = re.compile(r"^[A-F0-9\-]+$") - -generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() - -generator_default_variables = { - "DRIVER_PREFIX": "", - "DRIVER_SUFFIX": ".sys", - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": ".exe", - "STATIC_LIB_PREFIX": "", - "SHARED_LIB_PREFIX": "", - "STATIC_LIB_SUFFIX": ".lib", - "SHARED_LIB_SUFFIX": ".dll", - "INTERMEDIATE_DIR": "$(IntDir)", - "SHARED_INTERMEDIATE_DIR": "$(OutDir)/obj/global_intermediate", - "OS": "win", - "PRODUCT_DIR": "$(OutDir)", - "LIB_DIR": "$(OutDir)lib", - "RULE_INPUT_ROOT": "$(InputName)", - "RULE_INPUT_DIRNAME": "$(InputDir)", - "RULE_INPUT_EXT": "$(InputExt)", - "RULE_INPUT_NAME": "$(InputFileName)", - "RULE_INPUT_PATH": "$(InputPath)", - "CONFIGURATION_NAME": "$(ConfigurationName)", -} - - -# The msvs specific sections that hold paths -generator_additional_path_sections = [ - "msvs_cygwin_dirs", - "msvs_props", -] - - -generator_additional_non_configuration_keys = [ - "msvs_cygwin_dirs", - "msvs_cygwin_shell", - "msvs_large_pdb", - "msvs_shard", - "msvs_external_builder", - "msvs_external_builder_out_dir", - "msvs_external_builder_build_cmd", - "msvs_external_builder_clean_cmd", - "msvs_external_builder_clcompile_cmd", - "msvs_enable_winrt", - "msvs_requires_importlibrary", - "msvs_enable_winphone", - "msvs_application_type_revision", - "msvs_target_platform_version", - "msvs_target_platform_minversion", -] - -generator_filelist_paths = None - -# List of precompiled header related keys. -precomp_keys = [ - "msvs_precompiled_header", - "msvs_precompiled_source", -] - - -cached_username = None - - -cached_domain = None - - -# TODO(gspencer): Switch the os.environ calls to be -# win32api.GetDomainName() and win32api.GetUserName() once the -# python version in depot_tools has been updated to work on Vista -# 64-bit. -def _GetDomainAndUserName(): - if sys.platform not in ("win32", "cygwin"): - return ("DOMAIN", "USERNAME") - global cached_username - global cached_domain - if not cached_domain or not cached_username: - domain = os.environ.get("USERDOMAIN") - username = os.environ.get("USERNAME") - if not domain or not username: - call = subprocess.Popen( - ["net", "config", "Workstation"], stdout=subprocess.PIPE - ) - config = call.communicate()[0] - if PY3: - config = config.decode("utf-8") - username_re = re.compile(r"^User name\s+(\S+)", re.MULTILINE) - username_match = username_re.search(config) - if username_match: - username = username_match.group(1) - domain_re = re.compile(r"^Logon domain\s+(\S+)", re.MULTILINE) - domain_match = domain_re.search(config) - if domain_match: - domain = domain_match.group(1) - cached_domain = domain - cached_username = username - return (cached_domain, cached_username) - - -fixpath_prefix = None - - -def _NormalizedSource(source): - """Normalize the path. - - But not if that gets rid of a variable, as this may expand to something - larger than one directory. - - Arguments: - source: The path to be normalize.d - - Returns: - The normalized path. - """ - normalized = os.path.normpath(source) - if source.count("$") == normalized.count("$"): - source = normalized - return source - - -def _FixPath(path): - """Convert paths to a form that will make sense in a vcproj file. - - Arguments: - path: The path to convert, may contain / etc. - Returns: - The path with all slashes made into backslashes. - """ - if ( - fixpath_prefix - and path - and not os.path.isabs(path) - and not path[0] == "$" - and not _IsWindowsAbsPath(path) - ): - path = os.path.join(fixpath_prefix, path) - path = path.replace("/", "\\") - path = _NormalizedSource(path) - if path and path[-1] == "\\": - path = path[:-1] - return path - - -def _IsWindowsAbsPath(path): - """ - On Cygwin systems Python needs a little help determining if a path - is an absolute Windows path or not, so that - it does not treat those as relative, which results in bad paths like: - '..\\C:\\\\some_source_code_file.cc' - """ - return path.startswith("c:") or path.startswith("C:") - - -def _FixPaths(paths): - """Fix each of the paths of the list.""" - return [_FixPath(i) for i in paths] - - -def _ConvertSourcesToFilterHierarchy( - sources, prefix=None, excluded=None, list_excluded=True, msvs_version=None -): - """Converts a list split source file paths into a vcproj folder hierarchy. - - Arguments: - sources: A list of source file paths split. - prefix: A list of source file path layers meant to apply to each of sources. - excluded: A set of excluded files. - msvs_version: A MSVSVersion object. - - Returns: - A hierarchy of filenames and MSVSProject.Filter objects that matches the - layout of the source tree. - For example: - _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']], - prefix=['joe']) - --> - [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), - MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] - """ - if not prefix: - prefix = [] - result = [] - excluded_result = [] - folders = OrderedDict() - # Gather files into the final result, excluded, or folders. - for s in sources: - if len(s) == 1: - filename = _NormalizedSource("\\".join(prefix + s)) - if filename in excluded: - excluded_result.append(filename) - else: - result.append(filename) - elif msvs_version and not msvs_version.UsesVcxproj(): - # For MSVS 2008 and earlier, we need to process all files before walking - # the sub folders. - if not folders.get(s[0]): - folders[s[0]] = [] - folders[s[0]].append(s[1:]) - else: - contents = _ConvertSourcesToFilterHierarchy( - [s[1:]], - prefix + [s[0]], - excluded=excluded, - list_excluded=list_excluded, - msvs_version=msvs_version, - ) - contents = MSVSProject.Filter(s[0], contents=contents) - result.append(contents) - # Add a folder for excluded files. - if excluded_result and list_excluded: - excluded_folder = MSVSProject.Filter( - "_excluded_files", contents=excluded_result - ) - result.append(excluded_folder) - - if msvs_version and msvs_version.UsesVcxproj(): - return result - - # Populate all the folders. - for f in folders: - contents = _ConvertSourcesToFilterHierarchy( - folders[f], - prefix=prefix + [f], - excluded=excluded, - list_excluded=list_excluded, - msvs_version=msvs_version, - ) - contents = MSVSProject.Filter(f, contents=contents) - result.append(contents) - return result - - -def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): - if not value: - return - _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset) - - -def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): - # TODO(bradnelson): ugly hack, fix this more generally!!! - if "Directories" in setting or "Dependencies" in setting: - if type(value) == str: - value = value.replace("/", "\\") - else: - value = [i.replace("/", "\\") for i in value] - if not tools.get(tool_name): - tools[tool_name] = dict() - tool = tools[tool_name] - if "CompileAsWinRT" == setting: - return - if tool.get(setting): - if only_if_unset: - return - if type(tool[setting]) == list and type(value) == list: - tool[setting] += value - else: - raise TypeError( - 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' - "not allowed, previous value: %s" - % (value, setting, tool_name, str(tool[setting])) - ) - else: - tool[setting] = value - - -def _ConfigTargetVersion(config_data): - return config_data.get("msvs_target_version", "Windows7") - - -def _ConfigPlatform(config_data): - return config_data.get("msvs_configuration_platform", "Win32") - - -def _ConfigBaseName(config_name, platform_name): - if config_name.endswith("_" + platform_name): - return config_name[0 : -len(platform_name) - 1] - else: - return config_name - - -def _ConfigFullName(config_name, config_data): - platform_name = _ConfigPlatform(config_data) - return "%s|%s" % (_ConfigBaseName(config_name, platform_name), platform_name) - - -def _ConfigWindowsTargetPlatformVersion(config_data, version): - target_ver = config_data.get("msvs_windows_target_platform_version") - if target_ver and re.match(r"^\d+", target_ver): - return target_ver - config_ver = config_data.get("msvs_windows_sdk_version") - vers = [config_ver] if config_ver else version.compatible_sdks - for ver in vers: - for key in [ - r"HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s", - r"HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s", - ]: - sdk_dir = MSVSVersion._RegistryGetValue(key % ver, "InstallationFolder") - if not sdk_dir: - continue - version = MSVSVersion._RegistryGetValue(key % ver, "ProductVersion") or "" - # Find a matching entry in sdk_dir\include. - expected_sdk_dir = r"%s\include" % sdk_dir - names = sorted( - [ - x - for x in ( - os.listdir(expected_sdk_dir) - if os.path.isdir(expected_sdk_dir) - else [] - ) - if x.startswith(version) - ], - reverse=True, - ) - if names: - return names[0] - else: - print( - "Warning: No include files found for detected " - "Windows SDK version %s" % (version), - file=sys.stdout, - ) - - -def _BuildCommandLineForRuleRaw( - spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env -): - - if [x for x in cmd if "$(InputDir)" in x]: - input_dir_preamble = ( - "set INPUTDIR=$(InputDir)\n" - "if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n" - "set INPUTDIR=%INPUTDIR:~0,-1%\n" - ) - else: - input_dir_preamble = "" - - if cygwin_shell: - # Find path to cygwin. - cygwin_dir = _FixPath(spec.get("msvs_cygwin_dirs", ["."])[0]) - # Prepare command. - direct_cmd = cmd - direct_cmd = [ - i.replace("$(IntDir)", '`cygpath -m "${INTDIR}"`') for i in direct_cmd - ] - direct_cmd = [ - i.replace("$(OutDir)", '`cygpath -m "${OUTDIR}"`') for i in direct_cmd - ] - direct_cmd = [ - i.replace("$(InputDir)", '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd - ] - if has_input_path: - direct_cmd = [ - i.replace("$(InputPath)", '`cygpath -m "${INPUTPATH}"`') - for i in direct_cmd - ] - direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd] - # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) - direct_cmd = " ".join(direct_cmd) - # TODO(quote): regularize quoting path names throughout the module - cmd = "" - if do_setup_env: - cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' - cmd += "set CYGWIN=nontsec&& " - if direct_cmd.find("NUMBER_OF_PROCESSORS") >= 0: - cmd += "set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& " - if direct_cmd.find("INTDIR") >= 0: - cmd += "set INTDIR=$(IntDir)&& " - if direct_cmd.find("OUTDIR") >= 0: - cmd += "set OUTDIR=$(OutDir)&& " - if has_input_path and direct_cmd.find("INPUTPATH") >= 0: - cmd += "set INPUTPATH=$(InputPath) && " - cmd += 'bash -c "%(cmd)s"' - cmd = cmd % {"cygwin_dir": cygwin_dir, "cmd": direct_cmd} - return input_dir_preamble + cmd - else: - # Convert cat --> type to mimic unix. - if cmd[0] == "cat": - command = ["type"] - else: - command = [cmd[0].replace("/", "\\")] - # Add call before command to ensure that commands can be tied together one - # after the other without aborting in Incredibuild, since IB makes a bat - # file out of the raw command string, and some commands (like python) are - # actually batch files themselves. - command.insert(0, "call") - # Fix the paths - # TODO(quote): This is a really ugly heuristic, and will miss path fixing - # for arguments like "--arg=path" or "/opt:path". - # If the argument starts with a slash or dash, it's probably a command line - # switch - arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]] - arguments = [i.replace("$(InputDir)", "%INPUTDIR%") for i in arguments] - arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments] - if quote_cmd: - # Support a mode for using cmd directly. - # Convert any paths to native form (first element is used directly). - # TODO(quote): regularize quoting path names throughout the module - arguments = ['"%s"' % i for i in arguments] - # Collapse into a single command. - return input_dir_preamble + " ".join(command + arguments) - - -def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env): - # Currently this weird argument munging is used to duplicate the way a - # python script would need to be run as part of the chrome tree. - # Eventually we should add some sort of rule_default option to set this - # per project. For now the behavior chrome needs is the default. - mcs = rule.get("msvs_cygwin_shell") - if mcs is None: - mcs = int(spec.get("msvs_cygwin_shell", 1)) - elif isinstance(mcs, str): - mcs = int(mcs) - quote_cmd = int(rule.get("msvs_quote_cmd", 1)) - return _BuildCommandLineForRuleRaw( - spec, rule["action"], mcs, has_input_path, quote_cmd, do_setup_env=do_setup_env - ) - - -def _AddActionStep(actions_dict, inputs, outputs, description, command): - """Merge action into an existing list of actions. - - Care must be taken so that actions which have overlapping inputs either don't - get assigned to the same input, or get collapsed into one. - - Arguments: - actions_dict: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - inputs: list of inputs - outputs: list of outputs - description: description of the action - command: command line to execute - """ - # Require there to be at least one input (call sites will ensure this). - assert inputs - - action = { - "inputs": inputs, - "outputs": outputs, - "description": description, - "command": command, - } - - # Pick where to stick this action. - # While less than optimal in terms of build time, attach them to the first - # input for now. - chosen_input = inputs[0] - - # Add it there. - if chosen_input not in actions_dict: - actions_dict[chosen_input] = [] - actions_dict[chosen_input].append(action) - - -def _AddCustomBuildToolForMSVS( - p, spec, primary_input, inputs, outputs, description, cmd -): - """Add a custom build tool to execute something. - - Arguments: - p: the target project - spec: the target project dict - primary_input: input file to attach the build tool to - inputs: list of inputs - outputs: list of outputs - description: description of the action - cmd: command line to execute - """ - inputs = _FixPaths(inputs) - outputs = _FixPaths(outputs) - tool = MSVSProject.Tool( - "VCCustomBuildTool", - { - "Description": description, - "AdditionalDependencies": ";".join(inputs), - "Outputs": ";".join(outputs), - "CommandLine": cmd, - }, - ) - # Add to the properties of primary input for each config. - for config_name, c_data in spec["configurations"].items(): - p.AddFileConfig( - _FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool] - ) - - -def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): - """Add actions accumulated into an actions_dict, merging as needed. - - Arguments: - p: the target project - spec: the target project dict - actions_dict: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - """ - for primary_input in actions_dict: - inputs = OrderedSet() - outputs = OrderedSet() - descriptions = [] - commands = [] - for action in actions_dict[primary_input]: - inputs.update(OrderedSet(action["inputs"])) - outputs.update(OrderedSet(action["outputs"])) - descriptions.append(action["description"]) - commands.append(action["command"]) - # Add the custom build step for one input file. - description = ", and also ".join(descriptions) - command = "\r\n".join(commands) - _AddCustomBuildToolForMSVS( - p, - spec, - primary_input=primary_input, - inputs=inputs, - outputs=outputs, - description=description, - cmd=command, - ) - - -def _RuleExpandPath(path, input_file): - """Given the input file to which a rule applied, string substitute a path. - - Arguments: - path: a path to string expand - input_file: the file to which the rule applied. - Returns: - The string substituted path. - """ - path = path.replace( - "$(InputName)", os.path.splitext(os.path.split(input_file)[1])[0] - ) - path = path.replace("$(InputDir)", os.path.dirname(input_file)) - path = path.replace( - "$(InputExt)", os.path.splitext(os.path.split(input_file)[1])[1] - ) - path = path.replace("$(InputFileName)", os.path.split(input_file)[1]) - path = path.replace("$(InputPath)", input_file) - return path - - -def _FindRuleTriggerFiles(rule, sources): - """Find the list of files which a particular rule applies to. - - Arguments: - rule: the rule in question - sources: the set of all known source files for this project - Returns: - The list of sources that trigger a particular rule. - """ - return rule.get("rule_sources", []) - - -def _RuleInputsAndOutputs(rule, trigger_file): - """Find the inputs and outputs generated by a rule. - - Arguments: - rule: the rule in question. - trigger_file: the main trigger for this rule. - Returns: - The pair of (inputs, outputs) involved in this rule. - """ - raw_inputs = _FixPaths(rule.get("inputs", [])) - raw_outputs = _FixPaths(rule.get("outputs", [])) - inputs = OrderedSet() - outputs = OrderedSet() - inputs.add(trigger_file) - for i in raw_inputs: - inputs.add(_RuleExpandPath(i, trigger_file)) - for o in raw_outputs: - outputs.add(_RuleExpandPath(o, trigger_file)) - return (inputs, outputs) - - -def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options): - """Generate a native rules file. - - Arguments: - p: the target project - rules: the set of rules to include - output_dir: the directory in which the project/gyp resides - spec: the project dict - options: global generator options - """ - rules_filename = "%s%s.rules" % (spec["target_name"], options.suffix) - rules_file = MSVSToolFile.Writer( - os.path.join(output_dir, rules_filename), spec["target_name"] - ) - # Add each rule. - for r in rules: - rule_name = r["rule_name"] - rule_ext = r["extension"] - inputs = _FixPaths(r.get("inputs", [])) - outputs = _FixPaths(r.get("outputs", [])) - # Skip a rule with no action and no inputs. - if "action" not in r and not r.get("rule_sources", []): - continue - cmd = _BuildCommandLineForRule(spec, r, has_input_path=True, do_setup_env=True) - rules_file.AddCustomBuildRule( - name=rule_name, - description=r.get("message", rule_name), - extensions=[rule_ext], - additional_dependencies=inputs, - outputs=outputs, - cmd=cmd, - ) - # Write out rules file. - rules_file.WriteIfChanged() - - # Add rules file to project. - p.AddToolFile(rules_filename) - - -def _Cygwinify(path): - path = path.replace("$(OutDir)", "$(OutDirCygwin)") - path = path.replace("$(IntDir)", "$(IntDirCygwin)") - return path - - -def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to_add): - """Generate an external makefile to do a set of rules. - - Arguments: - rules: the list of rules to include - output_dir: path containing project and gyp files - spec: project specification data - sources: set of sources known - options: global generator options - actions_to_add: The list of actions we will add to. - """ - filename = "%s_rules%s.mk" % (spec["target_name"], options.suffix) - mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) - # Find cygwin style versions of some paths. - mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') - mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') - # Gather stuff needed to emit all: target. - all_inputs = OrderedSet() - all_outputs = OrderedSet() - all_output_dirs = OrderedSet() - first_outputs = [] - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - all_inputs.update(OrderedSet(inputs)) - all_outputs.update(OrderedSet(outputs)) - # Only use one target from each rule as the dependency for - # 'all' so we don't try to build each rule multiple times. - first_outputs.append(list(outputs)[0]) - # Get the unique output directories for this rule. - output_dirs = [os.path.split(i)[0] for i in outputs] - for od in output_dirs: - all_output_dirs.add(od) - first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] - # Write out all: target, including mkdir for each output directory. - mk_file.write("all: %s\n" % " ".join(first_outputs_cyg)) - for od in all_output_dirs: - if od: - mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od) - mk_file.write("\n") - # Define how each output is generated. - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - # Get all the inputs and outputs for this rule for this trigger file. - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - inputs = [_Cygwinify(i) for i in inputs] - outputs = [_Cygwinify(i) for i in outputs] - # Prepare the command line for this rule. - cmd = [_RuleExpandPath(c, tf) for c in rule["action"]] - cmd = ['"%s"' % i for i in cmd] - cmd = " ".join(cmd) - # Add it to the makefile. - mk_file.write("%s: %s\n" % (" ".join(outputs), " ".join(inputs))) - mk_file.write("\t%s\n\n" % cmd) - # Close up the file. - mk_file.close() - - # Add makefile to list of sources. - sources.add(filename) - # Add a build action to call makefile. - cmd = [ - "make", - "OutDir=$(OutDir)", - "IntDir=$(IntDir)", - "-j", - "${NUMBER_OF_PROCESSORS_PLUS_1}", - "-f", - filename, - ] - cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True) - # Insert makefile as 0'th input, so it gets the action attached there, - # as this is easier to understand from in the IDE. - all_inputs = list(all_inputs) - all_inputs.insert(0, filename) - _AddActionStep( - actions_to_add, - inputs=_FixPaths(all_inputs), - outputs=_FixPaths(all_outputs), - description="Running external rules for %s" % spec["target_name"], - command=cmd, - ) - - -def _EscapeEnvironmentVariableExpansion(s): - """Escapes % characters. - - Escapes any % characters so that Windows-style environment variable - expansions will leave them alone. - See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile - to understand why we have to do this. - - Args: - s: The string to be escaped. - - Returns: - The escaped string. - """ # noqa: E731,E123,E501 - s = s.replace("%", "%%") - return s - - -quote_replacer_regex = re.compile(r'(\\*)"') - - -def _EscapeCommandLineArgumentForMSVS(s): - """Escapes a Windows command-line argument. - - So that the Win32 CommandLineToArgv function will turn the escaped result back - into the original string. - See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx - ("Parsing C++ Command-Line Arguments") to understand why we have to do - this. - - Args: - s: the string to be escaped. - Returns: - the escaped string. - """ - - def _Replace(match): - # For a literal quote, CommandLineToArgv requires an odd number of - # backslashes preceding it, and it produces half as many literal backslashes - # (rounded down). So we need to produce 2n+1 backslashes. - return 2 * match.group(1) + '\\"' - - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex.sub(_Replace, s) - # Now add unescaped quotes so that any whitespace is interpreted literally. - s = '"' + s + '"' - return s - - -delimiters_replacer_regex = re.compile(r"(\\*)([,;]+)") - - -def _EscapeVCProjCommandLineArgListItem(s): - """Escapes command line arguments for MSVS. - - The VCProj format stores string lists in a single string using commas and - semi-colons as separators, which must be quoted if they are to be - interpreted literally. However, command-line arguments may already have - quotes, and the VCProj parser is ignorant of the backslash escaping - convention used by CommandLineToArgv, so the command-line quotes and the - VCProj quotes may not be the same quotes. So to store a general - command-line argument in a VCProj list, we need to parse the existing - quoting according to VCProj's convention and quote any delimiters that are - not already quoted by that convention. The quotes that we add will also be - seen by CommandLineToArgv, so if backslashes precede them then we also have - to escape those backslashes according to the CommandLineToArgv - convention. - - Args: - s: the string to be escaped. - Returns: - the escaped string. - """ - - def _Replace(match): - # For a non-literal quote, CommandLineToArgv requires an even number of - # backslashes preceding it, and it produces half as many literal - # backslashes. So we need to produce 2n backslashes. - return 2 * match.group(1) + '"' + match.group(2) + '"' - - segments = s.split('"') - # The unquoted segments are at the even-numbered indices. - for i in range(0, len(segments), 2): - segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i]) - # Concatenate back into a single string - s = '"'.join(segments) - if len(segments) % 2 == 0: - # String ends while still quoted according to VCProj's convention. This - # means the delimiter and the next list item that follow this one in the - # .vcproj file will be misinterpreted as part of this item. There is nothing - # we can do about this. Adding an extra quote would correct the problem in - # the VCProj but cause the same problem on the final command-line. Moving - # the item to the end of the list does works, but that's only possible if - # there's only one such item. Let's just warn the user. - print( - "Warning: MSVS may misinterpret the odd number of " + "quotes in " + s, - file=sys.stderr, - ) - return s - - -def _EscapeCppDefineForMSVS(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgumentForMSVS(s) - s = _EscapeVCProjCommandLineArgListItem(s) - # cl.exe replaces literal # characters with = in preprocessor definitions for - # some reason. Octal-encode to work around that. - s = s.replace("#", "\\%03o" % ord("#")) - return s - - -quote_replacer_regex2 = re.compile(r'(\\+)"') - - -def _EscapeCommandLineArgumentForMSBuild(s): - """Escapes a Windows command-line argument for use by MSBuild.""" - - def _Replace(match): - return (len(match.group(1)) / 2 * 4) * "\\" + '\\"' - - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex2.sub(_Replace, s) - return s - - -def _EscapeMSBuildSpecialCharacters(s): - escape_dictionary = { - "%": "%25", - "$": "%24", - "@": "%40", - "'": "%27", - ";": "%3B", - "?": "%3F", - "*": "%2A", - } - result = "".join([escape_dictionary.get(c, c) for c in s]) - return result - - -def _EscapeCppDefineForMSBuild(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgumentForMSBuild(s) - s = _EscapeMSBuildSpecialCharacters(s) - # cl.exe replaces literal # characters with = in preprocessor definitions for - # some reason. Octal-encode to work around that. - s = s.replace("#", "\\%03o" % ord("#")) - return s - - -def _GenerateRulesForMSVS( - p, output_dir, options, spec, sources, excluded_sources, actions_to_add -): - """Generate all the rules for a particular project. - - Arguments: - p: the project - output_dir: directory to emit rules to - options: global options passed to the generator - spec: the specification for this project - sources: the set of all known source files in this project - excluded_sources: the set of sources excluded from normal processing - actions_to_add: deferred list of actions to add in - """ - rules = spec.get("rules", []) - rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))] - rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))] - - # Handle rules that use a native rules file. - if rules_native: - _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options) - - # Handle external rules (non-native rules). - if rules_external: - _GenerateExternalRules( - rules_external, output_dir, spec, sources, options, actions_to_add - ) - _AdjustSourcesForRules(rules, sources, excluded_sources, False) - - -def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild): - # Add outputs generated by each rule (if applicable). - for rule in rules: - # Add in the outputs from this rule. - trigger_files = _FindRuleTriggerFiles(rule, sources) - for trigger_file in trigger_files: - # Remove trigger_file from excluded_sources to let the rule be triggered - # (e.g. rule trigger ax_enums.idl is added to excluded_sources - # because it's also in an action's inputs in the same project) - excluded_sources.discard(_FixPath(trigger_file)) - # Done if not processing outputs as sources. - if int(rule.get("process_outputs_as_sources", False)): - inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file) - inputs = OrderedSet(_FixPaths(inputs)) - outputs = OrderedSet(_FixPaths(outputs)) - inputs.remove(_FixPath(trigger_file)) - sources.update(inputs) - if not is_msbuild: - excluded_sources.update(inputs) - sources.update(outputs) - - -def _FilterActionsFromExcluded(excluded_sources, actions_to_add): - """Take inputs with actions attached out of the list of exclusions. - - Arguments: - excluded_sources: list of source files not to be built. - actions_to_add: dict of actions keyed on source file they're attached to. - Returns: - excluded_sources with files that have actions attached removed. - """ - must_keep = OrderedSet(_FixPaths(actions_to_add.keys())) - return [s for s in excluded_sources if s not in must_keep] - - -def _GetDefaultConfiguration(spec): - return spec["configurations"][spec["default_configuration"]] - - -def _GetGuidOfProject(proj_path, spec): - """Get the guid for the project. - - Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. - spec: The target dictionary containing the properties of the target. - Returns: - the guid. - Raises: - ValueError: if the specified GUID is invalid. - """ - # Pluck out the default configuration. - default_config = _GetDefaultConfiguration(spec) - # Decide the guid of the project. - guid = default_config.get("msvs_guid") - if guid: - if VALID_MSVS_GUID_CHARS.match(guid) is None: - raise ValueError( - 'Invalid MSVS guid: "%s". Must match regex: "%s".' - % (guid, VALID_MSVS_GUID_CHARS.pattern) - ) - guid = "{%s}" % guid - guid = guid or MSVSNew.MakeGuid(proj_path) - return guid - - -def _GetMsbuildToolsetOfProject(proj_path, spec, version): - """Get the platform toolset for the project. - - Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. - spec: The target dictionary containing the properties of the target. - version: The MSVSVersion object. - Returns: - the platform toolset string or None. - """ - # Pluck out the default configuration. - default_config = _GetDefaultConfiguration(spec) - toolset = default_config.get("msbuild_toolset") - if not toolset and version.DefaultToolset(): - toolset = version.DefaultToolset() - if spec["type"] == "windows_driver": - toolset = "WindowsKernelModeDriver10.0" - return toolset - - -def _GenerateProject(project, options, version, generator_flags, spec): - """Generates a vcproj file. - - Arguments: - project: the MSVSProject object. - options: global generator options. - version: the MSVSVersion object. - generator_flags: dict of generator-specific flags. - Returns: - A list of source files that cannot be found on disk. - """ - default_config = _GetDefaultConfiguration(project.spec) - - # Skip emitting anything if told to with msvs_existing_vcproj option. - if default_config.get("msvs_existing_vcproj"): - return [] - - if version.UsesVcxproj(): - return _GenerateMSBuildProject(project, options, version, generator_flags, spec) - else: - return _GenerateMSVSProject(project, options, version, generator_flags) - - -def _GenerateMSVSProject(project, options, version, generator_flags): - """Generates a .vcproj file. It may create .rules and .user files too. - - Arguments: - project: The project object we will generate the file for. - options: Global options passed to the generator. - version: The VisualStudioVersion object. - generator_flags: dict of generator-specific flags. - """ - spec = project.spec - gyp.common.EnsureDirExists(project.path) - - platforms = _GetUniquePlatforms(spec) - p = MSVSProject.Writer( - project.path, version, spec["target_name"], project.guid, platforms - ) - - # Get directory project file is in. - project_dir = os.path.split(project.path)[0] - gyp_path = _NormalizedSource(project.build_file) - relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) - - config_type = _GetMSVSConfigurationType(spec, project.build_file) - for config_name, config in spec["configurations"].items(): - _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) - - # Prepare list of sources and excluded sources. - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) - - # Add rules. - actions_to_add = {} - _GenerateRulesForMSVS( - p, project_dir, options, spec, sources, excluded_sources, actions_to_add - ) - list_excluded = generator_flags.get("msvs_list_excluded_files", True) - sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, project_dir, sources, excluded_sources, list_excluded, version - ) - - # Add in files. - missing_sources = _VerifySourcesExist(sources, project_dir) - p.AddFiles(sources) - - _AddToolFilesToMSVS(p, spec) - _HandlePreCompiledHeaders(p, sources, spec) - _AddActions(actions_to_add, spec, relative_path_of_gyp_file) - _AddCopies(actions_to_add, spec) - _WriteMSVSUserFile(project.path, version, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add) - _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded) - _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) - - # Write it out. - p.WriteIfChanged() - - return missing_sources - - -def _GetUniquePlatforms(spec): - """Returns the list of unique platforms for this spec, e.g ['win32', ...]. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - # Gather list of unique platforms. - platforms = OrderedSet() - for configuration in spec["configurations"]: - platforms.add(_ConfigPlatform(spec["configurations"][configuration])) - platforms = list(platforms) - return platforms - - -def _CreateMSVSUserFile(proj_path, version, spec): - """Generates a .user file for the user running this Gyp program. - - Arguments: - proj_path: The path of the project file being created. The .user file - shares the same path (with an appropriate suffix). - version: The VisualStudioVersion object. - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - (domain, username) = _GetDomainAndUserName() - vcuser_filename = ".".join([proj_path, domain, username, "user"]) - user_file = MSVSUserFile.Writer(vcuser_filename, version, spec["target_name"]) - return user_file - - -def _GetMSVSConfigurationType(spec, build_file): - """Returns the configuration type for this project. - - It's a number defined by Microsoft. May raise an exception. - - Args: - spec: The target dictionary containing the properties of the target. - build_file: The path of the gyp file. - Returns: - An integer, the configuration type. - """ - try: - config_type = { - "executable": "1", # .exe - "shared_library": "2", # .dll - "loadable_module": "2", # .dll - "static_library": "4", # .lib - "windows_driver": "5", # .sys - "none": "10", # Utility type - }[spec["type"]] - except KeyError: - if spec.get("type"): - raise GypError( - "Target type %s is not a valid target type for " - "target %s in %s." % (spec["type"], spec["target_name"], build_file) - ) - else: - raise GypError( - "Missing type field for target %s in %s." - % (spec["target_name"], build_file) - ) - return config_type - - -def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): - """Adds a configuration to the MSVS project. - - Many settings in a vcproj file are specific to a configuration. This - function the main part of the vcproj file that's configuration specific. - - Arguments: - p: The target project being generated. - spec: The target dictionary containing the properties of the target. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - config: The dictionary that defines the special processing to be done - for this configuration. - """ - # Get the information for this configuration - include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(config) - libraries = _GetLibraries(spec) - library_dirs = _GetLibraryDirs(config) - out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False) - defines = _GetDefines(config) - defines = [_EscapeCppDefineForMSVS(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(config) - prebuild = config.get("msvs_prebuild") - postbuild = config.get("msvs_postbuild") - def_file = _GetModuleDefinition(spec) - precompiled_header = config.get("msvs_precompiled_header") - - # Prepare the list of tools as a dictionary. - tools = dict() - # Add in user specified msvs_settings. - msvs_settings = config.get("msvs_settings", {}) - MSVSSettings.ValidateMSVSSettings(msvs_settings) - - # Prevent default library inheritance from the environment. - _ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", ["$(NOINHERIT)"]) - - for tool in msvs_settings: - settings = config["msvs_settings"][tool] - for setting in settings: - _ToolAppend(tools, tool, setting, settings[setting]) - # Add the information to the appropriate tool - _ToolAppend(tools, "VCCLCompilerTool", "AdditionalIncludeDirectories", include_dirs) - _ToolAppend(tools, "VCMIDLTool", "AdditionalIncludeDirectories", midl_include_dirs) - _ToolAppend( - tools, - "VCResourceCompilerTool", - "AdditionalIncludeDirectories", - resource_include_dirs, - ) - # Add in libraries. - _ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", libraries) - _ToolAppend(tools, "VCLinkerTool", "AdditionalLibraryDirectories", library_dirs) - if out_file: - _ToolAppend(tools, vc_tool, "OutputFile", out_file, only_if_unset=True) - # Add defines. - _ToolAppend(tools, "VCCLCompilerTool", "PreprocessorDefinitions", defines) - _ToolAppend(tools, "VCResourceCompilerTool", "PreprocessorDefinitions", defines) - # Change program database directory to prevent collisions. - _ToolAppend( - tools, - "VCCLCompilerTool", - "ProgramDataBaseFileName", - "$(IntDir)$(ProjectName)\\vc80.pdb", - only_if_unset=True, - ) - # Add disabled warnings. - _ToolAppend(tools, "VCCLCompilerTool", "DisableSpecificWarnings", disabled_warnings) - # Add Pre-build. - _ToolAppend(tools, "VCPreBuildEventTool", "CommandLine", prebuild) - # Add Post-build. - _ToolAppend(tools, "VCPostBuildEventTool", "CommandLine", postbuild) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(tools, "VCCLCompilerTool", "UsePrecompiledHeader", "2") - _ToolAppend( - tools, "VCCLCompilerTool", "PrecompiledHeaderThrough", precompiled_header - ) - _ToolAppend(tools, "VCCLCompilerTool", "ForcedIncludeFiles", precompiled_header) - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec["type"] == "loadable_module": - _ToolAppend(tools, "VCLinkerTool", "IgnoreImportLibrary", "true") - # Set the module definition file if any. - if def_file: - _ToolAppend(tools, "VCLinkerTool", "ModuleDefinitionFile", def_file) - - _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name) - - -def _GetIncludeDirs(config): - """Returns the list of directories to be used for #include directives. - - Arguments: - config: The dictionary that defines the special processing to be done - for this configuration. - Returns: - The list of directory paths. - """ - # TODO(bradnelson): include_dirs should really be flexible enough not to - # require this sort of thing. - include_dirs = config.get("include_dirs", []) + config.get( - "msvs_system_include_dirs", [] - ) - midl_include_dirs = config.get("midl_include_dirs", []) + config.get( - "msvs_system_include_dirs", [] - ) - resource_include_dirs = config.get("resource_include_dirs", include_dirs) - include_dirs = _FixPaths(include_dirs) - midl_include_dirs = _FixPaths(midl_include_dirs) - resource_include_dirs = _FixPaths(resource_include_dirs) - return include_dirs, midl_include_dirs, resource_include_dirs - - -def _GetLibraryDirs(config): - """Returns the list of directories to be used for library search paths. - - Arguments: - config: The dictionary that defines the special processing to be done - for this configuration. - Returns: - The list of directory paths. - """ - - library_dirs = config.get("library_dirs", []) - library_dirs = _FixPaths(library_dirs) - return library_dirs - - -def _GetLibraries(spec): - """Returns the list of libraries for this configuration. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - The list of directory paths. - """ - libraries = spec.get("libraries", []) - # Strip out -l, as it is not used on windows (but is needed so we can pass - # in libraries that are assumed to be in the default library path). - # Also remove duplicate entries, leaving only the last duplicate, while - # preserving order. - found = OrderedSet() - unique_libraries_list = [] - for entry in reversed(libraries): - library = re.sub(r"^\-l", "", entry) - if not os.path.splitext(library)[1]: - library += ".lib" - if library not in found: - found.add(library) - unique_libraries_list.append(library) - unique_libraries_list.reverse() - return unique_libraries_list - - -def _GetOutputFilePathAndTool(spec, msbuild): - """Returns the path and tool to use for this target. - - Figures out the path of the file this spec will create and the name of - the VC tool that will create it. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - A triple of (file path, name of the vc tool, name of the msbuild tool) - """ - # Select a name for the output file. - out_file = "" - vc_tool = "" - msbuild_tool = "" - output_file_map = { - "executable": ("VCLinkerTool", "Link", "$(OutDir)", ".exe"), - "shared_library": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"), - "loadable_module": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"), - "windows_driver": ("VCLinkerTool", "Link", "$(OutDir)", ".sys"), - "static_library": ("VCLibrarianTool", "Lib", "$(OutDir)lib\\", ".lib"), - } - output_file_props = output_file_map.get(spec["type"]) - if output_file_props and int(spec.get("msvs_auto_output_file", 1)): - vc_tool, msbuild_tool, out_dir, suffix = output_file_props - if spec.get("standalone_static_library", 0): - out_dir = "$(OutDir)" - out_dir = spec.get("product_dir", out_dir) - product_extension = spec.get("product_extension") - if product_extension: - suffix = "." + product_extension - elif msbuild: - suffix = "$(TargetExt)" - prefix = spec.get("product_prefix", "") - product_name = spec.get("product_name", "$(ProjectName)") - out_file = ntpath.join(out_dir, prefix + product_name + suffix) - return out_file, vc_tool, msbuild_tool - - -def _GetOutputTargetExt(spec): - """Returns the extension for this target, including the dot - - If product_extension is specified, set target_extension to this to avoid - MSB8012, returns None otherwise. Ignores any target_extension settings in - the input files. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - A string with the extension, or None - """ - target_extension = spec.get("product_extension") - if target_extension: - return "." + target_extension - return None - - -def _GetDefines(config): - """Returns the list of preprocessor definitions for this configuration. - - Arguments: - config: The dictionary that defines the special processing to be done - for this configuration. - Returns: - The list of preprocessor definitions. - """ - defines = [] - for d in config.get("defines", []): - if type(d) == list: - fd = "=".join([str(dpart) for dpart in d]) - else: - fd = str(d) - defines.append(fd) - return defines - - -def _GetDisabledWarnings(config): - return [str(i) for i in config.get("msvs_disabled_warnings", [])] - - -def _GetModuleDefinition(spec): - def_file = "" - if spec["type"] in [ - "shared_library", - "loadable_module", - "executable", - "windows_driver", - ]: - def_files = [s for s in spec.get("sources", []) if s.endswith(".def")] - if len(def_files) == 1: - def_file = _FixPath(def_files[0]) - elif def_files: - raise ValueError( - "Multiple module definition files in one target, target %s lists " - "multiple .def files: %s" % (spec["target_name"], " ".join(def_files)) - ) - return def_file - - -def _ConvertToolsToExpectedForm(tools): - """Convert tools to a form expected by Visual Studio. - - Arguments: - tools: A dictionary of settings; the tool name is the key. - Returns: - A list of Tool objects. - """ - tool_list = [] - for tool, settings in tools.items(): - # Collapse settings with lists. - settings_fixed = {} - for setting, value in settings.items(): - if type(value) == list: - if ( - tool == "VCLinkerTool" and setting == "AdditionalDependencies" - ) or setting == "AdditionalOptions": - settings_fixed[setting] = " ".join(value) - else: - settings_fixed[setting] = ";".join(value) - else: - settings_fixed[setting] = value - # Add in this tool. - tool_list.append(MSVSProject.Tool(tool, settings_fixed)) - return tool_list - - -def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name): - """Add to the project file the configuration specified by config. - - Arguments: - p: The target project being generated. - spec: the target project dict. - tools: A dictionary of settings; the tool name is the key. - config: The dictionary that defines the special processing to be done - for this configuration. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - """ - attributes = _GetMSVSAttributes(spec, config, config_type) - # Add in this configuration. - tool_list = _ConvertToolsToExpectedForm(tools) - p.AddConfig(_ConfigFullName(config_name, config), attrs=attributes, tools=tool_list) - - -def _GetMSVSAttributes(spec, config, config_type): - # Prepare configuration attributes. - prepared_attrs = {} - source_attrs = config.get("msvs_configuration_attributes", {}) - for a in source_attrs: - prepared_attrs[a] = source_attrs[a] - # Add props files. - vsprops_dirs = config.get("msvs_props", []) - vsprops_dirs = _FixPaths(vsprops_dirs) - if vsprops_dirs: - prepared_attrs["InheritedPropertySheets"] = ";".join(vsprops_dirs) - # Set configuration type. - prepared_attrs["ConfigurationType"] = config_type - output_dir = prepared_attrs.get( - "OutputDirectory", "$(SolutionDir)$(ConfigurationName)" - ) - prepared_attrs["OutputDirectory"] = _FixPath(output_dir) + "\\" - if "IntermediateDirectory" not in prepared_attrs: - intermediate = "$(ConfigurationName)\\obj\\$(ProjectName)" - prepared_attrs["IntermediateDirectory"] = _FixPath(intermediate) + "\\" - else: - intermediate = _FixPath(prepared_attrs["IntermediateDirectory"]) + "\\" - intermediate = MSVSSettings.FixVCMacroSlashes(intermediate) - prepared_attrs["IntermediateDirectory"] = intermediate - return prepared_attrs - - -def _AddNormalizedSources(sources_set, sources_array): - sources_set.update(_NormalizedSource(s) for s in sources_array) - - -def _PrepareListOfSources(spec, generator_flags, gyp_file): - """Prepare list of sources and excluded sources. - - Besides the sources specified directly in the spec, adds the gyp file so - that a change to it will cause a re-compile. Also adds appropriate sources - for actions and copies. Assumes later stage will un-exclude files which - have custom build steps attached. - - Arguments: - spec: The target dictionary containing the properties of the target. - gyp_file: The name of the gyp file. - Returns: - A pair of (list of sources, list of excluded sources). - The sources will be relative to the gyp file. - """ - sources = OrderedSet() - _AddNormalizedSources(sources, spec.get("sources", [])) - excluded_sources = OrderedSet() - # Add in the gyp file. - if not generator_flags.get("standalone"): - sources.add(gyp_file) - - # Add in 'action' inputs and outputs. - for a in spec.get("actions", []): - inputs = a["inputs"] - inputs = [_NormalizedSource(i) for i in inputs] - # Add all inputs to sources and excluded sources. - inputs = OrderedSet(inputs) - sources.update(inputs) - if not spec.get("msvs_external_builder"): - excluded_sources.update(inputs) - if int(a.get("process_outputs_as_sources", False)): - _AddNormalizedSources(sources, a.get("outputs", [])) - # Add in 'copies' inputs and outputs. - for cpy in spec.get("copies", []): - _AddNormalizedSources(sources, cpy.get("files", [])) - return (sources, excluded_sources) - - -def _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, gyp_dir, sources, excluded_sources, list_excluded, version -): - """Adjusts the list of sources and excluded sources. - - Also converts the sets to lists. - - Arguments: - spec: The target dictionary containing the properties of the target. - options: Global generator options. - gyp_dir: The path to the gyp file being processed. - sources: A set of sources to be included for this project. - excluded_sources: A set of sources to be excluded for this project. - version: A MSVSVersion object. - Returns: - A trio of (list of sources, list of excluded sources, - path of excluded IDL file) - """ - # Exclude excluded sources coming into the generator. - excluded_sources.update(OrderedSet(spec.get("sources_excluded", []))) - # Add excluded sources into sources for good measure. - sources.update(excluded_sources) - # Convert to proper windows form. - # NOTE: sources goes from being a set to a list here. - # NOTE: excluded_sources goes from being a set to a list here. - sources = _FixPaths(sources) - # Convert to proper windows form. - excluded_sources = _FixPaths(excluded_sources) - - excluded_idl = _IdlFilesHandledNonNatively(spec, sources) - - precompiled_related = _GetPrecompileRelatedFiles(spec) - # Find the excluded ones, minus the precompiled header related ones. - fully_excluded = [i for i in excluded_sources if i not in precompiled_related] - - # Convert to folders and the right slashes. - sources = [i.split("\\") for i in sources] - sources = _ConvertSourcesToFilterHierarchy( - sources, - excluded=fully_excluded, - list_excluded=list_excluded, - msvs_version=version, - ) - - # Prune filters with a single child to flatten ugly directory structures - # such as ../../src/modules/module1 etc. - if version.UsesVcxproj(): - while ( - all([isinstance(s, MSVSProject.Filter) for s in sources]) - and len(set([s.name for s in sources])) == 1 - ): - assert all([len(s.contents) == 1 for s in sources]) - sources = [s.contents[0] for s in sources] - else: - while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter): - sources = sources[0].contents - - return sources, excluded_sources, excluded_idl - - -def _IdlFilesHandledNonNatively(spec, sources): - # If any non-native rules use 'idl' as an extension exclude idl files. - # Gather a list here to use later. - using_idl = False - for rule in spec.get("rules", []): - if rule["extension"] == "idl" and int(rule.get("msvs_external_rule", 0)): - using_idl = True - break - if using_idl: - excluded_idl = [i for i in sources if i.endswith(".idl")] - else: - excluded_idl = [] - return excluded_idl - - -def _GetPrecompileRelatedFiles(spec): - # Gather a list of precompiled header related sources. - precompiled_related = [] - for _, config in spec["configurations"].items(): - for k in precomp_keys: - f = config.get(k) - if f: - precompiled_related.append(_FixPath(f)) - return precompiled_related - - -def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded): - exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - for file_name, excluded_configs in exclusions.items(): - if not list_excluded and len(excluded_configs) == len(spec["configurations"]): - # If we're not listing excluded files, then they won't appear in the - # project, so don't try to configure them to be excluded. - pass - else: - for config_name, config in excluded_configs: - p.AddFileConfig( - file_name, - _ConfigFullName(config_name, config), - {"ExcludedFromBuild": "true"}, - ) - - -def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): - exclusions = {} - # Exclude excluded sources from being built. - for f in excluded_sources: - excluded_configs = [] - for config_name, config in spec["configurations"].items(): - precomped = [_FixPath(config.get(i, "")) for i in precomp_keys] - # Don't do this for ones that are precompiled header related. - if f not in precomped: - excluded_configs.append((config_name, config)) - exclusions[f] = excluded_configs - # If any non-native rules use 'idl' as an extension exclude idl files. - # Exclude them now. - for f in excluded_idl: - excluded_configs = [] - for config_name, config in spec["configurations"].items(): - excluded_configs.append((config_name, config)) - exclusions[f] = excluded_configs - return exclusions - - -def _AddToolFilesToMSVS(p, spec): - # Add in tool files (rules). - tool_files = OrderedSet() - for _, config in spec["configurations"].items(): - for f in config.get("msvs_tool_files", []): - tool_files.add(f) - for f in tool_files: - p.AddToolFile(f) - - -def _HandlePreCompiledHeaders(p, sources, spec): - # Pre-compiled header source stubs need a different compiler flag - # (generate precompiled header) and any source file not of the same - # kind (i.e. C vs. C++) as the precompiled header source stub needs - # to have use of precompiled headers disabled. - extensions_excluded_from_precompile = [] - for config_name, config in spec["configurations"].items(): - source = config.get("msvs_precompiled_source") - if source: - source = _FixPath(source) - # UsePrecompiledHeader=1 for if using precompiled headers. - tool = MSVSProject.Tool("VCCLCompilerTool", {"UsePrecompiledHeader": "1"}) - p.AddFileConfig( - source, _ConfigFullName(config_name, config), {}, tools=[tool] - ) - basename, extension = os.path.splitext(source) - if extension == ".c": - extensions_excluded_from_precompile = [".cc", ".cpp", ".cxx"] - else: - extensions_excluded_from_precompile = [".c"] - - def DisableForSourceTree(source_tree): - for source in source_tree: - if isinstance(source, MSVSProject.Filter): - DisableForSourceTree(source.contents) - else: - basename, extension = os.path.splitext(source) - if extension in extensions_excluded_from_precompile: - for config_name, config in spec["configurations"].items(): - tool = MSVSProject.Tool( - "VCCLCompilerTool", - { - "UsePrecompiledHeader": "0", - "ForcedIncludeFiles": "$(NOINHERIT)", - }, - ) - p.AddFileConfig( - _FixPath(source), - _ConfigFullName(config_name, config), - {}, - tools=[tool], - ) - - # Do nothing if there was no precompiled source. - if extensions_excluded_from_precompile: - DisableForSourceTree(sources) - - -def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): - # Add actions. - actions = spec.get("actions", []) - # Don't setup_env every time. When all the actions are run together in one - # batch file in VS, the PATH will grow too long. - # Membership in this set means that the cygwin environment has been set up, - # and does not need to be set up again. - have_setup_env = set() - for a in actions: - # Attach actions to the gyp file if nothing else is there. - inputs = a.get("inputs") or [relative_path_of_gyp_file] - attached_to = inputs[0] - need_setup_env = attached_to not in have_setup_env - cmd = _BuildCommandLineForRule( - spec, a, has_input_path=False, do_setup_env=need_setup_env - ) - have_setup_env.add(attached_to) - # Add the action. - _AddActionStep( - actions_to_add, - inputs=inputs, - outputs=a.get("outputs", []), - description=a.get("message", a["action_name"]), - command=cmd, - ) - - -def _WriteMSVSUserFile(project_path, version, spec): - # Add run_as and test targets. - if "run_as" in spec: - run_as = spec["run_as"] - action = run_as.get("action", []) - environment = run_as.get("environment", []) - working_directory = run_as.get("working_directory", ".") - elif int(spec.get("test", 0)): - action = ["$(TargetPath)", "--gtest_print_time"] - environment = [] - working_directory = "." - else: - return # Nothing to add - # Write out the user file. - user_file = _CreateMSVSUserFile(project_path, version, spec) - for config_name, c_data in spec["configurations"].items(): - user_file.AddDebugSettings( - _ConfigFullName(config_name, c_data), action, environment, working_directory - ) - user_file.WriteIfChanged() - - -def _AddCopies(actions_to_add, spec): - copies = _GetCopies(spec) - for inputs, outputs, cmd, description in copies: - _AddActionStep( - actions_to_add, - inputs=inputs, - outputs=outputs, - description=description, - command=cmd, - ) - - -def _GetCopies(spec): - copies = [] - # Add copies. - for cpy in spec.get("copies", []): - for src in cpy.get("files", []): - dst = os.path.join(cpy["destination"], os.path.basename(src)) - # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and - # outputs, so do the same for our generated command line. - if src.endswith("/"): - src_bare = src[:-1] - base_dir = posixpath.split(src_bare)[0] - outer_dir = posixpath.split(src_bare)[1] - fixed_dst = _FixPath(dst) - full_dst = '"%s\\%s\\"' % (fixed_dst, outer_dir) - cmd = 'mkdir %s 2>nul & cd "%s" && xcopy /e /f /y "%s" %s' % ( - full_dst, - _FixPath(base_dir), - outer_dir, - full_dst, - ) - copies.append( - ( - [src], - ["dummy_copies", dst], - cmd, - "Copying %s to %s" % (src, fixed_dst), - ) - ) - else: - fix_dst = _FixPath(cpy["destination"]) - cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % ( - fix_dst, - _FixPath(src), - _FixPath(dst), - ) - copies.append(([src], [dst], cmd, "Copying %s to %s" % (src, fix_dst))) - return copies - - -def _GetPathDict(root, path): - # |path| will eventually be empty (in the recursive calls) if it was initially - # relative; otherwise it will eventually end up as '\', 'D:\', etc. - if not path or path.endswith(os.sep): - return root - parent, folder = os.path.split(path) - parent_dict = _GetPathDict(root, parent) - if folder not in parent_dict: - parent_dict[folder] = dict() - return parent_dict[folder] - - -def _DictsToFolders(base_path, bucket, flat): - # Convert to folders recursively. - children = [] - for folder, contents in bucket.items(): - if type(contents) == dict: - folder_children = _DictsToFolders( - os.path.join(base_path, folder), contents, flat - ) - if flat: - children += folder_children - else: - folder_children = MSVSNew.MSVSFolder( - os.path.join(base_path, folder), - name="(" + folder + ")", - entries=folder_children, - ) - children.append(folder_children) - else: - children.append(contents) - return children - - -def _CollapseSingles(parent, node): - # Recursively explorer the tree of dicts looking for projects which are - # the sole item in a folder which has the same name as the project. Bring - # such projects up one level. - if type(node) == dict and len(node) == 1 and next(iter(node)) == parent + ".vcproj": - return node[next(iter(node))] - if type(node) != dict: - return node - for child in node: - node[child] = _CollapseSingles(child, node[child]) - return node - - -def _GatherSolutionFolders(sln_projects, project_objects, flat): - root = {} - # Convert into a tree of dicts on path. - for p in sln_projects: - gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] - if p.endswith("#host"): - target += "_host" - gyp_dir = os.path.dirname(gyp_file) - path_dict = _GetPathDict(root, gyp_dir) - path_dict[target + ".vcproj"] = project_objects[p] - # Walk down from the top until we hit a folder that has more than one entry. - # In practice, this strips the top-level "src/" dir from the hierarchy in - # the solution. - while len(root) == 1 and type(root[next(iter(root))]) == dict: - root = root[next(iter(root))] - # Collapse singles. - root = _CollapseSingles("", root) - # Merge buckets until everything is a root entry. - return _DictsToFolders("", root, flat) - - -def _GetPathOfProject(qualified_target, spec, options, msvs_version): - default_config = _GetDefaultConfiguration(spec) - proj_filename = default_config.get("msvs_existing_vcproj") - if not proj_filename: - proj_filename = spec["target_name"] - if spec["toolset"] == "host": - proj_filename += "_host" - proj_filename = proj_filename + options.suffix + msvs_version.ProjectExtension() - - build_file = gyp.common.BuildFile(qualified_target) - proj_path = os.path.join(os.path.dirname(build_file), proj_filename) - fix_prefix = None - if options.generator_output: - project_dir_path = os.path.dirname(os.path.abspath(proj_path)) - proj_path = os.path.join(options.generator_output, proj_path) - fix_prefix = gyp.common.RelativePath( - project_dir_path, os.path.dirname(proj_path) - ) - return proj_path, fix_prefix - - -def _GetPlatformOverridesOfProject(spec): - # Prepare a dict indicating which project configurations are used for which - # solution configurations for this target. - config_platform_overrides = {} - for config_name, c in spec["configurations"].items(): - config_fullname = _ConfigFullName(config_name, c) - platform = c.get("msvs_target_platform", _ConfigPlatform(c)) - fixed_config_fullname = "%s|%s" % ( - _ConfigBaseName(config_name, _ConfigPlatform(c)), - platform, - ) - if spec["toolset"] == "host" and generator_supports_multiple_toolsets: - fixed_config_fullname = "%s|x64" % (config_name,) - config_platform_overrides[config_fullname] = fixed_config_fullname - return config_platform_overrides - - -def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): - """Create a MSVSProject object for the targets found in target list. - - Arguments: - target_list: the list of targets to generate project objects for. - target_dicts: the dictionary of specifications. - options: global generator options. - msvs_version: the MSVSVersion object. - Returns: - A set of created projects, keyed by target. - """ - global fixpath_prefix - # Generate each project. - projects = {} - for qualified_target in target_list: - spec = target_dicts[qualified_target] - proj_path, fixpath_prefix = _GetPathOfProject( - qualified_target, spec, options, msvs_version - ) - guid = _GetGuidOfProject(proj_path, spec) - overrides = _GetPlatformOverridesOfProject(spec) - build_file = gyp.common.BuildFile(qualified_target) - # Create object for this project. - target_name = spec["target_name"] - if spec["toolset"] == "host": - target_name += "_host" - obj = MSVSNew.MSVSProject( - proj_path, - name=target_name, - guid=guid, - spec=spec, - build_file=build_file, - config_platform_overrides=overrides, - fixpath_prefix=fixpath_prefix, - ) - # Set project toolset if any (MS build only) - if msvs_version.UsesVcxproj(): - obj.set_msbuild_toolset( - _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version) - ) - projects[qualified_target] = obj - # Set all the dependencies, but not if we are using an external builder like - # ninja - for project in projects.values(): - if not project.spec.get("msvs_external_builder"): - deps = project.spec.get("dependencies", []) - deps = [projects[d] for d in deps] - project.set_dependencies(deps) - return projects - - -def _InitNinjaFlavor(params, target_list, target_dicts): - """Initialize targets for the ninja flavor. - - This sets up the necessary variables in the targets to generate msvs projects - that use ninja as an external builder. The variables in the spec are only set - if they have not been set. This allows individual specs to override the - default values initialized here. - Arguments: - params: Params provided to the generator. - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - """ - for qualified_target in target_list: - spec = target_dicts[qualified_target] - if spec.get("msvs_external_builder"): - # The spec explicitly defined an external builder, so don't change it. - continue - - path_to_ninja = spec.get("msvs_path_to_ninja", "ninja.exe") - - spec["msvs_external_builder"] = "ninja" - if not spec.get("msvs_external_builder_out_dir"): - gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) - gyp_dir = os.path.dirname(gyp_file) - configuration = "$(Configuration)" - if params.get("target_arch") == "x64": - configuration += "_x64" - if params.get("target_arch") == "arm64": - configuration += "_arm64" - spec["msvs_external_builder_out_dir"] = os.path.join( - gyp.common.RelativePath(params["options"].toplevel_dir, gyp_dir), - ninja_generator.ComputeOutputDir(params), - configuration, - ) - if not spec.get("msvs_external_builder_build_cmd"): - spec["msvs_external_builder_build_cmd"] = [ - path_to_ninja, - "-C", - "$(OutDir)", - "$(ProjectName)", - ] - if not spec.get("msvs_external_builder_clean_cmd"): - spec["msvs_external_builder_clean_cmd"] = [ - path_to_ninja, - "-C", - "$(OutDir)", - "-tclean", - "$(ProjectName)", - ] - - -def CalculateVariables(default_variables, params): - """Generated variables that require params to be known.""" - - generator_flags = params.get("generator_flags", {}) - - # Select project file format version (if unset, default to auto detecting). - msvs_version = MSVSVersion.SelectVisualStudioVersion( - generator_flags.get("msvs_version", "auto") - ) - # Stash msvs_version for later (so we don't have to probe the system twice). - params["msvs_version"] = msvs_version - - # Set a variable so conditions can be based on msvs_version. - default_variables["MSVS_VERSION"] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if ( - os.environ.get("PROCESSOR_ARCHITECTURE", "").find("64") >= 0 - or os.environ.get("PROCESSOR_ARCHITEW6432", "").find("64") >= 0 - ): - default_variables["MSVS_OS_BITS"] = 64 - else: - default_variables["MSVS_OS_BITS"] = 32 - - if gyp.common.GetFlavor(params) == "ninja": - default_variables["SHARED_INTERMEDIATE_DIR"] = "$(OutDir)gen" - - -def PerformBuild(data, configurations, params): - options = params["options"] - msvs_version = params["msvs_version"] - devenv = os.path.join(msvs_version.path, "Common7", "IDE", "devenv.com") - - for build_file, build_file_dict in data.items(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != ".gyp": - continue - sln_path = build_file_root + options.suffix + ".sln" - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - - for config in configurations: - arguments = [devenv, sln_path, "/Build", config] - print("Building [%s]: %s" % (config, arguments)) - subprocess.check_call(arguments) - - -def CalculateGeneratorInputInfo(params): - if params.get("flavor") == "ninja": - toplevel = params["options"].toplevel_dir - qualified_out_dir = os.path.normpath( - os.path.join( - toplevel, - ninja_generator.ComputeOutputDir(params), - "gypfiles-msvs-ninja", - ) - ) - - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": toplevel, - "qualified_out_dir": qualified_out_dir, - } - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate .sln and .vcproj files. - - This is the entry point for this generator. - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - data: Dictionary containing per .gyp data. - """ - global fixpath_prefix - - options = params["options"] - - # Get the project file format version back out of where we stashed it in - # GeneratorCalculatedVariables. - msvs_version = params["msvs_version"] - - generator_flags = params.get("generator_flags", {}) - - # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. - (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts) - - # Optionally use the large PDB workaround for targets marked with - # 'msvs_large_pdb': 1. - (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims( - target_list, target_dicts, generator_default_variables - ) - - # Optionally configure each spec to use ninja as the external builder. - if params.get("flavor") == "ninja": - _InitNinjaFlavor(params, target_list, target_dicts) - - # Prepare the set of configurations. - configs = set() - for qualified_target in target_list: - spec = target_dicts[qualified_target] - for config_name, config in spec["configurations"].items(): - config_name = _ConfigFullName(config_name, config) - configs.add(config_name) - if config_name == "Release|arm64": - configs.add("Release|x64") - configs = list(configs) - - # Figure out all the projects that will be generated and their guids - project_objects = _CreateProjectObjects( - target_list, target_dicts, options, msvs_version - ) - - # Generate each project. - missing_sources = [] - for project in project_objects.values(): - fixpath_prefix = project.fixpath_prefix - missing_sources.extend( - _GenerateProject(project, options, msvs_version, generator_flags, spec) - ) - fixpath_prefix = None - - for build_file in data: - # Validate build_file extension - target_only_configs = configs - if generator_supports_multiple_toolsets: - target_only_configs = [i for i in configs if i.endswith("arm64")] - if not build_file.endswith(".gyp"): - continue - sln_path = os.path.splitext(build_file)[0] + options.suffix + ".sln" - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - # Get projects in the solution, and their dependents. - sln_projects = gyp.common.BuildFileTargets(target_list, build_file) - sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) - # Create folder hierarchy. - root_entries = _GatherSolutionFolders( - sln_projects, project_objects, flat=msvs_version.FlatSolution() - ) - # Create solution. - sln = MSVSNew.MSVSSolution( - sln_path, - entries=root_entries, - variants=target_only_configs, - websiteProperties=False, - version=msvs_version, - ) - sln.Write() - - if missing_sources: - error_message = "Missing input files:\n" + "\n".join(set(missing_sources)) - if generator_flags.get("msvs_error_on_missing_sources", False): - raise GypError(error_message) - else: - print("Warning: " + error_message, file=sys.stdout) - - -def _GenerateMSBuildFiltersFile( - filters_path, - source_files, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, -): - """Generate the filters file. - - This file is used by Visual Studio to organize the presentation of source - files into folders. - - Arguments: - filters_path: The path of the file to be created. - source_files: The hierarchical structure of all the sources. - extension_to_rule_name: A dictionary mapping file extensions to rules. - """ - filter_group = [] - source_group = [] - _AppendFiltersForMSBuild( - "", - source_files, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, - filter_group, - source_group, - ) - if filter_group: - content = [ - "Project", - { - "ToolsVersion": "4.0", - "xmlns": "http://schemas.microsoft.com/developer/msbuild/2003", - }, - ["ItemGroup"] + filter_group, - ["ItemGroup"] + source_group, - ] - easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True) - elif os.path.exists(filters_path): - # We don't need this filter anymore. Delete the old filter file. - os.unlink(filters_path) - - -def _AppendFiltersForMSBuild( - parent_filter_name, - sources, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, - filter_group, - source_group, -): - """Creates the list of filters and sources to be added in the filter file. - - Args: - parent_filter_name: The name of the filter under which the sources are - found. - sources: The hierarchy of filters and sources to process. - extension_to_rule_name: A dictionary mapping file extensions to rules. - filter_group: The list to which filter entries will be appended. - source_group: The list to which source entries will be appended. - """ - for source in sources: - if isinstance(source, MSVSProject.Filter): - # We have a sub-filter. Create the name of that sub-filter. - if not parent_filter_name: - filter_name = source.name - else: - filter_name = "%s\\%s" % (parent_filter_name, source.name) - # Add the filter to the group. - filter_group.append( - [ - "Filter", - {"Include": filter_name}, - ["UniqueIdentifier", MSVSNew.MakeGuid(source.name)], - ] - ) - # Recurse and add its dependents. - _AppendFiltersForMSBuild( - filter_name, - source.contents, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, - filter_group, - source_group, - ) - else: - # It's a source. Create a source entry. - _, element = _MapFileToMsBuildSourceType( - source, rule_dependencies, extension_to_rule_name, platforms, toolset - ) - source_entry = [element, {"Include": source}] - # Specify the filter it is part of, if any. - if parent_filter_name: - source_entry.append(["Filter", parent_filter_name]) - source_group.append(source_entry) - - -def _MapFileToMsBuildSourceType( - source, rule_dependencies, extension_to_rule_name, platforms, toolset -): - """Returns the group and element type of the source file. - - Arguments: - source: The source file name. - extension_to_rule_name: A dictionary mapping file extensions to rules. - - Returns: - A pair of (group this file should be part of, the label of element) - """ - _, ext = os.path.splitext(source) - ext = ext.lower() - if ext in extension_to_rule_name: - group = "rule" - element = extension_to_rule_name[ext] - elif ext in [".cc", ".cpp", ".c", ".cxx", ".mm"]: - group = "compile" - element = "ClCompile" - elif ext in [".h", ".hxx"]: - group = "include" - element = "ClInclude" - elif ext == ".rc": - group = "resource" - element = "ResourceCompile" - elif ext in [".s", ".asm"]: - group = "masm" - element = "MASM" - if "arm64" in platforms and toolset == "target": - element = "MARMASM" - elif ext == ".idl": - group = "midl" - element = "Midl" - elif source in rule_dependencies: - group = "rule_dependency" - element = "CustomBuild" - else: - group = "none" - element = "None" - return (group, element) - - -def _GenerateRulesForMSBuild( - output_dir, - options, - spec, - sources, - excluded_sources, - props_files_of_rules, - targets_files_of_rules, - actions_to_add, - rule_dependencies, - extension_to_rule_name, -): - # MSBuild rules are implemented using three files: an XML file, a .targets - # file and a .props file. - # For more details see: - # https://devblogs.microsoft.com/cppblog/quick-help-on-vs2010-custom-build-rule/ - rules = spec.get("rules", []) - rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))] - rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))] - - msbuild_rules = [] - for rule in rules_native: - # Skip a rule with no action and no inputs. - if "action" not in rule and not rule.get("rule_sources", []): - continue - msbuild_rule = MSBuildRule(rule, spec) - msbuild_rules.append(msbuild_rule) - rule_dependencies.update(msbuild_rule.additional_dependencies.split(";")) - extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name - if msbuild_rules: - base = spec["target_name"] + options.suffix - props_name = base + ".props" - targets_name = base + ".targets" - xml_name = base + ".xml" - - props_files_of_rules.add(props_name) - targets_files_of_rules.add(targets_name) - - props_path = os.path.join(output_dir, props_name) - targets_path = os.path.join(output_dir, targets_name) - xml_path = os.path.join(output_dir, xml_name) - - _GenerateMSBuildRulePropsFile(props_path, msbuild_rules) - _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules) - _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules) - - if rules_external: - _GenerateExternalRules( - rules_external, output_dir, spec, sources, options, actions_to_add - ) - _AdjustSourcesForRules(rules, sources, excluded_sources, True) - - -class MSBuildRule(object): - """Used to store information used to generate an MSBuild rule. - - Attributes: - rule_name: The rule name, sanitized to use in XML. - target_name: The name of the target. - after_targets: The name of the AfterTargets element. - before_targets: The name of the BeforeTargets element. - depends_on: The name of the DependsOn element. - compute_output: The name of the ComputeOutput element. - dirs_to_make: The name of the DirsToMake element. - inputs: The name of the _inputs element. - tlog: The name of the _tlog element. - extension: The extension this rule applies to. - description: The message displayed when this rule is invoked. - additional_dependencies: A string listing additional dependencies. - outputs: The outputs of this rule. - command: The command used to run the rule. - """ - - def __init__(self, rule, spec): - self.display_name = rule["rule_name"] - # Assure that the rule name is only characters and numbers - self.rule_name = re.sub(r"\W", "_", self.display_name) - # Create the various element names, following the example set by the - # Visual Studio 2008 to 2010 conversion. I don't know if VS2010 - # is sensitive to the exact names. - self.target_name = "_" + self.rule_name - self.after_targets = self.rule_name + "AfterTargets" - self.before_targets = self.rule_name + "BeforeTargets" - self.depends_on = self.rule_name + "DependsOn" - self.compute_output = "Compute%sOutput" % self.rule_name - self.dirs_to_make = self.rule_name + "DirsToMake" - self.inputs = self.rule_name + "_inputs" - self.tlog = self.rule_name + "_tlog" - self.extension = rule["extension"] - if not self.extension.startswith("."): - self.extension = "." + self.extension - - self.description = MSVSSettings.ConvertVCMacrosToMSBuild( - rule.get("message", self.rule_name) - ) - old_additional_dependencies = _FixPaths(rule.get("inputs", [])) - self.additional_dependencies = ";".join( - [ - MSVSSettings.ConvertVCMacrosToMSBuild(i) - for i in old_additional_dependencies - ] - ) - old_outputs = _FixPaths(rule.get("outputs", [])) - self.outputs = ";".join( - [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in old_outputs] - ) - old_command = _BuildCommandLineForRule( - spec, rule, has_input_path=True, do_setup_env=True - ) - self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command) - - -def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules): - """Generate the .props file.""" - content = [ - "Project", - {"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"}, - ] - for rule in msbuild_rules: - content.extend( - [ - [ - "PropertyGroup", - { - "Condition": "'$(%s)' == '' and '$(%s)' == '' and " - "'$(ConfigurationType)' != 'Makefile'" - % (rule.before_targets, rule.after_targets) - }, - [rule.before_targets, "Midl"], - [rule.after_targets, "CustomBuild"], - ], - [ - "PropertyGroup", - [ - rule.depends_on, - {"Condition": "'$(ConfigurationType)' != 'Makefile'"}, - "_SelectedFiles;$(%s)" % rule.depends_on, - ], - ], - [ - "ItemDefinitionGroup", - [ - rule.rule_name, - ["CommandLineTemplate", rule.command], - ["Outputs", rule.outputs], - ["ExecutionDescription", rule.description], - ["AdditionalDependencies", rule.additional_dependencies], - ], - ], - ] - ) - easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True) - - -def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules): - """Generate the .targets file.""" - content = [ - "Project", - {"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"}, - ] - item_group = [ - "ItemGroup", - [ - "PropertyPageSchema", - {"Include": "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"}, - ], - ] - for rule in msbuild_rules: - item_group.append( - [ - "AvailableItemName", - {"Include": rule.rule_name}, - ["Targets", rule.target_name], - ] - ) - content.append(item_group) - - for rule in msbuild_rules: - content.append( - [ - "UsingTask", - { - "TaskName": rule.rule_name, - "TaskFactory": "XamlTaskFactory", - "AssemblyName": "Microsoft.Build.Tasks.v4.0", - }, - ["Task", "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"], - ] - ) - for rule in msbuild_rules: - rule_name = rule.rule_name - target_outputs = "%%(%s.Outputs)" % rule_name - target_inputs = ( - "%%(%s.Identity);%%(%s.AdditionalDependencies);" "$(MSBuildProjectFile)" - ) % (rule_name, rule_name) - rule_inputs = "%%(%s.Identity)" % rule_name - extension_condition = ( - "'%(Extension)'=='.obj' or " - "'%(Extension)'=='.res' or " - "'%(Extension)'=='.rsc' or " - "'%(Extension)'=='.lib'" - ) - remove_section = [ - "ItemGroup", - {"Condition": "'@(SelectedFiles)' != ''"}, - [ - rule_name, - { - "Remove": "@(%s)" % rule_name, - "Condition": "'%(Identity)' != '@(SelectedFiles)'", - }, - ], - ] - inputs_section = [ - "ItemGroup", - [rule.inputs, {"Include": "%%(%s.AdditionalDependencies)" % rule_name}], - ] - logging_section = [ - "ItemGroup", - [ - rule.tlog, - { - "Include": "%%(%s.Outputs)" % rule_name, - "Condition": ( - "'%%(%s.Outputs)' != '' and " - "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name) - ), - }, - ["Source", "@(%s, '|')" % rule_name], - ["Inputs", "@(%s -> '%%(Fullpath)', ';')" % rule.inputs], - ], - ] - message_section = [ - "Message", - {"Importance": "High", "Text": "%%(%s.ExecutionDescription)" % rule_name}, - ] - write_tlog_section = [ - "WriteLinesToFile", - { - "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule.tlog, rule.tlog), - "File": "$(IntDir)$(ProjectName).write.1.tlog", - "Lines": "^%%(%s.Source);@(%s->'%%(Fullpath)')" - % (rule.tlog, rule.tlog), - }, - ] - read_tlog_section = [ - "WriteLinesToFile", - { - "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule.tlog, rule.tlog), - "File": "$(IntDir)$(ProjectName).read.1.tlog", - "Lines": "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog), - }, - ] - command_and_input_section = [ - rule_name, - { - "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule_name, rule_name), - "EchoOff": "true", - "StandardOutputImportance": "High", - "StandardErrorImportance": "High", - "CommandLineTemplate": "%%(%s.CommandLineTemplate)" % rule_name, - "AdditionalOptions": "%%(%s.AdditionalOptions)" % rule_name, - "Inputs": rule_inputs, - }, - ] - content.extend( - [ - [ - "Target", - { - "Name": rule.target_name, - "BeforeTargets": "$(%s)" % rule.before_targets, - "AfterTargets": "$(%s)" % rule.after_targets, - "Condition": "'@(%s)' != ''" % rule_name, - "DependsOnTargets": "$(%s);%s" - % (rule.depends_on, rule.compute_output), - "Outputs": target_outputs, - "Inputs": target_inputs, - }, - remove_section, - inputs_section, - logging_section, - message_section, - write_tlog_section, - read_tlog_section, - command_and_input_section, - ], - [ - "PropertyGroup", - [ - "ComputeLinkInputsTargets", - "$(ComputeLinkInputsTargets);", - "%s;" % rule.compute_output, - ], - [ - "ComputeLibInputsTargets", - "$(ComputeLibInputsTargets);", - "%s;" % rule.compute_output, - ], - ], - [ - "Target", - { - "Name": rule.compute_output, - "Condition": "'@(%s)' != ''" % rule_name, - }, - [ - "ItemGroup", - [ - rule.dirs_to_make, - { - "Condition": "'@(%s)' != '' and " - "'%%(%s.ExcludedFromBuild)' != 'true'" - % (rule_name, rule_name), - "Include": "%%(%s.Outputs)" % rule_name, - }, - ], - [ - "Link", - { - "Include": "%%(%s.Identity)" % rule.dirs_to_make, - "Condition": extension_condition, - }, - ], - [ - "Lib", - { - "Include": "%%(%s.Identity)" % rule.dirs_to_make, - "Condition": extension_condition, - }, - ], - [ - "ImpLib", - { - "Include": "%%(%s.Identity)" % rule.dirs_to_make, - "Condition": extension_condition, - }, - ], - ], - [ - "MakeDir", - { - "Directories": ( - "@(%s->'%%(RootDir)%%(Directory)')" % rule.dirs_to_make - ) - }, - ], - ], - ] - ) - easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True) - - -def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules): - # Generate the .xml file - content = [ - "ProjectSchemaDefinitions", - { - "xmlns": ( - "clr-namespace:Microsoft.Build.Framework.XamlTypes;" - "assembly=Microsoft.Build.Framework" - ), - "xmlns:x": "http://schemas.microsoft.com/winfx/2006/xaml", - "xmlns:sys": "clr-namespace:System;assembly=mscorlib", - "xmlns:transformCallback": "Microsoft.Cpp.Dev10.ConvertPropertyCallback", - }, - ] - for rule in msbuild_rules: - content.extend( - [ - [ - "Rule", - { - "Name": rule.rule_name, - "PageTemplate": "tool", - "DisplayName": rule.display_name, - "Order": "200", - }, - [ - "Rule.DataSource", - [ - "DataSource", - {"Persistence": "ProjectFile", "ItemType": rule.rule_name}, - ], - ], - [ - "Rule.Categories", - [ - "Category", - {"Name": "General"}, - ["Category.DisplayName", ["sys:String", "General"]], - ], - [ - "Category", - {"Name": "Command Line", "Subtype": "CommandLine"}, - ["Category.DisplayName", ["sys:String", "Command Line"]], - ], - ], - [ - "StringListProperty", - { - "Name": "Inputs", - "Category": "Command Line", - "IsRequired": "true", - "Switch": " ", - }, - [ - "StringListProperty.DataSource", - [ - "DataSource", - { - "Persistence": "ProjectFile", - "ItemType": rule.rule_name, - "SourceType": "Item", - }, - ], - ], - ], - [ - "StringProperty", - { - "Name": "CommandLineTemplate", - "DisplayName": "Command Line", - "Visible": "False", - "IncludeInCommandLine": "False", - }, - ], - [ - "DynamicEnumProperty", - { - "Name": rule.before_targets, - "Category": "General", - "EnumProvider": "Targets", - "IncludeInCommandLine": "False", - }, - [ - "DynamicEnumProperty.DisplayName", - ["sys:String", "Execute Before"], - ], - [ - "DynamicEnumProperty.Description", - [ - "sys:String", - "Specifies the targets for the build customization" - " to run before.", - ], - ], - [ - "DynamicEnumProperty.ProviderSettings", - [ - "NameValuePair", - { - "Name": "Exclude", - "Value": "^%s|^Compute" % rule.before_targets, - }, - ], - ], - [ - "DynamicEnumProperty.DataSource", - [ - "DataSource", - { - "Persistence": "ProjectFile", - "HasConfigurationCondition": "true", - }, - ], - ], - ], - [ - "DynamicEnumProperty", - { - "Name": rule.after_targets, - "Category": "General", - "EnumProvider": "Targets", - "IncludeInCommandLine": "False", - }, - [ - "DynamicEnumProperty.DisplayName", - ["sys:String", "Execute After"], - ], - [ - "DynamicEnumProperty.Description", - [ - "sys:String", - ( - "Specifies the targets for the build customization" - " to run after." - ), - ], - ], - [ - "DynamicEnumProperty.ProviderSettings", - [ - "NameValuePair", - { - "Name": "Exclude", - "Value": "^%s|^Compute" % rule.after_targets, - }, - ], - ], - [ - "DynamicEnumProperty.DataSource", - [ - "DataSource", - { - "Persistence": "ProjectFile", - "ItemType": "", - "HasConfigurationCondition": "true", - }, - ], - ], - ], - [ - "StringListProperty", - { - "Name": "Outputs", - "DisplayName": "Outputs", - "Visible": "False", - "IncludeInCommandLine": "False", - }, - ], - [ - "StringProperty", - { - "Name": "ExecutionDescription", - "DisplayName": "Execution Description", - "Visible": "False", - "IncludeInCommandLine": "False", - }, - ], - [ - "StringListProperty", - { - "Name": "AdditionalDependencies", - "DisplayName": "Additional Dependencies", - "IncludeInCommandLine": "False", - "Visible": "false", - }, - ], - [ - "StringProperty", - { - "Subtype": "AdditionalOptions", - "Name": "AdditionalOptions", - "Category": "Command Line", - }, - [ - "StringProperty.DisplayName", - ["sys:String", "Additional Options"], - ], - [ - "StringProperty.Description", - ["sys:String", "Additional Options"], - ], - ], - ], - [ - "ItemType", - {"Name": rule.rule_name, "DisplayName": rule.display_name}, - ], - [ - "FileExtension", - {"Name": "*" + rule.extension, "ContentType": rule.rule_name}, - ], - [ - "ContentType", - { - "Name": rule.rule_name, - "DisplayName": "", - "ItemType": rule.rule_name, - }, - ], - ] - ) - easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True) - - -def _GetConfigurationAndPlatform(name, settings, spec): - configuration = name.rsplit("_", 1)[0] - platform = settings.get("msvs_configuration_platform", "Win32") - if spec["toolset"] == "host" and platform == "arm64": - platform = "x64" # Host-only tools are always built for x64 - return (configuration, platform) - - -def _GetConfigurationCondition(name, settings, spec): - return r"'$(Configuration)|$(Platform)'=='%s|%s'" % _GetConfigurationAndPlatform( - name, settings, spec - ) - - -def _GetMSBuildProjectConfigurations(configurations, spec): - group = ["ItemGroup", {"Label": "ProjectConfigurations"}] - for (name, settings) in sorted(configurations.items()): - configuration, platform = _GetConfigurationAndPlatform(name, settings, spec) - designation = "%s|%s" % (configuration, platform) - group.append( - [ - "ProjectConfiguration", - {"Include": designation}, - ["Configuration", configuration], - ["Platform", platform], - ] - ) - return [group] - - -def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name): - namespace = os.path.splitext(gyp_file_name)[0] - properties = [ - [ - "PropertyGroup", - {"Label": "Globals"}, - ["ProjectGuid", guid], - ["Keyword", "Win32Proj"], - ["RootNamespace", namespace], - ["IgnoreWarnCompileDuplicatedFilename", "true"], - ] - ] - - if ( - os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64" - or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64" - ): - properties[0].append(["PreferredToolArchitecture", "x64"]) - - if spec.get("msvs_target_platform_version"): - target_platform_version = spec.get("msvs_target_platform_version") - properties[0].append(["WindowsTargetPlatformVersion", target_platform_version]) - if spec.get("msvs_target_platform_minversion"): - target_platform_minversion = spec.get("msvs_target_platform_minversion") - properties[0].append( - ["WindowsTargetPlatformMinVersion", target_platform_minversion] - ) - else: - properties[0].append( - ["WindowsTargetPlatformMinVersion", target_platform_version] - ) - - if spec.get("msvs_enable_winrt"): - properties[0].append(["DefaultLanguage", "en-US"]) - properties[0].append(["AppContainerApplication", "true"]) - if spec.get("msvs_application_type_revision"): - app_type_revision = spec.get("msvs_application_type_revision") - properties[0].append(["ApplicationTypeRevision", app_type_revision]) - else: - properties[0].append(["ApplicationTypeRevision", "8.1"]) - if spec.get("msvs_enable_winphone"): - properties[0].append(["ApplicationType", "Windows Phone"]) - else: - properties[0].append(["ApplicationType", "Windows Store"]) - - platform_name = None - msvs_windows_sdk_version = None - for configuration in spec["configurations"].values(): - platform_name = platform_name or _ConfigPlatform(configuration) - msvs_windows_sdk_version = ( - msvs_windows_sdk_version - or _ConfigWindowsTargetPlatformVersion(configuration, version) - ) - if platform_name and msvs_windows_sdk_version: - break - if msvs_windows_sdk_version: - properties[0].append( - ["WindowsTargetPlatformVersion", str(msvs_windows_sdk_version)] - ) - elif version.compatible_sdks: - raise GypError( - "%s requires any SDK of %s version, but none were found" - % (version.description, version.compatible_sdks) - ) - - if platform_name == "ARM": - properties[0].append(["WindowsSDKDesktopARMSupport", "true"]) - - return properties - - -def _GetMSBuildConfigurationDetails(spec, build_file): - properties = {} - for name, settings in spec["configurations"].items(): - msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) - condition = _GetConfigurationCondition(name, settings, spec) - character_set = msbuild_attributes.get("CharacterSet") - config_type = msbuild_attributes.get("ConfigurationType") - _AddConditionalProperty(properties, condition, "ConfigurationType", config_type) - if config_type == "Driver": - _AddConditionalProperty(properties, condition, "DriverType", "WDM") - _AddConditionalProperty( - properties, condition, "TargetVersion", _ConfigTargetVersion(settings) - ) - if character_set: - if "msvs_enable_winrt" not in spec: - _AddConditionalProperty( - properties, condition, "CharacterSet", character_set - ) - return _GetMSBuildPropertyGroup(spec, "Configuration", properties) - - -def _GetMSBuildLocalProperties(msbuild_toolset): - # Currently the only local property we support is PlatformToolset - properties = {} - if msbuild_toolset: - properties = [ - [ - "PropertyGroup", - {"Label": "Locals"}, - ["PlatformToolset", msbuild_toolset], - ] - ] - return properties - - -def _GetMSBuildPropertySheets(configurations, spec): - user_props = r"$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" - additional_props = {} - props_specified = False - for name, settings in sorted(configurations.items()): - configuration = _GetConfigurationCondition(name, settings, spec) - if "msbuild_props" in settings: - additional_props[configuration] = _FixPaths(settings["msbuild_props"]) - props_specified = True - else: - additional_props[configuration] = "" - - if not props_specified: - return [ - [ - "ImportGroup", - {"Label": "PropertySheets"}, - [ - "Import", - { - "Project": user_props, - "Condition": "exists('%s')" % user_props, - "Label": "LocalAppDataPlatform", - }, - ], - ] - ] - else: - sheets = [] - for condition, props in additional_props.items(): - import_group = [ - "ImportGroup", - {"Label": "PropertySheets", "Condition": condition}, - [ - "Import", - { - "Project": user_props, - "Condition": "exists('%s')" % user_props, - "Label": "LocalAppDataPlatform", - }, - ], - ] - for props_file in props: - import_group.append(["Import", {"Project": props_file}]) - sheets.append(import_group) - return sheets - - -def _ConvertMSVSBuildAttributes(spec, config, build_file): - config_type = _GetMSVSConfigurationType(spec, build_file) - msvs_attributes = _GetMSVSAttributes(spec, config, config_type) - msbuild_attributes = {} - for a in msvs_attributes: - if a in ["IntermediateDirectory", "OutputDirectory"]: - directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a]) - if not directory.endswith("\\"): - directory += "\\" - msbuild_attributes[a] = directory - elif a == "CharacterSet": - msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a]) - elif a == "ConfigurationType": - msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) - else: - print("Warning: Do not know how to convert MSVS attribute " + a) - return msbuild_attributes - - -def _ConvertMSVSCharacterSet(char_set): - if char_set.isdigit(): - char_set = {"0": "MultiByte", "1": "Unicode", "2": "MultiByte"}[char_set] - return char_set - - -def _ConvertMSVSConfigurationType(config_type): - if config_type.isdigit(): - config_type = { - "1": "Application", - "2": "DynamicLibrary", - "4": "StaticLibrary", - "5": "Driver", - "10": "Utility", - }[config_type] - return config_type - - -def _GetMSBuildAttributes(spec, config, build_file): - if "msbuild_configuration_attributes" not in config: - msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file) - - else: - config_type = _GetMSVSConfigurationType(spec, build_file) - config_type = _ConvertMSVSConfigurationType(config_type) - msbuild_attributes = config.get("msbuild_configuration_attributes", {}) - msbuild_attributes.setdefault("ConfigurationType", config_type) - output_dir = msbuild_attributes.get( - "OutputDirectory", "$(SolutionDir)$(Configuration)" - ) - msbuild_attributes["OutputDirectory"] = _FixPath(output_dir) + "\\" - if "IntermediateDirectory" not in msbuild_attributes: - intermediate = _FixPath("$(Configuration)") + "\\" - msbuild_attributes["IntermediateDirectory"] = intermediate - if "CharacterSet" in msbuild_attributes: - msbuild_attributes["CharacterSet"] = _ConvertMSVSCharacterSet( - msbuild_attributes["CharacterSet"] - ) - if "TargetName" not in msbuild_attributes: - prefix = spec.get("product_prefix", "") - product_name = spec.get("product_name", "$(ProjectName)") - target_name = prefix + product_name - msbuild_attributes["TargetName"] = target_name - if "TargetExt" not in msbuild_attributes and "product_extension" in spec: - ext = spec.get("product_extension") - msbuild_attributes["TargetExt"] = "." + ext - - if spec.get("msvs_external_builder"): - external_out_dir = spec.get("msvs_external_builder_out_dir", ".") - msbuild_attributes["OutputDirectory"] = _FixPath(external_out_dir) + "\\" - - # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile' - # (depending on the tool used) to avoid MSB8012 warning. - msbuild_tool_map = { - "executable": "Link", - "shared_library": "Link", - "loadable_module": "Link", - "windows_driver": "Link", - "static_library": "Lib", - } - msbuild_tool = msbuild_tool_map.get(spec["type"]) - if msbuild_tool: - msbuild_settings = config["finalized_msbuild_settings"] - out_file = msbuild_settings[msbuild_tool].get("OutputFile") - if out_file: - msbuild_attributes["TargetPath"] = _FixPath(out_file) - target_ext = msbuild_settings[msbuild_tool].get("TargetExt") - if target_ext: - msbuild_attributes["TargetExt"] = target_ext - - return msbuild_attributes - - -def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): - # TODO(jeanluc) We could optimize out the following and do it only if - # there are actions. - # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'. - new_paths = [] - cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])[0] - if cygwin_dirs: - cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs) - new_paths.append(cyg_path) - # TODO(jeanluc) Change the convention to have both a cygwin_dir and a - # python_dir. - python_path = cyg_path.replace("cygwin\\bin", "python_26") - new_paths.append(python_path) - if new_paths: - new_paths = "$(ExecutablePath);" + ";".join(new_paths) - - properties = {} - for (name, configuration) in sorted(configurations.items()): - condition = _GetConfigurationCondition(name, configuration, spec) - attributes = _GetMSBuildAttributes(spec, configuration, build_file) - msbuild_settings = configuration["finalized_msbuild_settings"] - _AddConditionalProperty( - properties, condition, "IntDir", attributes["IntermediateDirectory"] - ) - _AddConditionalProperty( - properties, condition, "OutDir", attributes["OutputDirectory"] - ) - _AddConditionalProperty( - properties, condition, "TargetName", attributes["TargetName"] - ) - if "TargetExt" in attributes: - _AddConditionalProperty( - properties, condition, "TargetExt", attributes["TargetExt"] - ) - - if attributes.get("TargetPath"): - _AddConditionalProperty( - properties, condition, "TargetPath", attributes["TargetPath"] - ) - if attributes.get("TargetExt"): - _AddConditionalProperty( - properties, condition, "TargetExt", attributes["TargetExt"] - ) - - if new_paths: - _AddConditionalProperty(properties, condition, "ExecutablePath", new_paths) - tool_settings = msbuild_settings.get("", {}) - for name, value in sorted(tool_settings.items()): - formatted_value = _GetValueFormattedForMSBuild("", name, value) - _AddConditionalProperty(properties, condition, name, formatted_value) - return _GetMSBuildPropertyGroup(spec, None, properties) - - -def _AddConditionalProperty(properties, condition, name, value): - """Adds a property / conditional value pair to a dictionary. - - Arguments: - properties: The dictionary to be modified. The key is the name of the - property. The value is itself a dictionary; its key is the value and - the value a list of condition for which this value is true. - condition: The condition under which the named property has the value. - name: The name of the property. - value: The value of the property. - """ - if name not in properties: - properties[name] = {} - values = properties[name] - if value not in values: - values[value] = [] - conditions = values[value] - conditions.append(condition) - - -# Regex for msvs variable references ( i.e. $(FOO) ). -MSVS_VARIABLE_REFERENCE = re.compile(r"\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)") - - -def _GetMSBuildPropertyGroup(spec, label, properties): - """Returns a PropertyGroup definition for the specified properties. - - Arguments: - spec: The target project dict. - label: An optional label for the PropertyGroup. - properties: The dictionary to be converted. The key is the name of the - property. The value is itself a dictionary; its key is the value and - the value a list of condition for which this value is true. - """ - group = ["PropertyGroup"] - if label: - group.append({"Label": label}) - num_configurations = len(spec["configurations"]) - - def GetEdges(node): - # Use a definition of edges such that user_of_variable -> used_varible. - # This happens to be easier in this case, since a variable's - # definition contains all variables it references in a single string. - edges = set() - for value in sorted(properties[node].keys()): - # Add to edges all $(...) references to variables. - # - # Variable references that refer to names not in properties are excluded - # These can exist for instance to refer built in definitions like - # $(SolutionDir). - # - # Self references are ignored. Self reference is used in a few places to - # append to the default value. I.e. PATH=$(PATH);other_path - edges.update( - set( - [ - v - for v in MSVS_VARIABLE_REFERENCE.findall(value) - if v in properties and v != node - ] - ) - ) - return edges - - properties_ordered = gyp.common.TopologicallySorted(properties.keys(), GetEdges) - # Walk properties in the reverse of a topological sort on - # user_of_variable -> used_variable as this ensures variables are - # defined before they are used. - # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) - for name in reversed(properties_ordered): - values = properties[name] - for value, conditions in sorted(values.items()): - if len(conditions) == num_configurations: - # If the value is the same all configurations, - # just add one unconditional entry. - group.append([name, value]) - else: - for condition in conditions: - group.append([name, {"Condition": condition}, value]) - return [group] - - -def _GetMSBuildToolSettingsSections(spec, configurations): - groups = [] - for (name, configuration) in sorted(configurations.items()): - msbuild_settings = configuration["finalized_msbuild_settings"] - group = [ - "ItemDefinitionGroup", - {"Condition": _GetConfigurationCondition(name, configuration, spec)}, - ] - for tool_name, tool_settings in sorted(msbuild_settings.items()): - # Skip the tool named '' which is a holder of global settings handled - # by _GetMSBuildConfigurationGlobalProperties. - if tool_name: - if tool_settings: - tool = [tool_name] - for name, value in sorted(tool_settings.items()): - formatted_value = _GetValueFormattedForMSBuild( - tool_name, name, value - ) - tool.append([name, formatted_value]) - group.append(tool) - groups.append(group) - return groups - - -def _FinalizeMSBuildSettings(spec, configuration): - if "msbuild_settings" in configuration: - converted = False - msbuild_settings = configuration["msbuild_settings"] - MSVSSettings.ValidateMSBuildSettings(msbuild_settings) - else: - converted = True - msvs_settings = configuration.get("msvs_settings", {}) - msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) - include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs( - configuration - ) - libraries = _GetLibraries(spec) - library_dirs = _GetLibraryDirs(configuration) - out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True) - target_ext = _GetOutputTargetExt(spec) - defines = _GetDefines(configuration) - if converted: - # Visual Studio 2010 has TR1 - defines = [d for d in defines if d != "_HAS_TR1=0"] - # Warn of ignored settings - ignored_settings = ["msvs_tool_files"] - for ignored_setting in ignored_settings: - value = configuration.get(ignored_setting) - if value: - print( - "Warning: The automatic conversion to MSBuild does not handle " - "%s. Ignoring setting of %s" % (ignored_setting, str(value)) - ) - - defines = [_EscapeCppDefineForMSBuild(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(configuration) - prebuild = configuration.get("msvs_prebuild") - postbuild = configuration.get("msvs_postbuild") - def_file = _GetModuleDefinition(spec) - precompiled_header = configuration.get("msvs_precompiled_header") - - # Add the information to the appropriate tool - # TODO(jeanluc) We could optimize and generate these settings only if - # the corresponding files are found, e.g. don't generate ResourceCompile - # if you don't have any resources. - _ToolAppend( - msbuild_settings, "ClCompile", "AdditionalIncludeDirectories", include_dirs - ) - _ToolAppend( - msbuild_settings, "Midl", "AdditionalIncludeDirectories", midl_include_dirs - ) - _ToolAppend( - msbuild_settings, - "ResourceCompile", - "AdditionalIncludeDirectories", - resource_include_dirs, - ) - # Add in libraries, note that even for empty libraries, we want this - # set, to prevent inheriting default libraries from the environment. - _ToolSetOrAppend(msbuild_settings, "Link", "AdditionalDependencies", libraries) - _ToolAppend(msbuild_settings, "Link", "AdditionalLibraryDirectories", library_dirs) - if out_file: - _ToolAppend( - msbuild_settings, msbuild_tool, "OutputFile", out_file, only_if_unset=True - ) - if target_ext: - _ToolAppend( - msbuild_settings, msbuild_tool, "TargetExt", target_ext, only_if_unset=True - ) - # Add defines. - _ToolAppend(msbuild_settings, "ClCompile", "PreprocessorDefinitions", defines) - _ToolAppend(msbuild_settings, "ResourceCompile", "PreprocessorDefinitions", defines) - # Add disabled warnings. - _ToolAppend( - msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings - ) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "Use") - _ToolAppend( - msbuild_settings, "ClCompile", "PrecompiledHeaderFile", precompiled_header - ) - _ToolAppend( - msbuild_settings, "ClCompile", "ForcedIncludeFiles", [precompiled_header] - ) - else: - _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "NotUsing") - # Turn off WinRT compilation - _ToolAppend(msbuild_settings, "ClCompile", "CompileAsWinRT", "false") - # Turn on import libraries if appropriate - if spec.get("msvs_requires_importlibrary"): - _ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "false") - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec["type"] == "loadable_module": - _ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "true") - # Set the module definition file if any. - if def_file: - _ToolAppend(msbuild_settings, "Link", "ModuleDefinitionFile", def_file) - configuration["finalized_msbuild_settings"] = msbuild_settings - if prebuild: - _ToolAppend(msbuild_settings, "PreBuildEvent", "Command", prebuild) - if postbuild: - _ToolAppend(msbuild_settings, "PostBuildEvent", "Command", postbuild) - - -def _GetValueFormattedForMSBuild(tool_name, name, value): - if type(value) == list: - # For some settings, VS2010 does not automatically extends the settings - # TODO(jeanluc) Is this what we want? - if name in [ - "AdditionalIncludeDirectories", - "AdditionalLibraryDirectories", - "AdditionalOptions", - "DelayLoadDLLs", - "DisableSpecificWarnings", - "PreprocessorDefinitions", - ]: - value.append("%%(%s)" % name) - # For most tools, entries in a list should be separated with ';' but some - # settings use a space. Check for those first. - exceptions = { - "ClCompile": ["AdditionalOptions"], - "Link": ["AdditionalOptions"], - "Lib": ["AdditionalOptions"], - } - if tool_name in exceptions and name in exceptions[tool_name]: - char = " " - else: - char = ";" - formatted_value = char.join( - [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value] - ) - else: - formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value) - return formatted_value - - -def _VerifySourcesExist(sources, root_dir): - """Verifies that all source files exist on disk. - - Checks that all regular source files, i.e. not created at run time, - exist on disk. Missing files cause needless recompilation but no otherwise - visible errors. - - Arguments: - sources: A recursive list of Filter/file names. - root_dir: The root directory for the relative path names. - Returns: - A list of source files that cannot be found on disk. - """ - missing_sources = [] - for source in sources: - if isinstance(source, MSVSProject.Filter): - missing_sources.extend(_VerifySourcesExist(source.contents, root_dir)) - else: - if "$" not in source: - full_path = os.path.join(root_dir, source) - if not os.path.exists(full_path): - missing_sources.append(full_path) - return missing_sources - - -def _GetMSBuildSources( - spec, - sources, - exclusions, - rule_dependencies, - extension_to_rule_name, - actions_spec, - sources_handled_by_action, - list_excluded, -): - groups = [ - "none", - "masm", - "midl", - "include", - "compile", - "resource", - "rule", - "rule_dependency", - ] - grouped_sources = {} - for g in groups: - grouped_sources[g] = [] - - _AddSources2( - spec, - sources, - exclusions, - grouped_sources, - rule_dependencies, - extension_to_rule_name, - sources_handled_by_action, - list_excluded, - ) - sources = [] - for g in groups: - if grouped_sources[g]: - sources.append(["ItemGroup"] + grouped_sources[g]) - if actions_spec: - sources.append(["ItemGroup"] + actions_spec) - return sources - - -def _AddSources2( - spec, - sources, - exclusions, - grouped_sources, - rule_dependencies, - extension_to_rule_name, - sources_handled_by_action, - list_excluded, -): - extensions_excluded_from_precompile = [] - for source in sources: - if isinstance(source, MSVSProject.Filter): - _AddSources2( - spec, - source.contents, - exclusions, - grouped_sources, - rule_dependencies, - extension_to_rule_name, - sources_handled_by_action, - list_excluded, - ) - else: - if source not in sources_handled_by_action: - detail = [] - excluded_configurations = exclusions.get(source, []) - if len(excluded_configurations) == len(spec["configurations"]): - detail.append(["ExcludedFromBuild", "true"]) - else: - for config_name, configuration in sorted(excluded_configurations): - condition = _GetConfigurationCondition( - config_name, configuration - ) - detail.append( - ["ExcludedFromBuild", {"Condition": condition}, "true"] - ) - # Add precompile if needed - for config_name, configuration in spec["configurations"].items(): - precompiled_source = configuration.get( - "msvs_precompiled_source", "" - ) - if precompiled_source != "": - precompiled_source = _FixPath(precompiled_source) - if not extensions_excluded_from_precompile: - # If the precompiled header is generated by a C source, - # we must not try to use it for C++ sources, - # and vice versa. - basename, extension = os.path.splitext(precompiled_source) - if extension == ".c": - extensions_excluded_from_precompile = [ - ".cc", - ".cpp", - ".cxx", - ] - else: - extensions_excluded_from_precompile = [".c"] - - if precompiled_source == source: - condition = _GetConfigurationCondition( - config_name, configuration, spec - ) - detail.append( - ["PrecompiledHeader", {"Condition": condition}, "Create"] - ) - else: - # Turn off precompiled header usage for source files of a - # different type than the file that generated the - # precompiled header. - for extension in extensions_excluded_from_precompile: - if source.endswith(extension): - detail.append(["PrecompiledHeader", ""]) - detail.append(["ForcedIncludeFiles", ""]) - - group, element = _MapFileToMsBuildSourceType( - source, - rule_dependencies, - extension_to_rule_name, - _GetUniquePlatforms(spec), - spec["toolset"], - ) - if group == "compile" and not os.path.isabs(source): - # Add an value to support duplicate source - # file basenames, except for absolute paths to avoid paths - # with more than 260 characters. - file_name = os.path.splitext(source)[0] + ".obj" - if file_name.startswith("..\\"): - file_name = re.sub(r"^(\.\.\\)+", "", file_name) - elif file_name.startswith("$("): - file_name = re.sub(r"^\$\([^)]+\)\\", "", file_name) - detail.append(["ObjectFileName", "$(IntDir)\\" + file_name]) - grouped_sources[group].append([element, {"Include": source}] + detail) - - -def _GetMSBuildProjectReferences(project): - references = [] - if project.dependencies: - group = ["ItemGroup"] - added_dependency_set = set() - for dependency in project.dependencies: - dependency_spec = dependency.spec - should_skip_dep = False - if project.spec["toolset"] == "target": - if dependency_spec["toolset"] == "host": - if dependency_spec["type"] == "static_library": - should_skip_dep = True - if dependency.name.startswith("run_"): - should_skip_dep = False - if should_skip_dep: - continue - - canonical_name = dependency.name.replace("_host", "") - added_dependency_set.add(canonical_name) - guid = dependency.guid - project_dir = os.path.split(project.path)[0] - relative_path = gyp.common.RelativePath(dependency.path, project_dir) - project_ref = [ - "ProjectReference", - {"Include": relative_path}, - ["Project", guid], - ["ReferenceOutputAssembly", "false"], - ] - for config in dependency.spec.get("configurations", {}).values(): - if config.get("msvs_use_library_dependency_inputs", 0): - project_ref.append(["UseLibraryDependencyInputs", "true"]) - break - # If it's disabled in any config, turn it off in the reference. - if config.get("msvs_2010_disable_uldi_when_referenced", 0): - project_ref.append(["UseLibraryDependencyInputs", "false"]) - break - group.append(project_ref) - references.append(group) - return references - - -def _GenerateMSBuildProject(project, options, version, generator_flags, spec): - spec = project.spec - configurations = spec["configurations"] - toolset = spec["toolset"] - project_dir, project_file_name = os.path.split(project.path) - gyp.common.EnsureDirExists(project.path) - # Prepare list of sources and excluded sources. - - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) - # Add rules. - actions_to_add = {} - props_files_of_rules = set() - targets_files_of_rules = set() - rule_dependencies = set() - extension_to_rule_name = {} - list_excluded = generator_flags.get("msvs_list_excluded_files", True) - platforms = _GetUniquePlatforms(spec) - - # Don't generate rules if we are using an external builder like ninja. - if not spec.get("msvs_external_builder"): - _GenerateRulesForMSBuild( - project_dir, - options, - spec, - sources, - excluded_sources, - props_files_of_rules, - targets_files_of_rules, - actions_to_add, - rule_dependencies, - extension_to_rule_name, - ) - else: - rules = spec.get("rules", []) - _AdjustSourcesForRules(rules, sources, excluded_sources, True) - - sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, project_dir, sources, excluded_sources, list_excluded, version - ) - - # Don't add actions if we are using an external builder like ninja. - if not spec.get("msvs_external_builder"): - _AddActions(actions_to_add, spec, project.build_file) - _AddCopies(actions_to_add, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add) - - exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild( - spec, actions_to_add - ) - - _GenerateMSBuildFiltersFile( - project.path + ".filters", - sources, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, - ) - missing_sources = _VerifySourcesExist(sources, project_dir) - - for configuration in configurations.values(): - _FinalizeMSBuildSettings(spec, configuration) - - # Add attributes to root element - - import_default_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.Default.props"}] - ] - import_cpp_props_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.props"}] - ] - import_cpp_targets_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.targets"}] - ] - import_masm_props_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.props"}] - ] - import_masm_targets_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.targets"}] - ] - import_marmasm_props_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.props"}] - ] - import_marmasm_targets_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.targets"}] - ] - macro_section = [["PropertyGroup", {"Label": "UserMacros"}]] - - content = [ - "Project", - { - "xmlns": "http://schemas.microsoft.com/developer/msbuild/2003", - "ToolsVersion": version.ProjectVersion(), - "DefaultTargets": "Build", - }, - ] - - content += _GetMSBuildProjectConfigurations(configurations, spec) - content += _GetMSBuildGlobalProperties( - spec, version, project.guid, project_file_name - ) - content += import_default_section - content += _GetMSBuildConfigurationDetails(spec, project.build_file) - if spec.get("msvs_enable_winphone"): - content += _GetMSBuildLocalProperties("v120_wp81") - else: - content += _GetMSBuildLocalProperties(project.msbuild_toolset) - content += import_cpp_props_section - content += import_masm_props_section - if "arm64" in platforms and toolset == "target": - content += import_marmasm_props_section - content += _GetMSBuildExtensions(props_files_of_rules) - content += _GetMSBuildPropertySheets(configurations, spec) - content += macro_section - content += _GetMSBuildConfigurationGlobalProperties( - spec, configurations, project.build_file - ) - content += _GetMSBuildToolSettingsSections(spec, configurations) - content += _GetMSBuildSources( - spec, - sources, - exclusions, - rule_dependencies, - extension_to_rule_name, - actions_spec, - sources_handled_by_action, - list_excluded, - ) - content += _GetMSBuildProjectReferences(project) - content += import_cpp_targets_section - content += import_masm_targets_section - if "arm64" in platforms and toolset == "target": - content += import_marmasm_targets_section - content += _GetMSBuildExtensionTargets(targets_files_of_rules) - - if spec.get("msvs_external_builder"): - content += _GetMSBuildExternalBuilderTargets(spec) - - # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS: - # has_run_as = _WriteMSVSUserFile(project.path, version, spec) - - easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True) - - return missing_sources - - -def _GetMSBuildExternalBuilderTargets(spec): - """Return a list of MSBuild targets for external builders. - - The "Build" and "Clean" targets are always generated. If the spec contains - 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also - be generated, to support building selected C/C++ files. - - Arguments: - spec: The gyp target spec. - Returns: - List of MSBuild 'Target' specs. - """ - build_cmd = _BuildCommandLineForRuleRaw( - spec, spec["msvs_external_builder_build_cmd"], False, False, False, False - ) - build_target = ["Target", {"Name": "Build"}] - build_target.append(["Exec", {"Command": build_cmd}]) - - clean_cmd = _BuildCommandLineForRuleRaw( - spec, spec["msvs_external_builder_clean_cmd"], False, False, False, False - ) - clean_target = ["Target", {"Name": "Clean"}] - clean_target.append(["Exec", {"Command": clean_cmd}]) - - targets = [build_target, clean_target] - - if spec.get("msvs_external_builder_clcompile_cmd"): - clcompile_cmd = _BuildCommandLineForRuleRaw( - spec, - spec["msvs_external_builder_clcompile_cmd"], - False, - False, - False, - False, - ) - clcompile_target = ["Target", {"Name": "ClCompile"}] - clcompile_target.append(["Exec", {"Command": clcompile_cmd}]) - targets.append(clcompile_target) - - return targets - - -def _GetMSBuildExtensions(props_files_of_rules): - extensions = ["ImportGroup", {"Label": "ExtensionSettings"}] - for props_file in props_files_of_rules: - extensions.append(["Import", {"Project": props_file}]) - return [extensions] - - -def _GetMSBuildExtensionTargets(targets_files_of_rules): - targets_node = ["ImportGroup", {"Label": "ExtensionTargets"}] - for targets_file in sorted(targets_files_of_rules): - targets_node.append(["Import", {"Project": targets_file}]) - return [targets_node] - - -def _GenerateActionsForMSBuild(spec, actions_to_add): - """Add actions accumulated into an actions_to_add, merging as needed. - - Arguments: - spec: the target project dict - actions_to_add: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - - Returns: - A pair of (action specification, the sources handled by this action). - """ - sources_handled_by_action = OrderedSet() - actions_spec = [] - for primary_input, actions in actions_to_add.items(): - if generator_supports_multiple_toolsets: - primary_input = primary_input.replace(".exe", "_host.exe") - inputs = OrderedSet() - outputs = OrderedSet() - descriptions = [] - commands = [] - for action in actions: - - def fixup_host_exe(i): - if "$(OutDir)" in i: - i = i.replace(".exe", "_host.exe") - return i - - if generator_supports_multiple_toolsets: - action["inputs"] = [fixup_host_exe(i) for i in action["inputs"]] - inputs.update(OrderedSet(action["inputs"])) - outputs.update(OrderedSet(action["outputs"])) - descriptions.append(action["description"]) - cmd = action["command"] - if generator_supports_multiple_toolsets: - cmd = cmd.replace(".exe", "_host.exe") - # For most actions, add 'call' so that actions that invoke batch files - # return and continue executing. msbuild_use_call provides a way to - # disable this but I have not seen any adverse effect from doing that - # for everything. - if action.get("msbuild_use_call", True): - cmd = "call " + cmd - commands.append(cmd) - # Add the custom build action for one input file. - description = ", and also ".join(descriptions) - - # We can't join the commands simply with && because the command line will - # get too long. See also _AddActions: cygwin's setup_env mustn't be called - # for every invocation or the command that sets the PATH will grow too - # long. - command = "\r\n".join( - [c + "\r\nif %errorlevel% neq 0 exit /b %errorlevel%" for c in commands] - ) - _AddMSBuildAction( - spec, - primary_input, - inputs, - outputs, - command, - description, - sources_handled_by_action, - actions_spec, - ) - return actions_spec, sources_handled_by_action - - -def _AddMSBuildAction( - spec, - primary_input, - inputs, - outputs, - cmd, - description, - sources_handled_by_action, - actions_spec, -): - command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd) - primary_input = _FixPath(primary_input) - inputs_array = _FixPaths(inputs) - outputs_array = _FixPaths(outputs) - additional_inputs = ";".join([i for i in inputs_array if i != primary_input]) - outputs = ";".join(outputs_array) - sources_handled_by_action.add(primary_input) - action_spec = ["CustomBuild", {"Include": primary_input}] - action_spec.extend( - # TODO(jeanluc) 'Document' for all or just if as_sources? - [ - ["FileType", "Document"], - ["Command", command], - ["Message", description], - ["Outputs", outputs], - ] - ) - if additional_inputs: - action_spec.append(["AdditionalInputs", additional_inputs]) - actions_spec.append(action_spec) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py deleted file mode 100755 index e001f417d53aa..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the msvs.py file. """ - -import gyp.generator.msvs as msvs -import unittest - -try: - from StringIO import StringIO # Python 2 -except ImportError: - from io import StringIO # Python 3 - - -class TestSequenceFunctions(unittest.TestCase): - def setUp(self): - self.stderr = StringIO() - - def test_GetLibraries(self): - self.assertEqual(msvs._GetLibraries({}), []) - self.assertEqual(msvs._GetLibraries({"libraries": []}), []) - self.assertEqual( - msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"] - ) - self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"]) - self.assertEqual( - msvs._GetLibraries( - { - "libraries": [ - "a.lib", - "b.lib", - "c.lib", - "-lb.lib", - "-lb.lib", - "d.lib", - "a.lib", - ] - } - ), - ["c.lib", "b.lib", "d.lib", "a.lib"], - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py deleted file mode 100644 index e064bad7ed840..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py +++ /dev/null @@ -1,2940 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import print_function - -import collections -import copy -import hashlib -import json -import multiprocessing -import os.path -import re -import signal -import subprocess -import sys -import gyp -import gyp.common -import gyp.msvs_emulation -import gyp.MSVSUtil as MSVSUtil -import gyp.xcode_emulation - -try: - from cStringIO import StringIO -except ImportError: - from io import StringIO - -from gyp.common import GetEnvironFallback -import gyp.ninja_syntax as ninja_syntax - -generator_default_variables = { - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "SHARED_LIB_PREFIX": "lib", - # Gyp expects the following variables to be expandable by the build - # system to the appropriate locations. Ninja prefers paths to be - # known at gyp time. To resolve this, introduce special - # variables starting with $! and $| (which begin with a $ so gyp knows it - # should be treated specially, but is otherwise an invalid - # ninja/shell variable) that are passed to gyp here but expanded - # before writing out into the target .ninja files; see - # ExpandSpecial. - # $! is used for variables that represent a path and that can only appear at - # the start of a string, while $| is used for variables that can appear - # anywhere in a string. - "INTERMEDIATE_DIR": "$!INTERMEDIATE_DIR", - "SHARED_INTERMEDIATE_DIR": "$!PRODUCT_DIR/gen", - "PRODUCT_DIR": "$!PRODUCT_DIR", - "CONFIGURATION_NAME": "$|CONFIGURATION_NAME", - # Special variables that may be used by gyp 'rule' targets. - # We generate definitions for these variables on the fly when processing a - # rule. - "RULE_INPUT_ROOT": "${root}", - "RULE_INPUT_DIRNAME": "${dirname}", - "RULE_INPUT_PATH": "${source}", - "RULE_INPUT_EXT": "${ext}", - "RULE_INPUT_NAME": "${name}", -} - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None - -generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() - - -def StripPrefix(arg, prefix): - if arg.startswith(prefix): - return arg[len(prefix) :] - return arg - - -def QuoteShellArgument(arg, flavor): - """Quote a string such that it will be interpreted as a single argument - by the shell.""" - # Rather than attempting to enumerate the bad shell characters, just - # allow common OK ones and quote anything else. - if re.match(r"^[a-zA-Z0-9_=.\\/-]+$", arg): - return arg # No quoting necessary. - if flavor == "win": - return gyp.msvs_emulation.QuoteForRspFile(arg) - return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" - - -def Define(d, flavor): - """Takes a preprocessor define and returns a -D parameter that's ninja- and - shell-escaped.""" - if flavor == "win": - # cl.exe replaces literal # characters with = in preprocessor definitions for - # some reason. Octal-encode to work around that. - d = d.replace("#", "\\%03o" % ord("#")) - return QuoteShellArgument(ninja_syntax.escape("-D" + d), flavor) - - -def AddArch(output, arch): - """Adds an arch string to an output path.""" - output, extension = os.path.splitext(output) - return "%s.%s%s" % (output, arch, extension) - - -class Target(object): - """Target represents the paths used within a single gyp target. - - Conceptually, building a single target A is a series of steps: - - 1) actions/rules/copies generates source/resources/etc. - 2) compiles generates .o files - 3) link generates a binary (library/executable) - 4) bundle merges the above in a mac bundle - - (Any of these steps can be optional.) - - From a build ordering perspective, a dependent target B could just - depend on the last output of this series of steps. - - But some dependent commands sometimes need to reach inside the box. - For example, when linking B it needs to get the path to the static - library generated by A. - - This object stores those paths. To keep things simple, member - variables only store concrete paths to single files, while methods - compute derived values like "the last output of the target". - """ - - def __init__(self, type): - # Gyp type ("static_library", etc.) of this target. - self.type = type - # File representing whether any input dependencies necessary for - # dependent actions have completed. - self.preaction_stamp = None - # File representing whether any input dependencies necessary for - # dependent compiles have completed. - self.precompile_stamp = None - # File representing the completion of actions/rules/copies, if any. - self.actions_stamp = None - # Path to the output of the link step, if any. - self.binary = None - # Path to the file representing the completion of building the bundle, - # if any. - self.bundle = None - # On Windows, incremental linking requires linking against all the .objs - # that compose a .lib (rather than the .lib itself). That list is stored - # here. In this case, we also need to save the compile_deps for the target, - # so that the target that directly depends on the .objs can also depend - # on those. - self.component_objs = None - self.compile_deps = None - # Windows only. The import .lib is the output of a build step, but - # because dependents only link against the lib (not both the lib and the - # dll) we keep track of the import library here. - self.import_lib = None - # Track if this target contains any C++ files, to decide if gcc or g++ - # should be used for linking. - self.uses_cpp = False - - def Linkable(self): - """Return true if this is a target that can be linked against.""" - return self.type in ("static_library", "shared_library") - - def UsesToc(self, flavor): - """Return true if the target should produce a restat rule based on a TOC - file.""" - # For bundles, the .TOC should be produced for the binary, not for - # FinalOutput(). But the naive approach would put the TOC file into the - # bundle, so don't do this for bundles for now. - if flavor == "win" or self.bundle: - return False - return self.type in ("shared_library", "loadable_module") - - def PreActionInput(self, flavor): - """Return the path, if any, that should be used as a dependency of - any dependent action step.""" - if self.UsesToc(flavor): - return self.FinalOutput() + ".TOC" - return self.FinalOutput() or self.preaction_stamp - - def PreCompileInput(self): - """Return the path, if any, that should be used as a dependency of - any dependent compile step.""" - return self.actions_stamp or self.precompile_stamp - - def FinalOutput(self): - """Return the last output of the target, which depends on all prior - steps.""" - return self.bundle or self.binary or self.actions_stamp - - -# A small discourse on paths as used within the Ninja build: -# All files we produce (both at gyp and at build time) appear in the -# build directory (e.g. out/Debug). -# -# Paths within a given .gyp file are always relative to the directory -# containing the .gyp file. Call these "gyp paths". This includes -# sources as well as the starting directory a given gyp rule/action -# expects to be run from. We call the path from the source root to -# the gyp file the "base directory" within the per-.gyp-file -# NinjaWriter code. -# -# All paths as written into the .ninja files are relative to the build -# directory. Call these paths "ninja paths". -# -# We translate between these two notions of paths with two helper -# functions: -# -# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) -# into the equivalent ninja path. -# -# - GypPathToUniqueOutput translates a gyp path into a ninja path to write -# an output file; the result can be namespaced such that it is unique -# to the input file name as well as the output target name. - - -class NinjaWriter(object): - def __init__( - self, - hash_for_rules, - target_outputs, - base_dir, - build_dir, - output_file, - toplevel_build, - output_file_name, - flavor, - toplevel_dir=None, - ): - """ - base_dir: path from source root to directory containing this gyp file, - by gyp semantics, all input paths are relative to this - build_dir: path from source root to build output - toplevel_dir: path to the toplevel directory - """ - - self.hash_for_rules = hash_for_rules - self.target_outputs = target_outputs - self.base_dir = base_dir - self.build_dir = build_dir - self.ninja = ninja_syntax.Writer(output_file) - self.toplevel_build = toplevel_build - self.output_file_name = output_file_name - - self.flavor = flavor - self.abs_build_dir = None - if toplevel_dir is not None: - self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) - self.obj_ext = ".obj" if flavor == "win" else ".o" - if flavor == "win": - # See docstring of msvs_emulation.GenerateEnvironmentFiles(). - self.win_env = {} - for arch in ("x86", "x64"): - self.win_env[arch] = "environment." + arch - - # Relative path from build output dir to base dir. - build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) - self.build_to_base = os.path.join(build_to_top, base_dir) - # Relative path from base dir to build dir. - base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) - self.base_to_build = os.path.join(base_to_top, build_dir) - - def ExpandSpecial(self, path, product_dir=None): - """Expand specials like $!PRODUCT_DIR in |path|. - - If |product_dir| is None, assumes the cwd is already the product - dir. Otherwise, |product_dir| is the relative path to the product - dir. - """ - - PRODUCT_DIR = "$!PRODUCT_DIR" - if PRODUCT_DIR in path: - if product_dir: - path = path.replace(PRODUCT_DIR, product_dir) - else: - path = path.replace(PRODUCT_DIR + "/", "") - path = path.replace(PRODUCT_DIR + "\\", "") - path = path.replace(PRODUCT_DIR, ".") - - INTERMEDIATE_DIR = "$!INTERMEDIATE_DIR" - if INTERMEDIATE_DIR in path: - int_dir = self.GypPathToUniqueOutput("gen") - # GypPathToUniqueOutput generates a path relative to the product dir, - # so insert product_dir in front if it is provided. - path = path.replace( - INTERMEDIATE_DIR, os.path.join(product_dir or "", int_dir) - ) - - CONFIGURATION_NAME = "$|CONFIGURATION_NAME" - path = path.replace(CONFIGURATION_NAME, self.config_name) - - return path - - def ExpandRuleVariables(self, path, root, dirname, source, ext, name): - if self.flavor == "win": - path = self.msvs_settings.ConvertVSMacros(path, config=self.config_name) - path = path.replace(generator_default_variables["RULE_INPUT_ROOT"], root) - path = path.replace(generator_default_variables["RULE_INPUT_DIRNAME"], dirname) - path = path.replace(generator_default_variables["RULE_INPUT_PATH"], source) - path = path.replace(generator_default_variables["RULE_INPUT_EXT"], ext) - path = path.replace(generator_default_variables["RULE_INPUT_NAME"], name) - return path - - def GypPathToNinja(self, path, env=None): - """Translate a gyp path to a ninja path, optionally expanding environment - variable references in |path| with |env|. - - See the above discourse on path conversions.""" - if env: - if self.flavor == "mac": - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - elif self.flavor == "win": - path = gyp.msvs_emulation.ExpandMacros(path, env) - if path.startswith("$!"): - expanded = self.ExpandSpecial(path) - if self.flavor == "win": - expanded = os.path.normpath(expanded) - return expanded - if "$|" in path: - path = self.ExpandSpecial(path) - assert "$" not in path, path - return os.path.normpath(os.path.join(self.build_to_base, path)) - - def GypPathToUniqueOutput(self, path, qualified=True): - """Translate a gyp path to a ninja path for writing output. - - If qualified is True, qualify the resulting filename with the name - of the target. This is necessary when e.g. compiling the same - path twice for two separate output targets. - - See the above discourse on path conversions.""" - - path = self.ExpandSpecial(path) - assert not path.startswith("$"), path - - # Translate the path following this scheme: - # Input: foo/bar.gyp, target targ, references baz/out.o - # Output: obj/foo/baz/targ.out.o (if qualified) - # obj/foo/baz/out.o (otherwise) - # (and obj.host instead of obj for cross-compiles) - # - # Why this scheme and not some other one? - # 1) for a given input, you can compute all derived outputs by matching - # its path, even if the input is brought via a gyp file with '..'. - # 2) simple files like libraries and stamps have a simple filename. - - obj = "obj" - if self.toolset != "target": - obj += "." + self.toolset - - path_dir, path_basename = os.path.split(path) - assert not os.path.isabs(path_dir), ( - "'%s' can not be absolute path (see crbug.com/462153)." % path_dir - ) - - if qualified: - path_basename = self.name + "." + path_basename - return os.path.normpath( - os.path.join(obj, self.base_dir, path_dir, path_basename) - ) - - def WriteCollapsedDependencies(self, name, targets, order_only=None): - """Given a list of targets, return a path for a single file - representing the result of building all the targets or None. - - Uses a stamp file if necessary.""" - - assert targets == [item for item in targets if item], targets - if len(targets) == 0: - assert not order_only - return None - if len(targets) > 1 or order_only: - stamp = self.GypPathToUniqueOutput(name + ".stamp") - targets = self.ninja.build(stamp, "stamp", targets, order_only=order_only) - self.ninja.newline() - return targets[0] - - def _SubninjaNameForArch(self, arch): - output_file_base = os.path.splitext(self.output_file_name)[0] - return "%s.%s.ninja" % (output_file_base, arch) - - def WriteSpec(self, spec, config_name, generator_flags): - """The main entry point for NinjaWriter: write the build rules for a spec. - - Returns a Target object, which represents the output paths for this spec. - Returns None if there are no outputs (e.g. a settings-only 'none' type - target).""" - - self.config_name = config_name - self.name = spec["target_name"] - self.toolset = spec["toolset"] - config = spec["configurations"][config_name] - self.target = Target(spec["type"]) - self.is_standalone_static_library = bool( - spec.get("standalone_static_library", 0) - ) - - self.target_rpath = generator_flags.get("target_rpath", r"\$$ORIGIN/lib/") - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - self.xcode_settings = self.msvs_settings = None - if self.flavor == "mac": - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) - if mac_toolchain_dir: - self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir - - if self.flavor == "win": - self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) - arch = self.msvs_settings.GetArch(config_name) - self.ninja.variable("arch", self.win_env[arch]) - self.ninja.variable("cc", "$cl_" + arch) - self.ninja.variable("cxx", "$cl_" + arch) - self.ninja.variable("cc_host", "$cl_" + arch) - self.ninja.variable("cxx_host", "$cl_" + arch) - self.ninja.variable("asm", "$ml_" + arch) - - if self.flavor == "mac": - self.archs = self.xcode_settings.GetActiveArchs(config_name) - if len(self.archs) > 1: - self.arch_subninjas = dict( - ( - arch, - ninja_syntax.Writer( - OpenOutput( - os.path.join( - self.toplevel_build, self._SubninjaNameForArch(arch) - ), - "w", - ) - ), - ) - for arch in self.archs - ) - - # Compute predepends for all rules. - # actions_depends is the dependencies this target depends on before running - # any of its action/rule/copy steps. - # compile_depends is the dependencies this target depends on before running - # any of its compile steps. - actions_depends = [] - compile_depends = [] - # TODO(evan): it is rather confusing which things are lists and which - # are strings. Fix these. - if "dependencies" in spec: - for dep in spec["dependencies"]: - if dep in self.target_outputs: - target = self.target_outputs[dep] - actions_depends.append(target.PreActionInput(self.flavor)) - compile_depends.append(target.PreCompileInput()) - if target.uses_cpp: - self.target.uses_cpp = True - actions_depends = [item for item in actions_depends if item] - compile_depends = [item for item in compile_depends if item] - actions_depends = self.WriteCollapsedDependencies( - "actions_depends", actions_depends - ) - compile_depends = self.WriteCollapsedDependencies( - "compile_depends", compile_depends - ) - self.target.preaction_stamp = actions_depends - self.target.precompile_stamp = compile_depends - - # Write out actions, rules, and copies. These must happen before we - # compile any sources, so compute a list of predependencies for sources - # while we do it. - extra_sources = [] - mac_bundle_depends = [] - self.target.actions_stamp = self.WriteActionsRulesCopies( - spec, extra_sources, actions_depends, mac_bundle_depends - ) - - # If we have actions/rules/copies, we depend directly on those, but - # otherwise we depend on dependent target's actions/rules/copies etc. - # We never need to explicitly depend on previous target's link steps, - # because no compile ever depends on them. - compile_depends_stamp = self.target.actions_stamp or compile_depends - - # Write out the compilation steps, if any. - link_deps = [] - try: - sources = extra_sources + spec.get("sources", []) - except TypeError: - print("extra_sources: ", str(extra_sources)) - print('spec.get("sources"): ', str(spec.get("sources"))) - raise - if sources: - if self.flavor == "mac" and len(self.archs) > 1: - # Write subninja file containing compile and link commands scoped to - # a single arch if a fat binary is being built. - for arch in self.archs: - self.ninja.subninja(self._SubninjaNameForArch(arch)) - - pch = None - if self.flavor == "win": - gyp.msvs_emulation.VerifyMissingSources( - sources, self.abs_build_dir, generator_flags, self.GypPathToNinja - ) - pch = gyp.msvs_emulation.PrecompiledHeader( - self.msvs_settings, - config_name, - self.GypPathToNinja, - self.GypPathToUniqueOutput, - self.obj_ext, - ) - else: - pch = gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, - self.GypPathToNinja, - lambda path, lang: self.GypPathToUniqueOutput(path + "-" + lang), - ) - link_deps = self.WriteSources( - self.ninja, - config_name, - config, - sources, - compile_depends_stamp, - pch, - spec, - ) - # Some actions/rules output 'sources' that are already object files. - obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] - if obj_outputs: - if self.flavor != "mac" or len(self.archs) == 1: - link_deps += [self.GypPathToNinja(o) for o in obj_outputs] - else: - print( - "Warning: Actions/rules writing object files don't work with " - "multiarch targets, dropping. (target %s)" % spec["target_name"] - ) - elif self.flavor == "mac" and len(self.archs) > 1: - link_deps = collections.defaultdict(list) - - compile_deps = self.target.actions_stamp or actions_depends - if self.flavor == "win" and self.target.type == "static_library": - self.target.component_objs = link_deps - self.target.compile_deps = compile_deps - - # Write out a link step, if needed. - output = None - is_empty_bundle = not link_deps and not mac_bundle_depends - if link_deps or self.target.actions_stamp or actions_depends: - output = self.WriteTarget( - spec, config_name, config, link_deps, compile_deps - ) - if self.is_mac_bundle: - mac_bundle_depends.append(output) - - # Bundle all of the above together, if needed. - if self.is_mac_bundle: - output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle) - - if not output: - return None - - assert self.target.FinalOutput(), output - return self.target - - def _WinIdlRule(self, source, prebuild, outputs): - """Handle the implicit VS .idl rule for one source file. Fills |outputs| - with files that are generated.""" - outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData( - source, self.config_name - ) - outdir = self.GypPathToNinja(outdir) - - def fix_path(path, rel=None): - path = os.path.join(outdir, path) - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - path = self.ExpandRuleVariables(path, root, dirname, source, ext, basename) - if rel: - path = os.path.relpath(path, rel) - return path - - vars = [(name, fix_path(value, outdir)) for name, value in vars] - output = [fix_path(p) for p in output] - vars.append(("outdir", outdir)) - vars.append(("idlflags", flags)) - input = self.GypPathToNinja(source) - self.ninja.build(output, "idl", input, variables=vars, order_only=prebuild) - outputs.extend(output) - - def WriteWinIdlFiles(self, spec, prebuild): - """Writes rules to match MSVS's implicit idl handling.""" - assert self.flavor == "win" - if self.msvs_settings.HasExplicitIdlRulesOrActions(spec): - return [] - outputs = [] - for source in filter(lambda x: x.endswith(".idl"), spec["sources"]): - self._WinIdlRule(source, prebuild, outputs) - return outputs - - def WriteActionsRulesCopies( - self, spec, extra_sources, prebuild, mac_bundle_depends - ): - """Write out the Actions, Rules, and Copies steps. Return a path - representing the outputs of these steps.""" - outputs = [] - if self.is_mac_bundle: - mac_bundle_resources = spec.get("mac_bundle_resources", [])[:] - else: - mac_bundle_resources = [] - extra_mac_bundle_resources = [] - - if "actions" in spec: - outputs += self.WriteActions( - spec["actions"], extra_sources, prebuild, extra_mac_bundle_resources - ) - if "rules" in spec: - outputs += self.WriteRules( - spec["rules"], - extra_sources, - prebuild, - mac_bundle_resources, - extra_mac_bundle_resources, - ) - if "copies" in spec: - outputs += self.WriteCopies(spec["copies"], prebuild, mac_bundle_depends) - - if "sources" in spec and self.flavor == "win": - outputs += self.WriteWinIdlFiles(spec, prebuild) - - if self.xcode_settings and self.xcode_settings.IsIosFramework(): - self.WriteiOSFrameworkHeaders(spec, outputs, prebuild) - - stamp = self.WriteCollapsedDependencies("actions_rules_copies", outputs) - - if self.is_mac_bundle: - xcassets = self.WriteMacBundleResources( - extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends - ) - partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends) - self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends) - - return stamp - - def GenerateDescription(self, verb, message, fallback): - """Generate and return a description of a build step. - - |verb| is the short summary, e.g. ACTION or RULE. - |message| is a hand-written description, or None if not available. - |fallback| is the gyp-level name of the step, usable as a fallback. - """ - if self.toolset != "target": - verb += "(%s)" % self.toolset - if message: - return "%s %s" % (verb, self.ExpandSpecial(message)) - else: - return "%s %s: %s" % (verb, self.name, fallback) - - def WriteActions( - self, actions, extra_sources, prebuild, extra_mac_bundle_resources - ): - # Actions cd into the base directory. - env = self.GetToolchainEnv() - all_outputs = [] - for action in actions: - # First write out a rule for the action. - name = "%s_%s" % (action["action_name"], self.hash_for_rules) - description = self.GenerateDescription( - "ACTION", action.get("message", None), name - ) - is_cygwin = ( - self.msvs_settings.IsRuleRunUnderCygwin(action) - if self.flavor == "win" - else False - ) - args = action["action"] - depfile = action.get("depfile", None) - if depfile: - depfile = self.ExpandSpecial(depfile, self.base_to_build) - pool = "console" if int(action.get("ninja_use_console", 0)) else None - rule_name, _ = self.WriteNewNinjaRule( - name, args, description, is_cygwin, env, pool, depfile=depfile - ) - - inputs = [self.GypPathToNinja(i, env) for i in action["inputs"]] - if int(action.get("process_outputs_as_sources", False)): - extra_sources += action["outputs"] - if int(action.get("process_outputs_as_mac_bundle_resources", False)): - extra_mac_bundle_resources += action["outputs"] - outputs = [self.GypPathToNinja(o, env) for o in action["outputs"]] - - # Then write out an edge using the rule. - self.ninja.build(outputs, rule_name, inputs, order_only=prebuild) - all_outputs += outputs - - self.ninja.newline() - - return all_outputs - - def WriteRules( - self, - rules, - extra_sources, - prebuild, - mac_bundle_resources, - extra_mac_bundle_resources, - ): - env = self.GetToolchainEnv() - all_outputs = [] - for rule in rules: - # Skip a rule with no action and no inputs. - if "action" not in rule and not rule.get("rule_sources", []): - continue - - # First write out a rule for the rule action. - name = "%s_%s" % (rule["rule_name"], self.hash_for_rules) - - args = rule["action"] - description = self.GenerateDescription( - "RULE", - rule.get("message", None), - ("%s " + generator_default_variables["RULE_INPUT_PATH"]) % name, - ) - is_cygwin = ( - self.msvs_settings.IsRuleRunUnderCygwin(rule) - if self.flavor == "win" - else False - ) - pool = "console" if int(rule.get("ninja_use_console", 0)) else None - rule_name, args = self.WriteNewNinjaRule( - name, args, description, is_cygwin, env, pool - ) - - # TODO: if the command references the outputs directly, we should - # simplify it to just use $out. - - # Rules can potentially make use of some special variables which - # must vary per source file. - # Compute the list of variables we'll need to provide. - special_locals = ("source", "root", "dirname", "ext", "name") - needed_variables = set(["source"]) - for argument in args: - for var in special_locals: - if "${%s}" % var in argument: - needed_variables.add(var) - needed_variables = sorted(needed_variables) - - def cygwin_munge(path): - # pylint: disable=cell-var-from-loop - if is_cygwin: - return path.replace("\\", "/") - return path - - inputs = [self.GypPathToNinja(i, env) for i in rule.get("inputs", [])] - - # If there are n source files matching the rule, and m additional rule - # inputs, then adding 'inputs' to each build edge written below will - # write m * n inputs. Collapsing reduces this to m + n. - sources = rule.get("rule_sources", []) - num_inputs = len(inputs) - if prebuild: - num_inputs += 1 - if num_inputs > 2 and len(sources) > 2: - inputs = [ - self.WriteCollapsedDependencies( - rule["rule_name"], inputs, order_only=prebuild - ) - ] - prebuild = [] - - # For each source file, write an edge that generates all the outputs. - for source in sources: - source = os.path.normpath(source) - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - - # Gather the list of inputs and outputs, expanding $vars if possible. - outputs = [ - self.ExpandRuleVariables(o, root, dirname, source, ext, basename) - for o in rule["outputs"] - ] - - if int(rule.get("process_outputs_as_sources", False)): - extra_sources += outputs - - was_mac_bundle_resource = source in mac_bundle_resources - if was_mac_bundle_resource or int( - rule.get("process_outputs_as_mac_bundle_resources", False) - ): - extra_mac_bundle_resources += outputs - # Note: This is n_resources * n_outputs_in_rule. - # Put to-be-removed items in a set and - # remove them all in a single pass - # if this becomes a performance issue. - if was_mac_bundle_resource: - mac_bundle_resources.remove(source) - - extra_bindings = [] - for var in needed_variables: - if var == "root": - extra_bindings.append(("root", cygwin_munge(root))) - elif var == "dirname": - # '$dirname' is a parameter to the rule action, which means - # it shouldn't be converted to a Ninja path. But we don't - # want $!PRODUCT_DIR in there either. - dirname_expanded = self.ExpandSpecial( - dirname, self.base_to_build - ) - extra_bindings.append( - ("dirname", cygwin_munge(dirname_expanded)) - ) - elif var == "source": - # '$source' is a parameter to the rule action, which means - # it shouldn't be converted to a Ninja path. But we don't - # want $!PRODUCT_DIR in there either. - source_expanded = self.ExpandSpecial(source, self.base_to_build) - extra_bindings.append(("source", cygwin_munge(source_expanded))) - elif var == "ext": - extra_bindings.append(("ext", ext)) - elif var == "name": - extra_bindings.append(("name", cygwin_munge(basename))) - else: - assert var is None, repr(var) - - outputs = [self.GypPathToNinja(o, env) for o in outputs] - if self.flavor == "win": - # WriteNewNinjaRule uses unique_name to create a rsp file on win. - extra_bindings.append( - ("unique_name", hashlib.md5(outputs[0]).hexdigest()) - ) - - self.ninja.build( - outputs, - rule_name, - self.GypPathToNinja(source), - implicit=inputs, - order_only=prebuild, - variables=extra_bindings, - ) - - all_outputs.extend(outputs) - - return all_outputs - - def WriteCopies(self, copies, prebuild, mac_bundle_depends): - outputs = [] - if self.xcode_settings: - extra_env = self.xcode_settings.GetPerTargetSettings() - env = self.GetToolchainEnv(additional_settings=extra_env) - else: - env = self.GetToolchainEnv() - for to_copy in copies: - for path in to_copy["files"]: - # Normalize the path so trailing slashes don't confuse us. - path = os.path.normpath(path) - basename = os.path.split(path)[1] - src = self.GypPathToNinja(path, env) - dst = self.GypPathToNinja( - os.path.join(to_copy["destination"], basename), env - ) - outputs += self.ninja.build(dst, "copy", src, order_only=prebuild) - if self.is_mac_bundle: - # gyp has mac_bundle_resources to copy things into a bundle's - # Resources folder, but there's no built-in way to copy files - # to other places in the bundle. - # Hence, some targets use copies for this. - # Check if this file is copied into the current bundle, - # and if so add it to the bundle depends so - # that dependent targets get rebuilt if the copy input changes. - if dst.startswith( - self.xcode_settings.GetBundleContentsFolderPath() - ): - mac_bundle_depends.append(dst) - - return outputs - - def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild): - """Prebuild steps to generate hmap files and copy headers to destination.""" - framework = self.ComputeMacBundleOutput() - all_sources = spec["sources"] - copy_headers = spec["mac_framework_headers"] - output = self.GypPathToUniqueOutput("headers.hmap") - self.xcode_settings.header_map_path = output - all_headers = map( - self.GypPathToNinja, filter(lambda x: x.endswith((".h")), all_sources) - ) - variables = [ - ("framework", framework), - ("copy_headers", map(self.GypPathToNinja, copy_headers)), - ] - outputs.extend( - self.ninja.build( - output, - "compile_ios_framework_headers", - all_headers, - variables=variables, - order_only=prebuild, - ) - ) - - def WriteMacBundleResources(self, resources, bundle_depends): - """Writes ninja edges for 'mac_bundle_resources'.""" - xcassets = [] - - extra_env = self.xcode_settings.GetPerTargetSettings() - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) - - for output, res in gyp.xcode_emulation.GetMacBundleResources( - generator_default_variables["PRODUCT_DIR"], - self.xcode_settings, - map(self.GypPathToNinja, resources), - ): - output = self.ExpandSpecial(output) - if os.path.splitext(output)[-1] != ".xcassets": - self.ninja.build( - output, - "mac_tool", - res, - variables=[ - ("mactool_cmd", "copy-bundle-resource"), - ("env", env), - ("binary", isBinary), - ], - ) - bundle_depends.append(output) - else: - xcassets.append(res) - return xcassets - - def WriteMacXCassets(self, xcassets, bundle_depends): - """Writes ninja edges for 'mac_bundle_resources' .xcassets files. - - This add an invocation of 'actool' via the 'mac_tool.py' helper script. - It assumes that the assets catalogs define at least one imageset and - thus an Assets.car file will be generated in the application resources - directory. If this is not the case, then the build will probably be done - at each invocation of ninja.""" - if not xcassets: - return - - extra_arguments = {} - settings_to_arg = { - "XCASSETS_APP_ICON": "app-icon", - "XCASSETS_LAUNCH_IMAGE": "launch-image", - } - settings = self.xcode_settings.xcode_settings[self.config_name] - for settings_key, arg_name in settings_to_arg.items(): - value = settings.get(settings_key) - if value: - extra_arguments[arg_name] = value - - partial_info_plist = None - if extra_arguments: - partial_info_plist = self.GypPathToUniqueOutput( - "assetcatalog_generated_info.plist" - ) - extra_arguments["output-partial-info-plist"] = partial_info_plist - - outputs = [] - outputs.append( - os.path.join(self.xcode_settings.GetBundleResourceFolder(), "Assets.car") - ) - if partial_info_plist: - outputs.append(partial_info_plist) - - keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor) - extra_env = self.xcode_settings.GetPerTargetSettings() - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - - bundle_depends.extend( - self.ninja.build( - outputs, - "compile_xcassets", - xcassets, - variables=[("env", env), ("keys", keys)], - ) - ) - return partial_info_plist - - def WriteMacInfoPlist(self, partial_info_plist, bundle_depends): - """Write build rules for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - generator_default_variables["PRODUCT_DIR"], - self.xcode_settings, - self.GypPathToNinja, - ) - if not info_plist: - return - out = self.ExpandSpecial(out) - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = self.GypPathToUniqueOutput( - os.path.basename(info_plist) - ) - defines = " ".join([Define(d, self.flavor) for d in defines]) - info_plist = self.ninja.build( - intermediate_plist, - "preprocess_infoplist", - info_plist, - variables=[("defines", defines)], - ) - - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - - if partial_info_plist: - intermediate_plist = self.GypPathToUniqueOutput("merged_info.plist") - info_plist = self.ninja.build( - intermediate_plist, "merge_infoplist", [partial_info_plist, info_plist] - ) - - keys = self.xcode_settings.GetExtraPlistItems(self.config_name) - keys = QuoteShellArgument(json.dumps(keys), self.flavor) - isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) - self.ninja.build( - out, - "copy_infoplist", - info_plist, - variables=[("env", env), ("keys", keys), ("binary", isBinary)], - ) - bundle_depends.append(out) - - def WriteSources( - self, - ninja_file, - config_name, - config, - sources, - predepends, - precompiled_header, - spec, - ): - """Write build rules to compile all of |sources|.""" - if self.toolset == "host": - self.ninja.variable("ar", "$ar_host") - self.ninja.variable("cc", "$cc_host") - self.ninja.variable("cxx", "$cxx_host") - self.ninja.variable("ld", "$ld_host") - self.ninja.variable("ldxx", "$ldxx_host") - self.ninja.variable("nm", "$nm_host") - self.ninja.variable("readelf", "$readelf_host") - - if self.flavor != "mac" or len(self.archs) == 1: - return self.WriteSourcesForArch( - self.ninja, - config_name, - config, - sources, - predepends, - precompiled_header, - spec, - ) - else: - return dict( - ( - arch, - self.WriteSourcesForArch( - self.arch_subninjas[arch], - config_name, - config, - sources, - predepends, - precompiled_header, - spec, - arch=arch, - ), - ) - for arch in self.archs - ) - - def WriteSourcesForArch( - self, - ninja_file, - config_name, - config, - sources, - predepends, - precompiled_header, - spec, - arch=None, - ): - """Write build rules to compile all of |sources|.""" - - extra_defines = [] - if self.flavor == "mac": - cflags = self.xcode_settings.GetCflags(config_name, arch=arch) - cflags_c = self.xcode_settings.GetCflagsC(config_name) - cflags_cc = self.xcode_settings.GetCflagsCC(config_name) - cflags_objc = ["$cflags_c"] + self.xcode_settings.GetCflagsObjC(config_name) - cflags_objcc = ["$cflags_cc"] + self.xcode_settings.GetCflagsObjCC( - config_name - ) - elif self.flavor == "win": - asmflags = self.msvs_settings.GetAsmflags(config_name) - cflags = self.msvs_settings.GetCflags(config_name) - cflags_c = self.msvs_settings.GetCflagsC(config_name) - cflags_cc = self.msvs_settings.GetCflagsCC(config_name) - extra_defines = self.msvs_settings.GetComputedDefines(config_name) - # See comment at cc_command for why there's two .pdb files. - pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName( - config_name, self.ExpandSpecial - ) - if not pdbpath_c: - obj = "obj" - if self.toolset != "target": - obj += "." + self.toolset - pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) - pdbpath_c = pdbpath + ".c.pdb" - pdbpath_cc = pdbpath + ".cc.pdb" - self.WriteVariableList(ninja_file, "pdbname_c", [pdbpath_c]) - self.WriteVariableList(ninja_file, "pdbname_cc", [pdbpath_cc]) - self.WriteVariableList(ninja_file, "pchprefix", [self.name]) - else: - cflags = config.get("cflags", []) - cflags_c = config.get("cflags_c", []) - cflags_cc = config.get("cflags_cc", []) - - # Respect environment variables related to build, but target-specific - # flags can still override them. - if self.toolset == "target": - cflags_c = ( - os.environ.get("CPPFLAGS", "").split() - + os.environ.get("CFLAGS", "").split() - + cflags_c - ) - cflags_cc = ( - os.environ.get("CPPFLAGS", "").split() - + os.environ.get("CXXFLAGS", "").split() - + cflags_cc - ) - elif self.toolset == "host": - cflags_c = ( - os.environ.get("CPPFLAGS_host", "").split() - + os.environ.get("CFLAGS_host", "").split() - + cflags_c - ) - cflags_cc = ( - os.environ.get("CPPFLAGS_host", "").split() - + os.environ.get("CXXFLAGS_host", "").split() - + cflags_cc - ) - - defines = config.get("defines", []) + extra_defines - self.WriteVariableList( - ninja_file, "defines", [Define(d, self.flavor) for d in defines] - ) - if self.flavor == "win": - self.WriteVariableList( - ninja_file, "asmflags", map(self.ExpandSpecial, asmflags) - ) - self.WriteVariableList( - ninja_file, - "rcflags", - [ - QuoteShellArgument(self.ExpandSpecial(f), self.flavor) - for f in self.msvs_settings.GetRcflags( - config_name, self.GypPathToNinja - ) - ], - ) - - include_dirs = config.get("include_dirs", []) - - env = self.GetToolchainEnv() - if self.flavor == "win": - include_dirs = self.msvs_settings.AdjustIncludeDirs( - include_dirs, config_name - ) - self.WriteVariableList( - ninja_file, - "includes", - [ - QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) - for i in include_dirs - ], - ) - - if self.flavor == "win": - midl_include_dirs = config.get("midl_include_dirs", []) - midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs( - midl_include_dirs, config_name - ) - self.WriteVariableList( - ninja_file, - "midl_includes", - [ - QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) - for i in midl_include_dirs - ], - ) - - pch_commands = precompiled_header.GetPchBuildCommands(arch) - if self.flavor == "mac": - # Most targets use no precompiled headers, so only write these if needed. - for ext, var in [ - ("c", "cflags_pch_c"), - ("cc", "cflags_pch_cc"), - ("m", "cflags_pch_objc"), - ("mm", "cflags_pch_objcc"), - ]: - include = precompiled_header.GetInclude(ext, arch) - if include: - ninja_file.variable(var, include) - - arflags = config.get("arflags", []) - - self.WriteVariableList(ninja_file, "cflags", map(self.ExpandSpecial, cflags)) - self.WriteVariableList( - ninja_file, "cflags_c", map(self.ExpandSpecial, cflags_c) - ) - self.WriteVariableList( - ninja_file, "cflags_cc", map(self.ExpandSpecial, cflags_cc) - ) - if self.flavor == "mac": - self.WriteVariableList( - ninja_file, "cflags_objc", map(self.ExpandSpecial, cflags_objc) - ) - self.WriteVariableList( - ninja_file, "cflags_objcc", map(self.ExpandSpecial, cflags_objcc) - ) - self.WriteVariableList(ninja_file, "arflags", map(self.ExpandSpecial, arflags)) - ninja_file.newline() - outputs = [] - has_rc_source = False - for source in sources: - filename, ext = os.path.splitext(source) - ext = ext[1:] - obj_ext = self.obj_ext - if ext in ("cc", "cpp", "cxx"): - command = "cxx" - self.target.uses_cpp = True - elif ext == "c" or (ext == "S" and self.flavor != "win"): - command = "cc" - elif ext == "s" and self.flavor != "win": # Doesn't generate .o.d files. - command = "cc_s" - elif ( - self.flavor == "win" - and ext == "asm" - and not self.msvs_settings.HasExplicitAsmRules(spec) - ): - command = "asm" - # Add the _asm suffix as msvs is capable of handling .cc and - # .asm files of the same name without collision. - obj_ext = "_asm.obj" - elif self.flavor == "mac" and ext == "m": - command = "objc" - elif self.flavor == "mac" and ext == "mm": - command = "objcxx" - self.target.uses_cpp = True - elif self.flavor == "win" and ext == "rc": - command = "rc" - obj_ext = ".res" - has_rc_source = True - else: - # Ignore unhandled extensions. - continue - input = self.GypPathToNinja(source) - output = self.GypPathToUniqueOutput(filename + obj_ext) - if arch is not None: - output = AddArch(output, arch) - implicit = precompiled_header.GetObjDependencies([input], [output], arch) - variables = [] - if self.flavor == "win": - variables, output, implicit = precompiled_header.GetFlagsModifications( - input, - output, - implicit, - command, - cflags_c, - cflags_cc, - self.ExpandSpecial, - ) - ninja_file.build( - output, - command, - input, - implicit=[gch for _, _, gch in implicit], - order_only=predepends, - variables=variables, - ) - outputs.append(output) - - if has_rc_source: - resource_include_dirs = config.get("resource_include_dirs", include_dirs) - self.WriteVariableList( - ninja_file, - "resource_includes", - [ - QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) - for i in resource_include_dirs - ], - ) - - self.WritePchTargets(ninja_file, pch_commands) - - ninja_file.newline() - return outputs - - def WritePchTargets(self, ninja_file, pch_commands): - """Writes ninja rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - var_name = { - "c": "cflags_pch_c", - "cc": "cflags_pch_cc", - "m": "cflags_pch_objc", - "mm": "cflags_pch_objcc", - }[lang] - - map = { - "c": "cc", - "cc": "cxx", - "m": "objc", - "mm": "objcxx", - } - cmd = map.get(lang) - ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)]) - - def WriteLink(self, spec, config_name, config, link_deps, compile_deps): - """Write out a link step. Fills out target.binary. """ - if self.flavor != "mac" or len(self.archs) == 1: - return self.WriteLinkForArch( - self.ninja, spec, config_name, config, link_deps, compile_deps - ) - else: - output = self.ComputeOutput(spec) - inputs = [ - self.WriteLinkForArch( - self.arch_subninjas[arch], - spec, - config_name, - config, - link_deps[arch], - compile_deps, - arch=arch, - ) - for arch in self.archs - ] - extra_bindings = [] - build_output = output - if not self.is_mac_bundle: - self.AppendPostbuildVariable(extra_bindings, spec, output, output) - - # TODO(yyanagisawa): more work needed to fix: - # https://code.google.com/p/gyp/issues/detail?id=411 - if ( - spec["type"] in ("shared_library", "loadable_module") - and not self.is_mac_bundle - ): - extra_bindings.append(("lib", output)) - self.ninja.build( - [output, output + ".TOC"], - "solipo", - inputs, - variables=extra_bindings, - ) - else: - self.ninja.build(build_output, "lipo", inputs, variables=extra_bindings) - return output - - def WriteLinkForArch( - self, ninja_file, spec, config_name, config, link_deps, compile_deps, arch=None - ): - """Write out a link step. Fills out target.binary. """ - command = { - "executable": "link", - "loadable_module": "solink_module", - "shared_library": "solink", - }[spec["type"]] - command_suffix = "" - - implicit_deps = set() - solibs = set() - order_deps = set() - - if compile_deps: - # Normally, the compiles of the target already depend on compile_deps, - # but a shared_library target might have no sources and only link together - # a few static_library deps, so the link step also needs to depend - # on compile_deps to make sure actions in the shared_library target - # get run before the link. - order_deps.add(compile_deps) - - if "dependencies" in spec: - # Two kinds of dependencies: - # - Linkable dependencies (like a .a or a .so): add them to the link line. - # - Non-linkable dependencies (like a rule that generates a file - # and writes a stamp file): add them to implicit_deps - extra_link_deps = set() - for dep in spec["dependencies"]: - target = self.target_outputs.get(dep) - if not target: - continue - linkable = target.Linkable() - if linkable: - new_deps = [] - if ( - self.flavor == "win" - and target.component_objs - and self.msvs_settings.IsUseLibraryDependencyInputs(config_name) - ): - new_deps = target.component_objs - if target.compile_deps: - order_deps.add(target.compile_deps) - elif self.flavor == "win" and target.import_lib: - new_deps = [target.import_lib] - elif target.UsesToc(self.flavor): - solibs.add(target.binary) - implicit_deps.add(target.binary + ".TOC") - else: - new_deps = [target.binary] - for new_dep in new_deps: - if new_dep not in extra_link_deps: - extra_link_deps.add(new_dep) - link_deps.append(new_dep) - - final_output = target.FinalOutput() - if not linkable or final_output != target.binary: - implicit_deps.add(final_output) - - extra_bindings = [] - if self.target.uses_cpp and self.flavor != "win": - extra_bindings.append(("ld", "$ldxx")) - - output = self.ComputeOutput(spec, arch) - if arch is None and not self.is_mac_bundle: - self.AppendPostbuildVariable(extra_bindings, spec, output, output) - - is_executable = spec["type"] == "executable" - # The ldflags config key is not used on mac or win. On those platforms - # linker flags are set via xcode_settings and msvs_settings, respectively. - env_ldflags = os.environ.get("LDFLAGS", "").split() - if self.flavor == "mac": - ldflags = self.xcode_settings.GetLdflags( - config_name, - self.ExpandSpecial(generator_default_variables["PRODUCT_DIR"]), - self.GypPathToNinja, - arch, - ) - ldflags = env_ldflags + ldflags - elif self.flavor == "win": - manifest_base_name = self.GypPathToUniqueOutput( - self.ComputeOutputFileName(spec) - ) - ( - ldflags, - intermediate_manifest, - manifest_files, - ) = self.msvs_settings.GetLdflags( - config_name, - self.GypPathToNinja, - self.ExpandSpecial, - manifest_base_name, - output, - is_executable, - self.toplevel_build, - ) - ldflags = env_ldflags + ldflags - self.WriteVariableList(ninja_file, "manifests", manifest_files) - implicit_deps = implicit_deps.union(manifest_files) - if intermediate_manifest: - self.WriteVariableList( - ninja_file, "intermediatemanifest", [intermediate_manifest] - ) - command_suffix = _GetWinLinkRuleNameSuffix( - self.msvs_settings.IsEmbedManifest(config_name) - ) - def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja) - if def_file: - implicit_deps.add(def_file) - else: - # Respect environment variables related to build, but target-specific - # flags can still override them. - ldflags = env_ldflags + config.get("ldflags", []) - if is_executable and len(solibs): - rpath = "lib/" - if self.toolset != "target": - rpath += self.toolset - ldflags.append(r"-Wl,-rpath=\$$ORIGIN/%s" % rpath) - else: - ldflags.append("-Wl,-rpath=%s" % self.target_rpath) - ldflags.append("-Wl,-rpath-link=%s" % rpath) - self.WriteVariableList(ninja_file, "ldflags", map(self.ExpandSpecial, ldflags)) - - library_dirs = config.get("library_dirs", []) - if self.flavor == "win": - library_dirs = [ - self.msvs_settings.ConvertVSMacros(library_dir, config_name) - for library_dir in library_dirs - ] - library_dirs = [ - "/LIBPATH:" - + QuoteShellArgument(self.GypPathToNinja(library_dir), self.flavor) - for library_dir in library_dirs - ] - else: - library_dirs = [ - QuoteShellArgument("-L" + self.GypPathToNinja(library_dir), self.flavor) - for library_dir in library_dirs - ] - - libraries = gyp.common.uniquer( - map(self.ExpandSpecial, spec.get("libraries", [])) - ) - if self.flavor == "mac": - libraries = self.xcode_settings.AdjustLibraries(libraries, config_name) - elif self.flavor == "win": - libraries = self.msvs_settings.AdjustLibraries(libraries) - - self.WriteVariableList(ninja_file, "libs", library_dirs + libraries) - - linked_binary = output - - if command in ("solink", "solink_module"): - extra_bindings.append(("soname", os.path.split(output)[1])) - extra_bindings.append(("lib", gyp.common.EncodePOSIXShellArgument(output))) - if self.flavor != "win": - link_file_list = output - if self.is_mac_bundle: - # 'Dependency Framework.framework/Versions/A/Dependency Framework' - # -> 'Dependency Framework.framework.rsp' - link_file_list = self.xcode_settings.GetWrapperName() - if arch: - link_file_list += "." + arch - link_file_list += ".rsp" - # If an rspfile contains spaces, ninja surrounds the filename with - # quotes around it and then passes it to open(), creating a file with - # quotes in its name (and when looking for the rsp file, the name - # makes it through bash which strips the quotes) :-/ - link_file_list = link_file_list.replace(" ", "_") - extra_bindings.append( - ( - "link_file_list", - gyp.common.EncodePOSIXShellArgument(link_file_list), - ) - ) - if self.flavor == "win": - extra_bindings.append(("binary", output)) - if ( - "/NOENTRY" not in ldflags - and not self.msvs_settings.GetNoImportLibrary(config_name) - ): - self.target.import_lib = output + ".lib" - extra_bindings.append( - ("implibflag", "/IMPLIB:%s" % self.target.import_lib) - ) - pdbname = self.msvs_settings.GetPDBName( - config_name, self.ExpandSpecial, output + ".pdb" - ) - output = [output, self.target.import_lib] - if pdbname: - output.append(pdbname) - elif not self.is_mac_bundle: - output = [output, output + ".TOC"] - else: - command = command + "_notoc" - elif self.flavor == "win": - extra_bindings.append(("binary", output)) - pdbname = self.msvs_settings.GetPDBName( - config_name, self.ExpandSpecial, output + ".pdb" - ) - if pdbname: - output = [output, pdbname] - - if len(solibs): - extra_bindings.append( - ("solibs", gyp.common.EncodePOSIXShellList(sorted(solibs))) - ) - - ninja_file.build( - output, - command + command_suffix, - link_deps, - implicit=sorted(implicit_deps), - order_only=list(order_deps), - variables=extra_bindings, - ) - return linked_binary - - def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): - extra_link_deps = any( - self.target_outputs.get(dep).Linkable() - for dep in spec.get("dependencies", []) - if dep in self.target_outputs - ) - if spec["type"] == "none" or (not link_deps and not extra_link_deps): - # TODO(evan): don't call this function for 'none' target types, as - # it doesn't do anything, and we fake out a 'binary' with a stamp file. - self.target.binary = compile_deps - self.target.type = "none" - elif spec["type"] == "static_library": - self.target.binary = self.ComputeOutput(spec) - if ( - self.flavor not in ("mac", "openbsd", "netbsd", "win") - and not self.is_standalone_static_library - ): - self.ninja.build( - self.target.binary, "alink_thin", link_deps, order_only=compile_deps - ) - else: - variables = [] - if self.xcode_settings: - libtool_flags = self.xcode_settings.GetLibtoolflags(config_name) - if libtool_flags: - variables.append(("libtool_flags", libtool_flags)) - if self.msvs_settings: - libflags = self.msvs_settings.GetLibFlags( - config_name, self.GypPathToNinja - ) - variables.append(("libflags", libflags)) - - if self.flavor != "mac" or len(self.archs) == 1: - self.AppendPostbuildVariable( - variables, spec, self.target.binary, self.target.binary - ) - self.ninja.build( - self.target.binary, - "alink", - link_deps, - order_only=compile_deps, - variables=variables, - ) - else: - inputs = [] - for arch in self.archs: - output = self.ComputeOutput(spec, arch) - self.arch_subninjas[arch].build( - output, - "alink", - link_deps[arch], - order_only=compile_deps, - variables=variables, - ) - inputs.append(output) - # TODO: It's not clear if - # libtool_flags should be passed to the alink - # call that combines single-arch .a files into a fat .a file. - self.AppendPostbuildVariable( - variables, spec, self.target.binary, self.target.binary - ) - self.ninja.build( - self.target.binary, - "alink", - inputs, - # FIXME: test proving order_only=compile_deps isn't - # needed. - variables=variables, - ) - else: - self.target.binary = self.WriteLink( - spec, config_name, config, link_deps, compile_deps - ) - return self.target.binary - - def WriteMacBundle(self, spec, mac_bundle_depends, is_empty): - assert self.is_mac_bundle - package_framework = spec["type"] in ("shared_library", "loadable_module") - output = self.ComputeMacBundleOutput() - if is_empty: - output += ".stamp" - variables = [] - self.AppendPostbuildVariable( - variables, - spec, - output, - self.target.binary, - is_command_start=not package_framework, - ) - if package_framework and not is_empty: - if spec["type"] == "shared_library" and self.xcode_settings.isIOS: - self.ninja.build( - output, - "package_ios_framework", - mac_bundle_depends, - variables=variables, - ) - else: - variables.append(("version", self.xcode_settings.GetFrameworkVersion())) - self.ninja.build( - output, "package_framework", mac_bundle_depends, variables=variables - ) - else: - self.ninja.build(output, "stamp", mac_bundle_depends, variables=variables) - self.target.bundle = output - return output - - def GetToolchainEnv(self, additional_settings=None): - """Returns the variables toolchain would set for build steps.""" - env = self.GetSortedXcodeEnv(additional_settings=additional_settings) - if self.flavor == "win": - env = self.GetMsvsToolchainEnv(additional_settings=additional_settings) - return env - - def GetMsvsToolchainEnv(self, additional_settings=None): - """Returns the variables Visual Studio would set for build steps.""" - return self.msvs_settings.GetVSMacroEnv( - "$!PRODUCT_DIR", config=self.config_name - ) - - def GetSortedXcodeEnv(self, additional_settings=None): - """Returns the variables Xcode would set for build steps.""" - assert self.abs_build_dir - abs_build_dir = self.abs_build_dir - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, - abs_build_dir, - os.path.join(abs_build_dir, self.build_to_base), - self.config_name, - additional_settings, - ) - - def GetSortedXcodePostbuildEnv(self): - """Returns the variables Xcode would set for postbuild steps.""" - postbuild_settings = {} - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - "CHROMIUM_STRIP_SAVE_FILE" - ) - if strip_save_file: - postbuild_settings["CHROMIUM_STRIP_SAVE_FILE"] = strip_save_file - return self.GetSortedXcodeEnv(additional_settings=postbuild_settings) - - def AppendPostbuildVariable( - self, variables, spec, output, binary, is_command_start=False - ): - """Adds a 'postbuild' variable if there is a postbuild for |output|.""" - postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start) - if postbuild: - variables.append(("postbuilds", postbuild)) - - def GetPostbuildCommand(self, spec, output, output_binary, is_command_start): - """Returns a shell command that runs all the postbuilds, and removes - |output| if any of them fails. If |is_command_start| is False, then the - returned string will start with ' && '.""" - if not self.xcode_settings or spec["type"] == "none" or not output: - return "" - output = QuoteShellArgument(output, self.flavor) - postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) - if output_binary is not None: - postbuilds = self.xcode_settings.AddImplicitPostbuilds( - self.config_name, - os.path.normpath(os.path.join(self.base_to_build, output)), - QuoteShellArgument( - os.path.normpath(os.path.join(self.base_to_build, output_binary)), - self.flavor, - ), - postbuilds, - quiet=True, - ) - - if not postbuilds: - return "" - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert( - 0, gyp.common.EncodePOSIXShellList(["cd", self.build_to_base]) - ) - env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) - # G will be non-null if any postbuild fails. Run all postbuilds in a - # subshell. - commands = ( - env - + " (" - + " && ".join([ninja_syntax.escape(command) for command in postbuilds]) - ) - command_string = ( - commands - + "); G=$$?; " - # Remove the final output if any postbuild failed. - "((exit $$G) || rm -rf %s) " % output - + "&& exit $$G)" - ) - if is_command_start: - return "(" + command_string + " && " - else: - return "$ && (" + command_string - - def ComputeExportEnvString(self, env): - """Given an environment, returns a string looking like - 'export FOO=foo; export BAR="${FOO} bar;' - that exports |env| to the shell.""" - export_str = [] - for k, v in env: - export_str.append( - "export %s=%s;" - % (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))) - ) - return " ".join(export_str) - - def ComputeMacBundleOutput(self): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = generator_default_variables["PRODUCT_DIR"] - return self.ExpandSpecial( - os.path.join(path, self.xcode_settings.GetWrapperName()) - ) - - def ComputeOutputFileName(self, spec, type=None): - """Compute the filename of the final output for the current target.""" - if not type: - type = spec["type"] - - default_variables = copy.copy(generator_default_variables) - CalculateVariables(default_variables, {"flavor": self.flavor}) - - # Compute filename prefix: the product prefix, or a default for - # the product type. - DEFAULT_PREFIX = { - "loadable_module": default_variables["SHARED_LIB_PREFIX"], - "shared_library": default_variables["SHARED_LIB_PREFIX"], - "static_library": default_variables["STATIC_LIB_PREFIX"], - "executable": default_variables["EXECUTABLE_PREFIX"], - } - prefix = spec.get("product_prefix", DEFAULT_PREFIX.get(type, "")) - - # Compute filename extension: the product extension, or a default - # for the product type. - DEFAULT_EXTENSION = { - "loadable_module": default_variables["SHARED_LIB_SUFFIX"], - "shared_library": default_variables["SHARED_LIB_SUFFIX"], - "static_library": default_variables["STATIC_LIB_SUFFIX"], - "executable": default_variables["EXECUTABLE_SUFFIX"], - } - extension = spec.get("product_extension") - if extension: - extension = "." + extension - else: - extension = DEFAULT_EXTENSION.get(type, "") - - if "product_name" in spec: - # If we were given an explicit name, use that. - target = spec["product_name"] - else: - # Otherwise, derive a name from the target name. - target = spec["target_name"] - if prefix == "lib": - # Snip out an extra 'lib' from libs if appropriate. - target = StripPrefix(target, "lib") - - if type in ( - "static_library", - "loadable_module", - "shared_library", - "executable", - ): - return "%s%s%s" % (prefix, target, extension) - elif type == "none": - return "%s.stamp" % target - else: - raise Exception("Unhandled output type %s" % type) - - def ComputeOutput(self, spec, arch=None): - """Compute the path for the final output of the spec.""" - type = spec["type"] - - if self.flavor == "win": - override = self.msvs_settings.GetOutputName( - self.config_name, self.ExpandSpecial - ) - if override: - return override - - if ( - arch is None - and self.flavor == "mac" - and type - in ("static_library", "executable", "shared_library", "loadable_module") - ): - filename = self.xcode_settings.GetExecutablePath() - else: - filename = self.ComputeOutputFileName(spec, type) - - if arch is None and "product_dir" in spec: - path = os.path.join(spec["product_dir"], filename) - return self.ExpandSpecial(path) - - # Some products go into the output root, libraries go into shared library - # dir, and everything else goes into the normal place. - type_in_output_root = ["executable", "loadable_module"] - if self.flavor == "mac" and self.toolset == "target": - type_in_output_root += ["shared_library", "static_library"] - elif self.flavor == "win" and self.toolset == "target": - type_in_output_root += ["shared_library"] - - if arch is not None: - # Make sure partial executables don't end up in a bundle or the regular - # output directory. - archdir = "arch" - if self.toolset != "target": - archdir = os.path.join("arch", "%s" % self.toolset) - return os.path.join(archdir, AddArch(filename, arch)) - elif type in type_in_output_root or self.is_standalone_static_library: - return filename - elif type == "shared_library": - libdir = "lib" - if self.toolset != "target": - libdir = os.path.join("lib", "%s" % self.toolset) - return os.path.join(libdir, filename) - else: - return self.GypPathToUniqueOutput(filename, qualified=False) - - def WriteVariableList(self, ninja_file, var, values): - assert not isinstance(values, str) - if values is None: - values = [] - ninja_file.variable(var, " ".join(values)) - - def WriteNewNinjaRule( - self, name, args, description, is_cygwin, env, pool, depfile=None - ): - """Write out a new ninja "rule" statement for a given command. - - Returns the name of the new rule, and a copy of |args| with variables - expanded.""" - - if self.flavor == "win": - args = [ - self.msvs_settings.ConvertVSMacros( - arg, self.base_to_build, config=self.config_name - ) - for arg in args - ] - description = self.msvs_settings.ConvertVSMacros( - description, config=self.config_name - ) - elif self.flavor == "mac": - # |env| is an empty list on non-mac. - args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] - description = gyp.xcode_emulation.ExpandEnvVars(description, env) - - # TODO: we shouldn't need to qualify names; we do it because - # currently the ninja rule namespace is global, but it really - # should be scoped to the subninja. - rule_name = self.name - if self.toolset == "target": - rule_name += "." + self.toolset - rule_name += "." + name - rule_name = re.sub("[^a-zA-Z0-9_]", "_", rule_name) - - # Remove variable references, but not if they refer to the magic rule - # variables. This is not quite right, as it also protects these for - # actions, not just for rules where they are valid. Good enough. - protect = ["${root}", "${dirname}", "${source}", "${ext}", "${name}"] - protect = "(?!" + "|".join(map(re.escape, protect)) + ")" - description = re.sub(protect + r"\$", "_", description) - - # gyp dictates that commands are run from the base directory. - # cd into the directory before running, and adjust paths in - # the arguments to point to the proper locations. - rspfile = None - rspfile_content = None - args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] - if self.flavor == "win": - rspfile = rule_name + ".$unique_name.rsp" - # The cygwin case handles this inside the bash sub-shell. - run_in = "" if is_cygwin else " " + self.build_to_base - if is_cygwin: - rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine( - args, self.build_to_base - ) - else: - rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args) - command = ( - "%s gyp-win-tool action-wrapper $arch " % sys.executable - + rspfile - + run_in - ) - else: - env = self.ComputeExportEnvString(env) - command = gyp.common.EncodePOSIXShellList(args) - command = "cd %s; " % self.build_to_base + env + command - - # GYP rules/actions express being no-ops by not touching their outputs. - # Avoid executing downstream dependencies in this case by specifying - # restat=1 to ninja. - self.ninja.rule( - rule_name, - command, - description, - depfile=depfile, - restat=True, - pool=pool, - rspfile=rspfile, - rspfile_content=rspfile_content, - ) - self.ninja.newline() - - return rule_name, args - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - global generator_additional_non_configuration_keys - global generator_additional_path_sections - flavor = gyp.common.GetFlavor(params) - if flavor == "mac": - default_variables.setdefault("OS", "mac") - default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib") - default_variables.setdefault( - "SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"] - ) - default_variables.setdefault( - "LIB_DIR", generator_default_variables["PRODUCT_DIR"] - ) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Ninja generator. - import gyp.generator.xcode as xcode_generator - - generator_additional_non_configuration_keys = getattr( - xcode_generator, "generator_additional_non_configuration_keys", [] - ) - generator_additional_path_sections = getattr( - xcode_generator, "generator_additional_path_sections", [] - ) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr( - xcode_generator, "generator_extra_sources_for_rules", [] - ) - elif flavor == "win": - exts = gyp.MSVSUtil.TARGET_TYPE_EXT - default_variables.setdefault("OS", "win") - default_variables["EXECUTABLE_SUFFIX"] = "." + exts["executable"] - default_variables["STATIC_LIB_PREFIX"] = "" - default_variables["STATIC_LIB_SUFFIX"] = "." + exts["static_library"] - default_variables["SHARED_LIB_PREFIX"] = "" - default_variables["SHARED_LIB_SUFFIX"] = "." + exts["shared_library"] - - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - - generator_additional_non_configuration_keys = getattr( - msvs_generator, "generator_additional_non_configuration_keys", [] - ) - generator_additional_path_sections = getattr( - msvs_generator, "generator_additional_path_sections", [] - ) - - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - else: - operating_system = flavor - if flavor == "android": - operating_system = "linux" # Keep this legacy behavior for now. - default_variables.setdefault("OS", operating_system) - default_variables.setdefault("SHARED_LIB_SUFFIX", ".so") - default_variables.setdefault( - "SHARED_LIB_DIR", os.path.join("$!PRODUCT_DIR", "lib") - ) - default_variables.setdefault("LIB_DIR", os.path.join("$!PRODUCT_DIR", "obj")) - - -def ComputeOutputDir(params): - """Returns the path from the toplevel_dir to the build output directory.""" - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to ninja easier, ninja doesn't put anything here. - generator_dir = os.path.relpath(params["options"].generator_output or ".") - - # output_dir: relative path from generator_dir to the build directory. - output_dir = params.get("generator_flags", {}).get("output_dir", "out") - - # Relative path from source root to our output files. e.g. "out" - return os.path.normpath(os.path.join(generator_dir, output_dir)) - - -def CalculateGeneratorInputInfo(params): - """Called by __init__ to initialize generator values based on params.""" - # E.g. "out/gypfiles" - toplevel = params["options"].toplevel_dir - qualified_out_dir = os.path.normpath( - os.path.join(toplevel, ComputeOutputDir(params), "gypfiles") - ) - - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": toplevel, - "qualified_out_dir": qualified_out_dir, - } - - -def OpenOutput(path, mode="w"): - """Open |path| for writing, creating directories if necessary.""" - gyp.common.EnsureDirExists(path) - return open(path, mode) - - -def CommandWithWrapper(cmd, wrappers, prog): - wrapper = wrappers.get(cmd, "") - if wrapper: - return wrapper + " " + prog - return prog - - -def GetDefaultConcurrentLinks(): - """Returns a best-guess for a number of concurrent links.""" - pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY", 0)) - if pool_size: - return pool_size - - if sys.platform in ("win32", "cygwin"): - import ctypes - - class MEMORYSTATUSEX(ctypes.Structure): - _fields_ = [ - ("dwLength", ctypes.c_ulong), - ("dwMemoryLoad", ctypes.c_ulong), - ("ullTotalPhys", ctypes.c_ulonglong), - ("ullAvailPhys", ctypes.c_ulonglong), - ("ullTotalPageFile", ctypes.c_ulonglong), - ("ullAvailPageFile", ctypes.c_ulonglong), - ("ullTotalVirtual", ctypes.c_ulonglong), - ("ullAvailVirtual", ctypes.c_ulonglong), - ("sullAvailExtendedVirtual", ctypes.c_ulonglong), - ] - - stat = MEMORYSTATUSEX() - stat.dwLength = ctypes.sizeof(stat) - ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) - - # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM - # on a 64 GB machine. - mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GB - hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX", 2 ** 32))) - return min(mem_limit, hard_cap) - elif sys.platform.startswith("linux"): - if os.path.exists("/proc/meminfo"): - with open("/proc/meminfo") as meminfo: - memtotal_re = re.compile(r"^MemTotal:\s*(\d*)\s*kB") - for line in meminfo: - match = memtotal_re.match(line) - if not match: - continue - # Allow 8Gb per link on Linux because Gold is quite memory hungry - return max(1, int(match.group(1)) // (8 * (2 ** 20))) - return 1 - elif sys.platform == "darwin": - try: - avail_bytes = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"])) - # A static library debug build of Chromium's unit_tests takes ~2.7GB, so - # 4GB per ld process allows for some more bloat. - return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB - except subprocess.CalledProcessError: - return 1 - else: - # TODO(scottmg): Implement this for other platforms. - return 1 - - -def _GetWinLinkRuleNameSuffix(embed_manifest): - """Returns the suffix used to select an appropriate linking rule depending on - whether the manifest embedding is enabled.""" - return "_embed" if embed_manifest else "" - - -def _AddWinLinkRules(master_ninja, embed_manifest): - """Adds link rules for Windows platform to |master_ninja|.""" - - def FullLinkCommand(ldcmd, out, binary_type): - resource_name = {"exe": "1", "dll": "2"}[binary_type] - return ( - "%(python)s gyp-win-tool link-with-manifests $arch %(embed)s " - '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' - "$manifests" - % { - "python": sys.executable, - "out": out, - "ldcmd": ldcmd, - "resname": resource_name, - "embed": embed_manifest, - } - ) - - rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest) - use_separate_mspdbsrv = int(os.environ.get("GYP_USE_SEPARATE_MSPDBSRV", "0")) != 0 - dlldesc = "LINK%s(DLL) $binary" % rule_name_suffix.upper() - dllcmd = ( - "%s gyp-win-tool link-wrapper $arch %s " - "$ld /nologo $implibflag /DLL /OUT:$binary " - "@$binary.rsp" % (sys.executable, use_separate_mspdbsrv) - ) - dllcmd = FullLinkCommand(dllcmd, "$binary", "dll") - master_ninja.rule( - "solink" + rule_name_suffix, - description=dlldesc, - command=dllcmd, - rspfile="$binary.rsp", - rspfile_content="$libs $in_newline $ldflags", - restat=True, - pool="link_pool", - ) - master_ninja.rule( - "solink_module" + rule_name_suffix, - description=dlldesc, - command=dllcmd, - rspfile="$binary.rsp", - rspfile_content="$libs $in_newline $ldflags", - restat=True, - pool="link_pool", - ) - # Note that ldflags goes at the end so that it has the option of - # overriding default settings earlier in the command line. - exe_cmd = ( - "%s gyp-win-tool link-wrapper $arch %s " - "$ld /nologo /OUT:$binary @$binary.rsp" - % (sys.executable, use_separate_mspdbsrv) - ) - exe_cmd = FullLinkCommand(exe_cmd, "$binary", "exe") - master_ninja.rule( - "link" + rule_name_suffix, - description="LINK%s $binary" % rule_name_suffix.upper(), - command=exe_cmd, - rspfile="$binary.rsp", - rspfile_content="$in_newline $libs $ldflags", - pool="link_pool", - ) - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): - options = params["options"] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get("generator_flags", {}) - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath(os.path.join(ComputeOutputDir(params), config_name)) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - - master_ninja_file = OpenOutput(os.path.join(toplevel_build, "build.ninja")) - master_ninja = ninja_syntax.Writer(master_ninja_file, width=120) - - # Put build-time support tools in out/{config_name}. - gyp.common.CopyTool(flavor, toplevel_build, generator_flags) - - # Grab make settings for CC/CXX. - # The rules are - # - The priority from low to high is gcc/g++, the 'make_global_settings' in - # gyp, the environment variable. - # - If there is no 'make_global_settings' for CC.host/CXX.host or - # 'CC_host'/'CXX_host' environment variable, cc_host/cxx_host should be set - # to cc/cxx. - if flavor == "win": - ar = "lib.exe" - # cc and cxx must be set to the correct architecture by overriding with one - # of cl_x86 or cl_x64 below. - cc = "UNSET" - cxx = "UNSET" - ld = "link.exe" - ld_host = "$ld" - else: - ar = "ar" - cc = "cc" - cxx = "c++" - ld = "$cc" - ldxx = "$cxx" - ld_host = "$cc_host" - ldxx_host = "$cxx_host" - - ar_host = ar - cc_host = None - cxx_host = None - cc_host_global_setting = None - cxx_host_global_setting = None - clang_cl = None - nm = "nm" - nm_host = "nm" - readelf = "readelf" - readelf_host = "readelf" - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings = data[build_file].get("make_global_settings", []) - build_to_root = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) - wrappers = {} - for key, value in make_global_settings: - if key == "AR": - ar = os.path.join(build_to_root, value) - if key == "AR.host": - ar_host = os.path.join(build_to_root, value) - if key == "CC": - cc = os.path.join(build_to_root, value) - if cc.endswith("clang-cl"): - clang_cl = cc - if key == "CXX": - cxx = os.path.join(build_to_root, value) - if key == "CC.host": - cc_host = os.path.join(build_to_root, value) - cc_host_global_setting = value - if key == "CXX.host": - cxx_host = os.path.join(build_to_root, value) - cxx_host_global_setting = value - if key == "LD": - ld = os.path.join(build_to_root, value) - if key == "LD.host": - ld_host = os.path.join(build_to_root, value) - if key == "LDXX": - ldxx = os.path.join(build_to_root, value) - if key == "LDXX.host": - ldxx_host = os.path.join(build_to_root, value) - if key == "NM": - nm = os.path.join(build_to_root, value) - if key == "NM.host": - nm_host = os.path.join(build_to_root, value) - if key == "READELF": - readelf = os.path.join(build_to_root, value) - if key == "READELF.host": - readelf_host = os.path.join(build_to_root, value) - if key.endswith("_wrapper"): - wrappers[key[: -len("_wrapper")]] = os.path.join(build_to_root, value) - - # Support wrappers from environment variables too. - for key, value in os.environ.items(): - if key.lower().endswith("_wrapper"): - key_prefix = key[: -len("_wrapper")] - key_prefix = re.sub(r"\.HOST$", ".host", key_prefix) - wrappers[key_prefix] = os.path.join(build_to_root, value) - - mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) - if mac_toolchain_dir: - wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir - - if flavor == "win": - configs = [ - target_dicts[qualified_target]["configurations"][config_name] - for qualified_target in target_list - ] - shared_system_includes = None - if not generator_flags.get("ninja_use_custom_environment_files", 0): - shared_system_includes = gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes( - configs, generator_flags - ) - cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( - toplevel_build, generator_flags, shared_system_includes, OpenOutput - ) - for arch, path in sorted(cl_paths.items()): - if clang_cl: - # If we have selected clang-cl, use that instead. - path = clang_cl - command = CommandWithWrapper( - "CC", wrappers, QuoteShellArgument(path, "win") - ) - if clang_cl: - # Use clang-cl to cross-compile for x86 or x86_64. - command += " -m32" if arch == "x86" else " -m64" - master_ninja.variable("cl_" + arch, command) - - cc = GetEnvironFallback(["CC_target", "CC"], cc) - master_ninja.variable("cc", CommandWithWrapper("CC", wrappers, cc)) - cxx = GetEnvironFallback(["CXX_target", "CXX"], cxx) - master_ninja.variable("cxx", CommandWithWrapper("CXX", wrappers, cxx)) - - if flavor == "win": - master_ninja.variable("ld", ld) - master_ninja.variable("idl", "midl.exe") - master_ninja.variable("ar", ar) - master_ninja.variable("rc", "rc.exe") - master_ninja.variable("ml_x86", "ml.exe") - master_ninja.variable("ml_x64", "ml64.exe") - master_ninja.variable("mt", "mt.exe") - else: - master_ninja.variable("ld", CommandWithWrapper("LINK", wrappers, ld)) - master_ninja.variable("ldxx", CommandWithWrapper("LINK", wrappers, ldxx)) - master_ninja.variable("ar", GetEnvironFallback(["AR_target", "AR"], ar)) - if flavor != "mac": - # Mac does not use readelf/nm for .TOC generation, so avoiding polluting - # the master ninja with extra unused variables. - master_ninja.variable("nm", GetEnvironFallback(["NM_target", "NM"], nm)) - master_ninja.variable( - "readelf", GetEnvironFallback(["READELF_target", "READELF"], readelf) - ) - - if generator_supports_multiple_toolsets: - if not cc_host: - cc_host = cc - if not cxx_host: - cxx_host = cxx - - master_ninja.variable("ar_host", GetEnvironFallback(["AR_host"], ar_host)) - master_ninja.variable("nm_host", GetEnvironFallback(["NM_host"], nm_host)) - master_ninja.variable( - "readelf_host", GetEnvironFallback(["READELF_host"], readelf_host) - ) - cc_host = GetEnvironFallback(["CC_host"], cc_host) - cxx_host = GetEnvironFallback(["CXX_host"], cxx_host) - - # The environment variable could be used in 'make_global_settings', like - # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here. - if "$(CC)" in cc_host and cc_host_global_setting: - cc_host = cc_host_global_setting.replace("$(CC)", cc) - if "$(CXX)" in cxx_host and cxx_host_global_setting: - cxx_host = cxx_host_global_setting.replace("$(CXX)", cxx) - master_ninja.variable( - "cc_host", CommandWithWrapper("CC.host", wrappers, cc_host) - ) - master_ninja.variable( - "cxx_host", CommandWithWrapper("CXX.host", wrappers, cxx_host) - ) - if flavor == "win": - master_ninja.variable("ld_host", ld_host) - master_ninja.variable("ldxx_host", ldxx_host) - else: - master_ninja.variable( - "ld_host", CommandWithWrapper("LINK", wrappers, ld_host) - ) - master_ninja.variable( - "ldxx_host", CommandWithWrapper("LINK", wrappers, ldxx_host) - ) - - master_ninja.newline() - - master_ninja.pool("link_pool", depth=GetDefaultConcurrentLinks()) - master_ninja.newline() - - deps = "msvc" if flavor == "win" else "gcc" - - if flavor != "win": - master_ninja.rule( - "cc", - description="CC $out", - command=( - "$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c " - "$cflags_pch_c -c $in -o $out" - ), - depfile="$out.d", - deps=deps, - ) - master_ninja.rule( - "cc_s", - description="CC $out", - command=( - "$cc $defines $includes $cflags $cflags_c " - "$cflags_pch_c -c $in -o $out" - ), - ) - master_ninja.rule( - "cxx", - description="CXX $out", - command=( - "$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc " - "$cflags_pch_cc -c $in -o $out" - ), - depfile="$out.d", - deps=deps, - ) - else: - # TODO(scottmg) Separate pdb names is a test to see if it works around - # http://crbug.com/142362. It seems there's a race between the creation of - # the .pdb by the precompiled header step for .cc and the compilation of - # .c files. This should be handled by mspdbsrv, but rarely errors out with - # c1xx : fatal error C1033: cannot open program database - # By making the rules target separate pdb files this might be avoided. - cc_command = ( - "ninja -t msvc -e $arch " + "-- " - "$cc /nologo /showIncludes /FC " - "@$out.rsp /c $in /Fo$out /Fd$pdbname_c " - ) - cxx_command = ( - "ninja -t msvc -e $arch " + "-- " - "$cxx /nologo /showIncludes /FC " - "@$out.rsp /c $in /Fo$out /Fd$pdbname_cc " - ) - master_ninja.rule( - "cc", - description="CC $out", - command=cc_command, - rspfile="$out.rsp", - rspfile_content="$defines $includes $cflags $cflags_c", - deps=deps, - ) - master_ninja.rule( - "cxx", - description="CXX $out", - command=cxx_command, - rspfile="$out.rsp", - rspfile_content="$defines $includes $cflags $cflags_cc", - deps=deps, - ) - master_ninja.rule( - "idl", - description="IDL $in", - command=( - "%s gyp-win-tool midl-wrapper $arch $outdir " - "$tlb $h $dlldata $iid $proxy $in " - "$midl_includes $idlflags" % sys.executable - ), - ) - master_ninja.rule( - "rc", - description="RC $in", - # Note: $in must be last otherwise rc.exe complains. - command=( - "%s gyp-win-tool rc-wrapper " - "$arch $rc $defines $resource_includes $rcflags /fo$out $in" - % sys.executable - ), - ) - master_ninja.rule( - "asm", - description="ASM $out", - command=( - "%s gyp-win-tool asm-wrapper " - "$arch $asm $defines $includes $asmflags /c /Fo $out $in" - % sys.executable - ), - ) - - if flavor != "mac" and flavor != "win": - master_ninja.rule( - "alink", - description="AR $out", - command="rm -f $out && $ar rcs $arflags $out $in", - ) - master_ninja.rule( - "alink_thin", - description="AR $out", - command="rm -f $out && $ar rcsT $arflags $out $in", - ) - - # This allows targets that only need to depend on $lib's API to declare an - # order-only dependency on $lib.TOC and avoid relinking such downstream - # dependencies when $lib changes only in non-public ways. - # The resulting string leaves an uninterpolated %{suffix} which - # is used in the final substitution below. - mtime_preserving_solink_base = ( - "if [ ! -e $lib -o ! -e $lib.TOC ]; then " - "%(solink)s && %(extract_toc)s > $lib.TOC; else " - "%(solink)s && %(extract_toc)s > $lib.tmp && " - "if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; " - "fi; fi" - % { - "solink": "$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s", - "extract_toc": ( - "{ $readelf -d $lib | grep SONAME ; " - "$nm -gD -f p $lib | cut -f1-2 -d' '; }" - ), - } - ) - - master_ninja.rule( - "solink", - description="SOLINK $lib", - restat=True, - command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"}, # noqa: E501 - rspfile="$link_file_list", - rspfile_content=("-Wl,--whole-archive $in $solibs -Wl," - "--no-whole-archive $libs"), - pool="link_pool", - ) - master_ninja.rule( - "solink_module", - description="SOLINK(module) $lib", - restat=True, - command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"}, - rspfile="$link_file_list", - rspfile_content="-Wl,--start-group $in $solibs $libs -Wl,--end-group", - pool="link_pool", - ) - master_ninja.rule( - "link", - description="LINK $out", - command=( - "$ld $ldflags -o $out " - "-Wl,--start-group $in $solibs $libs -Wl,--end-group" - ), - pool="link_pool", - ) - elif flavor == "win": - master_ninja.rule( - "alink", - description="LIB $out", - command=( - "%s gyp-win-tool link-wrapper $arch False " - "$ar /nologo /ignore:4221 /OUT:$out @$out.rsp" % sys.executable - ), - rspfile="$out.rsp", - rspfile_content="$in_newline $libflags", - ) - _AddWinLinkRules(master_ninja, embed_manifest=True) - _AddWinLinkRules(master_ninja, embed_manifest=False) - else: - master_ninja.rule( - "objc", - description="OBJC $out", - command=( - "$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc " - "$cflags_pch_objc -c $in -o $out" - ), - depfile="$out.d", - deps=deps, - ) - master_ninja.rule( - "objcxx", - description="OBJCXX $out", - command=( - "$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc " - "$cflags_pch_objcc -c $in -o $out" - ), - depfile="$out.d", - deps=deps, - ) - master_ninja.rule( - "alink", - description="LIBTOOL-STATIC $out, POSTBUILDS", - command="rm -f $out && " - "./gyp-mac-tool filter-libtool libtool $libtool_flags " - "-static -o $out $in" - "$postbuilds", - ) - master_ninja.rule( - "lipo", - description="LIPO $out, POSTBUILDS", - command="rm -f $out && lipo -create $in -output $out$postbuilds", - ) - master_ninja.rule( - "solipo", - description="SOLIPO $out, POSTBUILDS", - command=( - "rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&" - "%(extract_toc)s > $lib.TOC" - % { - "extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; " - "nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }" - } - ), - ) - - # Record the public interface of $lib in $lib.TOC. See the corresponding - # comment in the posix section above for details. - solink_base = "$ld %(type)s $ldflags -o $lib %(suffix)s" - mtime_preserving_solink_base = ( - "if [ ! -e $lib -o ! -e $lib.TOC ] || " - # Always force dependent targets to relink if this library - # reexports something. Handling this correctly would require - # recursive TOC dumping but this is rare in practice, so punt. - "otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then " - "%(solink)s && %(extract_toc)s > $lib.TOC; " - "else " - "%(solink)s && %(extract_toc)s > $lib.tmp && " - "if ! cmp -s $lib.tmp $lib.TOC; then " - "mv $lib.tmp $lib.TOC ; " - "fi; " - "fi" - % { - "solink": solink_base, - "extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; " - "nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }", - } - ) - - solink_suffix = "@$link_file_list$postbuilds" - master_ninja.rule( - "solink", - description="SOLINK $lib, POSTBUILDS", - restat=True, - command=mtime_preserving_solink_base - % {"suffix": solink_suffix, "type": "-shared"}, - rspfile="$link_file_list", - rspfile_content="$in $solibs $libs", - pool="link_pool", - ) - master_ninja.rule( - "solink_notoc", - description="SOLINK $lib, POSTBUILDS", - restat=True, - command=solink_base % {"suffix": solink_suffix, "type": "-shared"}, - rspfile="$link_file_list", - rspfile_content="$in $solibs $libs", - pool="link_pool", - ) - - master_ninja.rule( - "solink_module", - description="SOLINK(module) $lib, POSTBUILDS", - restat=True, - command=mtime_preserving_solink_base - % {"suffix": solink_suffix, "type": "-bundle"}, - rspfile="$link_file_list", - rspfile_content="$in $solibs $libs", - pool="link_pool", - ) - master_ninja.rule( - "solink_module_notoc", - description="SOLINK(module) $lib, POSTBUILDS", - restat=True, - command=solink_base % {"suffix": solink_suffix, "type": "-bundle"}, - rspfile="$link_file_list", - rspfile_content="$in $solibs $libs", - pool="link_pool", - ) - - master_ninja.rule( - "link", - description="LINK $out, POSTBUILDS", - command=("$ld $ldflags -o $out " "$in $solibs $libs$postbuilds"), - pool="link_pool", - ) - master_ninja.rule( - "preprocess_infoplist", - description="PREPROCESS INFOPLIST $out", - command=( - "$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && " - "plutil -convert xml1 $out $out" - ), - ) - master_ninja.rule( - "copy_infoplist", - description="COPY INFOPLIST $in", - command="$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys", - ) - master_ninja.rule( - "merge_infoplist", - description="MERGE INFOPLISTS $in", - command="$env ./gyp-mac-tool merge-info-plist $out $in", - ) - master_ninja.rule( - "compile_xcassets", - description="COMPILE XCASSETS $in", - command="$env ./gyp-mac-tool compile-xcassets $keys $in", - ) - master_ninja.rule( - "compile_ios_framework_headers", - description="COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in", - command="$env ./gyp-mac-tool compile-ios-framework-header-map $out " - "$framework $in && $env ./gyp-mac-tool " - "copy-ios-framework-headers $framework $copy_headers", - ) - master_ninja.rule( - "mac_tool", - description="MACTOOL $mactool_cmd $in", - command="$env ./gyp-mac-tool $mactool_cmd $in $out $binary", - ) - master_ninja.rule( - "package_framework", - description="PACKAGE FRAMEWORK $out, POSTBUILDS", - command="./gyp-mac-tool package-framework $out $version$postbuilds " - "&& touch $out", - ) - master_ninja.rule( - "package_ios_framework", - description="PACKAGE IOS FRAMEWORK $out, POSTBUILDS", - command="./gyp-mac-tool package-ios-framework $out $postbuilds " - "&& touch $out", - ) - if flavor == "win": - master_ninja.rule( - "stamp", - description="STAMP $out", - command="%s gyp-win-tool stamp $out" % sys.executable, - ) - else: - master_ninja.rule( - "stamp", description="STAMP $out", command="${postbuilds}touch $out" - ) - if flavor == "win": - master_ninja.rule( - "copy", - description="COPY $in $out", - command="%s gyp-win-tool recursive-mirror $in $out" % sys.executable, - ) - elif flavor == "zos": - master_ninja.rule( - "copy", - description="COPY $in $out", - command="rm -rf $out && cp -fRP $in $out", - ) - else: - master_ninja.rule( - "copy", - description="COPY $in $out", - command="ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)", - ) - master_ninja.newline() - - all_targets = set() - for build_file in params["build_files"]: - for target in gyp.common.AllTargets( - target_list, target_dicts, os.path.normpath(build_file) - ): - all_targets.add(target) - all_outputs = set() - - # target_outputs is a map from qualified target name to a Target object. - target_outputs = {} - # target_short_names is a map from target short name to a list of Target - # objects. - target_short_names = {} - - # short name of targets that were skipped because they didn't contain anything - # interesting. - # NOTE: there may be overlap between this an non_empty_target_names. - empty_target_names = set() - - # Set of non-empty short target names. - # NOTE: there may be overlap between this an empty_target_names. - non_empty_target_names = set() - - for qualified_target in target_list: - # qualified_target is like: third_party/icu/icu.gyp:icui18n#target - build_file, name, toolset = gyp.common.ParseQualifiedTarget(qualified_target) - - this_make_global_settings = data[build_file].get("make_global_settings", []) - assert make_global_settings == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets. %s vs. %s" - % (this_make_global_settings, make_global_settings) - ) - - spec = target_dicts[qualified_target] - if flavor == "mac": - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - # If build_file is a symlink, we must not follow it because there's a chance - # it could point to a path above toplevel_dir, and we cannot correctly deal - # with that case at the moment. - build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, False) - - qualified_target_for_hash = gyp.common.QualifiedTarget( - build_file, name, toolset - ) - qualified_target_for_hash = qualified_target_for_hash.encode("utf-8") - hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() - - base_path = os.path.dirname(build_file) - obj = "obj" - if toolset != "target": - obj += "." + toolset - output_file = os.path.join(obj, base_path, name + ".ninja") - - ninja_output = StringIO() - writer = NinjaWriter( - hash_for_rules, - target_outputs, - base_path, - build_dir, - ninja_output, - toplevel_build, - output_file, - flavor, - toplevel_dir=options.toplevel_dir, - ) - - target = writer.WriteSpec(spec, config_name, generator_flags) - - if ninja_output.tell() > 0: - # Only create files for ninja files that actually have contents. - with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file: - ninja_file.write(ninja_output.getvalue()) - ninja_output.close() - master_ninja.subninja(output_file) - - if target: - if name != target.FinalOutput() and spec["toolset"] == "target": - target_short_names.setdefault(name, []).append(target) - target_outputs[qualified_target] = target - if qualified_target in all_targets: - all_outputs.add(target.FinalOutput()) - non_empty_target_names.add(name) - else: - empty_target_names.add(name) - - if target_short_names: - # Write a short name to build this target. This benefits both the - # "build chrome" case as well as the gyp tests, which expect to be - # able to run actions and build libraries by their short name. - master_ninja.newline() - master_ninja.comment("Short names for targets.") - for short_name in sorted(target_short_names): - master_ninja.build( - short_name, - "phony", - [x.FinalOutput() for x in target_short_names[short_name]], - ) - - # Write phony targets for any empty targets that weren't written yet. As - # short names are not necessarily unique only do this for short names that - # haven't already been output for another target. - empty_target_names = empty_target_names - non_empty_target_names - if empty_target_names: - master_ninja.newline() - master_ninja.comment("Empty targets (output for completeness).") - for name in sorted(empty_target_names): - master_ninja.build(name, "phony") - - if all_outputs: - master_ninja.newline() - master_ninja.build("all", "phony", sorted(all_outputs)) - master_ninja.default(generator_flags.get("default_target", "all")) - - master_ninja_file.close() - - -def PerformBuild(data, configurations, params): - options = params["options"] - for config in configurations: - builddir = os.path.join(options.toplevel_dir, "out", config) - arguments = ["ninja", "-C", builddir] - print("Building [%s]: %s" % (config, arguments)) - subprocess.check_call(arguments) - - -def CallGenerateOutputForConfig(arglist): - # Ignore the interrupt signal so that the parent process catches it and - # kills all multiprocessing children. - signal.signal(signal.SIGINT, signal.SIG_IGN) - - (target_list, target_dicts, data, params, config_name) = arglist - GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) - - -def GenerateOutput(target_list, target_dicts, data, params): - # Update target_dicts for iOS device builds. - target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator( - target_dicts - ) - - user_config = params.get("generator_flags", {}).get("config", None) - if gyp.common.GetFlavor(params) == "win": - target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) - target_list, target_dicts = MSVSUtil.InsertLargePdbShims( - target_list, target_dicts, generator_default_variables - ) - - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) - else: - config_names = target_dicts[target_list[0]]["configurations"] - if params["parallel"]: - try: - pool = multiprocessing.Pool(len(config_names)) - arglists = [] - for config_name in config_names: - arglists.append( - (target_list, target_dicts, data, params, config_name) - ) - pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt as e: - pool.terminate() - raise e - else: - for config_name in config_names: - GenerateOutputForConfig( - target_list, target_dicts, data, params, config_name - ) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py deleted file mode 100644 index abadcd9828e8b..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the ninja.py file. """ - -import sys -import unittest - -import gyp.generator.ninja as ninja - - -class TestPrefixesAndSuffixes(unittest.TestCase): - def test_BinaryNamesWindows(self): - # These cannot run on non-Windows as they require a VS installation to - # correctly handle variable expansion. - if sys.platform.startswith("win"): - writer = ninja.NinjaWriter( - "foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win" - ) - spec = {"target_name": "wee"} - self.assertTrue( - writer.ComputeOutputFileName(spec, "executable").endswith(".exe") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "static_library").endswith(".lib") - ) - - def test_BinaryNamesLinux(self): - writer = ninja.NinjaWriter( - "foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux" - ) - spec = {"target_name": "wee"} - self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable")) - self.assertTrue( - writer.ComputeOutputFileName(spec, "shared_library").startswith("lib") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "static_library").startswith("lib") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "shared_library").endswith(".so") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "static_library").endswith(".a") - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py deleted file mode 100644 index 9e7e99e9e13d2..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py +++ /dev/null @@ -1,1395 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import print_function - -import filecmp -import gyp.common -import gyp.xcodeproj_file -import gyp.xcode_ninja -import errno -import os -import sys -import posixpath -import re -import shutil -import subprocess -import tempfile - - -# Project files generated by this module will use _intermediate_var as a -# custom Xcode setting whose value is a DerivedSources-like directory that's -# project-specific and configuration-specific. The normal choice, -# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive -# as it is likely that multiple targets within a single project file will want -# to access the same set of generated files. The other option, -# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, -# it is not configuration-specific. INTERMEDIATE_DIR is defined as -# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). -_intermediate_var = "INTERMEDIATE_DIR" - -# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all -# targets that share the same BUILT_PRODUCTS_DIR. -_shared_intermediate_var = "SHARED_INTERMEDIATE_DIR" - -_library_search_paths_var = "LIBRARY_SEARCH_PATHS" - -generator_default_variables = { - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "SHARED_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "SHARED_LIB_SUFFIX": ".dylib", - # INTERMEDIATE_DIR is a place for targets to build up intermediate products. - # It is specific to each build environment. It is only guaranteed to exist - # and be constant within the context of a project, corresponding to a single - # input file. Some build environments may allow their intermediate directory - # to be shared on a wider scale, but this is not guaranteed. - "INTERMEDIATE_DIR": "$(%s)" % _intermediate_var, - "OS": "mac", - "PRODUCT_DIR": "$(BUILT_PRODUCTS_DIR)", - "LIB_DIR": "$(BUILT_PRODUCTS_DIR)", - "RULE_INPUT_ROOT": "$(INPUT_FILE_BASE)", - "RULE_INPUT_EXT": "$(INPUT_FILE_SUFFIX)", - "RULE_INPUT_NAME": "$(INPUT_FILE_NAME)", - "RULE_INPUT_PATH": "$(INPUT_FILE_PATH)", - "RULE_INPUT_DIRNAME": "$(INPUT_FILE_DIRNAME)", - "SHARED_INTERMEDIATE_DIR": "$(%s)" % _shared_intermediate_var, - "CONFIGURATION_NAME": "$(CONFIGURATION)", -} - -# The Xcode-specific sections that hold paths. -generator_additional_path_sections = [ - "mac_bundle_resources", - "mac_framework_headers", - "mac_framework_private_headers", - # 'mac_framework_dirs', input already handles _dirs endings. -] - -# The Xcode-specific keys that exist on targets and aren't moved down to -# configurations. -generator_additional_non_configuration_keys = [ - "ios_app_extension", - "ios_watch_app", - "ios_watchkit_extension", - "mac_bundle", - "mac_bundle_resources", - "mac_framework_headers", - "mac_framework_private_headers", - "mac_xctest_bundle", - "mac_xcuitest_bundle", - "xcode_create_dependents_test_runner", -] - -# We want to let any rules apply to files that are resources also. -generator_extra_sources_for_rules = [ - "mac_bundle_resources", - "mac_framework_headers", - "mac_framework_private_headers", -] - -generator_filelist_paths = None - -# Xcode's standard set of library directories, which don't need to be duplicated -# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. -xcode_standard_library_dirs = frozenset( - ["$(SDKROOT)/usr/lib", "$(SDKROOT)/usr/local/lib"] -) - - -def CreateXCConfigurationList(configuration_names): - xccl = gyp.xcodeproj_file.XCConfigurationList({"buildConfigurations": []}) - if len(configuration_names) == 0: - configuration_names = ["Default"] - for configuration_name in configuration_names: - xcbc = gyp.xcodeproj_file.XCBuildConfiguration({"name": configuration_name}) - xccl.AppendProperty("buildConfigurations", xcbc) - xccl.SetProperty("defaultConfigurationName", configuration_names[0]) - return xccl - - -class XcodeProject(object): - def __init__(self, gyp_path, path, build_file_dict): - self.gyp_path = gyp_path - self.path = path - self.project = gyp.xcodeproj_file.PBXProject(path=path) - projectDirPath = gyp.common.RelativePath( - os.path.dirname(os.path.abspath(self.gyp_path)), - os.path.dirname(path) or ".", - ) - self.project.SetProperty("projectDirPath", projectDirPath) - self.project_file = gyp.xcodeproj_file.XCProjectFile( - {"rootObject": self.project} - ) - self.build_file_dict = build_file_dict - - # TODO(mark): add destructor that cleans up self.path if created_dir is - # True and things didn't complete successfully. Or do something even - # better with "try"? - self.created_dir = False - try: - os.makedirs(self.path) - self.created_dir = True - except OSError as e: - if e.errno != errno.EEXIST: - raise - - def Finalize1(self, xcode_targets, serialize_all_tests): - # Collect a list of all of the build configuration names used by the - # various targets in the file. It is very heavily advised to keep each - # target in an entire project (even across multiple project files) using - # the same set of configuration names. - configurations = [] - for xct in self.project.GetProperty("targets"): - xccl = xct.GetProperty("buildConfigurationList") - xcbcs = xccl.GetProperty("buildConfigurations") - for xcbc in xcbcs: - name = xcbc.GetProperty("name") - if name not in configurations: - configurations.append(name) - - # Replace the XCConfigurationList attached to the PBXProject object with - # a new one specifying all of the configuration names used by the various - # targets. - try: - xccl = CreateXCConfigurationList(configurations) - self.project.SetProperty("buildConfigurationList", xccl) - except Exception: - sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) - raise - - # The need for this setting is explained above where _intermediate_var is - # defined. The comments below about wanting to avoid project-wide build - # settings apply here too, but this needs to be set on a project-wide basis - # so that files relative to the _intermediate_var setting can be displayed - # properly in the Xcode UI. - # - # Note that for configuration-relative files such as anything relative to - # _intermediate_var, for the purposes of UI tree view display, Xcode will - # only resolve the configuration name once, when the project file is - # opened. If the active build configuration is changed, the project file - # must be closed and reopened if it is desired for the tree view to update. - # This is filed as Apple radar 6588391. - xccl.SetBuildSetting( - _intermediate_var, "$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)" - ) - xccl.SetBuildSetting( - _shared_intermediate_var, "$(SYMROOT)/DerivedSources/$(CONFIGURATION)" - ) - - # Set user-specified project-wide build settings and config files. This - # is intended to be used very sparingly. Really, almost everything should - # go into target-specific build settings sections. The project-wide - # settings are only intended to be used in cases where Xcode attempts to - # resolve variable references in a project context as opposed to a target - # context, such as when resolving sourceTree references while building up - # the tree tree view for UI display. - # Any values set globally are applied to all configurations, then any - # per-configuration values are applied. - for xck, xcv in self.build_file_dict.get("xcode_settings", {}).items(): - xccl.SetBuildSetting(xck, xcv) - if "xcode_config_file" in self.build_file_dict: - config_ref = self.project.AddOrGetFileInRootGroup( - self.build_file_dict["xcode_config_file"] - ) - xccl.SetBaseConfiguration(config_ref) - build_file_configurations = self.build_file_dict.get("configurations", {}) - if build_file_configurations: - for config_name in configurations: - build_file_configuration_named = build_file_configurations.get( - config_name, {} - ) - if build_file_configuration_named: - xcc = xccl.ConfigurationNamed(config_name) - for xck, xcv in build_file_configuration_named.get( - "xcode_settings", {} - ).items(): - xcc.SetBuildSetting(xck, xcv) - if "xcode_config_file" in build_file_configuration_named: - config_ref = self.project.AddOrGetFileInRootGroup( - build_file_configurations[config_name]["xcode_config_file"] - ) - xcc.SetBaseConfiguration(config_ref) - - # Sort the targets based on how they appeared in the input. - # TODO(mark): Like a lot of other things here, this assumes internal - # knowledge of PBXProject - in this case, of its "targets" property. - - # ordinary_targets are ordinary targets that are already in the project - # file. run_test_targets are the targets that run unittests and should be - # used for the Run All Tests target. support_targets are the action/rule - # targets used by GYP file targets, just kept for the assert check. - ordinary_targets = [] - run_test_targets = [] - support_targets = [] - - # targets is full list of targets in the project. - targets = [] - - # does the it define it's own "all"? - has_custom_all = False - - # targets_for_all is the list of ordinary_targets that should be listed - # in this project's "All" target. It includes each non_runtest_target - # that does not have suppress_wildcard set. - targets_for_all = [] - - for target in self.build_file_dict["targets"]: - target_name = target["target_name"] - toolset = target["toolset"] - qualified_target = gyp.common.QualifiedTarget( - self.gyp_path, target_name, toolset - ) - xcode_target = xcode_targets[qualified_target] - # Make sure that the target being added to the sorted list is already in - # the unsorted list. - assert xcode_target in self.project._properties["targets"] - targets.append(xcode_target) - ordinary_targets.append(xcode_target) - if xcode_target.support_target: - support_targets.append(xcode_target.support_target) - targets.append(xcode_target.support_target) - - if not int(target.get("suppress_wildcard", False)): - targets_for_all.append(xcode_target) - - if target_name.lower() == "all": - has_custom_all = True - - # If this target has a 'run_as' attribute, add its target to the - # targets, and add it to the test targets. - if target.get("run_as"): - # Make a target to run something. It should have one - # dependency, the parent xcode target. - xccl = CreateXCConfigurationList(configurations) - run_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - "name": "Run " + target_name, - "productName": xcode_target.GetProperty("productName"), - "buildConfigurationList": xccl, - }, - parent=self.project, - ) - run_target.AddDependency(xcode_target) - - command = target["run_as"] - script = "" - if command.get("working_directory"): - script = ( - script - + 'cd "%s"\n' - % gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - command.get("working_directory") - ) - ) - - if command.get("environment"): - script = ( - script - + "\n".join( - [ - 'export %s="%s"' - % ( - key, - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - val - ), - ) - for (key, val) in command.get("environment").items() - ] - ) - + "\n" - ) - - # Some test end up using sockets, files on disk, etc. and can get - # confused if more then one test runs at a time. The generator - # flag 'xcode_serialize_all_test_runs' controls the forcing of all - # tests serially. It defaults to True. To get serial runs this - # little bit of python does the same as the linux flock utility to - # make sure only one runs at a time. - command_prefix = "" - if serialize_all_tests: - command_prefix = """python -c "import fcntl, subprocess, sys -file = open('$TMPDIR/GYP_serialize_test_runs', 'a') -fcntl.flock(file.fileno(), fcntl.LOCK_EX) -sys.exit(subprocess.call(sys.argv[1:]))" """ - - # If we were unable to exec for some reason, we want to exit - # with an error, and fixup variable references to be shell - # syntax instead of xcode syntax. - script = ( - script - + "exec " - + command_prefix - + "%s\nexit 1\n" - % gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - gyp.common.EncodePOSIXShellList(command.get("action")) - ) - ) - - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( - {"shellScript": script, "showEnvVarsInLog": 0} - ) - run_target.AppendProperty("buildPhases", ssbp) - - # Add the run target to the project file. - targets.append(run_target) - run_test_targets.append(run_target) - xcode_target.test_runner = run_target - - # Make sure that the list of targets being replaced is the same length as - # the one replacing it, but allow for the added test runner targets. - assert len(self.project._properties["targets"]) == len(ordinary_targets) + len( - support_targets - ) - - self.project._properties["targets"] = targets - - # Get rid of unnecessary levels of depth in groups like the Source group. - self.project.RootGroupsTakeOverOnlyChildren(True) - - # Sort the groups nicely. Do this after sorting the targets, because the - # Products group is sorted based on the order of the targets. - self.project.SortGroups() - - # Create an "All" target if there's more than one target in this project - # file and the project didn't define its own "All" target. Put a generated - # "All" target first so that people opening up the project for the first - # time will build everything by default. - if len(targets_for_all) > 1 and not has_custom_all: - xccl = CreateXCConfigurationList(configurations) - all_target = gyp.xcodeproj_file.PBXAggregateTarget( - {"buildConfigurationList": xccl, "name": "All"}, parent=self.project - ) - - for target in targets_for_all: - all_target.AddDependency(target) - - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._properties. It's important to get the "All" target first, - # though. - self.project._properties["targets"].insert(0, all_target) - - # The same, but for run_test_targets. - if len(run_test_targets) > 1: - xccl = CreateXCConfigurationList(configurations) - run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( - {"buildConfigurationList": xccl, "name": "Run All Tests"}, - parent=self.project, - ) - for run_test_target in run_test_targets: - run_all_tests_target.AddDependency(run_test_target) - - # Insert after the "All" target, which must exist if there is more than - # one run_test_target. - self.project._properties["targets"].insert(1, run_all_tests_target) - - def Finalize2(self, xcode_targets, xcode_target_to_target_dict): - # Finalize2 needs to happen in a separate step because the process of - # updating references to other projects depends on the ordering of targets - # within remote project files. Finalize1 is responsible for sorting duty, - # and once all project files are sorted, Finalize2 can come in and update - # these references. - - # To support making a "test runner" target that will run all the tests - # that are direct dependents of any given target, we look for - # xcode_create_dependents_test_runner being set on an Aggregate target, - # and generate a second target that will run the tests runners found under - # the marked target. - for bf_tgt in self.build_file_dict["targets"]: - if int(bf_tgt.get("xcode_create_dependents_test_runner", 0)): - tgt_name = bf_tgt["target_name"] - toolset = bf_tgt["toolset"] - qualified_target = gyp.common.QualifiedTarget( - self.gyp_path, tgt_name, toolset - ) - xcode_target = xcode_targets[qualified_target] - if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): - # Collect all the run test targets. - all_run_tests = [] - pbxtds = xcode_target.GetProperty("dependencies") - for pbxtd in pbxtds: - pbxcip = pbxtd.GetProperty("targetProxy") - dependency_xct = pbxcip.GetProperty("remoteGlobalIDString") - if hasattr(dependency_xct, "test_runner"): - all_run_tests.append(dependency_xct.test_runner) - - # Directly depend on all the runners as they depend on the target - # that builds them. - if len(all_run_tests) > 0: - run_all_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - "name": "Run %s Tests" % tgt_name, - "productName": tgt_name, - }, - parent=self.project, - ) - for run_test_target in all_run_tests: - run_all_target.AddDependency(run_test_target) - - # Insert the test runner after the related target. - idx = self.project._properties["targets"].index(xcode_target) - self.project._properties["targets"].insert( - idx + 1, run_all_target - ) - - # Update all references to other projects, to make sure that the lists of - # remote products are complete. Otherwise, Xcode will fill them in when - # it opens the project file, which will result in unnecessary diffs. - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._other_pbxprojects. - for other_pbxproject in self.project._other_pbxprojects.keys(): - self.project.AddOrGetProjectReference(other_pbxproject) - - self.project.SortRemoteProductReferences() - - # Give everything an ID. - self.project_file.ComputeIDs() - - # Make sure that no two objects in the project file have the same ID. If - # multiple objects wind up with the same ID, upon loading the file, Xcode - # will only recognize one object (the last one in the file?) and the - # results are unpredictable. - self.project_file.EnsureNoIDCollisions() - - def Write(self): - # Write the project file to a temporary location first. Xcode watches for - # changes to the project file and presents a UI sheet offering to reload - # the project when it does change. However, in some cases, especially when - # multiple projects are open or when Xcode is busy, things don't work so - # seamlessly. Sometimes, Xcode is able to detect that a project file has - # changed but can't unload it because something else is referencing it. - # To mitigate this problem, and to avoid even having Xcode present the UI - # sheet when an open project is rewritten for inconsequential changes, the - # project file is written to a temporary file in the xcodeproj directory - # first. The new temporary file is then compared to the existing project - # file, if any. If they differ, the new file replaces the old; otherwise, - # the new project file is simply deleted. Xcode properly detects a file - # being renamed over an open project file as a change and so it remains - # able to present the "project file changed" sheet under this system. - # Writing to a temporary file first also avoids the possible problem of - # Xcode rereading an incomplete project file. - (output_fd, new_pbxproj_path) = tempfile.mkstemp( - suffix=".tmp", prefix="project.pbxproj.gyp.", dir=self.path - ) - - try: - output_file = os.fdopen(output_fd, "w") - - self.project_file.Print(output_file) - output_file.close() - - pbxproj_path = os.path.join(self.path, "project.pbxproj") - - same = False - try: - same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(new_pbxproj_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(0o77) - os.umask(umask) - - os.chmod(new_pbxproj_path, 0o666 & ~umask) - os.rename(new_pbxproj_path, pbxproj_path) - - except Exception: - # Don't leave turds behind. In fact, if this code was responsible for - # creating the xcodeproj directory, get rid of that too. - os.unlink(new_pbxproj_path) - if self.created_dir: - shutil.rmtree(self.path, True) - raise - - -def AddSourceToTarget(source, type, pbxp, xct): - # TODO(mark): Perhaps source_extensions and library_extensions can be made a - # little bit fancier. - source_extensions = ["c", "cc", "cpp", "cxx", "m", "mm", "s", "swift"] - - # .o is conceptually more of a "source" than a "library," but Xcode thinks - # of "sources" as things to compile and "libraries" (or "frameworks") as - # things to link with. Adding an object file to an Xcode target's frameworks - # phase works properly. - library_extensions = ["a", "dylib", "framework", "o"] - - basename = posixpath.basename(source) - (root, ext) = posixpath.splitext(basename) - if ext: - ext = ext[1:].lower() - - if ext in source_extensions and type != "none": - xct.SourcesPhase().AddFile(source) - elif ext in library_extensions and type != "none": - xct.FrameworksPhase().AddFile(source) - else: - # Files that aren't added to a sources or frameworks build phase can still - # go into the project file, just not as part of a build phase. - pbxp.AddOrGetFileInRootGroup(source) - - -def AddResourceToTarget(resource, pbxp, xct): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - xct.ResourcesPhase().AddFile(resource) - - -def AddHeaderToTarget(header, pbxp, xct, is_public): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - settings = "{ATTRIBUTES = (%s, ); }" % ("Private", "Public")[is_public] - xct.HeadersPhase().AddFile(header, settings) - - -_xcode_variable_re = re.compile(r"(\$\((.*?)\))") - - -def ExpandXcodeVariables(string, expansions): - """Expands Xcode-style $(VARIABLES) in string per the expansions dict. - - In some rare cases, it is appropriate to expand Xcode variables when a - project file is generated. For any substring $(VAR) in string, if VAR is a - key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. - Any $(VAR) substring in string for which VAR is not a key in the expansions - dict will remain in the returned string. - """ - - matches = _xcode_variable_re.findall(string) - if matches is None: - return string - - matches.reverse() - for match in matches: - (to_replace, variable) = match - if variable not in expansions: - continue - - replacement = expansions[variable] - string = re.sub(re.escape(to_replace), replacement, string) - - return string - - -_xcode_define_re = re.compile(r"([\\\"\' ])") - - -def EscapeXcodeDefine(s): - """We must escape the defines that we give to XCode so that it knows not to - split on spaces and to respect backslash and quote literals. However, we - must not quote the define, or Xcode will incorrectly interpret variables - especially $(inherited).""" - return re.sub(_xcode_define_re, r"\\\1", s) - - -def PerformBuild(data, configurations, params): - options = params["options"] - - for build_file, build_file_dict in data.items(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != ".gyp": - continue - xcodeproj_path = build_file_root + options.suffix + ".xcodeproj" - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - - for config in configurations: - arguments = ["xcodebuild", "-project", xcodeproj_path] - arguments += ["-configuration", config] - print("Building [%s]: %s" % (config, arguments)) - subprocess.check_call(arguments) - - -def CalculateGeneratorInputInfo(params): - toplevel = params["options"].toplevel_dir - if params.get("flavor") == "ninja": - generator_dir = os.path.relpath(params["options"].generator_output or ".") - output_dir = params.get("generator_flags", {}).get("output_dir", "out") - output_dir = os.path.normpath(os.path.join(generator_dir, output_dir)) - qualified_out_dir = os.path.normpath( - os.path.join(toplevel, output_dir, "gypfiles-xcode-ninja") - ) - else: - output_dir = os.path.normpath(os.path.join(toplevel, "xcodebuild")) - qualified_out_dir = os.path.normpath( - os.path.join(toplevel, output_dir, "gypfiles") - ) - - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": toplevel, - "qualified_out_dir": qualified_out_dir, - } - - -def GenerateOutput(target_list, target_dicts, data, params): - # Optionally configure each spec to use ninja as the external builder. - ninja_wrapper = params.get("flavor") == "ninja" - if ninja_wrapper: - (target_list, target_dicts, data) = gyp.xcode_ninja.CreateWrapper( - target_list, target_dicts, data, params - ) - - options = params["options"] - generator_flags = params.get("generator_flags", {}) - parallel_builds = generator_flags.get("xcode_parallel_builds", True) - serialize_all_tests = generator_flags.get("xcode_serialize_all_test_runs", True) - upgrade_check_project_version = generator_flags.get( - "xcode_upgrade_check_project_version", None - ) - - # Format upgrade_check_project_version with leading zeros as needed. - if upgrade_check_project_version: - upgrade_check_project_version = str(upgrade_check_project_version) - while len(upgrade_check_project_version) < 4: - upgrade_check_project_version = "0" + upgrade_check_project_version - - skip_excluded_files = not generator_flags.get("xcode_list_excluded_files", True) - xcode_projects = {} - for build_file, build_file_dict in data.items(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != ".gyp": - continue - xcodeproj_path = build_file_root + options.suffix + ".xcodeproj" - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) - xcode_projects[build_file] = xcp - pbxp = xcp.project - - # Set project-level attributes from multiple options - project_attributes = {} - if parallel_builds: - project_attributes["BuildIndependentTargetsInParallel"] = "YES" - if upgrade_check_project_version: - project_attributes["LastUpgradeCheck"] = upgrade_check_project_version - project_attributes[ - "LastTestingUpgradeCheck" - ] = upgrade_check_project_version - project_attributes["LastSwiftUpdateCheck"] = upgrade_check_project_version - pbxp.SetProperty("attributes", project_attributes) - - # Add gyp/gypi files to project - if not generator_flags.get("standalone"): - main_group = pbxp.GetProperty("mainGroup") - build_group = gyp.xcodeproj_file.PBXGroup({"name": "Build"}) - main_group.AppendChild(build_group) - for included_file in build_file_dict["included_files"]: - build_group.AddOrGetFileByPath(included_file, False) - - xcode_targets = {} - xcode_target_to_target_dict = {} - for qualified_target in target_list: - [build_file, target_name, toolset] = gyp.common.ParseQualifiedTarget( - qualified_target - ) - - spec = target_dicts[qualified_target] - if spec["toolset"] != "target": - raise Exception( - "Multiple toolsets not supported in xcode build (target %s)" - % qualified_target - ) - configuration_names = [spec["default_configuration"]] - for configuration_name in sorted(spec["configurations"].keys()): - if configuration_name not in configuration_names: - configuration_names.append(configuration_name) - xcp = xcode_projects[build_file] - pbxp = xcp.project - - # Set up the configurations for the target according to the list of names - # supplied. - xccl = CreateXCConfigurationList(configuration_names) - - # Create an XCTarget subclass object for the target. The type with - # "+bundle" appended will be used if the target has "mac_bundle" set. - # loadable_modules not in a mac_bundle are mapped to - # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets - # to create a single-file mh_bundle. - _types = { - "executable": "com.apple.product-type.tool", - "loadable_module": "com.googlecode.gyp.xcode.bundle", - "shared_library": "com.apple.product-type.library.dynamic", - "static_library": "com.apple.product-type.library.static", - "mac_kernel_extension": "com.apple.product-type.kernel-extension", - "executable+bundle": "com.apple.product-type.application", - "loadable_module+bundle": "com.apple.product-type.bundle", - "loadable_module+xctest": "com.apple.product-type.bundle.unit-test", - "loadable_module+xcuitest": "com.apple.product-type.bundle.ui-testing", - "shared_library+bundle": "com.apple.product-type.framework", - "executable+extension+bundle": "com.apple.product-type.app-extension", - "executable+watch+extension+bundle": - "com.apple.product-type.watchkit-extension", - "executable+watch+bundle": "com.apple.product-type.application.watchapp", - "mac_kernel_extension+bundle": "com.apple.product-type.kernel-extension", - } - - target_properties = { - "buildConfigurationList": xccl, - "name": target_name, - } - - type = spec["type"] - is_xctest = int(spec.get("mac_xctest_bundle", 0)) - is_xcuitest = int(spec.get("mac_xcuitest_bundle", 0)) - is_bundle = int(spec.get("mac_bundle", 0)) or is_xctest - is_app_extension = int(spec.get("ios_app_extension", 0)) - is_watchkit_extension = int(spec.get("ios_watchkit_extension", 0)) - is_watch_app = int(spec.get("ios_watch_app", 0)) - if type != "none": - type_bundle_key = type - if is_xcuitest: - type_bundle_key += "+xcuitest" - assert type == "loadable_module", ( - "mac_xcuitest_bundle targets must have type loadable_module " - "(target %s)" % target_name - ) - elif is_xctest: - type_bundle_key += "+xctest" - assert type == "loadable_module", ( - "mac_xctest_bundle targets must have type loadable_module " - "(target %s)" % target_name - ) - elif is_app_extension: - assert is_bundle, ( - "ios_app_extension flag requires mac_bundle " - "(target %s)" % target_name - ) - type_bundle_key += "+extension+bundle" - elif is_watchkit_extension: - assert is_bundle, ( - "ios_watchkit_extension flag requires mac_bundle " - "(target %s)" % target_name - ) - type_bundle_key += "+watch+extension+bundle" - elif is_watch_app: - assert is_bundle, ( - "ios_watch_app flag requires mac_bundle " - "(target %s)" % target_name - ) - type_bundle_key += "+watch+bundle" - elif is_bundle: - type_bundle_key += "+bundle" - - xctarget_type = gyp.xcodeproj_file.PBXNativeTarget - try: - target_properties["productType"] = _types[type_bundle_key] - except KeyError as e: - gyp.common.ExceptionAppend( - e, - "-- unknown product type while " "writing target %s" % target_name, - ) - raise - else: - xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget - assert not is_bundle, ( - 'mac_bundle targets cannot have type none (target "%s")' % target_name - ) - assert not is_xcuitest, ( - 'mac_xcuitest_bundle targets cannot have type none (target "%s")' - % target_name - ) - assert not is_xctest, ( - 'mac_xctest_bundle targets cannot have type none (target "%s")' - % target_name - ) - - target_product_name = spec.get("product_name") - if target_product_name is not None: - target_properties["productName"] = target_product_name - - xct = xctarget_type( - target_properties, - parent=pbxp, - force_outdir=spec.get("product_dir"), - force_prefix=spec.get("product_prefix"), - force_extension=spec.get("product_extension"), - ) - pbxp.AppendProperty("targets", xct) - xcode_targets[qualified_target] = xct - xcode_target_to_target_dict[xct] = spec - - spec_actions = spec.get("actions", []) - spec_rules = spec.get("rules", []) - - # Xcode has some "issues" with checking dependencies for the "Compile - # sources" step with any source files/headers generated by actions/rules. - # To work around this, if a target is building anything directly (not - # type "none"), then a second target is used to run the GYP actions/rules - # and is made a dependency of this target. This way the work is done - # before the dependency checks for what should be recompiled. - support_xct = None - # The Xcode "issues" don't affect xcode-ninja builds, since the dependency - # logic all happens in ninja. Don't bother creating the extra targets in - # that case. - if type != "none" and (spec_actions or spec_rules) and not ninja_wrapper: - support_xccl = CreateXCConfigurationList(configuration_names) - support_target_suffix = generator_flags.get( - "support_target_suffix", " Support" - ) - support_target_properties = { - "buildConfigurationList": support_xccl, - "name": target_name + support_target_suffix, - } - if target_product_name: - support_target_properties["productName"] = ( - target_product_name + " Support" - ) - support_xct = gyp.xcodeproj_file.PBXAggregateTarget( - support_target_properties, parent=pbxp - ) - pbxp.AppendProperty("targets", support_xct) - xct.AddDependency(support_xct) - # Hang the support target off the main target so it can be tested/found - # by the generator during Finalize. - xct.support_target = support_xct - - prebuild_index = 0 - - # Add custom shell script phases for "actions" sections. - for action in spec_actions: - # There's no need to write anything into the script to ensure that the - # output directories already exist, because Xcode will look at the - # declared outputs and automatically ensure that they exist for us. - - # Do we have a message to print when this action runs? - message = action.get("message") - if message: - message = "echo note: " + gyp.common.EncodePOSIXShellArgument(message) - else: - message = "" - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(action["action"]) - - # Convert Xcode-type variable references to sh-compatible environment - # variable references. - message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) - action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - action_string - ) - - script = "" - # Include the optional message - if message_sh: - script += message_sh + "\n" - # Be sure the script runs in exec, and that if exec fails, the script - # exits signalling an error. - script += "exec " + action_string_sh + "\nexit 1\n" - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( - { - "inputPaths": action["inputs"], - "name": 'Action "' + action["action_name"] + '"', - "outputPaths": action["outputs"], - "shellScript": script, - "showEnvVarsInLog": 0, - } - ) - - if support_xct: - support_xct.AppendProperty("buildPhases", ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties["buildPhases"].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # TODO(mark): Should verify that at most one of these is specified. - if int(action.get("process_outputs_as_sources", False)): - for output in action["outputs"]: - AddSourceToTarget(output, type, pbxp, xct) - - if int(action.get("process_outputs_as_mac_bundle_resources", False)): - for output in action["outputs"]: - AddResourceToTarget(output, pbxp, xct) - - # tgt_mac_bundle_resources holds the list of bundle resources so - # the rule processing can check against it. - if is_bundle: - tgt_mac_bundle_resources = spec.get("mac_bundle_resources", []) - else: - tgt_mac_bundle_resources = [] - - # Add custom shell script phases driving "make" for "rules" sections. - # - # Xcode's built-in rule support is almost powerful enough to use directly, - # but there are a few significant deficiencies that render them unusable. - # There are workarounds for some of its inadequacies, but in aggregate, - # the workarounds added complexity to the generator, and some workarounds - # actually require input files to be crafted more carefully than I'd like. - # Consequently, until Xcode rules are made more capable, "rules" input - # sections will be handled in Xcode output by shell script build phases - # performed prior to the compilation phase. - # - # The following problems with Xcode rules were found. The numbers are - # Apple radar IDs. I hope that these shortcomings are addressed, I really - # liked having the rules handled directly in Xcode during the period that - # I was prototyping this. - # - # 6588600 Xcode compiles custom script rule outputs too soon, compilation - # fails. This occurs when rule outputs from distinct inputs are - # interdependent. The only workaround is to put rules and their - # inputs in a separate target from the one that compiles the rule - # outputs. This requires input file cooperation and it means that - # process_outputs_as_sources is unusable. - # 6584932 Need to declare that custom rule outputs should be excluded from - # compilation. A possible workaround is to lie to Xcode about a - # rule's output, giving it a dummy file it doesn't know how to - # compile. The rule action script would need to touch the dummy. - # 6584839 I need a way to declare additional inputs to a custom rule. - # A possible workaround is a shell script phase prior to - # compilation that touches a rule's primary input files if any - # would-be additional inputs are newer than the output. Modifying - # the source tree - even just modification times - feels dirty. - # 6564240 Xcode "custom script" build rules always dump all environment - # variables. This is a low-prioroty problem and is not a - # show-stopper. - rules_by_ext = {} - for rule in spec_rules: - rules_by_ext[rule["extension"]] = rule - - # First, some definitions: - # - # A "rule source" is a file that was listed in a target's "sources" - # list and will have a rule applied to it on the basis of matching the - # rule's "extensions" attribute. Rule sources are direct inputs to - # rules. - # - # Rule definitions may specify additional inputs in their "inputs" - # attribute. These additional inputs are used for dependency tracking - # purposes. - # - # A "concrete output" is a rule output with input-dependent variables - # resolved. For example, given a rule with: - # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], - # if the target's "sources" list contained "one.ext" and "two.ext", - # the "concrete output" for rule input "two.ext" would be "two.cc". If - # a rule specifies multiple outputs, each input file that the rule is - # applied to will have the same number of concrete outputs. - # - # If any concrete outputs are outdated or missing relative to their - # corresponding rule_source or to any specified additional input, the - # rule action must be performed to generate the concrete outputs. - - # concrete_outputs_by_rule_source will have an item at the same index - # as the rule['rule_sources'] that it corresponds to. Each item is a - # list of all of the concrete outputs for the rule_source. - concrete_outputs_by_rule_source = [] - - # concrete_outputs_all is a flat list of all concrete outputs that this - # rule is able to produce, given the known set of input files - # (rule_sources) that apply to it. - concrete_outputs_all = [] - - # messages & actions are keyed by the same indices as rule['rule_sources'] - # and concrete_outputs_by_rule_source. They contain the message and - # action to perform after resolving input-dependent variables. The - # message is optional, in which case None is stored for each rule source. - messages = [] - actions = [] - - for rule_source in rule.get("rule_sources", []): - rule_source_dirname, rule_source_basename = posixpath.split(rule_source) - (rule_source_root, rule_source_ext) = posixpath.splitext( - rule_source_basename - ) - - # These are the same variable names that Xcode uses for its own native - # rule support. Because Xcode's rule engine is not being used, they - # need to be expanded as they are written to the makefile. - rule_input_dict = { - "INPUT_FILE_BASE": rule_source_root, - "INPUT_FILE_SUFFIX": rule_source_ext, - "INPUT_FILE_NAME": rule_source_basename, - "INPUT_FILE_PATH": rule_source, - "INPUT_FILE_DIRNAME": rule_source_dirname, - } - - concrete_outputs_for_this_rule_source = [] - for output in rule.get("outputs", []): - # Fortunately, Xcode and make both use $(VAR) format for their - # variables, so the expansion is the only transformation necessary. - # Any remaining $(VAR)-type variables in the string can be given - # directly to make, which will pick up the correct settings from - # what Xcode puts into the environment. - concrete_output = ExpandXcodeVariables(output, rule_input_dict) - concrete_outputs_for_this_rule_source.append(concrete_output) - - # Add all concrete outputs to the project. - pbxp.AddOrGetFileInRootGroup(concrete_output) - - concrete_outputs_by_rule_source.append( - concrete_outputs_for_this_rule_source - ) - concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) - - # TODO(mark): Should verify that at most one of these is specified. - if int(rule.get("process_outputs_as_sources", False)): - for output in concrete_outputs_for_this_rule_source: - AddSourceToTarget(output, type, pbxp, xct) - - # If the file came from the mac_bundle_resources list or if the rule - # is marked to process outputs as bundle resource, do so. - was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources - if was_mac_bundle_resource or int( - rule.get("process_outputs_as_mac_bundle_resources", False) - ): - for output in concrete_outputs_for_this_rule_source: - AddResourceToTarget(output, pbxp, xct) - - # Do we have a message to print when this rule runs? - message = rule.get("message") - if message: - message = gyp.common.EncodePOSIXShellArgument(message) - message = ExpandXcodeVariables(message, rule_input_dict) - messages.append(message) - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(rule["action"]) - - action = ExpandXcodeVariables(action_string, rule_input_dict) - actions.append(action) - - if len(concrete_outputs_all) > 0: - # TODO(mark): There's a possibility for collision here. Consider - # target "t" rule "A_r" and target "t_A" rule "r". - makefile_name = "%s.make" % re.sub( - "[^a-zA-Z0-9_]", "_", "%s_%s" % (target_name, rule["rule_name"]) - ) - makefile_path = os.path.join( - xcode_projects[build_file].path, makefile_name - ) - # TODO(mark): try/close? Write to a temporary file and swap it only - # if it's got changes? - makefile = open(makefile_path, "w") - - # make will build the first target in the makefile by default. By - # convention, it's called "all". List all (or at least one) - # concrete output for each rule source as a prerequisite of the "all" - # target. - makefile.write("all: \\\n") - for concrete_output_index, concrete_output_by_rule_source in enumerate( - concrete_outputs_by_rule_source - ): - # Only list the first (index [0]) concrete output of each input - # in the "all" target. Otherwise, a parallel make (-j > 1) would - # attempt to process each input multiple times simultaneously. - # Otherwise, "all" could just contain the entire list of - # concrete_outputs_all. - concrete_output = concrete_output_by_rule_source[0] - if ( - concrete_output_index - == len(concrete_outputs_by_rule_source) - 1 - ): - eol = "" - else: - eol = " \\" - makefile.write(" %s%s\n" % (concrete_output, eol)) - - for (rule_source, concrete_outputs, message, action) in zip( - rule["rule_sources"], - concrete_outputs_by_rule_source, - messages, - actions, - ): - makefile.write("\n") - - # Add a rule that declares it can build each concrete output of a - # rule source. Collect the names of the directories that are - # required. - concrete_output_dirs = [] - for concrete_output_index, concrete_output in enumerate( - concrete_outputs - ): - if concrete_output_index == 0: - bol = "" - else: - bol = " " - makefile.write("%s%s \\\n" % (bol, concrete_output)) - - concrete_output_dir = posixpath.dirname(concrete_output) - if ( - concrete_output_dir - and concrete_output_dir not in concrete_output_dirs - ): - concrete_output_dirs.append(concrete_output_dir) - - makefile.write(" : \\\n") - - # The prerequisites for this rule are the rule source itself and - # the set of additional rule inputs, if any. - prerequisites = [rule_source] - prerequisites.extend(rule.get("inputs", [])) - for prerequisite_index, prerequisite in enumerate(prerequisites): - if prerequisite_index == len(prerequisites) - 1: - eol = "" - else: - eol = " \\" - makefile.write(" %s%s\n" % (prerequisite, eol)) - - # Make sure that output directories exist before executing the rule - # action. - if len(concrete_output_dirs) > 0: - makefile.write( - '\t@mkdir -p "%s"\n' % '" "'.join(concrete_output_dirs) - ) - - # The rule message and action have already had - # the necessary variable substitutions performed. - if message: - # Mark it with note: so Xcode picks it up in build output. - makefile.write("\t@echo note: %s\n" % message) - makefile.write("\t%s\n" % action) - - makefile.close() - - # It might be nice to ensure that needed output directories exist - # here rather than in each target in the Makefile, but that wouldn't - # work if there ever was a concrete output that had an input-dependent - # variable anywhere other than in the leaf position. - - # Don't declare any inputPaths or outputPaths. If they're present, - # Xcode will provide a slight optimization by only running the script - # phase if any output is missing or outdated relative to any input. - # Unfortunately, it will also assume that all outputs are touched by - # the script, and if the outputs serve as files in a compilation - # phase, they will be unconditionally rebuilt. Since make might not - # rebuild everything that could be declared here as an output, this - # extra compilation activity is unnecessary. With inputPaths and - # outputPaths not supplied, make will always be called, but it knows - # enough to not do anything when everything is up-to-date. - - # To help speed things up, pass -j COUNT to make so it does some work - # in parallel. Don't use ncpus because Xcode will build ncpus targets - # in parallel and if each target happens to have a rules step, there - # would be ncpus^2 things going. With a machine that has 2 quad-core - # Xeons, a build can quickly run out of processes based on - # scheduling/other tasks, and randomly failing builds are no good. - script = ( - """JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" -if [ "${JOB_COUNT}" -gt 4 ]; then - JOB_COUNT=4 -fi -exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" -exit 1 -""" - % makefile_name - ) - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( - { - "name": 'Rule "' + rule["rule_name"] + '"', - "shellScript": script, - "showEnvVarsInLog": 0, - } - ) - - if support_xct: - support_xct.AppendProperty("buildPhases", ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move - # into xcodeproj_file itself. - xct._properties["buildPhases"].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # Extra rule inputs also go into the project file. Concrete outputs were - # already added when they were computed. - groups = ["inputs", "inputs_excluded"] - if skip_excluded_files: - groups = [x for x in groups if not x.endswith("_excluded")] - for group in groups: - for item in rule.get(group, []): - pbxp.AddOrGetFileInRootGroup(item) - - # Add "sources". - for source in spec.get("sources", []): - (source_root, source_extension) = posixpath.splitext(source) - if source_extension[1:] not in rules_by_ext: - # AddSourceToTarget will add the file to a root group if it's not - # already there. - AddSourceToTarget(source, type, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(source) - - # Add "mac_bundle_resources" and "mac_framework_private_headers" if - # it's a bundle of any type. - if is_bundle: - for resource in tgt_mac_bundle_resources: - (resource_root, resource_extension) = posixpath.splitext(resource) - if resource_extension[1:] not in rules_by_ext: - AddResourceToTarget(resource, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(resource) - - for header in spec.get("mac_framework_private_headers", []): - AddHeaderToTarget(header, pbxp, xct, False) - - # Add "mac_framework_headers". These can be valid for both frameworks - # and static libraries. - if is_bundle or type == "static_library": - for header in spec.get("mac_framework_headers", []): - AddHeaderToTarget(header, pbxp, xct, True) - - # Add "copies". - pbxcp_dict = {} - for copy_group in spec.get("copies", []): - dest = copy_group["destination"] - if dest[0] not in ("/", "$"): - # Relative paths are relative to $(SRCROOT). - dest = "$(SRCROOT)/" + dest - - code_sign = int(copy_group.get("xcode_code_sign", 0)) - settings = (None, "{ATTRIBUTES = (CodeSignOnCopy, ); }")[code_sign] - - # Coalesce multiple "copies" sections in the same target with the same - # "destination" property into the same PBXCopyFilesBuildPhase, otherwise - # they'll wind up with ID collisions. - pbxcp = pbxcp_dict.get(dest, None) - if pbxcp is None: - pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase( - {"name": "Copy to " + copy_group["destination"]}, parent=xct - ) - pbxcp.SetDestination(dest) - - # TODO(mark): The usual comment about this knowing too much about - # gyp.xcodeproj_file internals applies. - xct._properties["buildPhases"].insert(prebuild_index, pbxcp) - - pbxcp_dict[dest] = pbxcp - - for file in copy_group["files"]: - pbxcp.AddFile(file, settings) - - # Excluded files can also go into the project file. - if not skip_excluded_files: - for key in [ - "sources", - "mac_bundle_resources", - "mac_framework_headers", - "mac_framework_private_headers", - ]: - excluded_key = key + "_excluded" - for item in spec.get(excluded_key, []): - pbxp.AddOrGetFileInRootGroup(item) - - # So can "inputs" and "outputs" sections of "actions" groups. - groups = ["inputs", "inputs_excluded", "outputs", "outputs_excluded"] - if skip_excluded_files: - groups = [x for x in groups if not x.endswith("_excluded")] - for action in spec.get("actions", []): - for group in groups: - for item in action.get(group, []): - # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not - # sources. - if not item.startswith("$(BUILT_PRODUCTS_DIR)/"): - pbxp.AddOrGetFileInRootGroup(item) - - for postbuild in spec.get("postbuilds", []): - action_string_sh = gyp.common.EncodePOSIXShellList(postbuild["action"]) - script = "exec " + action_string_sh + "\nexit 1\n" - - # Make the postbuild step depend on the output of ld or ar from this - # target. Apparently putting the script step after the link step isn't - # sufficient to ensure proper ordering in all cases. With an input - # declared but no outputs, the script step should run every time, as - # desired. - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( - { - "inputPaths": ["$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)"], - "name": 'Postbuild "' + postbuild["postbuild_name"] + '"', - "shellScript": script, - "showEnvVarsInLog": 0, - } - ) - xct.AppendProperty("buildPhases", ssbp) - - # Add dependencies before libraries, because adding a dependency may imply - # adding a library. It's preferable to keep dependencies listed first - # during a link phase so that they can override symbols that would - # otherwise be provided by libraries, which will usually include system - # libraries. On some systems, ld is finicky and even requires the - # libraries to be ordered in such a way that unresolved symbols in - # earlier-listed libraries may only be resolved by later-listed libraries. - # The Mac linker doesn't work that way, but other platforms do, and so - # their linker invocations need to be constructed in this way. There's - # no compelling reason for Xcode's linker invocations to differ. - - if "dependencies" in spec: - for dependency in spec["dependencies"]: - xct.AddDependency(xcode_targets[dependency]) - # The support project also gets the dependencies (in case they are - # needed for the actions/rules to work). - if support_xct: - support_xct.AddDependency(xcode_targets[dependency]) - - if "libraries" in spec: - for library in spec["libraries"]: - xct.FrameworksPhase().AddFile(library) - # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. - # I wish Xcode handled this automatically. - library_dir = posixpath.dirname(library) - if library_dir not in xcode_standard_library_dirs and ( - not xct.HasBuildSetting(_library_search_paths_var) - or library_dir not in xct.GetBuildSetting(_library_search_paths_var) - ): - xct.AppendBuildSetting(_library_search_paths_var, library_dir) - - for configuration_name in configuration_names: - configuration = spec["configurations"][configuration_name] - xcbc = xct.ConfigurationNamed(configuration_name) - for include_dir in configuration.get("mac_framework_dirs", []): - xcbc.AppendBuildSetting("FRAMEWORK_SEARCH_PATHS", include_dir) - for include_dir in configuration.get("include_dirs", []): - xcbc.AppendBuildSetting("HEADER_SEARCH_PATHS", include_dir) - for library_dir in configuration.get("library_dirs", []): - if library_dir not in xcode_standard_library_dirs and ( - not xcbc.HasBuildSetting(_library_search_paths_var) - or library_dir - not in xcbc.GetBuildSetting(_library_search_paths_var) - ): - xcbc.AppendBuildSetting(_library_search_paths_var, library_dir) - - if "defines" in configuration: - for define in configuration["defines"]: - set_define = EscapeXcodeDefine(define) - xcbc.AppendBuildSetting("GCC_PREPROCESSOR_DEFINITIONS", set_define) - if "xcode_settings" in configuration: - for xck, xcv in configuration["xcode_settings"].items(): - xcbc.SetBuildSetting(xck, xcv) - if "xcode_config_file" in configuration: - config_ref = pbxp.AddOrGetFileInRootGroup( - configuration["xcode_config_file"] - ) - xcbc.SetBaseConfiguration(config_ref) - - build_files = [] - for build_file, build_file_dict in data.items(): - if build_file.endswith(".gyp"): - build_files.append(build_file) - - for build_file in build_files: - xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) - - for build_file in build_files: - xcode_projects[build_file].Finalize2(xcode_targets, xcode_target_to_target_dict) - - for build_file in build_files: - xcode_projects[build_file].Write() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py deleted file mode 100644 index 51fbca6a2a28f..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the xcode.py file. """ - -import gyp.generator.xcode as xcode -import unittest -import sys - - -class TestEscapeXcodeDefine(unittest.TestCase): - if sys.platform == "darwin": - - def test_InheritedRemainsUnescaped(self): - self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)") - - def test_Escaping(self): - self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\') - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/input.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/input.py deleted file mode 100644 index 5504390c0bb33..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/input.py +++ /dev/null @@ -1,3149 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import print_function - -import ast - -import gyp.common -import gyp.simple_copy -import multiprocessing -import os.path -import re -import shlex -import signal -import subprocess -import sys -import threading -import traceback -from distutils.version import StrictVersion -from gyp.common import GypError -from gyp.common import OrderedSet - -PY3 = bytes != str - -# A list of types that are treated as linkable. -linkable_types = [ - "executable", - "shared_library", - "loadable_module", - "mac_kernel_extension", - "windows_driver", -] - -# A list of sections that contain links to other targets. -dependency_sections = ["dependencies", "export_dependent_settings"] - -# base_path_sections is a list of sections defined by GYP that contain -# pathnames. The generators can provide more keys, the two lists are merged -# into path_sections, but you should call IsPathSection instead of using either -# list directly. -base_path_sections = [ - "destination", - "files", - "include_dirs", - "inputs", - "libraries", - "outputs", - "sources", -] -path_sections = set() - -# These per-process dictionaries are used to cache build file data when loading -# in parallel mode. -per_process_data = {} -per_process_aux_data = {} - - -def IsPathSection(section): - # If section ends in one of the '=+?!' characters, it's applied to a section - # without the trailing characters. '/' is notably absent from this list, - # because there's no way for a regular expression to be treated as a path. - while section and section[-1:] in "=+?!": - section = section[:-1] - - if section in path_sections: - return True - - # Sections matching the regexp '_(dir|file|path)s?$' are also - # considered PathSections. Using manual string matching since that - # is much faster than the regexp and this can be called hundreds of - # thousands of times so micro performance matters. - if "_" in section: - tail = section[-6:] - if tail[-1] == "s": - tail = tail[:-1] - if tail[-5:] in ("_file", "_path"): - return True - return tail[-4:] == "_dir" - - return False - - -# base_non_configuration_keys is a list of key names that belong in the target -# itself and should not be propagated into its configurations. It is merged -# with a list that can come from the generator to -# create non_configuration_keys. -base_non_configuration_keys = [ - # Sections that must exist inside targets and not configurations. - "actions", - "configurations", - "copies", - "default_configuration", - "dependencies", - "dependencies_original", - "libraries", - "postbuilds", - "product_dir", - "product_extension", - "product_name", - "product_prefix", - "rules", - "run_as", - "sources", - "standalone_static_library", - "suppress_wildcard", - "target_name", - "toolset", - "toolsets", - "type", - # Sections that can be found inside targets or configurations, but that - # should not be propagated from targets into their configurations. - "variables", -] -non_configuration_keys = [] - -# Keys that do not belong inside a configuration dictionary. -invalid_configuration_keys = [ - "actions", - "all_dependent_settings", - "configurations", - "dependencies", - "direct_dependent_settings", - "libraries", - "link_settings", - "sources", - "standalone_static_library", - "target_name", - "type", -] - -# Controls whether or not the generator supports multiple toolsets. -multiple_toolsets = False - -# Paths for converting filelist paths to output paths: { -# toplevel, -# qualified_output_dir, -# } -generator_filelist_paths = None - - -def GetIncludedBuildFiles(build_file_path, aux_data, included=None): - """Return a list of all build files included into build_file_path. - - The returned list will contain build_file_path as well as all other files - that it included, either directly or indirectly. Note that the list may - contain files that were included into a conditional section that evaluated - to false and was not merged into build_file_path's dict. - - aux_data is a dict containing a key for each build file or included build - file. Those keys provide access to dicts whose "included" keys contain - lists of all other files included by the build file. - - included should be left at its default None value by external callers. It - is used for recursion. - - The returned list will not contain any duplicate entries. Each build file - in the list will be relative to the current directory. - """ - - if included is None: - included = [] - - if build_file_path in included: - return included - - included.append(build_file_path) - - for included_build_file in aux_data[build_file_path].get("included", []): - GetIncludedBuildFiles(included_build_file, aux_data, included) - - return included - - -def CheckedEval(file_contents): - """Return the eval of a gyp file. - The gyp file is restricted to dictionaries and lists only, and - repeated keys are not allowed. - Note that this is slower than eval() is. - """ - - syntax_tree = ast.parse(file_contents) - assert isinstance(syntax_tree, ast.Module) - c1 = syntax_tree.body - assert len(c1) == 1 - c2 = c1[0] - assert isinstance(c2, ast.Expr) - return CheckNode(c2.value, []) - - -def CheckNode(node, keypath): - if isinstance(node, ast.Dict): - dict = {} - for key, value in zip(node.keys, node.values): - assert isinstance(key, ast.Str) - key = key.s - if key in dict: - raise GypError( - "Key '" - + key - + "' repeated at level " - + repr(len(keypath) + 1) - + " with key path '" - + ".".join(keypath) - + "'" - ) - kp = list(keypath) # Make a copy of the list for descending this node. - kp.append(key) - dict[key] = CheckNode(value, kp) - return dict - elif isinstance(node, ast.List): - children = [] - for index, child in enumerate(node.elts): - kp = list(keypath) # Copy list. - kp.append(repr(index)) - children.append(CheckNode(child, kp)) - return children - elif isinstance(node, ast.Str): - return node.s - else: - raise TypeError( - "Unknown AST node at key path '" + ".".join(keypath) + "': " + repr(node) - ) - - -def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check): - if build_file_path in data: - return data[build_file_path] - - if os.path.exists(build_file_path): - # Open the build file for read ('r') with universal-newlines mode ('U') - # to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n' - # which otherwise will fail eval() - if sys.platform == "zos": - # On z/OS, universal-newlines mode treats the file as an ascii file. - # But since node-gyp produces ebcdic files, do not use that mode. - build_file_contents = open(build_file_path, "r").read() - else: - build_file_contents = open(build_file_path, "rU").read() - else: - raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) - - build_file_data = None - try: - if check: - build_file_data = CheckedEval(build_file_contents) - else: - build_file_data = eval(build_file_contents, {"__builtins__": {}}, None) - except SyntaxError as e: - e.filename = build_file_path - raise - except Exception as e: - gyp.common.ExceptionAppend(e, "while reading " + build_file_path) - raise - - if type(build_file_data) is not dict: - raise GypError("%s does not evaluate to a dictionary." % build_file_path) - - data[build_file_path] = build_file_data - aux_data[build_file_path] = {} - - # Scan for includes and merge them in. - if "skip_includes" not in build_file_data or not build_file_data["skip_includes"]: - try: - if is_target: - LoadBuildFileIncludesIntoDict( - build_file_data, build_file_path, data, aux_data, includes, check - ) - else: - LoadBuildFileIncludesIntoDict( - build_file_data, build_file_path, data, aux_data, None, check - ) - except Exception as e: - gyp.common.ExceptionAppend( - e, "while reading includes of " + build_file_path - ) - raise - - return build_file_data - - -def LoadBuildFileIncludesIntoDict( - subdict, subdict_path, data, aux_data, includes, check -): - includes_list = [] - if includes is not None: - includes_list.extend(includes) - if "includes" in subdict: - for include in subdict["includes"]: - # "include" is specified relative to subdict_path, so compute the real - # path to include by appending the provided "include" to the directory - # in which subdict_path resides. - relative_include = os.path.normpath( - os.path.join(os.path.dirname(subdict_path), include) - ) - includes_list.append(relative_include) - # Unhook the includes list, it's no longer needed. - del subdict["includes"] - - # Merge in the included files. - for include in includes_list: - if "included" not in aux_data[subdict_path]: - aux_data[subdict_path]["included"] = [] - aux_data[subdict_path]["included"].append(include) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) - - MergeDicts( - subdict, - LoadOneBuildFile(include, data, aux_data, None, False, check), - subdict_path, - include, - ) - - # Recurse into subdictionaries. - for k, v in subdict.items(): - if type(v) is dict: - LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check) - elif type(v) is list: - LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, check) - - -# This recurses into lists so that it can look for dicts. -def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check): - for item in sublist: - if type(item) is dict: - LoadBuildFileIncludesIntoDict( - item, sublist_path, data, aux_data, None, check - ) - elif type(item) is list: - LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check) - - -# Processes toolsets in all the targets. This recurses into condition entries -# since they can contain toolsets as well. -def ProcessToolsetsInDict(data): - if "targets" in data: - target_list = data["targets"] - new_target_list = [] - for target in target_list: - # If this target already has an explicit 'toolset', and no 'toolsets' - # list, don't modify it further. - if "toolset" in target and "toolsets" not in target: - new_target_list.append(target) - continue - if multiple_toolsets: - toolsets = target.get("toolsets", ["target"]) - else: - toolsets = ["target"] - # Make sure this 'toolsets' definition is only processed once. - if "toolsets" in target: - del target["toolsets"] - if len(toolsets) > 0: - # Optimization: only do copies if more than one toolset is specified. - for build in toolsets[1:]: - new_target = gyp.simple_copy.deepcopy(target) - new_target["toolset"] = build - new_target_list.append(new_target) - target["toolset"] = toolsets[0] - new_target_list.append(target) - data["targets"] = new_target_list - if "conditions" in data: - for condition in data["conditions"]: - if type(condition) is list: - for condition_dict in condition[1:]: - if type(condition_dict) is dict: - ProcessToolsetsInDict(condition_dict) - - -# TODO(mark): I don't love this name. It just means that it's going to load -# a build file that contains targets and is expected to provide a targets dict -# that contains the targets... -def LoadTargetBuildFile( - build_file_path, - data, - aux_data, - variables, - includes, - depth, - check, - load_dependencies, -): - # If depth is set, predefine the DEPTH variable to be a relative path from - # this build file's directory to the directory identified by depth. - if depth: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) - if d == "": - variables["DEPTH"] = "." - else: - variables["DEPTH"] = d.replace("\\", "/") - - # The 'target_build_files' key is only set when loading target build files in - # the non-parallel code path, where LoadTargetBuildFile is called - # recursively. In the parallel code path, we don't need to check whether the - # |build_file_path| has already been loaded, because the 'scheduled' set in - # ParallelState guarantees that we never load the same |build_file_path| - # twice. - if "target_build_files" in data: - if build_file_path in data["target_build_files"]: - # Already loaded. - return False - data["target_build_files"].add(build_file_path) - - gyp.DebugOutput( - gyp.DEBUG_INCLUDES, "Loading Target Build File '%s'", build_file_path - ) - - build_file_data = LoadOneBuildFile( - build_file_path, data, aux_data, includes, True, check - ) - - # Store DEPTH for later use in generators. - build_file_data["_DEPTH"] = depth - - # Set up the included_files key indicating which .gyp files contributed to - # this target dict. - if "included_files" in build_file_data: - raise GypError(build_file_path + " must not contain included_files key") - - included = GetIncludedBuildFiles(build_file_path, aux_data) - build_file_data["included_files"] = [] - for included_file in included: - # included_file is relative to the current directory, but it needs to - # be made relative to build_file_path's directory. - included_relative = gyp.common.RelativePath( - included_file, os.path.dirname(build_file_path) - ) - build_file_data["included_files"].append(included_relative) - - # Do a first round of toolsets expansion so that conditions can be defined - # per toolset. - ProcessToolsetsInDict(build_file_data) - - # Apply "pre"/"early" variable expansions and condition evaluations. - ProcessVariablesAndConditionsInDict( - build_file_data, PHASE_EARLY, variables, build_file_path - ) - - # Since some toolsets might have been defined conditionally, perform - # a second round of toolsets expansion now. - ProcessToolsetsInDict(build_file_data) - - # Look at each project's target_defaults dict, and merge settings into - # targets. - if "target_defaults" in build_file_data: - if "targets" not in build_file_data: - raise GypError("Unable to find targets in build file %s" % build_file_path) - - index = 0 - while index < len(build_file_data["targets"]): - # This procedure needs to give the impression that target_defaults is - # used as defaults, and the individual targets inherit from that. - # The individual targets need to be merged into the defaults. Make - # a deep copy of the defaults for each target, merge the target dict - # as found in the input file into that copy, and then hook up the - # copy with the target-specific data merged into it as the replacement - # target dict. - old_target_dict = build_file_data["targets"][index] - new_target_dict = gyp.simple_copy.deepcopy( - build_file_data["target_defaults"] - ) - MergeDicts( - new_target_dict, old_target_dict, build_file_path, build_file_path - ) - build_file_data["targets"][index] = new_target_dict - index += 1 - - # No longer needed. - del build_file_data["target_defaults"] - - # Look for dependencies. This means that dependency resolution occurs - # after "pre" conditionals and variable expansion, but before "post" - - # in other words, you can't put a "dependencies" section inside a "post" - # conditional within a target. - - dependencies = [] - if "targets" in build_file_data: - for target_dict in build_file_data["targets"]: - if "dependencies" not in target_dict: - continue - for dependency in target_dict["dependencies"]: - dependencies.append( - gyp.common.ResolveTarget(build_file_path, dependency, None)[0] - ) - - if load_dependencies: - for dependency in dependencies: - try: - LoadTargetBuildFile( - dependency, - data, - aux_data, - variables, - includes, - depth, - check, - load_dependencies, - ) - except Exception as e: - gyp.common.ExceptionAppend( - e, "while loading dependencies of %s" % build_file_path - ) - raise - else: - return (build_file_path, dependencies) - - -def CallLoadTargetBuildFile( - global_flags, - build_file_path, - variables, - includes, - depth, - check, - generator_input_info, -): - """Wrapper around LoadTargetBuildFile for parallel processing. - - This wrapper is used when LoadTargetBuildFile is executed in - a worker process. - """ - - try: - signal.signal(signal.SIGINT, signal.SIG_IGN) - - # Apply globals so that the worker process behaves the same. - for key, value in global_flags.items(): - globals()[key] = value - - SetGeneratorGlobals(generator_input_info) - result = LoadTargetBuildFile( - build_file_path, - per_process_data, - per_process_aux_data, - variables, - includes, - depth, - check, - False, - ) - if not result: - return result - - (build_file_path, dependencies) = result - - # We can safely pop the build_file_data from per_process_data because it - # will never be referenced by this process again, so we don't need to keep - # it in the cache. - build_file_data = per_process_data.pop(build_file_path) - - # This gets serialized and sent back to the main process via a pipe. - # It's handled in LoadTargetBuildFileCallback. - return (build_file_path, build_file_data, dependencies) - except GypError as e: - sys.stderr.write("gyp: %s\n" % e) - return None - except Exception as e: - print("Exception:", e, file=sys.stderr) - print(traceback.format_exc(), file=sys.stderr) - return None - - -class ParallelProcessingError(Exception): - pass - - -class ParallelState(object): - """Class to keep track of state when processing input files in parallel. - - If build files are loaded in parallel, use this to keep track of - state during farming out and processing parallel jobs. It's stored - in a global so that the callback function can have access to it. - """ - - def __init__(self): - # The multiprocessing pool. - self.pool = None - # The condition variable used to protect this object and notify - # the main loop when there might be more data to process. - self.condition = None - # The "data" dict that was passed to LoadTargetBuildFileParallel - self.data = None - # The number of parallel calls outstanding; decremented when a response - # was received. - self.pending = 0 - # The set of all build files that have been scheduled, so we don't - # schedule the same one twice. - self.scheduled = set() - # A list of dependency build file paths that haven't been scheduled yet. - self.dependencies = [] - # Flag to indicate if there was an error in a child process. - self.error = False - - def LoadTargetBuildFileCallback(self, result): - """Handle the results of running LoadTargetBuildFile in another process. - """ - self.condition.acquire() - if not result: - self.error = True - self.condition.notify() - self.condition.release() - return - (build_file_path0, build_file_data0, dependencies0) = result - self.data[build_file_path0] = build_file_data0 - self.data["target_build_files"].add(build_file_path0) - for new_dependency in dependencies0: - if new_dependency not in self.scheduled: - self.scheduled.add(new_dependency) - self.dependencies.append(new_dependency) - self.pending -= 1 - self.condition.notify() - self.condition.release() - - -def LoadTargetBuildFilesParallel( - build_files, data, variables, includes, depth, check, generator_input_info -): - parallel_state = ParallelState() - parallel_state.condition = threading.Condition() - # Make copies of the build_files argument that we can modify while working. - parallel_state.dependencies = list(build_files) - parallel_state.scheduled = set(build_files) - parallel_state.pending = 0 - parallel_state.data = data - - try: - parallel_state.condition.acquire() - while parallel_state.dependencies or parallel_state.pending: - if parallel_state.error: - break - if not parallel_state.dependencies: - parallel_state.condition.wait() - continue - - dependency = parallel_state.dependencies.pop() - - parallel_state.pending += 1 - global_flags = { - "path_sections": globals()["path_sections"], - "non_configuration_keys": globals()["non_configuration_keys"], - "multiple_toolsets": globals()["multiple_toolsets"], - } - - if not parallel_state.pool: - parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count()) - parallel_state.pool.apply_async( - CallLoadTargetBuildFile, - args=( - global_flags, - dependency, - variables, - includes, - depth, - check, - generator_input_info, - ), - callback=parallel_state.LoadTargetBuildFileCallback, - ) - except KeyboardInterrupt as e: - parallel_state.pool.terminate() - raise e - - parallel_state.condition.release() - - parallel_state.pool.close() - parallel_state.pool.join() - parallel_state.pool = None - - if parallel_state.error: - sys.exit(1) - - -# Look for the bracket that matches the first bracket seen in a -# string, and return the start and end as a tuple. For example, if -# the input is something like "<(foo <(bar)) blah", then it would -# return (1, 13), indicating the entire string except for the leading -# "<" and trailing " blah". -LBRACKETS = set("{[(") -BRACKETS = {"}": "{", "]": "[", ")": "("} - - -def FindEnclosingBracketGroup(input_str): - stack = [] - start = -1 - for index, char in enumerate(input_str): - if char in LBRACKETS: - stack.append(char) - if start == -1: - start = index - elif char in BRACKETS: - if not stack: - return (-1, -1) - if stack.pop() != BRACKETS[char]: - return (-1, -1) - if not stack: - return (start, index + 1) - return (-1, -1) - - -def IsStrCanonicalInt(string): - """Returns True if |string| is in its canonical integer form. - - The canonical form is such that str(int(string)) == string. - """ - if type(string) is str: - # This function is called a lot so for maximum performance, avoid - # involving regexps which would otherwise make the code much - # shorter. Regexps would need twice the time of this function. - if string: - if string == "0": - return True - if string[0] == "-": - string = string[1:] - if not string: - return False - if "1" <= string[0] <= "9": - return string.isdigit() - - return False - - -# This matches things like "<(asdf)", "(?P<(?:(?:!?@?)|\|)?)" - r"(?P[-a-zA-Z0-9_.]+)?" - r"\((?P\s*\[?)" - r"(?P.*?)(\]?)\))" -) - -# This matches the same as early_variable_re, but with '>' instead of '<'. -late_variable_re = re.compile( - r"(?P(?P>(?:(?:!?@?)|\|)?)" - r"(?P[-a-zA-Z0-9_.]+)?" - r"\((?P\s*\[?)" - r"(?P.*?)(\]?)\))" -) - -# This matches the same as early_variable_re, but with '^' instead of '<'. -latelate_variable_re = re.compile( - r"(?P(?P[\^](?:(?:!?@?)|\|)?)" - r"(?P[-a-zA-Z0-9_.]+)?" - r"\((?P\s*\[?)" - r"(?P.*?)(\]?)\))" -) - -# Global cache of results from running commands so they don't have to be run -# more then once. -cached_command_results = {} - - -def FixupPlatformCommand(cmd): - if sys.platform == "win32": - if type(cmd) is list: - cmd = [re.sub("^cat ", "type ", cmd[0])] + cmd[1:] - else: - cmd = re.sub("^cat ", "type ", cmd) - return cmd - - -PHASE_EARLY = 0 -PHASE_LATE = 1 -PHASE_LATELATE = 2 - - -def ExpandVariables(input, phase, variables, build_file): - # Look for the pattern that gets expanded into variables - if phase == PHASE_EARLY: - variable_re = early_variable_re - expansion_symbol = "<" - elif phase == PHASE_LATE: - variable_re = late_variable_re - expansion_symbol = ">" - elif phase == PHASE_LATELATE: - variable_re = latelate_variable_re - expansion_symbol = "^" - else: - assert False - - input_str = str(input) - if IsStrCanonicalInt(input_str): - return int(input_str) - - # Do a quick scan to determine if an expensive regex search is warranted. - if expansion_symbol not in input_str: - return input_str - - # Get the entire list of matches as a list of MatchObject instances. - # (using findall here would return strings instead of MatchObjects). - matches = list(variable_re.finditer(input_str)) - if not matches: - return input_str - - output = input_str - # Reverse the list of matches so that replacements are done right-to-left. - # That ensures that earlier replacements won't mess up the string in a - # way that causes later calls to find the earlier substituted text instead - # of what's intended for replacement. - matches.reverse() - for match_group in matches: - match = match_group.groupdict() - gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match) - # match['replace'] is the substring to look for, match['type'] - # is the character code for the replacement type (< > ! <| >| <@ - # >@ !@), match['is_array'] contains a '[' for command - # arrays, and match['content'] is the name of the variable (< >) - # or command to run (!). match['command_string'] is an optional - # command string. Currently, only 'pymod_do_main' is supported. - - # run_command is true if a ! variant is used. - run_command = "!" in match["type"] - command_string = match["command_string"] - - # file_list is true if a | variant is used. - file_list = "|" in match["type"] - - # Capture these now so we can adjust them later. - replace_start = match_group.start("replace") - replace_end = match_group.end("replace") - - # Find the ending paren, and re-evaluate the contained string. - (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) - - # Adjust the replacement range to match the entire command - # found by FindEnclosingBracketGroup (since the variable_re - # probably doesn't match the entire command if it contained - # nested variables). - replace_end = replace_start + c_end - - # Find the "real" replacement, matching the appropriate closing - # paren, and adjust the replacement start and end. - replacement = input_str[replace_start:replace_end] - - # Figure out what the contents of the variable parens are. - contents_start = replace_start + c_start + 1 - contents_end = replace_end - 1 - contents = input_str[contents_start:contents_end] - - # Do filter substitution now for <|(). - # Admittedly, this is different than the evaluation order in other - # contexts. However, since filtration has no chance to run on <|(), - # this seems like the only obvious way to give them access to filters. - if file_list: - processed_variables = gyp.simple_copy.deepcopy(variables) - ProcessListFiltersInDict(contents, processed_variables) - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, processed_variables, build_file) - else: - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, variables, build_file) - - # Strip off leading/trailing whitespace so that variable matches are - # simpler below (and because they are rarely needed). - contents = contents.strip() - - # expand_to_list is true if an @ variant is used. In that case, - # the expansion should result in a list. Note that the caller - # is to be expecting a list in return, and not all callers do - # because not all are working in list context. Also, for list - # expansions, there can be no other text besides the variable - # expansion in the input string. - expand_to_list = "@" in match["type"] and input_str == replacement - - if run_command or file_list: - # Find the build file's directory, so commands can be run or file lists - # generated relative to it. - build_file_dir = os.path.dirname(build_file) - if build_file_dir == "" and not file_list: - # If build_file is just a leaf filename indicating a file in the - # current directory, build_file_dir might be an empty string. Set - # it to None to signal to subprocess.Popen that it should run the - # command in the current directory. - build_file_dir = None - - # Support <|(listfile.txt ...) which generates a file - # containing items from a gyp list, generated at gyp time. - # This works around actions/rules which have more inputs than will - # fit on the command line. - if file_list: - if type(contents) is list: - contents_list = contents - else: - contents_list = contents.split(" ") - replacement = contents_list[0] - if os.path.isabs(replacement): - raise GypError('| cannot handle absolute paths, got "%s"' % replacement) - - if not generator_filelist_paths: - path = os.path.join(build_file_dir, replacement) - else: - if os.path.isabs(build_file_dir): - toplevel = generator_filelist_paths["toplevel"] - rel_build_file_dir = gyp.common.RelativePath( - build_file_dir, toplevel - ) - else: - rel_build_file_dir = build_file_dir - qualified_out_dir = generator_filelist_paths["qualified_out_dir"] - path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement) - gyp.common.EnsureDirExists(path) - - replacement = gyp.common.RelativePath(path, build_file_dir) - f = gyp.common.WriteOnDiff(path) - for i in contents_list[1:]: - f.write("%s\n" % i) - f.close() - - elif run_command: - use_shell = True - if match["is_array"]: - contents = eval(contents) - use_shell = False - - # Check for a cached value to avoid executing commands, or generating - # file lists more than once. The cache key contains the command to be - # run as well as the directory to run it from, to account for commands - # that depend on their current directory. - # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, - # someone could author a set of GYP files where each time the command - # is invoked it produces different output by design. When the need - # arises, the syntax should be extended to support no caching off a - # command's output so it is run every time. - cache_key = (str(contents), build_file_dir) - cached_value = cached_command_results.get(cache_key, None) - if cached_value is None: - gyp.DebugOutput( - gyp.DEBUG_VARIABLES, - "Executing command '%s' in directory '%s'", - contents, - build_file_dir, - ) - - replacement = "" - - if command_string == "pymod_do_main": - # (sources/) etc. to resolve to - # and empty list if undefined. This allows actions to: - # 'action!': [ - # '>@(_sources!)', - # ], - # 'action/': [ - # '>@(_sources/)', - # ], - replacement = [] - else: - raise GypError( - "Undefined variable " + contents + " in " + build_file - ) - else: - replacement = variables[contents] - - if isinstance(replacement, bytes) and not isinstance(replacement, str): - replacement = replacement.decode("utf-8") # done on Python 3 only - if type(replacement) is list: - for item in replacement: - if isinstance(item, bytes) and not isinstance(item, str): - item = item.decode("utf-8") # done on Python 3 only - if not contents[-1] == "/" and type(item) not in (str, int): - raise GypError( - "Variable " - + contents - + " must expand to a string or list of strings; " - + "list contains a " - + item.__class__.__name__ - ) - # Run through the list and handle variable expansions in it. Since - # the list is guaranteed not to contain dicts, this won't do anything - # with conditions sections. - ProcessVariablesAndConditionsInList( - replacement, phase, variables, build_file - ) - elif type(replacement) not in (str, int): - raise GypError( - "Variable " - + contents - + " must expand to a string or list of strings; " - + "found a " - + replacement.__class__.__name__ - ) - - if expand_to_list: - # Expanding in list context. It's guaranteed that there's only one - # replacement to do in |input_str| and that it's this replacement. See - # above. - if type(replacement) is list: - # If it's already a list, make a copy. - output = replacement[:] - else: - # Split it the same way sh would split arguments. - output = shlex.split(str(replacement)) - else: - # Expanding in string context. - encoded_replacement = "" - if type(replacement) is list: - # When expanding a list into string context, turn the list items - # into a string in a way that will work with a subprocess call. - # - # TODO(mark): This isn't completely correct. This should - # call a generator-provided function that observes the - # proper list-to-argument quoting rules on a specific - # platform instead of just calling the POSIX encoding - # routine. - encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) - else: - encoded_replacement = replacement - - output = ( - output[:replace_start] + str(encoded_replacement) + output[replace_end:] - ) - # Prepare for the next match iteration. - input_str = output - - if output == input: - gyp.DebugOutput( - gyp.DEBUG_VARIABLES, - "Found only identity matches on %r, avoiding infinite " "recursion.", - output, - ) - else: - # Look for more matches now that we've replaced some, to deal with - # expanding local variables (variables defined in the same - # variables block as this one). - gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output) - if type(output) is list: - if output and type(output[0]) is list: - # Leave output alone if it's a list of lists. - # We don't want such lists to be stringified. - pass - else: - new_output = [] - for item in output: - new_output.append( - ExpandVariables(item, phase, variables, build_file) - ) - output = new_output - else: - output = ExpandVariables(output, phase, variables, build_file) - - # Convert all strings that are canonically-represented integers into integers. - if type(output) is list: - for index, outstr in enumerate(output): - if IsStrCanonicalInt(outstr): - output[index] = int(outstr) - elif IsStrCanonicalInt(output): - output = int(output) - - return output - - -# The same condition is often evaluated over and over again so it -# makes sense to cache as much as possible between evaluations. -cached_conditions_asts = {} - - -def EvalCondition(condition, conditions_key, phase, variables, build_file): - """Returns the dict that should be used or None if the result was - that nothing should be used.""" - if type(condition) is not list: - raise GypError(conditions_key + " must be a list") - if len(condition) < 2: - # It's possible that condition[0] won't work in which case this - # attempt will raise its own IndexError. That's probably fine. - raise GypError( - conditions_key - + " " - + condition[0] - + " must be at least length 2, not " - + str(len(condition)) - ) - - i = 0 - result = None - while i < len(condition): - cond_expr = condition[i] - true_dict = condition[i + 1] - if type(true_dict) is not dict: - raise GypError( - "{} {} must be followed by a dictionary, not {}".format( - conditions_key, cond_expr, type(true_dict) - ) - ) - if len(condition) > i + 2 and type(condition[i + 2]) is dict: - false_dict = condition[i + 2] - i = i + 3 - if i != len(condition): - raise GypError( - "{} {} has {} unexpected trailing items".format( - conditions_key, cond_expr, len(condition) - i - ) - ) - else: - false_dict = None - i = i + 2 - if result is None: - result = EvalSingleCondition( - cond_expr, true_dict, false_dict, phase, variables, build_file - ) - - return result - - -def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file): - """Returns true_dict if cond_expr evaluates to true, and false_dict - otherwise.""" - # Do expansions on the condition itself. Since the condition can naturally - # contain variable references without needing to resort to GYP expansion - # syntax, this is of dubious value for variables, but someone might want to - # use a command expansion directly inside a condition. - cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, build_file) - if type(cond_expr_expanded) not in (str, int): - raise ValueError( - "Variable expansion in this context permits str and int " - + "only, found " - + cond_expr_expanded.__class__.__name__ - ) - - try: - if cond_expr_expanded in cached_conditions_asts: - ast_code = cached_conditions_asts[cond_expr_expanded] - else: - ast_code = compile(cond_expr_expanded, "", "eval") - cached_conditions_asts[cond_expr_expanded] = ast_code - env = {"__builtins__": {}, "v": StrictVersion} - if eval(ast_code, env, variables): - return true_dict - return false_dict - except SyntaxError as e: - syntax_error = SyntaxError( - "%s while evaluating condition '%s' in %s " - "at character %d." % (str(e.args[0]), e.text, build_file, e.offset), - e.filename, - e.lineno, - e.offset, - e.text, - ) - raise syntax_error - except NameError as e: - gyp.common.ExceptionAppend( - e, - "while evaluating condition '%s' in %s" % (cond_expr_expanded, build_file), - ) - raise GypError(e) - - -def ProcessConditionsInDict(the_dict, phase, variables, build_file): - # Process a 'conditions' or 'target_conditions' section in the_dict, - # depending on phase. - # early -> conditions - # late -> target_conditions - # latelate -> no conditions - # - # Each item in a conditions list consists of cond_expr, a string expression - # evaluated as the condition, and true_dict, a dict that will be merged into - # the_dict if cond_expr evaluates to true. Optionally, a third item, - # false_dict, may be present. false_dict is merged into the_dict if - # cond_expr evaluates to false. - # - # Any dict merged into the_dict will be recursively processed for nested - # conditionals and other expansions, also according to phase, immediately - # prior to being merged. - - if phase == PHASE_EARLY: - conditions_key = "conditions" - elif phase == PHASE_LATE: - conditions_key = "target_conditions" - elif phase == PHASE_LATELATE: - return - else: - assert False - - if conditions_key not in the_dict: - return - - conditions_list = the_dict[conditions_key] - # Unhook the conditions list, it's no longer needed. - del the_dict[conditions_key] - - for condition in conditions_list: - merge_dict = EvalCondition( - condition, conditions_key, phase, variables, build_file - ) - - if merge_dict is not None: - # Expand variables and nested conditinals in the merge_dict before - # merging it. - ProcessVariablesAndConditionsInDict( - merge_dict, phase, variables, build_file - ) - - MergeDicts(the_dict, merge_dict, build_file, build_file) - - -def LoadAutomaticVariablesFromDict(variables, the_dict): - # Any keys with plain string values in the_dict become automatic variables. - # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.items(): - if type(value) in (str, int, list): - variables["_" + key] = value - - -def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): - # Any keys in the_dict's "variables" dict, if it has one, becomes a - # variable. The variable name is the key name in the "variables" dict. - # Variables that end with the % character are set only if they are unset in - # the variables dict. the_dict_key is the name of the key that accesses - # the_dict in the_dict's parent dict. If the_dict's parent is not a dict - # (it could be a list or it could be parentless because it is a root dict), - # the_dict_key will be None. - for key, value in the_dict.get("variables", {}).items(): - if type(value) not in (str, int, list): - continue - - if key.endswith("%"): - variable_name = key[:-1] - if variable_name in variables: - # If the variable is already set, don't set it. - continue - if the_dict_key == "variables" and variable_name in the_dict: - # If the variable is set without a % in the_dict, and the_dict is a - # variables dict (making |variables| a variables sub-dict of a - # variables dict), use the_dict's definition. - value = the_dict[variable_name] - else: - variable_name = key - - variables[variable_name] = value - - -def ProcessVariablesAndConditionsInDict( - the_dict, phase, variables_in, build_file, the_dict_key=None -): - """Handle all variable and command expansion and conditional evaluation. - - This function is the public entry point for all variable expansions and - conditional evaluations. The variables_in dictionary will not be modified - by this function. - """ - - # Make a copy of the variables_in dict that can be modified during the - # loading of automatics and the loading of the variables dict. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - - if "variables" in the_dict: - # Make sure all the local variables are added to the variables - # list before we process them so that you can reference one - # variable from another. They will be fully expanded by recursion - # in ExpandVariables. - for key, value in the_dict["variables"].items(): - variables[key] = value - - # Handle the associated variables dict first, so that any variable - # references within can be resolved prior to using them as variables. - # Pass a copy of the variables dict to avoid having it be tainted. - # Otherwise, it would have extra automatics added for everything that - # should just be an ordinary variable in this scope. - ProcessVariablesAndConditionsInDict( - the_dict["variables"], phase, variables, build_file, "variables" - ) - - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - for key, value in the_dict.items(): - # Skip "variables", which was already processed if present. - if key != "variables" and type(value) is str: - expanded = ExpandVariables(value, phase, variables, build_file) - if type(expanded) not in (str, int): - raise ValueError( - "Variable expansion in this context permits str and int " - + "only, found " - + expanded.__class__.__name__ - + " for " - + key - ) - the_dict[key] = expanded - - # Variable expansion may have resulted in changes to automatics. Reload. - # TODO(mark): Optimization: only reload if no changes were made. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Process conditions in this dict. This is done after variable expansion - # so that conditions may take advantage of expanded variables. For example, - # if the_dict contains: - # {'type': '<(library_type)', - # 'conditions': [['_type=="static_library"', { ... }]]}, - # _type, as used in the condition, will only be set to the value of - # library_type if variable expansion is performed before condition - # processing. However, condition processing should occur prior to recursion - # so that variables (both automatic and "variables" dict type) may be - # adjusted by conditions sections, merged into the_dict, and have the - # intended impact on contained dicts. - # - # This arrangement means that a "conditions" section containing a "variables" - # section will only have those variables effective in subdicts, not in - # the_dict. The workaround is to put a "conditions" section within a - # "variables" section. For example: - # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will not result in "IS_MAC" being appended to the "defines" list in the - # current scope but would result in it being appended to the "defines" list - # within "my_subdict". By comparison: - # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will append "IS_MAC" to both "defines" lists. - - # Evaluate conditions sections, allowing variable expansions within them - # as well as nested conditionals. This will process a 'conditions' or - # 'target_conditions' section, perform appropriate merging and recursive - # conditional and variable processing, and then remove the conditions section - # from the_dict if it is present. - ProcessConditionsInDict(the_dict, phase, variables, build_file) - - # Conditional processing may have resulted in changes to automatics or the - # variables dict. Reload. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Recurse into child dicts, or process child lists which may result in - # further recursion into descendant dicts. - for key, value in the_dict.items(): - # Skip "variables" and string values, which were already processed if - # present. - if key == "variables" or type(value) is str: - continue - if type(value) is dict: - # Pass a copy of the variables dict so that subdicts can't influence - # parents. - ProcessVariablesAndConditionsInDict( - value, phase, variables, build_file, key - ) - elif type(value) is list: - # The list itself can't influence the variables dict, and - # ProcessVariablesAndConditionsInList will make copies of the variables - # dict if it needs to pass it to something that can influence it. No - # copy is necessary here. - ProcessVariablesAndConditionsInList(value, phase, variables, build_file) - elif type(value) is not int: - raise TypeError("Unknown type " + value.__class__.__name__ + " for " + key) - - -def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file): - # Iterate using an index so that new values can be assigned into the_list. - index = 0 - while index < len(the_list): - item = the_list[index] - if type(item) is dict: - # Make a copy of the variables dict so that it won't influence anything - # outside of its own scope. - ProcessVariablesAndConditionsInDict(item, phase, variables, build_file) - elif type(item) is list: - ProcessVariablesAndConditionsInList(item, phase, variables, build_file) - elif type(item) is str: - expanded = ExpandVariables(item, phase, variables, build_file) - if type(expanded) in (str, int): - the_list[index] = expanded - elif type(expanded) is list: - the_list[index : index + 1] = expanded - index += len(expanded) - - # index now identifies the next item to examine. Continue right now - # without falling into the index increment below. - continue - else: - raise ValueError( - "Variable expansion in this context permits strings and " - + "lists only, found " - + expanded.__class__.__name__ - + " at " - + index - ) - elif type(item) is not int: - raise TypeError( - "Unknown type " + item.__class__.__name__ + " at index " + index - ) - index = index + 1 - - -def BuildTargetsDict(data): - """Builds a dict mapping fully-qualified target names to their target dicts. - - |data| is a dict mapping loaded build files by pathname relative to the - current directory. Values in |data| are build file contents. For each - |data| value with a "targets" key, the value of the "targets" key is taken - as a list containing target dicts. Each target's fully-qualified name is - constructed from the pathname of the build file (|data| key) and its - "target_name" property. These fully-qualified names are used as the keys - in the returned dict. These keys provide access to the target dicts, - the dicts in the "targets" lists. - """ - - targets = {} - for build_file in data["target_build_files"]: - for target in data[build_file].get("targets", []): - target_name = gyp.common.QualifiedTarget( - build_file, target["target_name"], target["toolset"] - ) - if target_name in targets: - raise GypError("Duplicate target definitions for " + target_name) - targets[target_name] = target - - return targets - - -def QualifyDependencies(targets): - """Make dependency links fully-qualified relative to the current directory. - - |targets| is a dict mapping fully-qualified target names to their target - dicts. For each target in this dict, keys known to contain dependency - links are examined, and any dependencies referenced will be rewritten - so that they are fully-qualified and relative to the current directory. - All rewritten dependencies are suitable for use as keys to |targets| or a - similar dict. - """ - - all_dependency_sections = [ - dep + op for dep in dependency_sections for op in ("", "!", "/") - ] - - for target, target_dict in targets.items(): - target_build_file = gyp.common.BuildFile(target) - toolset = target_dict["toolset"] - for dependency_key in all_dependency_sections: - dependencies = target_dict.get(dependency_key, []) - for index, dep in enumerate(dependencies): - dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( - target_build_file, dep, toolset - ) - if not multiple_toolsets: - # Ignore toolset specification in the dependency if it is specified. - dep_toolset = toolset - dependency = gyp.common.QualifiedTarget( - dep_file, dep_target, dep_toolset - ) - dependencies[index] = dependency - - # Make sure anything appearing in a list other than "dependencies" also - # appears in the "dependencies" list. - if ( - dependency_key != "dependencies" - and dependency not in target_dict["dependencies"] - ): - raise GypError( - "Found " - + dependency - + " in " - + dependency_key - + " of " - + target - + ", but not in dependencies" - ) - - -def ExpandWildcardDependencies(targets, data): - """Expands dependencies specified as build_file:*. - - For each target in |targets|, examines sections containing links to other - targets. If any such section contains a link of the form build_file:*, it - is taken as a wildcard link, and is expanded to list each target in - build_file. The |data| dict provides access to build file dicts. - - Any target that does not wish to be included by wildcard can provide an - optional "suppress_wildcard" key in its target dict. When present and - true, a wildcard dependency link will not include such targets. - - All dependency names, including the keys to |targets| and the values in each - dependency list, must be qualified when this function is called. - """ - - for target, target_dict in targets.items(): - target_build_file = gyp.common.BuildFile(target) - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - - # Loop this way instead of "for dependency in" or "for index in range" - # because the dependencies list will be modified within the loop body. - index = 0 - while index < len(dependencies): - ( - dependency_build_file, - dependency_target, - dependency_toolset, - ) = gyp.common.ParseQualifiedTarget(dependencies[index]) - if dependency_target != "*" and dependency_toolset != "*": - # Not a wildcard. Keep it moving. - index = index + 1 - continue - - if dependency_build_file == target_build_file: - # It's an error for a target to depend on all other targets in - # the same file, because a target cannot depend on itself. - raise GypError( - "Found wildcard in " - + dependency_key - + " of " - + target - + " referring to same build file" - ) - - # Take the wildcard out and adjust the index so that the next - # dependency in the list will be processed the next time through the - # loop. - del dependencies[index] - index = index - 1 - - # Loop through the targets in the other build file, adding them to - # this target's list of dependencies in place of the removed - # wildcard. - dependency_target_dicts = data[dependency_build_file]["targets"] - for dependency_target_dict in dependency_target_dicts: - if int(dependency_target_dict.get("suppress_wildcard", False)): - continue - dependency_target_name = dependency_target_dict["target_name"] - if ( - dependency_target != "*" - and dependency_target != dependency_target_name - ): - continue - dependency_target_toolset = dependency_target_dict["toolset"] - if ( - dependency_toolset != "*" - and dependency_toolset != dependency_target_toolset - ): - continue - dependency = gyp.common.QualifiedTarget( - dependency_build_file, - dependency_target_name, - dependency_target_toolset, - ) - index = index + 1 - dependencies.insert(index, dependency) - - index = index + 1 - - -def Unify(items): - """Removes duplicate elements from items, keeping the first element.""" - seen = {} - return [seen.setdefault(e, e) for e in items if e not in seen] - - -def RemoveDuplicateDependencies(targets): - """Makes sure every dependency appears only once in all targets's dependency - lists.""" - for target_name, target_dict in targets.items(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - target_dict[dependency_key] = Unify(dependencies) - - -def Filter(items, item): - """Removes item from items.""" - res = {} - return [res.setdefault(e, e) for e in items if e != item] - - -def RemoveSelfDependencies(targets): - """Remove self dependencies from targets that have the prune_self_dependency - variable set.""" - for target_name, target_dict in targets.items(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - for t in dependencies: - if t == target_name: - if ( - targets[t] - .get("variables", {}) - .get("prune_self_dependency", 0) - ): - target_dict[dependency_key] = Filter( - dependencies, target_name - ) - - -def RemoveLinkDependenciesFromNoneTargets(targets): - """Remove dependencies having the 'link_dependency' attribute from the 'none' - targets.""" - for target_name, target_dict in targets.items(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - for t in dependencies: - if target_dict.get("type", None) == "none": - if targets[t].get("variables", {}).get("link_dependency", 0): - target_dict[dependency_key] = Filter( - target_dict[dependency_key], t - ) - - -class DependencyGraphNode(object): - """ - - Attributes: - ref: A reference to an object that this DependencyGraphNode represents. - dependencies: List of DependencyGraphNodes on which this one depends. - dependents: List of DependencyGraphNodes that depend on this one. - """ - - class CircularException(GypError): - pass - - def __init__(self, ref): - self.ref = ref - self.dependencies = [] - self.dependents = [] - - def __repr__(self): - return "" % self.ref - - def FlattenToList(self): - # flat_list is the sorted list of dependencies - actually, the list items - # are the "ref" attributes of DependencyGraphNodes. Every target will - # appear in flat_list after all of its dependencies, and before all of its - # dependents. - flat_list = OrderedSet() - - def ExtractNodeRef(node): - """Extracts the object that the node represents from the given node.""" - return node.ref - - # in_degree_zeros is the list of DependencyGraphNodes that have no - # dependencies not in flat_list. Initially, it is a copy of the children - # of this node, because when the graph was built, nodes with no - # dependencies were made implicit dependents of the root node. - in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef) - - while in_degree_zeros: - # Nodes in in_degree_zeros have no dependencies not in flat_list, so they - # can be appended to flat_list. Take these nodes out of in_degree_zeros - # as work progresses, so that the next node to process from the list can - # always be accessed at a consistent position. - node = in_degree_zeros.pop() - flat_list.add(node.ref) - - # Look at dependents of the node just added to flat_list. Some of them - # may now belong in in_degree_zeros. - for node_dependent in sorted(node.dependents, key=ExtractNodeRef): - is_in_degree_zero = True - # TODO: We want to check through the - # node_dependent.dependencies list but if it's long and we - # always start at the beginning, then we get O(n^2) behaviour. - for node_dependent_dependency in sorted( - node_dependent.dependencies, key=ExtractNodeRef - ): - if node_dependent_dependency.ref not in flat_list: - # The dependent one or more dependencies not in flat_list. - # There will be more chances to add it to flat_list - # when examining it again as a dependent of those other - # dependencies, provided that there are no cycles. - is_in_degree_zero = False - break - - if is_in_degree_zero: - # All of the dependent's dependencies are already in flat_list. Add - # it to in_degree_zeros where it will be processed in a future - # iteration of the outer loop. - in_degree_zeros += [node_dependent] - - return list(flat_list) - - def FindCycles(self): - """ - Returns a list of cycles in the graph, where each cycle is its own list. - """ - results = [] - visited = set() - - def Visit(node, path): - for child in node.dependents: - if child in path: - results.append([child] + path[: path.index(child) + 1]) - elif child not in visited: - visited.add(child) - Visit(child, [child] + path) - - visited.add(self) - Visit(self, [self]) - - return results - - def DirectDependencies(self, dependencies=None): - """Returns a list of just direct dependencies.""" - if dependencies is None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - - return dependencies - - def _AddImportedDependencies(self, targets, dependencies=None): - """Given a list of direct dependencies, adds indirect dependencies that - other dependencies have declared to export their settings. - - This method does not operate on self. Rather, it operates on the list - of dependencies in the |dependencies| argument. For each dependency in - that list, if any declares that it exports the settings of one of its - own dependencies, those dependencies whose settings are "passed through" - are added to the list. As new items are added to the list, they too will - be processed, so it is possible to import settings through multiple levels - of dependencies. - - This method is not terribly useful on its own, it depends on being - "primed" with a list of direct dependencies such as one provided by - DirectDependencies. DirectAndImportedDependencies is intended to be the - public entry point. - """ - - if dependencies is None: - dependencies = [] - - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - # Add any dependencies whose settings should be imported to the list - # if not already present. Newly-added items will be checked for - # their own imports when the list iteration reaches them. - # Rather than simply appending new items, insert them after the - # dependency that exported them. This is done to more closely match - # the depth-first method used by DeepDependencies. - add_index = 1 - for imported_dependency in dependency_dict.get( - "export_dependent_settings", [] - ): - if imported_dependency not in dependencies: - dependencies.insert(index + add_index, imported_dependency) - add_index = add_index + 1 - index = index + 1 - - return dependencies - - def DirectAndImportedDependencies(self, targets, dependencies=None): - """Returns a list of a target's direct dependencies and all indirect - dependencies that a dependency has advertised settings should be exported - through the dependency for. - """ - - dependencies = self.DirectDependencies(dependencies) - return self._AddImportedDependencies(targets, dependencies) - - def DeepDependencies(self, dependencies=None): - """Returns an OrderedSet of all of a target's dependencies, recursively.""" - if dependencies is None: - # Using a list to get ordered output and a set to do fast "is it - # already added" checks. - dependencies = OrderedSet() - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref is None: - continue - if dependency.ref not in dependencies: - dependency.DeepDependencies(dependencies) - dependencies.add(dependency.ref) - - return dependencies - - def _LinkDependenciesInternal( - self, targets, include_shared_libraries, dependencies=None, initial=True - ): - """Returns an OrderedSet of dependency targets that are linked - into this target. - - This function has a split personality, depending on the setting of - |initial|. Outside callers should always leave |initial| at its default - setting. - - When adding a target to the list of dependencies, this function will - recurse into itself with |initial| set to False, to collect dependencies - that are linked into the linkable target for which the list is being built. - - If |include_shared_libraries| is False, the resulting dependencies will not - include shared_library targets that are linked into this target. - """ - if dependencies is None: - # Using a list to get ordered output and a set to do fast "is it - # already added" checks. - dependencies = OrderedSet() - - # Check for None, corresponding to the root node. - if self.ref is None: - return dependencies - - # It's kind of sucky that |targets| has to be passed into this function, - # but that's presently the easiest way to access the target dicts so that - # this function can find target types. - - if "target_name" not in targets[self.ref]: - raise GypError("Missing 'target_name' field in target.") - - if "type" not in targets[self.ref]: - raise GypError( - "Missing 'type' field in target %s" % targets[self.ref]["target_name"] - ) - - target_type = targets[self.ref]["type"] - - is_linkable = target_type in linkable_types - - if initial and not is_linkable: - # If this is the first target being examined and it's not linkable, - # return an empty list of link dependencies, because the link - # dependencies are intended to apply to the target itself (initial is - # True) and this target won't be linked. - return dependencies - - # Don't traverse 'none' targets if explicitly excluded. - if target_type == "none" and not targets[self.ref].get( - "dependencies_traverse", True - ): - dependencies.add(self.ref) - return dependencies - - # Executables, mac kernel extensions, windows drivers and loadable modules - # are already fully and finally linked. Nothing else can be a link - # dependency of them, there can only be dependencies in the sense that a - # dependent target might run an executable or load the loadable_module. - if not initial and target_type in ( - "executable", - "loadable_module", - "mac_kernel_extension", - "windows_driver", - ): - return dependencies - - # Shared libraries are already fully linked. They should only be included - # in |dependencies| when adjusting static library dependencies (in order to - # link against the shared_library's import lib), but should not be included - # in |dependencies| when propagating link_settings. - # The |include_shared_libraries| flag controls which of these two cases we - # are handling. - if ( - not initial - and target_type == "shared_library" - and not include_shared_libraries - ): - return dependencies - - # The target is linkable, add it to the list of link dependencies. - if self.ref not in dependencies: - dependencies.add(self.ref) - if initial or not is_linkable: - # If this is a subsequent target and it's linkable, don't look any - # further for linkable dependencies, as they'll already be linked into - # this target linkable. Always look at dependencies of the initial - # target, and always look at dependencies of non-linkables. - for dependency in self.dependencies: - dependency._LinkDependenciesInternal( - targets, include_shared_libraries, dependencies, False - ) - - return dependencies - - def DependenciesForLinkSettings(self, targets): - """ - Returns a list of dependency targets whose link_settings should be merged - into this target. - """ - - # TODO(sbaig) Currently, chrome depends on the bug that shared libraries' - # link_settings are propagated. So for now, we will allow it, unless the - # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to - # False. Once chrome is fixed, we can remove this flag. - include_shared_libraries = targets[self.ref].get( - "allow_sharedlib_linksettings_propagation", True - ) - return self._LinkDependenciesInternal(targets, include_shared_libraries) - - def DependenciesToLinkAgainst(self, targets): - """ - Returns a list of dependency targets that are linked into this target. - """ - return self._LinkDependenciesInternal(targets, True) - - -def BuildDependencyList(targets): - # Create a DependencyGraphNode for each target. Put it into a dict for easy - # access. - dependency_nodes = {} - for target, spec in targets.items(): - if target not in dependency_nodes: - dependency_nodes[target] = DependencyGraphNode(target) - - # Set up the dependency links. Targets that have no dependencies are treated - # as dependent on root_node. - root_node = DependencyGraphNode(None) - for target, spec in targets.items(): - target_node = dependency_nodes[target] - dependencies = spec.get("dependencies") - if not dependencies: - target_node.dependencies = [root_node] - root_node.dependents.append(target_node) - else: - for dependency in dependencies: - dependency_node = dependency_nodes.get(dependency) - if not dependency_node: - raise GypError( - "Dependency '%s' not found while " - "trying to load target %s" % (dependency, target) - ) - target_node.dependencies.append(dependency_node) - dependency_node.dependents.append(target_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(targets): - if not root_node.dependents: - # If all targets have dependencies, add the first target as a dependent - # of root_node so that the cycle can be discovered from root_node. - target = next(iter(targets)) - target_node = dependency_nodes[target] - target_node.dependencies.append(root_node) - root_node.dependents.append(target_node) - - cycles = [] - for cycle in root_node.FindCycles(): - paths = [node.ref for node in cycle] - cycles.append("Cycle: %s" % " -> ".join(paths)) - raise DependencyGraphNode.CircularException( - "Cycles in dependency graph detected:\n" + "\n".join(cycles) - ) - - return [dependency_nodes, flat_list] - - -def VerifyNoGYPFileCircularDependencies(targets): - # Create a DependencyGraphNode for each gyp file containing a target. Put - # it into a dict for easy access. - dependency_nodes = {} - for target in targets: - build_file = gyp.common.BuildFile(target) - if build_file not in dependency_nodes: - dependency_nodes[build_file] = DependencyGraphNode(build_file) - - # Set up the dependency links. - for target, spec in targets.items(): - build_file = gyp.common.BuildFile(target) - build_file_node = dependency_nodes[build_file] - target_dependencies = spec.get("dependencies", []) - for dependency in target_dependencies: - try: - dependency_build_file = gyp.common.BuildFile(dependency) - except GypError as e: - gyp.common.ExceptionAppend( - e, "while computing dependencies of .gyp file %s" % build_file - ) - raise - - if dependency_build_file == build_file: - # A .gyp file is allowed to refer back to itself. - continue - dependency_node = dependency_nodes.get(dependency_build_file) - if not dependency_node: - raise GypError("Dependency '%s' not found" % dependency_build_file) - if dependency_node not in build_file_node.dependencies: - build_file_node.dependencies.append(dependency_node) - dependency_node.dependents.append(build_file_node) - - # Files that have no dependencies are treated as dependent on root_node. - root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.values(): - if len(build_file_node.dependencies) == 0: - build_file_node.dependencies.append(root_node) - root_node.dependents.append(build_file_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(dependency_nodes): - if not root_node.dependents: - # If all files have dependencies, add the first file as a dependent - # of root_node so that the cycle can be discovered from root_node. - file_node = next(iter(dependency_nodes.values())) - file_node.dependencies.append(root_node) - root_node.dependents.append(file_node) - cycles = [] - for cycle in root_node.FindCycles(): - paths = [node.ref for node in cycle] - cycles.append("Cycle: %s" % " -> ".join(paths)) - raise DependencyGraphNode.CircularException( - "Cycles in .gyp file dependency graph detected:\n" + "\n".join(cycles) - ) - - -def DoDependentSettings(key, flat_list, targets, dependency_nodes): - # key should be one of all_dependent_settings, direct_dependent_settings, - # or link_settings. - - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - - if key == "all_dependent_settings": - dependencies = dependency_nodes[target].DeepDependencies() - elif key == "direct_dependent_settings": - dependencies = dependency_nodes[target].DirectAndImportedDependencies( - targets - ) - elif key == "link_settings": - dependencies = dependency_nodes[target].DependenciesForLinkSettings(targets) - else: - raise GypError( - "DoDependentSettings doesn't know how to determine " - "dependencies for " + key - ) - - for dependency in dependencies: - dependency_dict = targets[dependency] - if key not in dependency_dict: - continue - dependency_build_file = gyp.common.BuildFile(dependency) - MergeDicts( - target_dict, dependency_dict[key], build_file, dependency_build_file - ) - - -def AdjustStaticLibraryDependencies( - flat_list, targets, dependency_nodes, sort_dependencies -): - # Recompute target "dependencies" properties. For each static library - # target, remove "dependencies" entries referring to other static libraries, - # unless the dependency has the "hard_dependency" attribute set. For each - # linkable target, add a "dependencies" entry referring to all of the - # target's computed list of link dependencies (including static libraries - # if no such entry is already present. - for target in flat_list: - target_dict = targets[target] - target_type = target_dict["type"] - - if target_type == "static_library": - if "dependencies" not in target_dict: - continue - - target_dict["dependencies_original"] = target_dict.get("dependencies", [])[ - : - ] - - # A static library should not depend on another static library unless - # the dependency relationship is "hard," which should only be done when - # a dependent relies on some side effect other than just the build - # product, like a rule or action output. Further, if a target has a - # non-hard dependency, but that dependency exports a hard dependency, - # the non-hard dependency can safely be removed, but the exported hard - # dependency must be added to the target to keep the same dependency - # ordering. - dependencies = dependency_nodes[target].DirectAndImportedDependencies( - targets - ) - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - - # Remove every non-hard static library dependency and remove every - # non-static library dependency that isn't a direct dependency. - if ( - dependency_dict["type"] == "static_library" - and not dependency_dict.get("hard_dependency", False) - ) or ( - dependency_dict["type"] != "static_library" - and dependency not in target_dict["dependencies"] - ): - # Take the dependency out of the list, and don't increment index - # because the next dependency to analyze will shift into the index - # formerly occupied by the one being removed. - del dependencies[index] - else: - index = index + 1 - - # Update the dependencies. If the dependencies list is empty, it's not - # needed, so unhook it. - if len(dependencies) > 0: - target_dict["dependencies"] = dependencies - else: - del target_dict["dependencies"] - - elif target_type in linkable_types: - # Get a list of dependency targets that should be linked into this - # target. Add them to the dependencies list if they're not already - # present. - - link_dependencies = dependency_nodes[target].DependenciesToLinkAgainst( - targets - ) - for dependency in link_dependencies: - if dependency == target: - continue - if "dependencies" not in target_dict: - target_dict["dependencies"] = [] - if dependency not in target_dict["dependencies"]: - target_dict["dependencies"].append(dependency) - # Sort the dependencies list in the order from dependents to dependencies. - # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D. - # Note: flat_list is already sorted in the order from dependencies to - # dependents. - if sort_dependencies and "dependencies" in target_dict: - target_dict["dependencies"] = [ - dep - for dep in reversed(flat_list) - if dep in target_dict["dependencies"] - ] - - -# Initialize this here to speed up MakePathRelative. -exception_re = re.compile(r"""["']?[-/$<>^]""") - - -def MakePathRelative(to_file, fro_file, item): - # If item is a relative path, it's relative to the build file dict that it's - # coming from. Fix it up to make it relative to the build file dict that - # it's going into. - # Exception: any |item| that begins with these special characters is - # returned without modification. - # / Used when a path is already absolute (shortcut optimization; - # such paths would be returned as absolute anyway) - # $ Used for build environment variables - # - Used for some build environment flags (such as -lapr-1 in a - # "libraries" section) - # < Used for our own variable and command expansions (see ExpandVariables) - # > Used for our own variable and command expansions (see ExpandVariables) - # ^ Used for our own variable and command expansions (see ExpandVariables) - # - # "/' Used when a value is quoted. If these are present, then we - # check the second character instead. - # - if to_file == fro_file or exception_re.match(item): - return item - else: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - ret = os.path.normpath( - os.path.join( - gyp.common.RelativePath( - os.path.dirname(fro_file), os.path.dirname(to_file) - ), - item, - ) - ).replace("\\", "/") - if item.endswith("/"): - ret += "/" - return ret - - -def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): - # Python documentation recommends objects which do not support hash - # set this value to None. Python library objects follow this rule. - def is_hashable(val): - return val.__hash__ - - # If x is hashable, returns whether x is in s. Else returns whether x is in items. - def is_in_set_or_list(x, s, items): - if is_hashable(x): - return x in s - return x in items - - prepend_index = 0 - - # Make membership testing of hashables in |to| (in particular, strings) - # faster. - hashable_to_set = set(x for x in to if is_hashable(x)) - for item in fro: - singleton = False - if type(item) in (str, int): - # The cheap and easy case. - if is_paths: - to_item = MakePathRelative(to_file, fro_file, item) - else: - to_item = item - - if not (type(item) is str and item.startswith("-")): - # Any string that doesn't begin with a "-" is a singleton - it can - # only appear once in a list, to be enforced by the list merge append - # or prepend. - singleton = True - elif type(item) is dict: - # Make a copy of the dictionary, continuing to look for paths to fix. - # The other intelligent aspects of merge processing won't apply because - # item is being merged into an empty dict. - to_item = {} - MergeDicts(to_item, item, to_file, fro_file) - elif type(item) is list: - # Recurse, making a copy of the list. If the list contains any - # descendant dicts, path fixing will occur. Note that here, custom - # values for is_paths and append are dropped; those are only to be - # applied to |to| and |fro|, not sublists of |fro|. append shouldn't - # matter anyway because the new |to_item| list is empty. - to_item = [] - MergeLists(to_item, item, to_file, fro_file) - else: - raise TypeError( - "Attempt to merge list item of unsupported type " - + item.__class__.__name__ - ) - - if append: - # If appending a singleton that's already in the list, don't append. - # This ensures that the earliest occurrence of the item will stay put. - if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to): - to.append(to_item) - if is_hashable(to_item): - hashable_to_set.add(to_item) - else: - # If prepending a singleton that's already in the list, remove the - # existing instance and proceed with the prepend. This ensures that the - # item appears at the earliest possible position in the list. - while singleton and to_item in to: - to.remove(to_item) - - # Don't just insert everything at index 0. That would prepend the new - # items to the list in reverse order, which would be an unwelcome - # surprise. - to.insert(prepend_index, to_item) - if is_hashable(to_item): - hashable_to_set.add(to_item) - prepend_index = prepend_index + 1 - - -def MergeDicts(to, fro, to_file, fro_file): - # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.items(): - # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give - # copy semantics. Something else may want to merge from the |fro| dict - # later, and having the same dict ref pointed to twice in the tree isn't - # what anyone wants considering that the dicts may subsequently be - # modified. - if k in to: - bad_merge = False - if type(v) in (str, int): - if type(to[k]) not in (str, int): - bad_merge = True - elif not isinstance(v, type(to[k])): - bad_merge = True - - if bad_merge: - raise TypeError( - "Attempt to merge dict value of type " - + v.__class__.__name__ - + " into incompatible type " - + to[k].__class__.__name__ - + " for key " - + k - ) - if type(v) in (str, int): - # Overwrite the existing value, if any. Cheap and easy. - is_path = IsPathSection(k) - if is_path: - to[k] = MakePathRelative(to_file, fro_file, v) - else: - to[k] = v - elif type(v) is dict: - # Recurse, guaranteeing copies will be made of objects that require it. - if k not in to: - to[k] = {} - MergeDicts(to[k], v, to_file, fro_file) - elif type(v) is list: - # Lists in dicts can be merged with different policies, depending on - # how the key in the "from" dict (k, the from-key) is written. - # - # If the from-key has ...the to-list will have this action - # this character appended:... applied when receiving the from-list: - # = replace - # + prepend - # ? set, only if to-list does not yet exist - # (none) append - # - # This logic is list-specific, but since it relies on the associated - # dict key, it's checked in this dict-oriented function. - ext = k[-1] - append = True - if ext == "=": - list_base = k[:-1] - lists_incompatible = [list_base, list_base + "?"] - to[list_base] = [] - elif ext == "+": - list_base = k[:-1] - lists_incompatible = [list_base + "=", list_base + "?"] - append = False - elif ext == "?": - list_base = k[:-1] - lists_incompatible = [list_base, list_base + "=", list_base + "+"] - else: - list_base = k - lists_incompatible = [list_base + "=", list_base + "?"] - - # Some combinations of merge policies appearing together are meaningless. - # It's stupid to replace and append simultaneously, for example. Append - # and prepend are the only policies that can coexist. - for list_incompatible in lists_incompatible: - if list_incompatible in fro: - raise GypError( - "Incompatible list policies " + k + " and " + list_incompatible - ) - - if list_base in to: - if ext == "?": - # If the key ends in "?", the list will only be merged if it doesn't - # already exist. - continue - elif type(to[list_base]) is not list: - # This may not have been checked above if merging in a list with an - # extension character. - raise TypeError( - "Attempt to merge dict value of type " - + v.__class__.__name__ - + " into incompatible type " - + to[list_base].__class__.__name__ - + " for key " - + list_base - + "(" - + k - + ")" - ) - else: - to[list_base] = [] - - # Call MergeLists, which will make copies of objects that require it. - # MergeLists can recurse back into MergeDicts, although this will be - # to make copies of dicts (with paths fixed), there will be no - # subsequent dict "merging" once entering a list because lists are - # always replaced, appended to, or prepended to. - is_paths = IsPathSection(list_base) - MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) - else: - raise TypeError( - "Attempt to merge dict value of unsupported type " - + v.__class__.__name__ - + " for key " - + k - ) - - -def MergeConfigWithInheritance( - new_configuration_dict, build_file, target_dict, configuration, visited -): - # Skip if previously visited. - if configuration in visited: - return - - # Look at this configuration. - configuration_dict = target_dict["configurations"][configuration] - - # Merge in parents. - for parent in configuration_dict.get("inherit_from", []): - MergeConfigWithInheritance( - new_configuration_dict, - build_file, - target_dict, - parent, - visited + [configuration], - ) - - # Merge it into the new config. - MergeDicts(new_configuration_dict, configuration_dict, build_file, build_file) - - # Drop abstract. - if "abstract" in new_configuration_dict: - del new_configuration_dict["abstract"] - - -def SetUpConfigurations(target, target_dict): - # key_suffixes is a list of key suffixes that might appear on key names. - # These suffixes are handled in conditional evaluations (for =, +, and ?) - # and rules/exclude processing (for ! and /). Keys with these suffixes - # should be treated the same as keys without. - key_suffixes = ["=", "+", "?", "!", "/"] - - build_file = gyp.common.BuildFile(target) - - # Provide a single configuration by default if none exists. - # TODO(mark): Signal an error if default_configurations exists but - # configurations does not. - if "configurations" not in target_dict: - target_dict["configurations"] = {"Default": {}} - if "default_configuration" not in target_dict: - concrete = [ - i - for (i, config) in target_dict["configurations"].items() - if not config.get("abstract") - ] - target_dict["default_configuration"] = sorted(concrete)[0] - - merged_configurations = {} - configs = target_dict["configurations"] - for (configuration, old_configuration_dict) in configs.items(): - # Skip abstract configurations (saves work only). - if old_configuration_dict.get("abstract"): - continue - # Configurations inherit (most) settings from the enclosing target scope. - # Get the inheritance relationship right by making a copy of the target - # dict. - new_configuration_dict = {} - for (key, target_val) in target_dict.items(): - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if key_base not in non_configuration_keys: - new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val) - - # Merge in configuration (with all its parents first). - MergeConfigWithInheritance( - new_configuration_dict, build_file, target_dict, configuration, [] - ) - - merged_configurations[configuration] = new_configuration_dict - - # Put the new configurations back into the target dict as a configuration. - for configuration in merged_configurations.keys(): - target_dict["configurations"][configuration] = merged_configurations[ - configuration - ] - - # Now drop all the abstract ones. - configs = target_dict["configurations"] - target_dict["configurations"] = { - k: v for k, v in configs.items() if not v.get("abstract") - } - - # Now that all of the target's configurations have been built, go through - # the target dict's keys and remove everything that's been moved into a - # "configurations" section. - delete_keys = [] - for key in target_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if key_base not in non_configuration_keys: - delete_keys.append(key) - for key in delete_keys: - del target_dict[key] - - # Check the configurations to see if they contain invalid keys. - for configuration in target_dict["configurations"].keys(): - configuration_dict = target_dict["configurations"][configuration] - for key in configuration_dict.keys(): - if key in invalid_configuration_keys: - raise GypError( - "%s not allowed in the %s configuration, found in " - "target %s" % (key, configuration, target) - ) - - -def ProcessListFiltersInDict(name, the_dict): - """Process regular expression and exclusion-based filters on lists. - - An exclusion list is in a dict key named with a trailing "!", like - "sources!". Every item in such a list is removed from the associated - main list, which in this example, would be "sources". Removed items are - placed into a "sources_excluded" list in the dict. - - Regular expression (regex) filters are contained in dict keys named with a - trailing "/", such as "sources/" to operate on the "sources" list. Regex - filters in a dict take the form: - 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'], - ['include', '_mac\\.cc$'] ], - The first filter says to exclude all files ending in _linux.cc, _mac.cc, and - _win.cc. The second filter then includes all files ending in _mac.cc that - are now or were once in the "sources" list. Items matching an "exclude" - filter are subject to the same processing as would occur if they were listed - by name in an exclusion list (ending in "!"). Items matching an "include" - filter are brought back into the main list if previously excluded by an - exclusion list or exclusion regex filter. Subsequent matching "exclude" - patterns can still cause items to be excluded after matching an "include". - """ - - # Look through the dictionary for any lists whose keys end in "!" or "/". - # These are lists that will be treated as exclude lists and regular - # expression-based exclude/include lists. Collect the lists that are - # needed first, looking for the lists that they operate on, and assemble - # then into |lists|. This is done in a separate loop up front, because - # the _included and _excluded keys need to be added to the_dict, and that - # can't be done while iterating through it. - - lists = [] - del_lists = [] - for key, value in the_dict.items(): - operation = key[-1] - if operation != "!" and operation != "/": - continue - - if type(value) is not list: - raise ValueError( - name + " key " + key + " must be list, not " + value.__class__.__name__ - ) - - list_key = key[:-1] - if list_key not in the_dict: - # This happens when there's a list like "sources!" but no corresponding - # "sources" list. Since there's nothing for it to operate on, queue up - # the "sources!" list for deletion now. - del_lists.append(key) - continue - - if type(the_dict[list_key]) is not list: - value = the_dict[list_key] - raise ValueError( - name - + " key " - + list_key - + " must be list, not " - + value.__class__.__name__ - + " when applying " - + {"!": "exclusion", "/": "regex"}[operation] - ) - - if list_key not in lists: - lists.append(list_key) - - # Delete the lists that are known to be unneeded at this point. - for del_list in del_lists: - del the_dict[del_list] - - for list_key in lists: - the_list = the_dict[list_key] - - # Initialize the list_actions list, which is parallel to the_list. Each - # item in list_actions identifies whether the corresponding item in - # the_list should be excluded, unconditionally preserved (included), or - # whether no exclusion or inclusion has been applied. Items for which - # no exclusion or inclusion has been applied (yet) have value -1, items - # excluded have value 0, and items included have value 1. Includes and - # excludes override previous actions. All items in list_actions are - # initialized to -1 because no excludes or includes have been processed - # yet. - list_actions = list((-1,) * len(the_list)) - - exclude_key = list_key + "!" - if exclude_key in the_dict: - for exclude_item in the_dict[exclude_key]: - for index, list_item in enumerate(the_list): - if exclude_item == list_item: - # This item matches the exclude_item, so set its action to 0 - # (exclude). - list_actions[index] = 0 - - # The "whatever!" list is no longer needed, dump it. - del the_dict[exclude_key] - - regex_key = list_key + "/" - if regex_key in the_dict: - for regex_item in the_dict[regex_key]: - [action, pattern] = regex_item - pattern_re = re.compile(pattern) - - if action == "exclude": - # This item matches an exclude regex, set its value to 0 (exclude). - action_value = 0 - elif action == "include": - # This item matches an include regex, set its value to 1 (include). - action_value = 1 - else: - # This is an action that doesn't make any sense. - raise ValueError( - "Unrecognized action " - + action - + " in " - + name - + " key " - + regex_key - ) - - for index, list_item in enumerate(the_list): - if list_actions[index] == action_value: - # Even if the regex matches, nothing will change so continue - # (regex searches are expensive). - continue - if pattern_re.search(list_item): - # Regular expression match. - list_actions[index] = action_value - - # The "whatever/" list is no longer needed, dump it. - del the_dict[regex_key] - - # Add excluded items to the excluded list. - # - # Note that exclude_key ("sources!") is different from excluded_key - # ("sources_excluded"). The exclude_key list is input and it was already - # processed and deleted; the excluded_key list is output and it's about - # to be created. - excluded_key = list_key + "_excluded" - if excluded_key in the_dict: - raise GypError( - name + " key " + excluded_key + " must not be present prior " - " to applying exclusion/regex filters for " + list_key - ) - - excluded_list = [] - - # Go backwards through the list_actions list so that as items are deleted, - # the indices of items that haven't been seen yet don't shift. That means - # that things need to be prepended to excluded_list to maintain them in the - # same order that they existed in the_list. - for index in range(len(list_actions) - 1, -1, -1): - if list_actions[index] == 0: - # Dump anything with action 0 (exclude). Keep anything with action 1 - # (include) or -1 (no include or exclude seen for the item). - excluded_list.insert(0, the_list[index]) - del the_list[index] - - # If anything was excluded, put the excluded list into the_dict at - # excluded_key. - if len(excluded_list) > 0: - the_dict[excluded_key] = excluded_list - - # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.items(): - if type(value) is dict: - ProcessListFiltersInDict(key, value) - elif type(value) is list: - ProcessListFiltersInList(key, value) - - -def ProcessListFiltersInList(name, the_list): - for item in the_list: - if type(item) is dict: - ProcessListFiltersInDict(name, item) - elif type(item) is list: - ProcessListFiltersInList(name, item) - - -def ValidateTargetType(target, target_dict): - """Ensures the 'type' field on the target is one of the known types. - - Arguments: - target: string, name of target. - target_dict: dict, target spec. - - Raises an exception on error. - """ - VALID_TARGET_TYPES = ( - "executable", - "loadable_module", - "static_library", - "shared_library", - "mac_kernel_extension", - "none", - "windows_driver", - ) - target_type = target_dict.get("type", None) - if target_type not in VALID_TARGET_TYPES: - raise GypError( - "Target %s has an invalid target type '%s'. " - "Must be one of %s." % (target, target_type, "/".join(VALID_TARGET_TYPES)) - ) - if ( - target_dict.get("standalone_static_library", 0) - and not target_type == "static_library" - ): - raise GypError( - "Target %s has type %s but standalone_static_library flag is" - " only valid for static_library type." % (target, target_type) - ) - - -def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): - """Ensures that the rules sections in target_dict are valid and consistent, - and determines which sources they apply to. - - Arguments: - target: string, name of target. - target_dict: dict, target spec containing "rules" and "sources" lists. - extra_sources_for_rules: a list of keys to scan for rule matches in - addition to 'sources'. - """ - - # Dicts to map between values found in rules' 'rule_name' and 'extension' - # keys and the rule dicts themselves. - rule_names = {} - rule_extensions = {} - - rules = target_dict.get("rules", []) - for rule in rules: - # Make sure that there's no conflict among rule names and extensions. - rule_name = rule["rule_name"] - if rule_name in rule_names: - raise GypError( - "rule %s exists in duplicate, target %s" % (rule_name, target) - ) - rule_names[rule_name] = rule - - rule_extension = rule["extension"] - if rule_extension.startswith("."): - rule_extension = rule_extension[1:] - if rule_extension in rule_extensions: - raise GypError( - ( - "extension %s associated with multiple rules, " - + "target %s rules %s and %s" - ) - % ( - rule_extension, - target, - rule_extensions[rule_extension]["rule_name"], - rule_name, - ) - ) - rule_extensions[rule_extension] = rule - - # Make sure rule_sources isn't already there. It's going to be - # created below if needed. - if "rule_sources" in rule: - raise GypError( - "rule_sources must not exist in input, target %s rule %s" - % (target, rule_name) - ) - - rule_sources = [] - source_keys = ["sources"] - source_keys.extend(extra_sources_for_rules) - for source_key in source_keys: - for source in target_dict.get(source_key, []): - (source_root, source_extension) = os.path.splitext(source) - if source_extension.startswith("."): - source_extension = source_extension[1:] - if source_extension == rule_extension: - rule_sources.append(source) - - if len(rule_sources) > 0: - rule["rule_sources"] = rule_sources - - -def ValidateRunAsInTarget(target, target_dict, build_file): - target_name = target_dict.get("target_name") - run_as = target_dict.get("run_as") - if not run_as: - return - if type(run_as) is not dict: - raise GypError( - "The 'run_as' in target %s from file %s should be a " - "dictionary." % (target_name, build_file) - ) - action = run_as.get("action") - if not action: - raise GypError( - "The 'run_as' in target %s from file %s must have an " - "'action' section." % (target_name, build_file) - ) - if type(action) is not list: - raise GypError( - "The 'action' for 'run_as' in target %s from file %s " - "must be a list." % (target_name, build_file) - ) - working_directory = run_as.get("working_directory") - if working_directory and type(working_directory) is not str: - raise GypError( - "The 'working_directory' for 'run_as' in target %s " - "in file %s should be a string." % (target_name, build_file) - ) - environment = run_as.get("environment") - if environment and type(environment) is not dict: - raise GypError( - "The 'environment' for 'run_as' in target %s " - "in file %s should be a dictionary." % (target_name, build_file) - ) - - -def ValidateActionsInTarget(target, target_dict, build_file): - """Validates the inputs to the actions in a target.""" - target_name = target_dict.get("target_name") - actions = target_dict.get("actions", []) - for action in actions: - action_name = action.get("action_name") - if not action_name: - raise GypError( - "Anonymous action in target %s. " - "An action must have an 'action_name' field." % target_name - ) - inputs = action.get("inputs", None) - if inputs is None: - raise GypError("Action in target %s has no inputs." % target_name) - action_command = action.get("action") - if action_command and not action_command[0]: - raise GypError("Empty action as command in target %s." % target_name) - - -def TurnIntIntoStrInDict(the_dict): - """Given dict the_dict, recursively converts all integers into strings. - """ - # Use items instead of iteritems because there's no need to try to look at - # reinserted keys and their associated values. - for k, v in the_dict.items(): - if type(v) is int: - v = str(v) - the_dict[k] = v - elif type(v) is dict: - TurnIntIntoStrInDict(v) - elif type(v) is list: - TurnIntIntoStrInList(v) - - if type(k) is int: - del the_dict[k] - the_dict[str(k)] = v - - -def TurnIntIntoStrInList(the_list): - """Given list the_list, recursively converts all integers into strings. - """ - for index, item in enumerate(the_list): - if type(item) is int: - the_list[index] = str(item) - elif type(item) is dict: - TurnIntIntoStrInDict(item) - elif type(item) is list: - TurnIntIntoStrInList(item) - - -def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data): - """Return only the targets that are deep dependencies of |root_targets|.""" - qualified_root_targets = [] - for target in root_targets: - target = target.strip() - qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list) - if not qualified_targets: - raise GypError("Could not find target %s" % target) - qualified_root_targets.extend(qualified_targets) - - wanted_targets = {} - for target in qualified_root_targets: - wanted_targets[target] = targets[target] - for dependency in dependency_nodes[target].DeepDependencies(): - wanted_targets[dependency] = targets[dependency] - - wanted_flat_list = [t for t in flat_list if t in wanted_targets] - - # Prune unwanted targets from each build_file's data dict. - for build_file in data["target_build_files"]: - if "targets" not in data[build_file]: - continue - new_targets = [] - for target in data[build_file]["targets"]: - qualified_name = gyp.common.QualifiedTarget( - build_file, target["target_name"], target["toolset"] - ) - if qualified_name in wanted_targets: - new_targets.append(target) - data[build_file]["targets"] = new_targets - - return wanted_targets, wanted_flat_list - - -def VerifyNoCollidingTargets(targets): - """Verify that no two targets in the same directory share the same name. - - Arguments: - targets: A list of targets in the form 'path/to/file.gyp:target_name'. - """ - # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'. - used = {} - for target in targets: - # Separate out 'path/to/file.gyp, 'target_name' from - # 'path/to/file.gyp:target_name'. - path, name = target.rsplit(":", 1) - # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'. - subdir, gyp = os.path.split(path) - # Use '.' for the current directory '', so that the error messages make - # more sense. - if not subdir: - subdir = "." - # Prepare a key like 'path/to:target_name'. - key = subdir + ":" + name - if key in used: - # Complain if this target is already used. - raise GypError( - 'Duplicate target name "%s" in directory "%s" used both ' - 'in "%s" and "%s".' % (name, subdir, gyp, used[key]) - ) - used[key] = gyp - - -def SetGeneratorGlobals(generator_input_info): - # Set up path_sections and non_configuration_keys with the default data plus - # the generator-specific data. - global path_sections - path_sections = set(base_path_sections) - path_sections.update(generator_input_info["path_sections"]) - - global non_configuration_keys - non_configuration_keys = base_non_configuration_keys[:] - non_configuration_keys.extend(generator_input_info["non_configuration_keys"]) - - global multiple_toolsets - multiple_toolsets = generator_input_info["generator_supports_multiple_toolsets"] - - global generator_filelist_paths - generator_filelist_paths = generator_input_info["generator_filelist_paths"] - - -def Load( - build_files, - variables, - includes, - depth, - generator_input_info, - check, - circular_check, - parallel, - root_targets, -): - SetGeneratorGlobals(generator_input_info) - # A generator can have other lists (in addition to sources) be processed - # for rules. - extra_sources_for_rules = generator_input_info["extra_sources_for_rules"] - - # Load build files. This loads every target-containing build file into - # the |data| dictionary such that the keys to |data| are build file names, - # and the values are the entire build file contents after "early" or "pre" - # processing has been done and includes have been resolved. - # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as - # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps - # track of the keys corresponding to "target" files. - data = {"target_build_files": set()} - # Normalize paths everywhere. This is important because paths will be - # used as keys to the data dict and for references between input files. - build_files = set(map(os.path.normpath, build_files)) - if parallel: - LoadTargetBuildFilesParallel( - build_files, data, variables, includes, depth, check, generator_input_info - ) - else: - aux_data = {} - for build_file in build_files: - try: - LoadTargetBuildFile( - build_file, data, aux_data, variables, includes, depth, check, True - ) - except Exception as e: - gyp.common.ExceptionAppend(e, "while trying to load %s" % build_file) - raise - - # Build a dict to access each target's subdict by qualified name. - targets = BuildTargetsDict(data) - - # Fully qualify all dependency links. - QualifyDependencies(targets) - - # Remove self-dependencies from targets that have 'prune_self_dependencies' - # set to 1. - RemoveSelfDependencies(targets) - - # Expand dependencies specified as build_file:*. - ExpandWildcardDependencies(targets, data) - - # Remove all dependencies marked as 'link_dependency' from the targets of - # type 'none'. - RemoveLinkDependenciesFromNoneTargets(targets) - - # Apply exclude (!) and regex (/) list filters only for dependency_sections. - for target_name, target_dict in targets.items(): - tmp_dict = {} - for key_base in dependency_sections: - for op in ("", "!", "/"): - key = key_base + op - if key in target_dict: - tmp_dict[key] = target_dict[key] - del target_dict[key] - ProcessListFiltersInDict(target_name, tmp_dict) - # Write the results back to |target_dict|. - for key in tmp_dict: - target_dict[key] = tmp_dict[key] - - # Make sure every dependency appears at most once. - RemoveDuplicateDependencies(targets) - - if circular_check: - # Make sure that any targets in a.gyp don't contain dependencies in other - # .gyp files that further depend on a.gyp. - VerifyNoGYPFileCircularDependencies(targets) - - [dependency_nodes, flat_list] = BuildDependencyList(targets) - - if root_targets: - # Remove, from |targets| and |flat_list|, the targets that are not deep - # dependencies of the targets specified in |root_targets|. - targets, flat_list = PruneUnwantedTargets( - targets, flat_list, dependency_nodes, root_targets, data - ) - - # Check that no two targets in the same directory have the same name. - VerifyNoCollidingTargets(flat_list) - - # Handle dependent settings of various types. - for settings_type in [ - "all_dependent_settings", - "direct_dependent_settings", - "link_settings", - ]: - DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) - - # Take out the dependent settings now that they've been published to all - # of the targets that require them. - for target in flat_list: - if settings_type in targets[target]: - del targets[target][settings_type] - - # Make sure static libraries don't declare dependencies on other static - # libraries, but that linkables depend on all unlinked static libraries - # that they need so that their link steps will be correct. - gii = generator_input_info - if gii["generator_wants_static_library_dependencies_adjusted"]: - AdjustStaticLibraryDependencies( - flat_list, - targets, - dependency_nodes, - gii["generator_wants_sorted_dependencies"], - ) - - # Apply "post"/"late"/"target" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATE, variables, build_file - ) - - # Move everything that can go into a "configurations" section into one. - for target in flat_list: - target_dict = targets[target] - SetUpConfigurations(target, target_dict) - - # Apply exclude (!) and regex (/) list filters. - for target in flat_list: - target_dict = targets[target] - ProcessListFiltersInDict(target, target_dict) - - # Apply "latelate" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATELATE, variables, build_file - ) - - # Make sure that the rules make sense, and build up rule_sources lists as - # needed. Not all generators will need to use the rule_sources lists, but - # some may, and it seems best to build the list in a common spot. - # Also validate actions and run_as elements in targets. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ValidateTargetType(target, target_dict) - ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) - ValidateRunAsInTarget(target, target_dict, build_file) - ValidateActionsInTarget(target, target_dict, build_file) - - # Generators might not expect ints. Turn them into strs. - TurnIntIntoStrInDict(data) - - # TODO(mark): Return |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - return [flat_list, targets, data] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/input_test.py deleted file mode 100755 index 6672ddc014b15..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/input_test.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the input.py file.""" - -import gyp.input -import unittest - - -class TestFindCycles(unittest.TestCase): - def setUp(self): - self.nodes = {} - for x in ("a", "b", "c", "d", "e"): - self.nodes[x] = gyp.input.DependencyGraphNode(x) - - def _create_dependency(self, dependent, dependency): - dependent.dependencies.append(dependency) - dependency.dependents.append(dependent) - - def test_no_cycle_empty_graph(self): - for label, node in self.nodes.items(): - self.assertEqual([], node.FindCycles()) - - def test_no_cycle_line(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["b"], self.nodes["c"]) - self._create_dependency(self.nodes["c"], self.nodes["d"]) - - for label, node in self.nodes.items(): - self.assertEqual([], node.FindCycles()) - - def test_no_cycle_dag(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["a"], self.nodes["c"]) - self._create_dependency(self.nodes["b"], self.nodes["c"]) - - for label, node in self.nodes.items(): - self.assertEqual([], node.FindCycles()) - - def test_cycle_self_reference(self): - self._create_dependency(self.nodes["a"], self.nodes["a"]) - - self.assertEqual( - [[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles() - ) - - def test_cycle_two_nodes(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["b"], self.nodes["a"]) - - self.assertEqual( - [[self.nodes["a"], self.nodes["b"], self.nodes["a"]]], - self.nodes["a"].FindCycles(), - ) - self.assertEqual( - [[self.nodes["b"], self.nodes["a"], self.nodes["b"]]], - self.nodes["b"].FindCycles(), - ) - - def test_two_cycles(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["b"], self.nodes["a"]) - - self._create_dependency(self.nodes["b"], self.nodes["c"]) - self._create_dependency(self.nodes["c"], self.nodes["b"]) - - cycles = self.nodes["a"].FindCycles() - self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles) - self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles) - self.assertEqual(2, len(cycles)) - - def test_big_cycle(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["b"], self.nodes["c"]) - self._create_dependency(self.nodes["c"], self.nodes["d"]) - self._create_dependency(self.nodes["d"], self.nodes["e"]) - self._create_dependency(self.nodes["e"], self.nodes["a"]) - - self.assertEqual( - [ - [ - self.nodes["a"], - self.nodes["b"], - self.nodes["c"], - self.nodes["d"], - self.nodes["e"], - self.nodes["a"], - ] - ], - self.nodes["a"].FindCycles(), - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py deleted file mode 100755 index 07412578d19a2..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py +++ /dev/null @@ -1,776 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -from __future__ import print_function - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import struct -import subprocess -import sys -import tempfile - -PY3 = bytes != str - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace("-", "") - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - convert_to_binary = convert_to_binary == "True" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == ".xib": - return self._CopyXIBFile(source, dest) - elif extension == ".storyboard": - return self._CopyXIBFile(source, dest) - elif extension == ".strings" and not convert_to_binary: - self._CopyStringsFile(source, dest) - else: - if os.path.exists(dest): - os.unlink(dest) - shutil.copy(source, dest) - - if convert_to_binary and extension in (".plist", ".strings"): - self._ConvertToBinary(dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"] - - if os.environ["XCODE_VERSION_ACTUAL"] > "0700": - args.extend(["--auto-activate-custom-fonts"]) - if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ: - args.extend( - [ - "--target-device", - "iphone", - "--target-device", - "ipad", - "--minimum-deployment-target", - os.environ["IPHONEOS_DEPLOYMENT_TARGET"], - ] - ) - else: - args.extend( - [ - "--target-device", - "mac", - "--minimum-deployment-target", - os.environ["MACOSX_DEPLOYMENT_TARGET"], - ] - ) - - args.extend( - ["--output-format", "human-readable-text", "--compile", dest, source] - ) - - ibtool_section_re = re.compile(r"/\*.*\*/") - ibtool_re = re.compile(r".*note:.*is clipping its content") - try: - stdout = subprocess.check_output(args) - except subprocess.CalledProcessError as e: - print(e.output) - raise - current_section_header = None - for line in stdout.splitlines(): - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - print(current_section_header) - current_section_header = None - print(line) - return 0 - - def _ConvertToBinary(self, dest): - subprocess.check_call( - ["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest] - ) - - def _CopyStringsFile(self, source, dest): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - - with open(source, "rb") as in_file: - s = in_file.read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - with open(dest, "wb") as fp: - fp.write(s.decode(input_code).encode("UTF-16")) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - with open(file_name, "rb") as fp: - try: - header = fp.read(3) - except Exception: - return None - if header.startswith(b"\xFE\xFF"): - return "UTF-16" - elif header.startswith(b"\xFF\xFE"): - return "UTF-16" - elif header.startswith(b"\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - with open(source, "r") as fd: - lines = fd.read() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist.update(json.loads(keys[0])) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r"[_/\s]") - for key in os.environ: - if key.startswith("_"): - continue - evar = "${%s}" % key - evalue = os.environ[key] - lines = lines.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = "${%s:identifier}" % key - evalue = IDENT_RE.sub("_", os.environ[key]) - lines = lines.replace(lines, evar, evalue) - - evar = "${%s:rfc1034identifier}" % key - evalue = IDENT_RE.sub("-", os.environ[key]) - lines = lines.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.splitlines() - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = "\n".join(line for line in lines if line is not None) - - # Write out the file with variables replaced. - with open(dest, "w") as fd: - fd.write(lines) - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == "True": - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist["CFBundlePackageType"] - if package_type != "APPL": - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get("CFBundleSignature", "????") - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = "?" * 4 - - dest = os.path.join(os.path.dirname(info_plist), "PkgInfo") - with open(dest, "w") as fp: - fp.write("%s%s" % (package_type, signature_code)) - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile( - r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$" - ) - libtool_re5 = re.compile( - r"^.*libtool: warning for library: " - + r".* the table of contents is empty " - + r"\(no object file members in the library define global symbols\)$" - ) - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env["ZERO_AR_DATE"] = "1" - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - _, err = libtoolout.communicate() - if PY3: - err = err.decode("utf-8") - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print(line, file=sys.stderr) - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"): - os.utime(cmd_list[i + 1], None) - break - return libtoolout.returncode - - def ExecPackageIosFramework(self, framework): - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split(".")[0] - module_path = os.path.join(framework, "Modules") - if not os.path.exists(module_path): - os.mkdir(module_path) - module_template = ( - "framework module %s {\n" - ' umbrella header "%s.h"\n' - "\n" - " export *\n" - " module * { export * }\n" - "}\n" % (binary, binary) - ) - - with open(os.path.join(module_path, "module.modulemap"), "w") as module_file: - module_file.write(module_template) - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split(".")[0] - - CURRENT = "Current" - RESOURCES = "Resources" - VERSIONS = "Versions" - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers): - framework_name = os.path.basename(framework).split(".")[0] - all_headers = [os.path.abspath(header) for header in all_headers] - filelist = {} - for header in all_headers: - filename = os.path.basename(header) - filelist[filename] = header - filelist[os.path.join(framework_name, filename)] = header - WriteHmap(out, filelist) - - def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers): - header_path = os.path.join(framework, "Headers") - if not os.path.exists(header_path): - os.makedirs(header_path) - for header in copy_headers: - shutil.copy(header, os.path.join(header_path, os.path.basename(header))) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - "xcrun", - "actool", - "--output-format", - "human-readable-text", - "--compress-pngs", - "--notices", - "--warnings", - "--errors", - ] - is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ - if is_iphone_target: - platform = os.environ["CONFIGURATION"].split("-")[-1] - if platform not in ("iphoneos", "iphonesimulator"): - platform = "iphonesimulator" - command_line.extend( - [ - "--platform", - platform, - "--target-device", - "iphone", - "--target-device", - "ipad", - "--minimum-deployment-target", - os.environ["IPHONEOS_DEPLOYMENT_TARGET"], - "--compile", - os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]), - ] - ) - else: - command_line.extend( - [ - "--platform", - "macosx", - "--target-device", - "mac", - "--minimum-deployment-target", - os.environ["MACOSX_DEPLOYMENT_TARGET"], - "--compile", - os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]), - ] - ) - if keys: - keys = json.loads(keys) - for key, value in keys.items(): - arg_name = "--" + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 2. copy Entitlements.plist from user or SDK next to the bundle, - 3. code sign the bundle. - """ - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier() - ) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides - ) - - args = ["codesign", "--force", "--sign", key] - if preserve == "True": - args.extend(["--deep", "--preserve-metadata=identifier,entitlements"]) - else: - args.extend(["--entitlements", entitlements_path]) - args.extend(["--timestamp=none", path]) - subprocess.check_call(args) - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier - ) - target_path = os.path.join( - os.environ["BUILT_PRODUCTS_DIR"], - os.environ["CONTENTS_FOLDER_PATH"], - "embedded.mobileprovision", - ) - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".") - return substitutions, provisioning_data["Entitlements"] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles" - ) - if not os.path.isdir(profiles_dir): - print( - "cannot find mobile provisioning for %s" % (bundle_identifier), - file=sys.stderr, - ) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + ".mobileprovision") - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, "*.mobileprovision") - ) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get("Entitlements", {}).get( - "application-identifier", "" - ) - for team_identifier in profile_data.get("TeamIdentifier", []): - app_id = "%s.%s" % (team_identifier, bundle_identifier) - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, - profile_data, - team_identifier, - ) - if not valid_provisioning_profiles: - print( - "cannot find mobile provisioning for %s" % (bundle_identifier), - file=sys.stderr, - ) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call( - ["security", "cms", "-D", "-i", profile_path, "-o", temp.name] - ) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.items(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except Exception: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(["plutil", "-convert", "xml1", temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - "CFBundleIdentifier": bundle_identifier, - "AppIdentifierPrefix": app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"] - ) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data["CFBundleIdentifier"] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent" - ) - if not source_path: - source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist") - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.items(): - data = data.replace("$(%s)" % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - - -def NextGreaterPowerOf2(x): - return 2 ** (x).bit_length() - - -def WriteHmap(output_name, filelist): - """Generates a header map based on |filelist|. - - Per Mark Mentovai: - A header map is structured essentially as a hash table, keyed by names used - in #includes, and providing pathnames to the actual files. - - The implementation below and the comment above comes from inspecting: - http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt - while also looking at the implementation in clang in: - https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp - """ - magic = 1751998832 - version = 1 - _reserved = 0 - count = len(filelist) - capacity = NextGreaterPowerOf2(count) - strings_offset = 24 + (12 * capacity) - max_value_length = max(len(value) for value in filelist.values()) - - out = open(output_name, "wb") - out.write( - struct.pack( - " 0 or arg.count("/") > 1: - arg = os.path.normpath(arg) - - # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes - # preceding it, and results in n backslashes + the quote. So we substitute - # in 2* what we match, +1 more, plus the quote. - arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg) - - # %'s also need to be doubled otherwise they're interpreted as batch - # positional arguments. Also make sure to escape the % so that they're - # passed literally through escaping so they can be singled to just the - # original %. Otherwise, trying to pass the literal representation that - # looks like an environment variable to the shell (e.g. %PATH%) would fail. - arg = arg.replace("%", "%%") - - # These commands are used in rsp files, so no escaping for the shell (via ^) - # is necessary. - - # Finally, wrap the whole thing in quotes so that the above quote rule - # applies and whitespace isn't a word break. - return '"' + arg + '"' - - -def EncodeRspFileList(args): - """Process a list of arguments using QuoteCmdExeArgument.""" - # Note that the first argument is assumed to be the command. Don't add - # quotes around it because then built-ins like 'echo', etc. won't work. - # Take care to normpath only the path in the case of 'call ../x.bat' because - # otherwise the whole thing is incorrectly interpreted as a path and not - # normalized correctly. - if not args: - return "" - if args[0].startswith("call "): - call, program = args[0].split(" ", 1) - program = call + " " + os.path.normpath(program) - else: - program = os.path.normpath(args[0]) - return program + " " + " ".join(QuoteForRspFile(arg) for arg in args[1:]) - - -def _GenericRetrieve(root, default, path): - """Given a list of dictionary keys |path| and a tree of dicts |root|, find - value at path, or return |default| if any of the path doesn't exist.""" - if not root: - return default - if not path: - return root - return _GenericRetrieve(root.get(path[0]), default, path[1:]) - - -def _AddPrefix(element, prefix): - """Add |prefix| to |element| or each subelement if element is iterable.""" - if element is None: - return element - # Note, not Iterable because we don't want to handle strings like that. - if isinstance(element, list) or isinstance(element, tuple): - return [prefix + e for e in element] - else: - return prefix + element - - -def _DoRemapping(element, map): - """If |element| then remap it through |map|. If |element| is iterable then - each item will be remapped. Any elements not found will be removed.""" - if map is not None and element is not None: - if not callable(map): - map = map.get # Assume it's a dict, otherwise a callable to do the remap. - if isinstance(element, list) or isinstance(element, tuple): - element = filter(None, [map(elem) for elem in element]) - else: - element = map(element) - return element - - -def _AppendOrReturn(append, element): - """If |append| is None, simply return |element|. If |append| is not None, - then add |element| to it, adding each item in |element| if it's a list or - tuple.""" - if append is not None and element is not None: - if isinstance(element, list) or isinstance(element, tuple): - append.extend(element) - else: - append.append(element) - else: - return element - - -def _FindDirectXInstallation(): - """Try to find an installation location for the DirectX SDK. Check for the - standard environment variable, and if that doesn't exist, try to find - via the registry. May return None if not found in either location.""" - # Return previously calculated value, if there is one - if hasattr(_FindDirectXInstallation, "dxsdk_dir"): - return _FindDirectXInstallation.dxsdk_dir - - dxsdk_dir = os.environ.get("DXSDK_DIR") - if not dxsdk_dir: - # Setup params to pass to and attempt to launch reg.exe. - cmd = ["reg.exe", "query", r"HKLM\Software\Microsoft\DirectX", "/s"] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout = p.communicate()[0] - if PY3: - stdout = stdout.decode("utf-8") - for line in stdout.splitlines(): - if "InstallPath" in line: - dxsdk_dir = line.split(" ")[3] + "\\" - - # Cache return value - _FindDirectXInstallation.dxsdk_dir = dxsdk_dir - return dxsdk_dir - - -def GetGlobalVSMacroEnv(vs_version): - """Get a dict of variables mapping internal VS macro names to their gyp - equivalents. Returns all variables that are independent of the target.""" - env = {} - # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when - # Visual Studio is actually installed. - if vs_version.Path(): - env["$(VSInstallDir)"] = vs_version.Path() - env["$(VCInstallDir)"] = os.path.join(vs_version.Path(), "VC") + "\\" - # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be - # set. This happens when the SDK is sync'd via src-internal, rather than - # by typical end-user installation of the SDK. If it's not set, we don't - # want to leave the unexpanded variable in the path, so simply strip it. - dxsdk_dir = _FindDirectXInstallation() - env["$(DXSDK_DIR)"] = dxsdk_dir if dxsdk_dir else "" - # Try to find an installation location for the Windows DDK by checking - # the WDK_DIR environment variable, may be None. - env["$(WDK_DIR)"] = os.environ.get("WDK_DIR", "") - return env - - -def ExtractSharedMSVSSystemIncludes(configs, generator_flags): - """Finds msvs_system_include_dirs that are common to all targets, removes - them from all targets, and returns an OrderedSet containing them.""" - all_system_includes = OrderedSet(configs[0].get("msvs_system_include_dirs", [])) - for config in configs[1:]: - system_includes = config.get("msvs_system_include_dirs", []) - all_system_includes = all_system_includes & OrderedSet(system_includes) - if not all_system_includes: - return None - # Expand macros in all_system_includes. - env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags)) - expanded_system_includes = OrderedSet( - [ExpandMacros(include, env) for include in all_system_includes] - ) - if any(["$" in include for include in expanded_system_includes]): - # Some path relies on target-specific variables, bail. - return None - - # Remove system includes shared by all targets from the targets. - for config in configs: - includes = config.get("msvs_system_include_dirs", []) - if includes: # Don't insert a msvs_system_include_dirs key if not needed. - # This must check the unexpanded includes list: - new_includes = [i for i in includes if i not in all_system_includes] - config["msvs_system_include_dirs"] = new_includes - return expanded_system_includes - - -class MsvsSettings(object): - """A class that understands the gyp 'msvs_...' values (especially the - msvs_settings field). They largely correpond to the VS2008 IDE DOM. This - class helps map those settings to command line options.""" - - def __init__(self, spec, generator_flags): - self.spec = spec - self.vs_version = GetVSVersion(generator_flags) - - supported_fields = [ - ("msvs_configuration_attributes", dict), - ("msvs_settings", dict), - ("msvs_system_include_dirs", list), - ("msvs_disabled_warnings", list), - ("msvs_precompiled_header", str), - ("msvs_precompiled_source", str), - ("msvs_configuration_platform", str), - ("msvs_target_platform", str), - ] - configs = spec["configurations"] - for field, default in supported_fields: - setattr(self, field, {}) - for configname, config in configs.items(): - getattr(self, field)[configname] = config.get(field, default()) - - self.msvs_cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."]) - - unsupported_fields = [ - "msvs_prebuild", - "msvs_postbuild", - ] - unsupported = [] - for field in unsupported_fields: - for config in configs.values(): - if field in config: - unsupported += [ - "%s not supported (target %s)." % (field, spec["target_name"]) - ] - if unsupported: - raise Exception("\n".join(unsupported)) - - def GetExtension(self): - """Returns the extension for the target, with no leading dot. - - Uses 'product_extension' if specified, otherwise uses MSVS defaults based on - the target type. - """ - ext = self.spec.get("product_extension", None) - if ext: - return ext - return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "") - - def GetVSMacroEnv(self, base_to_build=None, config=None): - """Get a dict of variables mapping internal VS macro names to their gyp - equivalents.""" - target_arch = self.GetArch(config) - if target_arch == "x86": - target_platform = "Win32" - else: - target_platform = target_arch - target_name = self.spec.get("product_prefix", "") + self.spec.get( - "product_name", self.spec["target_name"] - ) - target_dir = base_to_build + "\\" if base_to_build else "" - target_ext = "." + self.GetExtension() - target_file_name = target_name + target_ext - - replacements = { - "$(InputName)": "${root}", - "$(InputPath)": "${source}", - "$(IntDir)": "$!INTERMEDIATE_DIR", - "$(OutDir)\\": target_dir, - "$(PlatformName)": target_platform, - "$(ProjectDir)\\": "", - "$(ProjectName)": self.spec["target_name"], - "$(TargetDir)\\": target_dir, - "$(TargetExt)": target_ext, - "$(TargetFileName)": target_file_name, - "$(TargetName)": target_name, - "$(TargetPath)": os.path.join(target_dir, target_file_name), - } - replacements.update(GetGlobalVSMacroEnv(self.vs_version)) - return replacements - - def ConvertVSMacros(self, s, base_to_build=None, config=None): - """Convert from VS macro names to something equivalent.""" - env = self.GetVSMacroEnv(base_to_build, config=config) - return ExpandMacros(s, env) - - def AdjustLibraries(self, libraries): - """Strip -l from library if it's specified with that.""" - libs = [lib[2:] if lib.startswith("-l") else lib for lib in libraries] - return [ - lib + ".lib" - if not lib.lower().endswith(".lib") and not lib.lower().endswith(".obj") - else lib - for lib in libs - ] - - def _GetAndMunge(self, field, path, default, prefix, append, map): - """Retrieve a value from |field| at |path| or return |default|. If - |append| is specified, and the item is found, it will be appended to that - object instead of returned. If |map| is specified, results will be - remapped through |map| before being returned or appended.""" - result = _GenericRetrieve(field, default, path) - result = _DoRemapping(result, map) - result = _AddPrefix(result, prefix) - return _AppendOrReturn(append, result) - - class _GetWrapper(object): - def __init__(self, parent, field, base_path, append=None): - self.parent = parent - self.field = field - self.base_path = [base_path] - self.append = append - - def __call__(self, name, map=None, prefix="", default=None): - return self.parent._GetAndMunge( - self.field, - self.base_path + [name], - default=default, - prefix=prefix, - append=self.append, - map=map, - ) - - def GetArch(self, config): - """Get architecture based on msvs_configuration_platform and - msvs_target_platform. Returns either 'x86' or 'x64'.""" - configuration_platform = self.msvs_configuration_platform.get(config, "") - platform = self.msvs_target_platform.get(config, "") - if not platform: # If no specific override, use the configuration's. - platform = configuration_platform - # Map from platform to architecture. - return {"Win32": "x86", "x64": "x64", "ARM64": "arm64"}.get(platform, "x86") - - def _TargetConfig(self, config): - """Returns the target-specific configuration.""" - # There's two levels of architecture/platform specification in VS. The - # first level is globally for the configuration (this is what we consider - # "the" config at the gyp level, which will be something like 'Debug' or - # 'Release'), VS2015 and later only use this level - if self.vs_version.short_name >= 2015: - return config - # and a second target-specific configuration, which is an - # override for the global one. |config| is remapped here to take into - # account the local target-specific overrides to the global configuration. - arch = self.GetArch(config) - if arch == "x64" and not config.endswith("_x64"): - config += "_x64" - if arch == "x86" and config.endswith("_x64"): - config = config.rsplit("_", 1)[0] - return config - - def _Setting(self, path, config, default=None, prefix="", append=None, map=None): - """_GetAndMunge for msvs_settings.""" - return self._GetAndMunge( - self.msvs_settings[config], path, default, prefix, append, map - ) - - def _ConfigAttrib( - self, path, config, default=None, prefix="", append=None, map=None - ): - """_GetAndMunge for msvs_configuration_attributes.""" - return self._GetAndMunge( - self.msvs_configuration_attributes[config], - path, - default, - prefix, - append, - map, - ) - - def AdjustIncludeDirs(self, include_dirs, config): - """Updates include_dirs to expand VS specific paths, and adds the system - include dirs used for platform SDK and similar.""" - config = self._TargetConfig(config) - includes = include_dirs + self.msvs_system_include_dirs[config] - includes.extend( - self._Setting( - ("VCCLCompilerTool", "AdditionalIncludeDirectories"), config, default=[] - ) - ) - return [self.ConvertVSMacros(p, config=config) for p in includes] - - def AdjustMidlIncludeDirs(self, midl_include_dirs, config): - """Updates midl_include_dirs to expand VS specific paths, and adds the - system include dirs used for platform SDK and similar.""" - config = self._TargetConfig(config) - includes = midl_include_dirs + self.msvs_system_include_dirs[config] - includes.extend( - self._Setting( - ("VCMIDLTool", "AdditionalIncludeDirectories"), config, default=[] - ) - ) - return [self.ConvertVSMacros(p, config=config) for p in includes] - - def GetComputedDefines(self, config): - """Returns the set of defines that are injected to the defines list based - on other VS settings.""" - config = self._TargetConfig(config) - defines = [] - if self._ConfigAttrib(["CharacterSet"], config) == "1": - defines.extend(("_UNICODE", "UNICODE")) - if self._ConfigAttrib(["CharacterSet"], config) == "2": - defines.append("_MBCS") - defines.extend( - self._Setting( - ("VCCLCompilerTool", "PreprocessorDefinitions"), config, default=[] - ) - ) - return defines - - def GetCompilerPdbName(self, config, expand_special): - """Get the pdb file name that should be used for compiler invocations, or - None if there's no explicit name specified.""" - config = self._TargetConfig(config) - pdbname = self._Setting(("VCCLCompilerTool", "ProgramDataBaseFileName"), config) - if pdbname: - pdbname = expand_special(self.ConvertVSMacros(pdbname)) - return pdbname - - def GetMapFileName(self, config, expand_special): - """Gets the explicitly overridden map file name for a target or returns None - if it's not set.""" - config = self._TargetConfig(config) - map_file = self._Setting(("VCLinkerTool", "MapFileName"), config) - if map_file: - map_file = expand_special(self.ConvertVSMacros(map_file, config=config)) - return map_file - - def GetOutputName(self, config, expand_special): - """Gets the explicitly overridden output name for a target or returns None - if it's not overridden.""" - config = self._TargetConfig(config) - type = self.spec["type"] - root = "VCLibrarianTool" if type == "static_library" else "VCLinkerTool" - # TODO(scottmg): Handle OutputDirectory without OutputFile. - output_file = self._Setting((root, "OutputFile"), config) - if output_file: - output_file = expand_special( - self.ConvertVSMacros(output_file, config=config) - ) - return output_file - - def GetPDBName(self, config, expand_special, default): - """Gets the explicitly overridden pdb name for a target or returns - default if it's not overridden, or if no pdb will be generated.""" - config = self._TargetConfig(config) - output_file = self._Setting(("VCLinkerTool", "ProgramDatabaseFile"), config) - generate_debug_info = self._Setting( - ("VCLinkerTool", "GenerateDebugInformation"), config - ) - if generate_debug_info == "true": - if output_file: - return expand_special(self.ConvertVSMacros(output_file, config=config)) - else: - return default - else: - return None - - def GetNoImportLibrary(self, config): - """If NoImportLibrary: true, ninja will not expect the output to include - an import library.""" - config = self._TargetConfig(config) - noimplib = self._Setting(("NoImportLibrary",), config) - return noimplib == "true" - - def GetAsmflags(self, config): - """Returns the flags that need to be added to ml invocations.""" - config = self._TargetConfig(config) - asmflags = [] - safeseh = self._Setting(("MASM", "UseSafeExceptionHandlers"), config) - if safeseh == "true": - asmflags.append("/safeseh") - return asmflags - - def GetCflags(self, config): - """Returns the flags that need to be added to .c and .cc compilations.""" - config = self._TargetConfig(config) - cflags = [] - cflags.extend(["/wd" + w for w in self.msvs_disabled_warnings[config]]) - cl = self._GetWrapper( - self, self.msvs_settings[config], "VCCLCompilerTool", append=cflags - ) - cl( - "Optimization", - map={"0": "d", "1": "1", "2": "2", "3": "x"}, - prefix="/O", - default="2", - ) - cl("InlineFunctionExpansion", prefix="/Ob") - cl("DisableSpecificWarnings", prefix="/wd") - cl("StringPooling", map={"true": "/GF"}) - cl("EnableFiberSafeOptimizations", map={"true": "/GT"}) - cl("OmitFramePointers", map={"false": "-", "true": ""}, prefix="/Oy") - cl("EnableIntrinsicFunctions", map={"false": "-", "true": ""}, prefix="/Oi") - cl("FavorSizeOrSpeed", map={"1": "t", "2": "s"}, prefix="/O") - cl( - "FloatingPointModel", - map={"0": "precise", "1": "strict", "2": "fast"}, - prefix="/fp:", - default="0", - ) - cl("CompileAsManaged", map={"false": "", "true": "/clr"}) - cl("WholeProgramOptimization", map={"true": "/GL"}) - cl("WarningLevel", prefix="/W") - cl("WarnAsError", map={"true": "/WX"}) - cl( - "CallingConvention", - map={"0": "d", "1": "r", "2": "z", "3": "v"}, - prefix="/G", - ) - cl("DebugInformationFormat", map={"1": "7", "3": "i", "4": "I"}, prefix="/Z") - cl("RuntimeTypeInfo", map={"true": "/GR", "false": "/GR-"}) - cl("EnableFunctionLevelLinking", map={"true": "/Gy", "false": "/Gy-"}) - cl("MinimalRebuild", map={"true": "/Gm"}) - cl("BufferSecurityCheck", map={"true": "/GS", "false": "/GS-"}) - cl("BasicRuntimeChecks", map={"1": "s", "2": "u", "3": "1"}, prefix="/RTC") - cl( - "RuntimeLibrary", - map={"0": "T", "1": "Td", "2": "D", "3": "Dd"}, - prefix="/M", - ) - cl("ExceptionHandling", map={"1": "sc", "2": "a"}, prefix="/EH") - cl("DefaultCharIsUnsigned", map={"true": "/J"}) - cl( - "TreatWChar_tAsBuiltInType", - map={"false": "-", "true": ""}, - prefix="/Zc:wchar_t", - ) - cl("EnablePREfast", map={"true": "/analyze"}) - cl("AdditionalOptions", prefix="") - cl( - "EnableEnhancedInstructionSet", - map={"1": "SSE", "2": "SSE2", "3": "AVX", "4": "IA32", "5": "AVX2"}, - prefix="/arch:", - ) - cflags.extend( - [ - "/FI" + f - for f in self._Setting( - ("VCCLCompilerTool", "ForcedIncludeFiles"), config, default=[] - ) - ] - ) - if self.vs_version.project_version >= 12.0: - # New flag introduced in VS2013 (project version 12.0) Forces writes to - # the program database (PDB) to be serialized through MSPDBSRV.EXE. - # https://msdn.microsoft.com/en-us/library/dn502518.aspx - cflags.append("/FS") - # ninja handles parallelism by itself, don't have the compiler do it too. - cflags = [x for x in cflags if not x.startswith("/MP")] - return cflags - - def _GetPchFlags(self, config, extension): - """Get the flags to be added to the cflags for precompiled header support. - """ - config = self._TargetConfig(config) - # The PCH is only built once by a particular source file. Usage of PCH must - # only be for the same language (i.e. C vs. C++), so only include the pch - # flags when the language matches. - if self.msvs_precompiled_header[config]: - source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1] - if _LanguageMatchesForPch(source_ext, extension): - pch = self.msvs_precompiled_header[config] - pchbase = os.path.split(pch)[1] - return ["/Yu" + pch, "/FI" + pch, "/Fp${pchprefix}." + pchbase + ".pch"] - return [] - - def GetCflagsC(self, config): - """Returns the flags that need to be added to .c compilations.""" - config = self._TargetConfig(config) - return self._GetPchFlags(config, ".c") - - def GetCflagsCC(self, config): - """Returns the flags that need to be added to .cc compilations.""" - config = self._TargetConfig(config) - return ["/TP"] + self._GetPchFlags(config, ".cc") - - def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): - """Get and normalize the list of paths in AdditionalLibraryDirectories - setting.""" - config = self._TargetConfig(config) - libpaths = self._Setting( - (root, "AdditionalLibraryDirectories"), config, default=[] - ) - libpaths = [ - os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(p, config=config))) - for p in libpaths - ] - return ['/LIBPATH:"' + p + '"' for p in libpaths] - - def GetLibFlags(self, config, gyp_to_build_path): - """Returns the flags that need to be added to lib commands.""" - config = self._TargetConfig(config) - libflags = [] - lib = self._GetWrapper( - self, self.msvs_settings[config], "VCLibrarianTool", append=libflags - ) - libflags.extend( - self._GetAdditionalLibraryDirectories( - "VCLibrarianTool", config, gyp_to_build_path - ) - ) - lib("LinkTimeCodeGeneration", map={"true": "/LTCG"}) - lib( - "TargetMachine", - map={"1": "X86", "17": "X64", "3": "ARM"}, - prefix="/MACHINE:", - ) - lib("AdditionalOptions") - return libflags - - def GetDefFile(self, gyp_to_build_path): - """Returns the .def file from sources, if any. Otherwise returns None.""" - spec = self.spec - if spec["type"] in ("shared_library", "loadable_module", "executable"): - def_files = [ - s for s in spec.get("sources", []) if s.lower().endswith(".def") - ] - if len(def_files) == 1: - return gyp_to_build_path(def_files[0]) - elif len(def_files) > 1: - raise Exception("Multiple .def files") - return None - - def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path): - """.def files get implicitly converted to a ModuleDefinitionFile for the - linker in the VS generator. Emulate that behaviour here.""" - def_file = self.GetDefFile(gyp_to_build_path) - if def_file: - ldflags.append('/DEF:"%s"' % def_file) - - def GetPGDName(self, config, expand_special): - """Gets the explicitly overridden pgd name for a target or returns None - if it's not overridden.""" - config = self._TargetConfig(config) - output_file = self._Setting(("VCLinkerTool", "ProfileGuidedDatabase"), config) - if output_file: - output_file = expand_special( - self.ConvertVSMacros(output_file, config=config) - ) - return output_file - - def GetLdflags( - self, - config, - gyp_to_build_path, - expand_special, - manifest_base_name, - output_name, - is_executable, - build_dir, - ): - """Returns the flags that need to be added to link commands, and the - manifest files.""" - config = self._TargetConfig(config) - ldflags = [] - ld = self._GetWrapper( - self, self.msvs_settings[config], "VCLinkerTool", append=ldflags - ) - self._GetDefFileAsLdflags(ldflags, gyp_to_build_path) - ld("GenerateDebugInformation", map={"true": "/DEBUG"}) - # TODO: These 'map' values come from machineTypeOption enum, - # and does not have an official value for ARM64 in VS2017 (yet). - # It needs to verify the ARM64 value when machineTypeOption is updated. - ld( - "TargetMachine", - map={"1": "X86", "17": "X64", "3": "ARM", "18": "ARM64"}, - prefix="/MACHINE:", - ) - ldflags.extend( - self._GetAdditionalLibraryDirectories( - "VCLinkerTool", config, gyp_to_build_path - ) - ) - ld("DelayLoadDLLs", prefix="/DELAYLOAD:") - ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"}) - out = self.GetOutputName(config, expand_special) - if out: - ldflags.append("/OUT:" + out) - pdb = self.GetPDBName(config, expand_special, output_name + ".pdb") - if pdb: - ldflags.append("/PDB:" + pdb) - pgd = self.GetPGDName(config, expand_special) - if pgd: - ldflags.append("/PGD:" + pgd) - map_file = self.GetMapFileName(config, expand_special) - ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"}) - ld("MapExports", map={"true": "/MAPINFO:EXPORTS"}) - ld("AdditionalOptions", prefix="") - - minimum_required_version = self._Setting( - ("VCLinkerTool", "MinimumRequiredVersion"), config, default="" - ) - if minimum_required_version: - minimum_required_version = "," + minimum_required_version - ld( - "SubSystem", - map={ - "1": "CONSOLE%s" % minimum_required_version, - "2": "WINDOWS%s" % minimum_required_version, - }, - prefix="/SUBSYSTEM:", - ) - - stack_reserve_size = self._Setting( - ("VCLinkerTool", "StackReserveSize"), config, default="" - ) - if stack_reserve_size: - stack_commit_size = self._Setting( - ("VCLinkerTool", "StackCommitSize"), config, default="" - ) - if stack_commit_size: - stack_commit_size = "," + stack_commit_size - ldflags.append("/STACK:%s%s" % (stack_reserve_size, stack_commit_size)) - - ld("TerminalServerAware", map={"1": ":NO", "2": ""}, prefix="/TSAWARE") - ld("LinkIncremental", map={"1": ":NO", "2": ""}, prefix="/INCREMENTAL") - ld("BaseAddress", prefix="/BASE:") - ld("FixedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/FIXED") - ld("RandomizedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/DYNAMICBASE") - ld("DataExecutionPrevention", map={"1": ":NO", "2": ""}, prefix="/NXCOMPAT") - ld("OptimizeReferences", map={"1": "NOREF", "2": "REF"}, prefix="/OPT:") - ld("ForceSymbolReferences", prefix="/INCLUDE:") - ld("EnableCOMDATFolding", map={"1": "NOICF", "2": "ICF"}, prefix="/OPT:") - ld( - "LinkTimeCodeGeneration", - map={"1": "", "2": ":PGINSTRUMENT", "3": ":PGOPTIMIZE", "4": ":PGUPDATE"}, - prefix="/LTCG", - ) - ld("IgnoreDefaultLibraryNames", prefix="/NODEFAULTLIB:") - ld("ResourceOnlyDLL", map={"true": "/NOENTRY"}) - ld("EntryPointSymbol", prefix="/ENTRY:") - ld("Profile", map={"true": "/PROFILE"}) - ld("LargeAddressAware", map={"1": ":NO", "2": ""}, prefix="/LARGEADDRESSAWARE") - # TODO(scottmg): This should sort of be somewhere else (not really a flag). - ld("AdditionalDependencies", prefix="") - - if self.GetArch(config) == "x86": - safeseh_default = "true" - else: - safeseh_default = None - ld( - "ImageHasSafeExceptionHandlers", - map={"false": ":NO", "true": ""}, - prefix="/SAFESEH", - default=safeseh_default, - ) - - # If the base address is not specifically controlled, DYNAMICBASE should - # be on by default. - if not any("DYNAMICBASE" in flag or flag == "/FIXED" for flag in ldflags): - ldflags.append("/DYNAMICBASE") - - # If the NXCOMPAT flag has not been specified, default to on. Despite the - # documentation that says this only defaults to on when the subsystem is - # Vista or greater (which applies to the linker), the IDE defaults it on - # unless it's explicitly off. - if not any("NXCOMPAT" in flag for flag in ldflags): - ldflags.append("/NXCOMPAT") - - have_def_file = any(flag.startswith("/DEF:") for flag in ldflags) - ( - manifest_flags, - intermediate_manifest, - manifest_files, - ) = self._GetLdManifestFlags( - config, - manifest_base_name, - gyp_to_build_path, - is_executable and not have_def_file, - build_dir, - ) - ldflags.extend(manifest_flags) - return ldflags, intermediate_manifest, manifest_files - - def _GetLdManifestFlags( - self, config, name, gyp_to_build_path, allow_isolation, build_dir - ): - """Returns a 3-tuple: - - the set of flags that need to be added to the link to generate - a default manifest - - the intermediate manifest that the linker will generate that should be - used to assert it doesn't add anything to the merged one. - - the list of all the manifest files to be merged by the manifest tool and - included into the link.""" - generate_manifest = self._Setting( - ("VCLinkerTool", "GenerateManifest"), config, default="true" - ) - if generate_manifest != "true": - # This means not only that the linker should not generate the intermediate - # manifest but also that the manifest tool should do nothing even when - # additional manifests are specified. - return ["/MANIFEST:NO"], [], [] - - output_name = name + ".intermediate.manifest" - flags = [ - "/MANIFEST", - "/ManifestFile:" + output_name, - ] - - # Instead of using the MANIFESTUAC flags, we generate a .manifest to - # include into the list of manifests. This allows us to avoid the need to - # do two passes during linking. The /MANIFEST flag and /ManifestFile are - # still used, and the intermediate manifest is used to assert that the - # final manifest we get from merging all the additional manifest files - # (plus the one we generate here) isn't modified by merging the - # intermediate into it. - - # Always NO, because we generate a manifest file that has what we want. - flags.append("/MANIFESTUAC:NO") - - config = self._TargetConfig(config) - enable_uac = self._Setting( - ("VCLinkerTool", "EnableUAC"), config, default="true" - ) - manifest_files = [] - generated_manifest_outer = ( - "" - "" - "%s" - ) - if enable_uac == "true": - execution_level = self._Setting( - ("VCLinkerTool", "UACExecutionLevel"), config, default="0" - ) - execution_level_map = { - "0": "asInvoker", - "1": "highestAvailable", - "2": "requireAdministrator", - } - - ui_access = self._Setting( - ("VCLinkerTool", "UACUIAccess"), config, default="false" - ) - - inner = """ - - - - - - -""" % ( - execution_level_map[execution_level], - ui_access, - ) - else: - inner = "" - - generated_manifest_contents = generated_manifest_outer % inner - generated_name = name + ".generated.manifest" - # Need to join with the build_dir here as we're writing it during - # generation time, but we return the un-joined version because the build - # will occur in that directory. We only write the file if the contents - # have changed so that simply regenerating the project files doesn't - # cause a relink. - build_dir_generated_name = os.path.join(build_dir, generated_name) - gyp.common.EnsureDirExists(build_dir_generated_name) - f = gyp.common.WriteOnDiff(build_dir_generated_name) - f.write(generated_manifest_contents) - f.close() - manifest_files = [generated_name] - - if allow_isolation: - flags.append("/ALLOWISOLATION") - - manifest_files += self._GetAdditionalManifestFiles(config, gyp_to_build_path) - return flags, output_name, manifest_files - - def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): - """Gets additional manifest files that are added to the default one - generated by the linker.""" - files = self._Setting( - ("VCManifestTool", "AdditionalManifestFiles"), config, default=[] - ) - if isinstance(files, str): - files = files.split(";") - return [ - os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(f, config=config))) - for f in files - ] - - def IsUseLibraryDependencyInputs(self, config): - """Returns whether the target should be linked via Use Library Dependency - Inputs (using component .objs of a given .lib).""" - config = self._TargetConfig(config) - uldi = self._Setting(("VCLinkerTool", "UseLibraryDependencyInputs"), config) - return uldi == "true" - - def IsEmbedManifest(self, config): - """Returns whether manifest should be linked into binary.""" - config = self._TargetConfig(config) - embed = self._Setting( - ("VCManifestTool", "EmbedManifest"), config, default="true" - ) - return embed == "true" - - def IsLinkIncremental(self, config): - """Returns whether the target should be linked incrementally.""" - config = self._TargetConfig(config) - link_inc = self._Setting(("VCLinkerTool", "LinkIncremental"), config) - return link_inc != "1" - - def GetRcflags(self, config, gyp_to_ninja_path): - """Returns the flags that need to be added to invocations of the resource - compiler.""" - config = self._TargetConfig(config) - rcflags = [] - rc = self._GetWrapper( - self, self.msvs_settings[config], "VCResourceCompilerTool", append=rcflags - ) - rc("AdditionalIncludeDirectories", map=gyp_to_ninja_path, prefix="/I") - rcflags.append("/I" + gyp_to_ninja_path(".")) - rc("PreprocessorDefinitions", prefix="/d") - # /l arg must be in hex without leading '0x' - rc("Culture", prefix="/l", map=lambda x: hex(int(x))[2:]) - return rcflags - - def BuildCygwinBashCommandLine(self, args, path_to_base): - """Build a command line that runs args via cygwin bash. We assume that all - incoming paths are in Windows normpath'd form, so they need to be - converted to posix style for the part of the command line that's passed to - bash. We also have to do some Visual Studio macro emulation here because - various rules use magic VS names for things. Also note that rules that - contain ninja variables cannot be fixed here (for example ${source}), so - the outer generator needs to make sure that the paths that are written out - are in posix style, if the command line will be used here.""" - cygwin_dir = os.path.normpath( - os.path.join(path_to_base, self.msvs_cygwin_dirs[0]) - ) - cd = ("cd %s" % path_to_base).replace("\\", "/") - args = [a.replace("\\", "/").replace('"', '\\"') for a in args] - args = ["'%s'" % a.replace("'", "'\\''") for a in args] - bash_cmd = " ".join(args) - cmd = ( - 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir - + 'bash -c "%s ; %s"' % (cd, bash_cmd) - ) - return cmd - - def IsRuleRunUnderCygwin(self, rule): - """Determine if an action should be run under cygwin. If the variable is - unset, or set to 1 we use cygwin.""" - return ( - int(rule.get("msvs_cygwin_shell", self.spec.get("msvs_cygwin_shell", 1))) - != 0 - ) - - def _HasExplicitRuleForExtension(self, spec, extension): - """Determine if there's an explicit rule for a particular extension.""" - for rule in spec.get("rules", []): - if rule["extension"] == extension: - return True - return False - - def _HasExplicitIdlActions(self, spec): - """Determine if an action should not run midl for .idl files.""" - return any( - [action.get("explicit_idl_action", 0) for action in spec.get("actions", [])] - ) - - def HasExplicitIdlRulesOrActions(self, spec): - """Determine if there's an explicit rule or action for idl files. When - there isn't we need to generate implicit rules to build MIDL .idl files.""" - return self._HasExplicitRuleForExtension( - spec, "idl" - ) or self._HasExplicitIdlActions(spec) - - def HasExplicitAsmRules(self, spec): - """Determine if there's an explicit rule for asm files. When there isn't we - need to generate implicit rules to assemble .asm files.""" - return self._HasExplicitRuleForExtension(spec, "asm") - - def GetIdlBuildData(self, source, config): - """Determine the implicit outputs for an idl file. Returns output - directory, outputs, and variables and flags that are required.""" - config = self._TargetConfig(config) - midl_get = self._GetWrapper(self, self.msvs_settings[config], "VCMIDLTool") - - def midl(name, default=None): - return self.ConvertVSMacros(midl_get(name, default=default), config=config) - - tlb = midl("TypeLibraryName", default="${root}.tlb") - header = midl("HeaderFileName", default="${root}.h") - dlldata = midl("DLLDataFileName", default="dlldata.c") - iid = midl("InterfaceIdentifierFileName", default="${root}_i.c") - proxy = midl("ProxyFileName", default="${root}_p.c") - # Note that .tlb is not included in the outputs as it is not always - # generated depending on the content of the input idl file. - outdir = midl("OutputDirectory", default="") - output = [header, dlldata, iid, proxy] - variables = [ - ("tlb", tlb), - ("h", header), - ("dlldata", dlldata), - ("iid", iid), - ("proxy", proxy), - ] - # TODO(scottmg): Are there configuration settings to set these flags? - target_platform = self.GetArch(config) - if target_platform == "x86": - target_platform = "win32" - flags = ["/char", "signed", "/env", target_platform, "/Oicf"] - return outdir, output, variables, flags - - -def _LanguageMatchesForPch(source_ext, pch_source_ext): - c_exts = (".c",) - cc_exts = (".cc", ".cxx", ".cpp") - return (source_ext in c_exts and pch_source_ext in c_exts) or ( - source_ext in cc_exts and pch_source_ext in cc_exts - ) - - -class PrecompiledHeader(object): - """Helper to generate dependencies and build rules to handle generation of - precompiled headers. Interface matches the GCH handler in xcode_emulation.py. - """ - - def __init__( - self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext - ): - self.settings = settings - self.config = config - pch_source = self.settings.msvs_precompiled_source[self.config] - self.pch_source = gyp_to_build_path(pch_source) - filename, _ = os.path.splitext(pch_source) - self.output_obj = gyp_to_unique_output(filename + obj_ext).lower() - - def _PchHeader(self): - """Get the header that will appear in an #include line for all source - files.""" - return self.settings.msvs_precompiled_header[self.config] - - def GetObjDependencies(self, sources, objs, arch): - """Given a list of sources files and the corresponding object files, - returns a list of the pch files that should be depended upon. The - additional wrapping in the return value is for interface compatibility - with make.py on Mac, and xcode_emulation.py.""" - assert arch is None - if not self._PchHeader(): - return [] - pch_ext = os.path.splitext(self.pch_source)[1] - for source in sources: - if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext): - return [(None, None, self.output_obj)] - return [] - - def GetPchBuildCommands(self, arch): - """Not used on Windows as there are no additional build steps required - (instead, existing steps are modified in GetFlagsModifications below).""" - return [] - - def GetFlagsModifications( - self, input, output, implicit, command, cflags_c, cflags_cc, expand_special - ): - """Get the modified cflags and implicit dependencies that should be used - for the pch compilation step.""" - if input == self.pch_source: - pch_output = ["/Yc" + self._PchHeader()] - if command == "cxx": - return ( - [("cflags_cc", map(expand_special, cflags_cc + pch_output))], - self.output_obj, - [], - ) - elif command == "cc": - return ( - [("cflags_c", map(expand_special, cflags_c + pch_output))], - self.output_obj, - [], - ) - return [], output, implicit - - -vs_version = None - - -def GetVSVersion(generator_flags): - global vs_version - if not vs_version: - vs_version = gyp.MSVSVersion.SelectVisualStudioVersion( - generator_flags.get("msvs_version", "auto"), allow_fallback=False - ) - return vs_version - - -def _GetVsvarsSetupArgs(generator_flags, arch): - vs = GetVSVersion(generator_flags) - return vs.SetupScript() - - -def ExpandMacros(string, expansions): - """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv - for the canonical way to retrieve a suitable dict.""" - if "$" in string: - for old, new in expansions.items(): - assert "$(" not in new, new - string = string.replace(old, new) - return string - - -def _ExtractImportantEnvironment(output_of_set): - """Extracts environment variables required for the toolchain to run from - a textual dump output by the cmd.exe 'set' command.""" - envvars_to_save = ( - "goma_.*", # TODO(scottmg): This is ugly, but needed for goma. - "include", - "lib", - "libpath", - "path", - "pathext", - "systemroot", - "temp", - "tmp", - ) - env = {} - # This occasionally happens and leads to misleading SYSTEMROOT error messages - # if not caught here. - if output_of_set.count("=") == 0: - raise Exception("Invalid output_of_set. Value is:\n%s" % output_of_set) - for line in output_of_set.splitlines(): - for envvar in envvars_to_save: - if re.match(envvar + "=", line.lower()): - var, setting = line.split("=", 1) - if envvar == "path": - # Our own rules (for running gyp-win-tool) and other actions in - # Chromium rely on python being in the path. Add the path to this - # python here so that if it's not in the path when ninja is run - # later, python will still be found. - setting = os.path.dirname(sys.executable) + os.pathsep + setting - env[var.upper()] = setting - break - for required in ("SYSTEMROOT", "TEMP", "TMP"): - if required not in env: - raise Exception( - 'Environment variable "%s" ' - "required to be set to valid path" % required - ) - return env - - -def _FormatAsEnvironmentBlock(envvar_dict): - """Format as an 'environment block' directly suitable for CreateProcess. - Briefly this is a list of key=value\0, terminated by an additional \0. See - CreateProcess documentation for more details.""" - block = "" - nul = "\0" - for key, value in envvar_dict.items(): - block += key + "=" + value + nul - block += nul - return block - - -def _ExtractCLPath(output_of_where): - """Gets the path to cl.exe based on the output of calling the environment - setup batch file, followed by the equivalent of `where`.""" - # Take the first line, as that's the first found in the PATH. - for line in output_of_where.strip().splitlines(): - if line.startswith("LOC:"): - return line[len("LOC:") :].strip() - - -def GenerateEnvironmentFiles( - toplevel_build_dir, generator_flags, system_includes, open_out -): - """It's not sufficient to have the absolute path to the compiler, linker, - etc. on Windows, as those tools rely on .dlls being in the PATH. We also - need to support both x86 and x64 compilers within the same build (to support - msvs_target_platform hackery). Different architectures require a different - compiler binary, and different supporting environment variables (INCLUDE, - LIB, LIBPATH). So, we extract the environment here, wrap all invocations - of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which - sets up the environment, and then we do not prefix the compiler with - an absolute path, instead preferring something like "cl.exe" in the rule - which will then run whichever the environment setup has put in the path. - When the following procedure to generate environment files does not - meet your requirement (e.g. for custom toolchains), you can pass - "-G ninja_use_custom_environment_files" to the gyp to suppress file - generation and use custom environment files prepared by yourself.""" - archs = ("x86", "x64") - if generator_flags.get("ninja_use_custom_environment_files", 0): - cl_paths = {} - for arch in archs: - cl_paths[arch] = "cl.exe" - return cl_paths - vs = GetVSVersion(generator_flags) - cl_paths = {} - for arch in archs: - # Extract environment variables for subprocesses. - args = vs.SetupScript(arch) - args.extend(("&&", "set")) - popen = subprocess.Popen( - args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - variables, _ = popen.communicate() - if PY3: - variables = variables.decode("utf-8") - if popen.returncode != 0: - raise Exception('"%s" failed with error %d' % (args, popen.returncode)) - env = _ExtractImportantEnvironment(variables) - - # Inject system includes from gyp files into INCLUDE. - if system_includes: - system_includes = system_includes | OrderedSet( - env.get("INCLUDE", "").split(";") - ) - env["INCLUDE"] = ";".join(system_includes) - - env_block = _FormatAsEnvironmentBlock(env) - f = open_out(os.path.join(toplevel_build_dir, "environment." + arch), "w") - f.write(env_block) - f.close() - - # Find cl.exe location for this architecture. - args = vs.SetupScript(arch) - args.extend( - ("&&", "for", "%i", "in", "(cl.exe)", "do", "@echo", "LOC:%~$PATH:i") - ) - popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE) - output, _ = popen.communicate() - if PY3: - output = output.decode("utf-8") - cl_paths[arch] = _ExtractCLPath(output) - return cl_paths - - -def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): - """Emulate behavior of msvs_error_on_missing_sources present in the msvs - generator: Check that all regular source files, i.e. not created at run time, - exist on disk. Missing files cause needless recompilation when building via - VS, and we want this check to match for people/bots that build using ninja, - so they're not surprised when the VS build fails.""" - if int(generator_flags.get("msvs_error_on_missing_sources", 0)): - no_specials = filter(lambda x: "$" not in x, sources) - relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials] - missing = [x for x in relative if not os.path.exists(x)] - if missing: - # They'll look like out\Release\..\..\stuff\things.cc, so normalize the - # path for a slightly less crazy looking output. - cleaned_up = [os.path.normpath(x) for x in missing] - raise Exception("Missing input files:\n%s" % "\n".join(cleaned_up)) - - -# Sets some values in default_variables, which are required for many -# generators, run on Windows. -def CalculateCommonVariables(default_variables, params): - generator_flags = params.get("generator_flags", {}) - - # Set a variable so conditions can be based on msvs_version. - msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) - default_variables["MSVS_VERSION"] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if "64" in os.environ.get("PROCESSOR_ARCHITECTURE", "") or "64" in os.environ.get( - "PROCESSOR_ARCHITEW6432", "" - ): - default_variables["MSVS_OS_BITS"] = 64 - else: - default_variables["MSVS_OS_BITS"] = 32 diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py deleted file mode 100644 index 14212358082a6..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py +++ /dev/null @@ -1,174 +0,0 @@ -# This file comes from -# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py -# Do not edit! Edit the upstream one instead. - -"""Python module for generating .ninja files. - -Note that this is emphatically not a required piece of Ninja; it's -just a helpful utility for build-file-generation systems that already -use Python. -""" - -import textwrap - - -def escape_path(word): - return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:") - - -class Writer(object): - def __init__(self, output, width=78): - self.output = output - self.width = width - - def newline(self): - self.output.write("\n") - - def comment(self, text): - for line in textwrap.wrap(text, self.width - 2): - self.output.write("# " + line + "\n") - - def variable(self, key, value, indent=0): - if value is None: - return - if isinstance(value, list): - value = " ".join(filter(None, value)) # Filter out empty strings. - self._line("%s = %s" % (key, value), indent) - - def pool(self, name, depth): - self._line("pool %s" % name) - self.variable("depth", depth, indent=1) - - def rule( - self, - name, - command, - description=None, - depfile=None, - generator=False, - pool=None, - restat=False, - rspfile=None, - rspfile_content=None, - deps=None, - ): - self._line("rule %s" % name) - self.variable("command", command, indent=1) - if description: - self.variable("description", description, indent=1) - if depfile: - self.variable("depfile", depfile, indent=1) - if generator: - self.variable("generator", "1", indent=1) - if pool: - self.variable("pool", pool, indent=1) - if restat: - self.variable("restat", "1", indent=1) - if rspfile: - self.variable("rspfile", rspfile, indent=1) - if rspfile_content: - self.variable("rspfile_content", rspfile_content, indent=1) - if deps: - self.variable("deps", deps, indent=1) - - def build( - self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None - ): - outputs = self._as_list(outputs) - all_inputs = self._as_list(inputs)[:] - out_outputs = list(map(escape_path, outputs)) - all_inputs = list(map(escape_path, all_inputs)) - - if implicit: - implicit = map(escape_path, self._as_list(implicit)) - all_inputs.append("|") - all_inputs.extend(implicit) - if order_only: - order_only = map(escape_path, self._as_list(order_only)) - all_inputs.append("||") - all_inputs.extend(order_only) - - self._line( - "build %s: %s" % (" ".join(out_outputs), " ".join([rule] + all_inputs)) - ) - - if variables: - if isinstance(variables, dict): - iterator = iter(variables.items()) - else: - iterator = iter(variables) - - for key, val in iterator: - self.variable(key, val, indent=1) - - return outputs - - def include(self, path): - self._line("include %s" % path) - - def subninja(self, path): - self._line("subninja %s" % path) - - def default(self, paths): - self._line("default %s" % " ".join(self._as_list(paths))) - - def _count_dollars_before_index(self, s, i): - """Returns the number of '$' characters right in front of s[i].""" - dollar_count = 0 - dollar_index = i - 1 - while dollar_index > 0 and s[dollar_index] == "$": - dollar_count += 1 - dollar_index -= 1 - return dollar_count - - def _line(self, text, indent=0): - """Write 'text' word-wrapped at self.width characters.""" - leading_space = " " * indent - while len(leading_space) + len(text) > self.width: - # The text is too wide; wrap if possible. - - # Find the rightmost space that would obey our width constraint and - # that's not an escaped space. - available_space = self.width - len(leading_space) - len(" $") - space = available_space - while True: - space = text.rfind(" ", 0, space) - if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0: - break - - if space < 0: - # No such space; just use the first unescaped space we can find. - space = available_space - 1 - while True: - space = text.find(" ", space + 1) - if ( - space < 0 - or self._count_dollars_before_index(text, space) % 2 == 0 - ): - break - if space < 0: - # Give up on breaking. - break - - self.output.write(leading_space + text[0:space] + " $\n") - text = text[space + 1 :] - - # Subsequent lines are continuations, so indent them. - leading_space = " " * (indent + 2) - - self.output.write(leading_space + text + "\n") - - def _as_list(self, input): - if input is None: - return [] - if isinstance(input, list): - return input - return [input] - - -def escape(string): - """Escape a string such that it can be embedded into a Ninja file without - further interpretation.""" - assert "\n" not in string, "Ninja syntax does not allow newlines" - # We only have one special metacharacter: '$'. - return string.replace("$", "$$") diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py deleted file mode 100644 index e01106f9c4821..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""A clone of the default copy.deepcopy that doesn't handle cyclic -structures or complex types except for dicts and lists. This is -because gyp copies so large structure that small copy overhead ends up -taking seconds in a project the size of Chromium.""" - - -class Error(Exception): - pass - - -__all__ = ["Error", "deepcopy"] - - -def deepcopy(x): - """Deep copy operation on gyp objects such as strings, ints, dicts - and lists. More than twice as fast as copy.deepcopy but much less - generic.""" - - try: - return _deepcopy_dispatch[type(x)](x) - except KeyError: - raise Error( - "Unsupported type %s for deepcopy. Use copy.deepcopy " - + "or expand simple_copy support." % type(x) - ) - - -_deepcopy_dispatch = d = {} - - -def _deepcopy_atomic(x): - return x - - -try: - types = bool, float, int, str, type, type(None), long, unicode -except NameError: # Python 3 - types = bool, float, int, str, type, type(None) - -for x in types: - d[x] = _deepcopy_atomic - - -def _deepcopy_list(x): - return [deepcopy(a) for a in x] - - -d[list] = _deepcopy_list - - -def _deepcopy_dict(x): - y = {} - for key, value in x.items(): - y[deepcopy(key)] = deepcopy(value) - return y - - -d[dict] = _deepcopy_dict - -del d diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py deleted file mode 100755 index 758e9f5c45622..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py +++ /dev/null @@ -1,385 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions for Windows builds. - -These functions are executed via gyp-win-tool when using the ninja generator. -""" - -from __future__ import print_function - -import os -import re -import shutil -import subprocess -import stat -import string -import sys - -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) -PY3 = bytes != str - -# A regex matching an argument corresponding to the output filename passed to -# link.exe. -_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P.+)$", re.IGNORECASE) - - -def main(args): - executor = WinTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class WinTool(object): - """This class performs all the Windows tooling steps. The methods can either - be executed directly, or dispatched from an argument list.""" - - def _UseSeparateMspdbsrv(self, env, args): - """Allows to use a unique instance of mspdbsrv.exe per linker instead of a - shared one.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - if args[0] != "link.exe": - return - - # Use the output filename passed to the linker to generate an endpoint name - # for mspdbsrv.exe. - endpoint_name = None - for arg in args: - m = _LINK_EXE_OUT_ARG.match(arg) - if m: - endpoint_name = re.sub( - r"\W+", "", "%s_%d" % (m.group("out"), os.getpid()) - ) - break - - if endpoint_name is None: - return - - # Adds the appropriate environment variable. This will be read by link.exe - # to know which instance of mspdbsrv.exe it should connect to (if it's - # not set then the default endpoint is used). - env["_MSPDBSRV_ENDPOINT_"] = endpoint_name - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like recursive-mirror to RecursiveMirror.""" - return name_string.title().replace("-", "") - - def _GetEnv(self, arch): - """Gets the saved environment from a file for a given architecture.""" - # The environment is saved as an "environment block" (see CreateProcess - # and msvs_emulation for details). We convert to a dict here. - # Drop last 2 NULs, one for list terminator, one for trailing vs. separator. - pairs = open(arch).read()[:-2].split("\0") - kvs = [item.split("=", 1) for item in pairs] - return dict(kvs) - - def ExecStamp(self, path): - """Simple stamp command.""" - open(path, "w").close() - - def ExecRecursiveMirror(self, source, dest): - """Emulation of rm -rf out && cp -af in out.""" - if os.path.exists(dest): - if os.path.isdir(dest): - - def _on_error(fn, path, excinfo): - # The operation failed, possibly because the file is set to - # read-only. If that's why, make it writable and try the op again. - if not os.access(path, os.W_OK): - os.chmod(path, stat.S_IWRITE) - fn(path) - - shutil.rmtree(dest, onerror=_on_error) - else: - if not os.access(dest, os.W_OK): - # Attempt to make the file writable before deleting it. - os.chmod(dest, stat.S_IWRITE) - os.unlink(dest) - - if os.path.isdir(source): - shutil.copytree(source, dest) - else: - shutil.copy2(source, dest) - - def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): - """Filter diagnostic output from link that looks like: - ' Creating library ui.dll.lib and object ui.dll.exp' - This happens when there are exports from the dll or exe. - """ - env = self._GetEnv(arch) - if use_separate_mspdbsrv == "True": - self._UseSeparateMspdbsrv(env, args) - if sys.platform == "win32": - args = list(args) # *args is a tuple by default, which is read-only. - args[0] = args[0].replace("/", "\\") - # https://docs.python.org/2/library/subprocess.html: - # "On Unix with shell=True [...] if args is a sequence, the first item - # specifies the command string, and any additional items will be treated as - # additional arguments to the shell itself. That is to say, Popen does the - # equivalent of: - # Popen(['/bin/sh', '-c', args[0], args[1], ...])" - # For that reason, since going through the shell doesn't seem necessary on - # non-Windows don't do that there. - link = subprocess.Popen( - args, - shell=sys.platform == "win32", - env=env, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - out, _ = link.communicate() - if PY3: - out = out.decode("utf-8") - for line in out.splitlines(): - if ( - not line.startswith(" Creating library ") - and not line.startswith("Generating code") - and not line.startswith("Finished generating code") - ): - print(line) - return link.returncode - - def ExecLinkWithManifests( - self, - arch, - embed_manifest, - out, - ldcmd, - resname, - mt, - rc, - intermediate_manifest, - *manifests - ): - """A wrapper for handling creating a manifest resource and then executing - a link command.""" - # The 'normal' way to do manifests is to have link generate a manifest - # based on gathering dependencies from the object files, then merge that - # manifest with other manifests supplied as sources, convert the merged - # manifest to a resource, and then *relink*, including the compiled - # version of the manifest resource. This breaks incremental linking, and - # is generally overly complicated. Instead, we merge all the manifests - # provided (along with one that includes what would normally be in the - # linker-generated one, see msvs_emulation.py), and include that into the - # first and only link. We still tell link to generate a manifest, but we - # only use that to assert that our simpler process did not miss anything. - variables = { - "python": sys.executable, - "arch": arch, - "out": out, - "ldcmd": ldcmd, - "resname": resname, - "mt": mt, - "rc": rc, - "intermediate_manifest": intermediate_manifest, - "manifests": " ".join(manifests), - } - add_to_ld = "" - if manifests: - subprocess.check_call( - "%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo " - "-manifest %(manifests)s -out:%(out)s.manifest" % variables - ) - if embed_manifest == "True": - subprocess.check_call( - "%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest" - " %(out)s.manifest.rc %(resname)s" % variables - ) - subprocess.check_call( - "%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s " - "%(out)s.manifest.rc" % variables - ) - add_to_ld = " %(out)s.manifest.res" % variables - subprocess.check_call(ldcmd + add_to_ld) - - # Run mt.exe on the theoretically complete manifest we generated, merging - # it with the one the linker generated to confirm that the linker - # generated one does not add anything. This is strictly unnecessary for - # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not - # used in a #pragma comment. - if manifests: - # Merge the intermediate one with ours to .assert.manifest, then check - # that .assert.manifest is identical to ours. - subprocess.check_call( - "%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo " - "-manifest %(out)s.manifest %(intermediate_manifest)s " - "-out:%(out)s.assert.manifest" % variables - ) - assert_manifest = "%(out)s.assert.manifest" % variables - our_manifest = "%(out)s.manifest" % variables - # Load and normalize the manifests. mt.exe sometimes removes whitespace, - # and sometimes doesn't unfortunately. - with open(our_manifest, "r") as our_f: - with open(assert_manifest, "r") as assert_f: - our_data = our_f.read().translate(None, string.whitespace) - assert_data = assert_f.read().translate(None, string.whitespace) - if our_data != assert_data: - os.unlink(out) - - def dump(filename): - print(filename, file=sys.stderr) - print("-----", file=sys.stderr) - with open(filename, "r") as f: - print(f.read(), file=sys.stderr) - print("-----", file=sys.stderr) - - dump(intermediate_manifest) - dump(our_manifest) - dump(assert_manifest) - sys.stderr.write( - 'Linker generated manifest "%s" added to final manifest "%s" ' - '(result in "%s"). ' - "Were /MANIFEST switches used in #pragma statements? " - % (intermediate_manifest, our_manifest, assert_manifest) - ) - return 1 - - def ExecManifestWrapper(self, arch, *args): - """Run manifest tool with environment set. Strip out undesirable warning - (some XML blocks are recognized by the OS loader, but not the manifest - tool).""" - env = self._GetEnv(arch) - popen = subprocess.Popen( - args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - out, _ = popen.communicate() - if PY3: - out = out.decode("utf-8") - for line in out.splitlines(): - if line and "manifest authoring warning 81010002" not in line: - print(line) - return popen.returncode - - def ExecManifestToRc(self, arch, *args): - """Creates a resource file pointing a SxS assembly manifest. - |args| is tuple containing path to resource file, path to manifest file - and resource name which can be "1" (for executables) or "2" (for DLLs).""" - manifest_path, resource_path, resource_name = args - with open(resource_path, "w") as output: - output.write( - '#include \n%s RT_MANIFEST "%s"' - % (resource_name, os.path.abspath(manifest_path).replace("\\", "/")) - ) - - def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags): - """Filter noisy filenames output from MIDL compile step that isn't - quietable via command line flags. - """ - args = ( - ["midl", "/nologo"] - + list(flags) - + [ - "/out", - outdir, - "/tlb", - tlb, - "/h", - h, - "/dlldata", - dlldata, - "/iid", - iid, - "/proxy", - proxy, - idl, - ] - ) - env = self._GetEnv(arch) - popen = subprocess.Popen( - args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - out, _ = popen.communicate() - if PY3: - out = out.decode("utf-8") - # Filter junk out of stdout, and write filtered versions. Output we want - # to filter is pairs of lines that look like this: - # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl - # objidl.idl - lines = out.splitlines() - prefixes = ("Processing ", "64 bit Processing ") - processing = set(os.path.basename(x) for x in lines if x.startswith(prefixes)) - for line in lines: - if not line.startswith(prefixes) and line not in processing: - print(line) - return popen.returncode - - def ExecAsmWrapper(self, arch, *args): - """Filter logo banner from invocations of asm.exe.""" - env = self._GetEnv(arch) - popen = subprocess.Popen( - args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - out, _ = popen.communicate() - if PY3: - out = out.decode("utf-8") - for line in out.splitlines(): - if ( - not line.startswith("Copyright (C) Microsoft Corporation") - and not line.startswith("Microsoft (R) Macro Assembler") - and not line.startswith(" Assembling: ") - and line - ): - print(line) - return popen.returncode - - def ExecRcWrapper(self, arch, *args): - """Filter logo banner from invocations of rc.exe. Older versions of RC - don't support the /nologo flag.""" - env = self._GetEnv(arch) - popen = subprocess.Popen( - args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - out, _ = popen.communicate() - if PY3: - out = out.decode("utf-8") - for line in out.splitlines(): - if ( - not line.startswith("Microsoft (R) Windows (R) Resource Compiler") - and not line.startswith("Copyright (C) Microsoft Corporation") - and line - ): - print(line) - return popen.returncode - - def ExecActionWrapper(self, arch, rspfile, *dir): - """Runs an action command line from a response file using the environment - for |arch|. If |dir| is supplied, use that as the working directory.""" - env = self._GetEnv(arch) - # TODO(scottmg): This is a temporary hack to get some specific variables - # through to actions that are set after gyp-time. http://crbug.com/333738. - for k, v in os.environ.items(): - if k not in env: - env[k] = v - args = open(rspfile).read() - dir = dir[0] if dir else None - return subprocess.call(args, shell=True, env=env, cwd=dir) - - def ExecClCompile(self, project_dir, selected_files): - """Executed by msvs-ninja projects when the 'ClCompile' target is used to - build selected C/C++ files.""" - project_dir = os.path.relpath(project_dir, BASE_DIR) - selected_files = selected_files.split(";") - ninja_targets = [ - os.path.join(project_dir, filename) + "^^" for filename in selected_files - ] - cmd = ["ninja.exe"] - cmd.extend(ninja_targets) - return subprocess.call(cmd, shell=True, cwd=BASE_DIR) - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py deleted file mode 100644 index 8af2b39f9a147..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py +++ /dev/null @@ -1,1941 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -This module contains classes that help to emulate xcodebuild behavior on top of -other build systems, such as make and ninja. -""" - -from __future__ import print_function - -import copy -import gyp.common -import os -import os.path -import re -import shlex -import subprocess -import sys -from gyp.common import GypError - -PY3 = bytes != str - -# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when -# "xcodebuild" is called too quickly (it has been found to return incorrect -# version number). -XCODE_VERSION_CACHE = None - -# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance -# corresponding to the installed version of Xcode. -XCODE_ARCHS_DEFAULT_CACHE = None - - -def XcodeArchsVariableMapping(archs, archs_including_64_bit=None): - """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable, - and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT).""" - mapping = {"$(ARCHS_STANDARD)": archs} - if archs_including_64_bit: - mapping["$(ARCHS_STANDARD_INCLUDING_64_BIT)"] = archs_including_64_bit - return mapping - - -class XcodeArchsDefault(object): - """A class to resolve ARCHS variable from xcode_settings, resolving Xcode - macros and implementing filtering by VALID_ARCHS. The expansion of macros - depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and - on the version of Xcode. - """ - - # Match variable like $(ARCHS_STANDARD). - variable_pattern = re.compile(r"\$\([a-zA-Z_][a-zA-Z0-9_]*\)$") - - def __init__(self, default, mac, iphonesimulator, iphoneos): - self._default = (default,) - self._archs = {"mac": mac, "ios": iphoneos, "iossim": iphonesimulator} - - def _VariableMapping(self, sdkroot): - """Returns the dictionary of variable mapping depending on the SDKROOT.""" - sdkroot = sdkroot.lower() - if "iphoneos" in sdkroot: - return self._archs["ios"] - elif "iphonesimulator" in sdkroot: - return self._archs["iossim"] - else: - return self._archs["mac"] - - def _ExpandArchs(self, archs, sdkroot): - """Expands variables references in ARCHS, and remove duplicates.""" - variable_mapping = self._VariableMapping(sdkroot) - expanded_archs = [] - for arch in archs: - if self.variable_pattern.match(arch): - variable = arch - try: - variable_expansion = variable_mapping[variable] - for arch in variable_expansion: - if arch not in expanded_archs: - expanded_archs.append(arch) - except KeyError: - print('Warning: Ignoring unsupported variable "%s".' % variable) - elif arch not in expanded_archs: - expanded_archs.append(arch) - return expanded_archs - - def ActiveArchs(self, archs, valid_archs, sdkroot): - """Expands variables references in ARCHS, and filter by VALID_ARCHS if it - is defined (if not set, Xcode accept any value in ARCHS, otherwise, only - values present in VALID_ARCHS are kept).""" - expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or "") - if valid_archs: - filtered_archs = [] - for arch in expanded_archs: - if arch in valid_archs: - filtered_archs.append(arch) - expanded_archs = filtered_archs - return expanded_archs - - -def GetXcodeArchsDefault(): - """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the - installed version of Xcode. The default values used by Xcode for ARCHS - and the expansion of the variables depends on the version of Xcode used. - - For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included - uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses - $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0 - and deprecated with Xcode 5.1. - - For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit - architecture as part of $(ARCHS_STANDARD) and default to only building it. - - For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part - of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they - are also part of $(ARCHS_STANDARD). - - All these rules are coded in the construction of the |XcodeArchsDefault| - object to use depending on the version of Xcode detected. The object is - for performance reason.""" - global XCODE_ARCHS_DEFAULT_CACHE - if XCODE_ARCHS_DEFAULT_CACHE: - return XCODE_ARCHS_DEFAULT_CACHE - xcode_version, _ = XcodeVersion() - if xcode_version < "0500": - XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( - "$(ARCHS_STANDARD)", - XcodeArchsVariableMapping(["i386"]), - XcodeArchsVariableMapping(["i386"]), - XcodeArchsVariableMapping(["armv7"]), - ) - elif xcode_version < "0510": - XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( - "$(ARCHS_STANDARD_INCLUDING_64_BIT)", - XcodeArchsVariableMapping(["x86_64"], ["x86_64"]), - XcodeArchsVariableMapping(["i386"], ["i386", "x86_64"]), - XcodeArchsVariableMapping( - ["armv7", "armv7s"], ["armv7", "armv7s", "arm64"] - ), - ) - else: - XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( - "$(ARCHS_STANDARD)", - XcodeArchsVariableMapping(["x86_64"], ["x86_64"]), - XcodeArchsVariableMapping(["i386", "x86_64"], ["i386", "x86_64"]), - XcodeArchsVariableMapping( - ["armv7", "armv7s", "arm64"], ["armv7", "armv7s", "arm64"] - ), - ) - return XCODE_ARCHS_DEFAULT_CACHE - - -class XcodeSettings(object): - """A class that understands the gyp 'xcode_settings' object.""" - - # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached - # at class-level for efficiency. - _sdk_path_cache = {} - _platform_path_cache = {} - _sdk_root_cache = {} - - # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so - # cached at class-level for efficiency. - _plist_cache = {} - - # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so - # cached at class-level for efficiency. - _codesigning_key_cache = {} - - def __init__(self, spec): - self.spec = spec - - self.isIOS = False - self.mac_toolchain_dir = None - self.header_map_path = None - - # Per-target 'xcode_settings' are pushed down into configs earlier by gyp. - # This means self.xcode_settings[config] always contains all settings - # for that config -- the per-target settings as well. Settings that are - # the same for all configs are implicitly per-target settings. - self.xcode_settings = {} - configs = spec["configurations"] - for configname, config in configs.items(): - self.xcode_settings[configname] = config.get("xcode_settings", {}) - self._ConvertConditionalKeys(configname) - if self.xcode_settings[configname].get("IPHONEOS_DEPLOYMENT_TARGET", None): - self.isIOS = True - - # This is only non-None temporarily during the execution of some methods. - self.configname = None - - # Used by _AdjustLibrary to match .a and .dylib entries in libraries. - self.library_re = re.compile(r"^lib([^/]+)\.(a|dylib)$") - - def _ConvertConditionalKeys(self, configname): - """Converts or warns on conditional keys. Xcode supports conditional keys, - such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation - with some keys converted while the rest force a warning.""" - settings = self.xcode_settings[configname] - conditional_keys = [key for key in settings if key.endswith("]")] - for key in conditional_keys: - # If you need more, speak up at http://crbug.com/122592 - if key.endswith("[sdk=iphoneos*]"): - if configname.endswith("iphoneos"): - new_key = key.split("[")[0] - settings[new_key] = settings[key] - else: - print( - "Warning: Conditional keys not implemented, ignoring:", - " ".join(conditional_keys), - ) - del settings[key] - - def _Settings(self): - assert self.configname - return self.xcode_settings[self.configname] - - def _Test(self, test_key, cond_key, default): - return self._Settings().get(test_key, default) == cond_key - - def _Appendf(self, lst, test_key, format_str, default=None): - if test_key in self._Settings(): - lst.append(format_str % str(self._Settings()[test_key])) - elif default: - lst.append(format_str % str(default)) - - def _WarnUnimplemented(self, test_key): - if test_key in self._Settings(): - print('Warning: Ignoring not yet implemented key "%s".' % test_key) - - def IsBinaryOutputFormat(self, configname): - default = "binary" if self.isIOS else "xml" - format = self.xcode_settings[configname].get("INFOPLIST_OUTPUT_FORMAT", default) - return format == "binary" - - def IsIosFramework(self): - return self.spec["type"] == "shared_library" and self._IsBundle() and self.isIOS - - def _IsBundle(self): - return ( - int(self.spec.get("mac_bundle", 0)) != 0 - or self._IsXCTest() - or self._IsXCUiTest() - ) - - def _IsXCTest(self): - return int(self.spec.get("mac_xctest_bundle", 0)) != 0 - - def _IsXCUiTest(self): - return int(self.spec.get("mac_xcuitest_bundle", 0)) != 0 - - def _IsIosAppExtension(self): - return int(self.spec.get("ios_app_extension", 0)) != 0 - - def _IsIosWatchKitExtension(self): - return int(self.spec.get("ios_watchkit_extension", 0)) != 0 - - def _IsIosWatchApp(self): - return int(self.spec.get("ios_watch_app", 0)) != 0 - - def GetFrameworkVersion(self): - """Returns the framework version of the current target. Only valid for - bundles.""" - assert self._IsBundle() - return self.GetPerTargetSetting("FRAMEWORK_VERSION", default="A") - - def GetWrapperExtension(self): - """Returns the bundle extension (.app, .framework, .plugin, etc). Only - valid for bundles.""" - assert self._IsBundle() - if self.spec["type"] in ("loadable_module", "shared_library"): - default_wrapper_extension = { - "loadable_module": "bundle", - "shared_library": "framework", - }[self.spec["type"]] - wrapper_extension = self.GetPerTargetSetting( - "WRAPPER_EXTENSION", default=default_wrapper_extension - ) - return "." + self.spec.get("product_extension", wrapper_extension) - elif self.spec["type"] == "executable": - if self._IsIosAppExtension() or self._IsIosWatchKitExtension(): - return "." + self.spec.get("product_extension", "appex") - else: - return "." + self.spec.get("product_extension", "app") - else: - assert False, "Don't know extension for '%s', target '%s'" % ( - self.spec["type"], - self.spec["target_name"], - ) - - def GetProductName(self): - """Returns PRODUCT_NAME.""" - return self.spec.get("product_name", self.spec["target_name"]) - - def GetFullProductName(self): - """Returns FULL_PRODUCT_NAME.""" - if self._IsBundle(): - return self.GetWrapperName() - else: - return self._GetStandaloneBinaryPath() - - def GetWrapperName(self): - """Returns the directory name of the bundle represented by this target. - Only valid for bundles.""" - assert self._IsBundle() - return self.GetProductName() + self.GetWrapperExtension() - - def GetBundleContentsFolderPath(self): - """Returns the qualified path to the bundle's contents folder. E.g. - Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles.""" - if self.isIOS: - return self.GetWrapperName() - assert self._IsBundle() - if self.spec["type"] == "shared_library": - return os.path.join( - self.GetWrapperName(), "Versions", self.GetFrameworkVersion() - ) - else: - # loadable_modules have a 'Contents' folder like executables. - return os.path.join(self.GetWrapperName(), "Contents") - - def GetBundleResourceFolder(self): - """Returns the qualified path to the bundle's resource folder. E.g. - Chromium.app/Contents/Resources. Only valid for bundles.""" - assert self._IsBundle() - if self.isIOS: - return self.GetBundleContentsFolderPath() - return os.path.join(self.GetBundleContentsFolderPath(), "Resources") - - def GetBundleExecutableFolderPath(self): - """Returns the qualified path to the bundle's executables folder. E.g. - Chromium.app/Contents/MacOS. Only valid for bundles.""" - assert self._IsBundle() - if self.spec["type"] in ("shared_library") or self.isIOS: - return self.GetBundleContentsFolderPath() - elif self.spec["type"] in ("executable", "loadable_module"): - return os.path.join(self.GetBundleContentsFolderPath(), "MacOS") - - def GetBundleJavaFolderPath(self): - """Returns the qualified path to the bundle's Java resource folder. - E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleResourceFolder(), "Java") - - def GetBundleFrameworksFolderPath(self): - """Returns the qualified path to the bundle's frameworks folder. E.g, - Chromium.app/Contents/Frameworks. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), "Frameworks") - - def GetBundleSharedFrameworksFolderPath(self): - """Returns the qualified path to the bundle's frameworks folder. E.g, - Chromium.app/Contents/SharedFrameworks. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), "SharedFrameworks") - - def GetBundleSharedSupportFolderPath(self): - """Returns the qualified path to the bundle's shared support folder. E.g, - Chromium.app/Contents/SharedSupport. Only valid for bundles.""" - assert self._IsBundle() - if self.spec["type"] == "shared_library": - return self.GetBundleResourceFolder() - else: - return os.path.join(self.GetBundleContentsFolderPath(), "SharedSupport") - - def GetBundlePlugInsFolderPath(self): - """Returns the qualified path to the bundle's plugins folder. E.g, - Chromium.app/Contents/PlugIns. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), "PlugIns") - - def GetBundleXPCServicesFolderPath(self): - """Returns the qualified path to the bundle's XPC services folder. E.g, - Chromium.app/Contents/XPCServices. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), "XPCServices") - - def GetBundlePlistPath(self): - """Returns the qualified path to the bundle's plist file. E.g. - Chromium.app/Contents/Info.plist. Only valid for bundles.""" - assert self._IsBundle() - if ( - self.spec["type"] in ("executable", "loadable_module") - or self.IsIosFramework() - ): - return os.path.join(self.GetBundleContentsFolderPath(), "Info.plist") - else: - return os.path.join( - self.GetBundleContentsFolderPath(), "Resources", "Info.plist" - ) - - def GetProductType(self): - """Returns the PRODUCT_TYPE of this target.""" - if self._IsIosAppExtension(): - assert self._IsBundle(), ( - "ios_app_extension flag requires mac_bundle " - "(target %s)" % self.spec["target_name"] - ) - return "com.apple.product-type.app-extension" - if self._IsIosWatchKitExtension(): - assert self._IsBundle(), ( - "ios_watchkit_extension flag requires " - "mac_bundle (target %s)" % self.spec["target_name"] - ) - return "com.apple.product-type.watchkit-extension" - if self._IsIosWatchApp(): - assert self._IsBundle(), ( - "ios_watch_app flag requires mac_bundle " - "(target %s)" % self.spec["target_name"] - ) - return "com.apple.product-type.application.watchapp" - if self._IsXCUiTest(): - assert self._IsBundle(), ( - "mac_xcuitest_bundle flag requires mac_bundle " - "(target %s)" % self.spec["target_name"] - ) - return "com.apple.product-type.bundle.ui-testing" - if self._IsBundle(): - return { - "executable": "com.apple.product-type.application", - "loadable_module": "com.apple.product-type.bundle", - "shared_library": "com.apple.product-type.framework", - }[self.spec["type"]] - else: - return { - "executable": "com.apple.product-type.tool", - "loadable_module": "com.apple.product-type.library.dynamic", - "shared_library": "com.apple.product-type.library.dynamic", - "static_library": "com.apple.product-type.library.static", - }[self.spec["type"]] - - def GetMachOType(self): - """Returns the MACH_O_TYPE of this target.""" - # Weird, but matches Xcode. - if not self._IsBundle() and self.spec["type"] == "executable": - return "" - return { - "executable": "mh_execute", - "static_library": "staticlib", - "shared_library": "mh_dylib", - "loadable_module": "mh_bundle", - }[self.spec["type"]] - - def _GetBundleBinaryPath(self): - """Returns the name of the bundle binary of by this target. - E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join( - self.GetBundleExecutableFolderPath(), self.GetExecutableName() - ) - - def _GetStandaloneExecutableSuffix(self): - if "product_extension" in self.spec: - return "." + self.spec["product_extension"] - return { - "executable": "", - "static_library": ".a", - "shared_library": ".dylib", - "loadable_module": ".so", - }[self.spec["type"]] - - def _GetStandaloneExecutablePrefix(self): - return self.spec.get( - "product_prefix", - { - "executable": "", - "static_library": "lib", - "shared_library": "lib", - # Non-bundled loadable_modules are called foo.so for some reason - # (that is, .so and no prefix) with the xcode build -- match that. - "loadable_module": "", - }[self.spec["type"]], - ) - - def _GetStandaloneBinaryPath(self): - """Returns the name of the non-bundle binary represented by this target. - E.g. hello_world. Only valid for non-bundles.""" - assert not self._IsBundle() - assert self.spec["type"] in ( - "executable", - "shared_library", - "static_library", - "loadable_module", - ), ("Unexpected type %s" % self.spec["type"]) - target = self.spec["target_name"] - if self.spec["type"] == "static_library": - if target[:3] == "lib": - target = target[3:] - elif self.spec["type"] in ("loadable_module", "shared_library"): - if target[:3] == "lib": - target = target[3:] - - target_prefix = self._GetStandaloneExecutablePrefix() - target = self.spec.get("product_name", target) - target_ext = self._GetStandaloneExecutableSuffix() - return target_prefix + target + target_ext - - def GetExecutableName(self): - """Returns the executable name of the bundle represented by this target. - E.g. Chromium.""" - if self._IsBundle(): - return self.spec.get("product_name", self.spec["target_name"]) - else: - return self._GetStandaloneBinaryPath() - - def GetExecutablePath(self): - """Returns the qualified path to the primary executable of the bundle - represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium.""" - if self._IsBundle(): - return self._GetBundleBinaryPath() - else: - return self._GetStandaloneBinaryPath() - - def GetActiveArchs(self, configname): - """Returns the architectures this target should be built for.""" - config_settings = self.xcode_settings[configname] - xcode_archs_default = GetXcodeArchsDefault() - return xcode_archs_default.ActiveArchs( - config_settings.get("ARCHS"), - config_settings.get("VALID_ARCHS"), - config_settings.get("SDKROOT"), - ) - - def _GetSdkVersionInfoItem(self, sdk, infoitem): - # xcodebuild requires Xcode and can't run on Command Line Tools-only - # systems from 10.7 onward. - # Since the CLT has no SDK paths anyway, returning None is the - # most sensible route and should still do the right thing. - try: - return GetStdoutQuiet(["xcrun", "--sdk", sdk, infoitem]) - except GypError: - pass - - def _SdkRoot(self, configname): - if configname is None: - configname = self.configname - return self.GetPerConfigSetting("SDKROOT", configname, default="") - - def _XcodePlatformPath(self, configname=None): - sdk_root = self._SdkRoot(configname) - if sdk_root not in XcodeSettings._platform_path_cache: - platform_path = self._GetSdkVersionInfoItem( - sdk_root, "--show-sdk-platform-path" - ) - XcodeSettings._platform_path_cache[sdk_root] = platform_path - return XcodeSettings._platform_path_cache[sdk_root] - - def _SdkPath(self, configname=None): - sdk_root = self._SdkRoot(configname) - if sdk_root.startswith("/"): - return sdk_root - return self._XcodeSdkPath(sdk_root) - - def _XcodeSdkPath(self, sdk_root): - if sdk_root not in XcodeSettings._sdk_path_cache: - sdk_path = self._GetSdkVersionInfoItem(sdk_root, "--show-sdk-path") - XcodeSettings._sdk_path_cache[sdk_root] = sdk_path - if sdk_root: - XcodeSettings._sdk_root_cache[sdk_path] = sdk_root - return XcodeSettings._sdk_path_cache[sdk_root] - - def _AppendPlatformVersionMinFlags(self, lst): - self._Appendf(lst, "MACOSX_DEPLOYMENT_TARGET", "-mmacosx-version-min=%s") - if "IPHONEOS_DEPLOYMENT_TARGET" in self._Settings(): - # TODO: Implement this better? - sdk_path_basename = os.path.basename(self._SdkPath()) - if sdk_path_basename.lower().startswith("iphonesimulator"): - self._Appendf( - lst, "IPHONEOS_DEPLOYMENT_TARGET", "-mios-simulator-version-min=%s" - ) - else: - self._Appendf( - lst, "IPHONEOS_DEPLOYMENT_TARGET", "-miphoneos-version-min=%s" - ) - - def GetCflags(self, configname, arch=None): - """Returns flags that need to be added to .c, .cc, .m, and .mm - compilations.""" - # This functions (and the similar ones below) do not offer complete - # emulation of all xcode_settings keys. They're implemented on demand. - - self.configname = configname - cflags = [] - - sdk_root = self._SdkPath() - if "SDKROOT" in self._Settings() and sdk_root: - cflags.append("-isysroot %s" % sdk_root) - - if self.header_map_path: - cflags.append("-I%s" % self.header_map_path) - - if self._Test("CLANG_WARN_CONSTANT_CONVERSION", "YES", default="NO"): - cflags.append("-Wconstant-conversion") - - if self._Test("GCC_CHAR_IS_UNSIGNED_CHAR", "YES", default="NO"): - cflags.append("-funsigned-char") - - if self._Test("GCC_CW_ASM_SYNTAX", "YES", default="YES"): - cflags.append("-fasm-blocks") - - if "GCC_DYNAMIC_NO_PIC" in self._Settings(): - if self._Settings()["GCC_DYNAMIC_NO_PIC"] == "YES": - cflags.append("-mdynamic-no-pic") - else: - pass - # TODO: In this case, it depends on the target. xcode passes - # mdynamic-no-pic by default for executable and possibly static lib - # according to mento - - if self._Test("GCC_ENABLE_PASCAL_STRINGS", "YES", default="YES"): - cflags.append("-mpascal-strings") - - self._Appendf(cflags, "GCC_OPTIMIZATION_LEVEL", "-O%s", default="s") - - if self._Test("GCC_GENERATE_DEBUGGING_SYMBOLS", "YES", default="YES"): - dbg_format = self._Settings().get("DEBUG_INFORMATION_FORMAT", "dwarf") - if dbg_format == "dwarf": - cflags.append("-gdwarf-2") - elif dbg_format == "stabs": - raise NotImplementedError("stabs debug format is not supported yet.") - elif dbg_format == "dwarf-with-dsym": - cflags.append("-gdwarf-2") - else: - raise NotImplementedError("Unknown debug format %s" % dbg_format) - - if self._Settings().get("GCC_STRICT_ALIASING") == "YES": - cflags.append("-fstrict-aliasing") - elif self._Settings().get("GCC_STRICT_ALIASING") == "NO": - cflags.append("-fno-strict-aliasing") - - if self._Test("GCC_SYMBOLS_PRIVATE_EXTERN", "YES", default="NO"): - cflags.append("-fvisibility=hidden") - - if self._Test("GCC_TREAT_WARNINGS_AS_ERRORS", "YES", default="NO"): - cflags.append("-Werror") - - if self._Test("GCC_WARN_ABOUT_MISSING_NEWLINE", "YES", default="NO"): - cflags.append("-Wnewline-eof") - - # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or - # llvm-gcc. It also requires a fairly recent libtool, and - # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the - # path to the libLTO.dylib that matches the used clang. - if self._Test("LLVM_LTO", "YES", default="NO"): - cflags.append("-flto") - - self._AppendPlatformVersionMinFlags(cflags) - - # TODO: - if self._Test("COPY_PHASE_STRIP", "YES", default="NO"): - self._WarnUnimplemented("COPY_PHASE_STRIP") - self._WarnUnimplemented("GCC_DEBUGGING_SYMBOLS") - self._WarnUnimplemented("GCC_ENABLE_OBJC_EXCEPTIONS") - - # TODO: This is exported correctly, but assigning to it is not supported. - self._WarnUnimplemented("MACH_O_TYPE") - self._WarnUnimplemented("PRODUCT_TYPE") - - if arch is not None: - archs = [arch] - else: - assert self.configname - archs = self.GetActiveArchs(self.configname) - if len(archs) != 1: - # TODO: Supporting fat binaries will be annoying. - self._WarnUnimplemented("ARCHS") - archs = ["i386"] - cflags.append("-arch " + archs[0]) - - if archs[0] in ("i386", "x86_64"): - if self._Test("GCC_ENABLE_SSE3_EXTENSIONS", "YES", default="NO"): - cflags.append("-msse3") - if self._Test( - "GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS", "YES", default="NO" - ): - cflags.append("-mssse3") # Note 3rd 's'. - if self._Test("GCC_ENABLE_SSE41_EXTENSIONS", "YES", default="NO"): - cflags.append("-msse4.1") - if self._Test("GCC_ENABLE_SSE42_EXTENSIONS", "YES", default="NO"): - cflags.append("-msse4.2") - - cflags += self._Settings().get("WARNING_CFLAGS", []) - - if self._IsXCTest(): - platform_root = self._XcodePlatformPath(configname) - if platform_root: - cflags.append("-F" + platform_root + "/Developer/Library/Frameworks/") - - if sdk_root: - framework_root = sdk_root - else: - framework_root = "" - config = self.spec["configurations"][self.configname] - framework_dirs = config.get("mac_framework_dirs", []) - for directory in framework_dirs: - cflags.append("-F" + directory.replace("$(SDKROOT)", framework_root)) - - self.configname = None - return cflags - - def GetCflagsC(self, configname): - """Returns flags that need to be added to .c, and .m compilations.""" - self.configname = configname - cflags_c = [] - if self._Settings().get("GCC_C_LANGUAGE_STANDARD", "") == "ansi": - cflags_c.append("-ansi") - else: - self._Appendf(cflags_c, "GCC_C_LANGUAGE_STANDARD", "-std=%s") - cflags_c += self._Settings().get("OTHER_CFLAGS", []) - self.configname = None - return cflags_c - - def GetCflagsCC(self, configname): - """Returns flags that need to be added to .cc, and .mm compilations.""" - self.configname = configname - cflags_cc = [] - - clang_cxx_language_standard = self._Settings().get( - "CLANG_CXX_LANGUAGE_STANDARD" - ) - # Note: Don't make c++0x to c++11 so that c++0x can be used with older - # clangs that don't understand c++11 yet (like Xcode 4.2's). - if clang_cxx_language_standard: - cflags_cc.append("-std=%s" % clang_cxx_language_standard) - - self._Appendf(cflags_cc, "CLANG_CXX_LIBRARY", "-stdlib=%s") - - if self._Test("GCC_ENABLE_CPP_RTTI", "NO", default="YES"): - cflags_cc.append("-fno-rtti") - if self._Test("GCC_ENABLE_CPP_EXCEPTIONS", "NO", default="YES"): - cflags_cc.append("-fno-exceptions") - if self._Test("GCC_INLINES_ARE_PRIVATE_EXTERN", "YES", default="NO"): - cflags_cc.append("-fvisibility-inlines-hidden") - if self._Test("GCC_THREADSAFE_STATICS", "NO", default="YES"): - cflags_cc.append("-fno-threadsafe-statics") - # Note: This flag is a no-op for clang, it only has an effect for gcc. - if self._Test("GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO", "NO", default="YES"): - cflags_cc.append("-Wno-invalid-offsetof") - - other_ccflags = [] - - for flag in self._Settings().get("OTHER_CPLUSPLUSFLAGS", ["$(inherited)"]): - # TODO: More general variable expansion. Missing in many other places too. - if flag in ("$inherited", "$(inherited)", "${inherited}"): - flag = "$OTHER_CFLAGS" - if flag in ("$OTHER_CFLAGS", "$(OTHER_CFLAGS)", "${OTHER_CFLAGS}"): - other_ccflags += self._Settings().get("OTHER_CFLAGS", []) - else: - other_ccflags.append(flag) - cflags_cc += other_ccflags - - self.configname = None - return cflags_cc - - def _AddObjectiveCGarbageCollectionFlags(self, flags): - gc_policy = self._Settings().get("GCC_ENABLE_OBJC_GC", "unsupported") - if gc_policy == "supported": - flags.append("-fobjc-gc") - elif gc_policy == "required": - flags.append("-fobjc-gc-only") - - def _AddObjectiveCARCFlags(self, flags): - if self._Test("CLANG_ENABLE_OBJC_ARC", "YES", default="NO"): - flags.append("-fobjc-arc") - - def _AddObjectiveCMissingPropertySynthesisFlags(self, flags): - if self._Test( - "CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS", "YES", default="NO" - ): - flags.append("-Wobjc-missing-property-synthesis") - - def GetCflagsObjC(self, configname): - """Returns flags that need to be added to .m compilations.""" - self.configname = configname - cflags_objc = [] - self._AddObjectiveCGarbageCollectionFlags(cflags_objc) - self._AddObjectiveCARCFlags(cflags_objc) - self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc) - self.configname = None - return cflags_objc - - def GetCflagsObjCC(self, configname): - """Returns flags that need to be added to .mm compilations.""" - self.configname = configname - cflags_objcc = [] - self._AddObjectiveCGarbageCollectionFlags(cflags_objcc) - self._AddObjectiveCARCFlags(cflags_objcc) - self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc) - if self._Test("GCC_OBJC_CALL_CXX_CDTORS", "YES", default="NO"): - cflags_objcc.append("-fobjc-call-cxx-cdtors") - self.configname = None - return cflags_objcc - - def GetInstallNameBase(self): - """Return DYLIB_INSTALL_NAME_BASE for this target.""" - # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. - if self.spec["type"] != "shared_library" and ( - self.spec["type"] != "loadable_module" or self._IsBundle() - ): - return None - install_base = self.GetPerTargetSetting( - "DYLIB_INSTALL_NAME_BASE", - default="/Library/Frameworks" if self._IsBundle() else "/usr/local/lib", - ) - return install_base - - def _StandardizePath(self, path): - """Do :standardizepath processing for path.""" - # I'm not quite sure what :standardizepath does. Just call normpath(), - # but don't let @executable_path/../foo collapse to foo. - if "/" in path: - prefix, rest = "", path - if path.startswith("@"): - prefix, rest = path.split("/", 1) - rest = os.path.normpath(rest) # :standardizepath - path = os.path.join(prefix, rest) - return path - - def GetInstallName(self): - """Return LD_DYLIB_INSTALL_NAME for this target.""" - # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. - if self.spec["type"] != "shared_library" and ( - self.spec["type"] != "loadable_module" or self._IsBundle() - ): - return None - - default_install_name = ( - "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)" - ) - install_name = self.GetPerTargetSetting( - "LD_DYLIB_INSTALL_NAME", default=default_install_name - ) - - # Hardcode support for the variables used in chromium for now, to - # unblock people using the make build. - if "$" in install_name: - assert install_name in ( - "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/" - "$(WRAPPER_NAME)/$(PRODUCT_NAME)", - default_install_name, - ), ( - "Variables in LD_DYLIB_INSTALL_NAME are not generally supported " - "yet in target '%s' (got '%s')" - % (self.spec["target_name"], install_name) - ) - - install_name = install_name.replace( - "$(DYLIB_INSTALL_NAME_BASE:standardizepath)", - self._StandardizePath(self.GetInstallNameBase()), - ) - if self._IsBundle(): - # These are only valid for bundles, hence the |if|. - install_name = install_name.replace( - "$(WRAPPER_NAME)", self.GetWrapperName() - ) - install_name = install_name.replace( - "$(PRODUCT_NAME)", self.GetProductName() - ) - else: - assert "$(WRAPPER_NAME)" not in install_name - assert "$(PRODUCT_NAME)" not in install_name - - install_name = install_name.replace( - "$(EXECUTABLE_PATH)", self.GetExecutablePath() - ) - return install_name - - def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path): - """Checks if ldflag contains a filename and if so remaps it from - gyp-directory-relative to build-directory-relative.""" - # This list is expanded on demand. - # They get matched as: - # -exported_symbols_list file - # -Wl,exported_symbols_list file - # -Wl,exported_symbols_list,file - LINKER_FILE = r"(\S+)" - WORD = r"\S+" - linker_flags = [ - ["-exported_symbols_list", LINKER_FILE], # Needed for NaCl. - ["-unexported_symbols_list", LINKER_FILE], - ["-reexported_symbols_list", LINKER_FILE], - ["-sectcreate", WORD, WORD, LINKER_FILE], # Needed for remoting. - ] - for flag_pattern in linker_flags: - regex = re.compile("(?:-Wl,)?" + "[ ,]".join(flag_pattern)) - m = regex.match(ldflag) - if m: - ldflag = ( - ldflag[: m.start(1)] - + gyp_to_build_path(m.group(1)) - + ldflag[m.end(1) :] - ) - # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS, - # TODO(thakis): Update ffmpeg.gyp): - if ldflag.startswith("-L"): - ldflag = "-L" + gyp_to_build_path(ldflag[len("-L") :]) - return ldflag - - def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): - """Returns flags that need to be passed to the linker. - - Args: - configname: The name of the configuration to get ld flags for. - product_dir: The directory where products such static and dynamic - libraries are placed. This is added to the library search path. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build directory. - """ - self.configname = configname - ldflags = [] - - # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS - # can contain entries that depend on this. Explicitly absolutify these. - for ldflag in self._Settings().get("OTHER_LDFLAGS", []): - ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path)) - - if self._Test("DEAD_CODE_STRIPPING", "YES", default="NO"): - ldflags.append("-Wl,-dead_strip") - - if self._Test("PREBINDING", "YES", default="NO"): - ldflags.append("-Wl,-prebind") - - self._Appendf( - ldflags, "DYLIB_COMPATIBILITY_VERSION", "-compatibility_version %s" - ) - self._Appendf(ldflags, "DYLIB_CURRENT_VERSION", "-current_version %s") - - self._AppendPlatformVersionMinFlags(ldflags) - - if "SDKROOT" in self._Settings() and self._SdkPath(): - ldflags.append("-isysroot " + self._SdkPath()) - - for library_path in self._Settings().get("LIBRARY_SEARCH_PATHS", []): - ldflags.append("-L" + gyp_to_build_path(library_path)) - - if "ORDER_FILE" in self._Settings(): - ldflags.append( - "-Wl,-order_file " - + "-Wl," - + gyp_to_build_path(self._Settings()["ORDER_FILE"]) - ) - - if arch is not None: - archs = [arch] - else: - assert self.configname - archs = self.GetActiveArchs(self.configname) - if len(archs) != 1: - # TODO: Supporting fat binaries will be annoying. - self._WarnUnimplemented("ARCHS") - archs = ["i386"] - ldflags.append("-arch " + archs[0]) - - # Xcode adds the product directory by default. - # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838 - ldflags.append("-L" + (product_dir if product_dir != "." else "./")) - - install_name = self.GetInstallName() - if install_name and self.spec["type"] != "loadable_module": - ldflags.append("-install_name " + install_name.replace(" ", r"\ ")) - - for rpath in self._Settings().get("LD_RUNPATH_SEARCH_PATHS", []): - ldflags.append("-Wl,-rpath," + rpath) - - sdk_root = self._SdkPath() - if not sdk_root: - sdk_root = "" - config = self.spec["configurations"][self.configname] - framework_dirs = config.get("mac_framework_dirs", []) - for directory in framework_dirs: - ldflags.append("-F" + directory.replace("$(SDKROOT)", sdk_root)) - - if self._IsXCTest(): - platform_root = self._XcodePlatformPath(configname) - if sdk_root and platform_root: - ldflags.append("-F" + platform_root + "/Developer/Library/Frameworks/") - ldflags.append("-framework XCTest") - - is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension() - if sdk_root and is_extension: - # Adds the link flags for extensions. These flags are common for all - # extensions and provide loader and main function. - # These flags reflect the compilation options used by xcode to compile - # extensions. - xcode_version, _ = XcodeVersion() - if xcode_version < "0900": - ldflags.append("-lpkstart") - ldflags.append( - sdk_root - + "/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit" - ) - else: - ldflags.append("-e _NSExtensionMain") - ldflags.append("-fapplication-extension") - - self._Appendf(ldflags, "CLANG_CXX_LIBRARY", "-stdlib=%s") - - self.configname = None - return ldflags - - def GetLibtoolflags(self, configname): - """Returns flags that need to be passed to the static linker. - - Args: - configname: The name of the configuration to get ld flags for. - """ - self.configname = configname - libtoolflags = [] - - for libtoolflag in self._Settings().get("OTHER_LDFLAGS", []): - libtoolflags.append(libtoolflag) - # TODO(thakis): ARCHS? - - self.configname = None - return libtoolflags - - def GetPerTargetSettings(self): - """Gets a list of all the per-target settings. This will only fetch keys - whose values are the same across all configurations.""" - first_pass = True - result = {} - for configname in sorted(self.xcode_settings.keys()): - if first_pass: - result = dict(self.xcode_settings[configname]) - first_pass = False - else: - for key, value in self.xcode_settings[configname].items(): - if key not in result: - continue - elif result[key] != value: - del result[key] - return result - - def GetPerConfigSetting(self, setting, configname, default=None): - if configname in self.xcode_settings: - return self.xcode_settings[configname].get(setting, default) - else: - return self.GetPerTargetSetting(setting, default) - - def GetPerTargetSetting(self, setting, default=None): - """Tries to get xcode_settings.setting from spec. Assumes that the setting - has the same value in all configurations and throws otherwise.""" - is_first_pass = True - result = None - for configname in sorted(self.xcode_settings.keys()): - if is_first_pass: - result = self.xcode_settings[configname].get(setting, None) - is_first_pass = False - else: - assert result == self.xcode_settings[configname].get(setting, None), ( - "Expected per-target setting for '%s', got per-config setting " - "(target %s)" % (setting, self.spec["target_name"]) - ) - if result is None: - return default - return result - - def _GetStripPostbuilds(self, configname, output_binary, quiet): - """Returns a list of shell commands that contain the shell commands - necessary to strip this target's binary. These should be run as postbuilds - before the actual postbuilds run.""" - self.configname = configname - - result = [] - if self._Test("DEPLOYMENT_POSTPROCESSING", "YES", default="NO") and self._Test( - "STRIP_INSTALLED_PRODUCT", "YES", default="NO" - ): - - default_strip_style = "debugging" - if ( - self.spec["type"] == "loadable_module" or self._IsIosAppExtension() - ) and self._IsBundle(): - default_strip_style = "non-global" - elif self.spec["type"] == "executable": - default_strip_style = "all" - - strip_style = self._Settings().get("STRIP_STYLE", default_strip_style) - strip_flags = {"all": "", "non-global": "-x", "debugging": "-S"}[ - strip_style - ] - - explicit_strip_flags = self._Settings().get("STRIPFLAGS", "") - if explicit_strip_flags: - strip_flags += " " + _NormalizeEnvVarReferences(explicit_strip_flags) - - if not quiet: - result.append("echo STRIP\\(%s\\)" % self.spec["target_name"]) - result.append("strip %s %s" % (strip_flags, output_binary)) - - self.configname = None - return result - - def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): - """Returns a list of shell commands that contain the shell commands - necessary to massage this target's debug information. These should be run - as postbuilds before the actual postbuilds run.""" - self.configname = configname - - # For static libraries, no dSYMs are created. - result = [] - if ( - self._Test("GCC_GENERATE_DEBUGGING_SYMBOLS", "YES", default="YES") - and self._Test( - "DEBUG_INFORMATION_FORMAT", "dwarf-with-dsym", default="dwarf" - ) - and self.spec["type"] != "static_library" - ): - if not quiet: - result.append("echo DSYMUTIL\\(%s\\)" % self.spec["target_name"]) - result.append("dsymutil %s -o %s" % (output_binary, output + ".dSYM")) - - self.configname = None - return result - - def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False): - """Returns a list of shell commands that contain the shell commands - to run as postbuilds for this target, before the actual postbuilds.""" - # dSYMs need to build before stripping happens. - return self._GetDebugInfoPostbuilds( - configname, output, output_binary, quiet - ) + self._GetStripPostbuilds(configname, output_binary, quiet) - - def _GetIOSPostbuilds(self, configname, output_binary): - """Return a shell command to codesign the iOS output binary so it can - be deployed to a device. This should be run as the very last step of the - build.""" - if not ( - self.isIOS - and (self.spec["type"] == "executable" or self._IsXCTest()) - or self.IsIosFramework() - ): - return [] - - postbuilds = [] - product_name = self.GetFullProductName() - settings = self.xcode_settings[configname] - - # Xcode expects XCTests to be copied into the TEST_HOST dir. - if self._IsXCTest(): - source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name) - test_host = os.path.dirname(settings.get("TEST_HOST")) - xctest_destination = os.path.join(test_host, "PlugIns", product_name) - postbuilds.extend(["ditto %s %s" % (source, xctest_destination)]) - - key = self._GetIOSCodeSignIdentityKey(settings) - if not key: - return postbuilds - - # Warn for any unimplemented signing xcode keys. - unimpl = ["OTHER_CODE_SIGN_FLAGS"] - unimpl = set(unimpl) & set(self.xcode_settings[configname].keys()) - if unimpl: - print( - "Warning: Some codesign keys not implemented, ignoring: %s" - % ", ".join(sorted(unimpl)) - ) - - if self._IsXCTest(): - # For device xctests, Xcode copies two extra frameworks into $TEST_HOST. - test_host = os.path.dirname(settings.get("TEST_HOST")) - frameworks_dir = os.path.join(test_host, "Frameworks") - platform_root = self._XcodePlatformPath(configname) - frameworks = [ - "Developer/Library/PrivateFrameworks/IDEBundleInjection.framework", - "Developer/Library/Frameworks/XCTest.framework", - ] - for framework in frameworks: - source = os.path.join(platform_root, framework) - destination = os.path.join(frameworks_dir, os.path.basename(framework)) - postbuilds.extend(["ditto %s %s" % (source, destination)]) - - # Then re-sign everything with 'preserve=True' - postbuilds.extend( - [ - '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' - % ( - os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), - key, - settings.get("CODE_SIGN_ENTITLEMENTS", ""), - settings.get("PROVISIONING_PROFILE", ""), - destination, - True, - ) - ] - ) - plugin_dir = os.path.join(test_host, "PlugIns") - targets = [os.path.join(plugin_dir, product_name), test_host] - for target in targets: - postbuilds.extend( - [ - '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' - % ( - os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), - key, - settings.get("CODE_SIGN_ENTITLEMENTS", ""), - settings.get("PROVISIONING_PROFILE", ""), - target, - True, - ) - ] - ) - - postbuilds.extend( - [ - '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' - % ( - os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), - key, - settings.get("CODE_SIGN_ENTITLEMENTS", ""), - settings.get("PROVISIONING_PROFILE", ""), - os.path.join("${BUILT_PRODUCTS_DIR}", product_name), - False, - ) - ] - ) - return postbuilds - - def _GetIOSCodeSignIdentityKey(self, settings): - identity = settings.get("CODE_SIGN_IDENTITY") - if not identity: - return None - if identity not in XcodeSettings._codesigning_key_cache: - output = subprocess.check_output( - ["security", "find-identity", "-p", "codesigning", "-v"] - ) - for line in output.splitlines(): - if identity in line: - fingerprint = line.split()[1] - cache = XcodeSettings._codesigning_key_cache - assert identity not in cache or fingerprint == cache[identity], ( - "Multiple codesigning fingerprints for identity: %s" % identity - ) - XcodeSettings._codesigning_key_cache[identity] = fingerprint - return XcodeSettings._codesigning_key_cache.get(identity, "") - - def AddImplicitPostbuilds( - self, configname, output, output_binary, postbuilds=[], quiet=False - ): - """Returns a list of shell commands that should run before and after - |postbuilds|.""" - assert output_binary is not None - pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet) - post = self._GetIOSPostbuilds(configname, output_binary) - return pre + postbuilds + post - - def _AdjustLibrary(self, library, config_name=None): - if library.endswith(".framework"): - l_flag = "-framework " + os.path.splitext(os.path.basename(library))[0] - else: - m = self.library_re.match(library) - if m: - l_flag = "-l" + m.group(1) - else: - l_flag = library - - sdk_root = self._SdkPath(config_name) - if not sdk_root: - sdk_root = "" - # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of - # ".dylib" without providing a real support for them. What it does, for - # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the - # library order and cause collision when building Chrome. - # - # Instead substitute ".tbd" to ".dylib" in the generated project when the - # following conditions are both true: - # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib", - # - the ".dylib" file does not exists but a ".tbd" file do. - library = l_flag.replace("$(SDKROOT)", sdk_root) - if l_flag.startswith("$(SDKROOT)"): - basename, ext = os.path.splitext(library) - if ext == ".dylib" and not os.path.exists(library): - tbd_library = basename + ".tbd" - if os.path.exists(tbd_library): - library = tbd_library - return library - - def AdjustLibraries(self, libraries, config_name=None): - """Transforms entries like 'Cocoa.framework' in libraries into entries like - '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc. - """ - libraries = [self._AdjustLibrary(library, config_name) for library in libraries] - return libraries - - def _BuildMachineOSBuild(self): - return GetStdout(["sw_vers", "-buildVersion"]) - - def _XcodeIOSDeviceFamily(self, configname): - family = self.xcode_settings[configname].get("TARGETED_DEVICE_FAMILY", "1") - return [int(x) for x in family.split(",")] - - def GetExtraPlistItems(self, configname=None): - """Returns a dictionary with extra items to insert into Info.plist.""" - if configname not in XcodeSettings._plist_cache: - cache = {} - cache["BuildMachineOSBuild"] = self._BuildMachineOSBuild() - - xcode_version, xcode_build = XcodeVersion() - cache["DTXcode"] = xcode_version - cache["DTXcodeBuild"] = xcode_build - compiler = self.xcode_settings[configname].get("GCC_VERSION") - if compiler is not None: - cache["DTCompiler"] = compiler - - sdk_root = self._SdkRoot(configname) - if not sdk_root: - sdk_root = self._DefaultSdkRoot() - sdk_version = self._GetSdkVersionInfoItem(sdk_root, "--show-sdk-version") - cache["DTSDKName"] = sdk_root + (sdk_version or "") - if xcode_version >= "0720": - cache["DTSDKBuild"] = self._GetSdkVersionInfoItem( - sdk_root, "--show-sdk-build-version" - ) - elif xcode_version >= "0430": - cache["DTSDKBuild"] = sdk_version - else: - cache["DTSDKBuild"] = cache["BuildMachineOSBuild"] - - if self.isIOS: - cache["MinimumOSVersion"] = self.xcode_settings[configname].get( - "IPHONEOS_DEPLOYMENT_TARGET" - ) - cache["DTPlatformName"] = sdk_root - cache["DTPlatformVersion"] = sdk_version - - if configname.endswith("iphoneos"): - cache["CFBundleSupportedPlatforms"] = ["iPhoneOS"] - cache["DTPlatformBuild"] = cache["DTSDKBuild"] - else: - cache["CFBundleSupportedPlatforms"] = ["iPhoneSimulator"] - # This is weird, but Xcode sets DTPlatformBuild to an empty field - # for simulator builds. - cache["DTPlatformBuild"] = "" - XcodeSettings._plist_cache[configname] = cache - - # Include extra plist items that are per-target, not per global - # XcodeSettings. - items = dict(XcodeSettings._plist_cache[configname]) - if self.isIOS: - items["UIDeviceFamily"] = self._XcodeIOSDeviceFamily(configname) - return items - - def _DefaultSdkRoot(self): - """Returns the default SDKROOT to use. - - Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode - project, then the environment variable was empty. Starting with this - version, Xcode uses the name of the newest SDK installed. - """ - xcode_version, _ = XcodeVersion() - if xcode_version < "0500": - return "" - default_sdk_path = self._XcodeSdkPath("") - default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path) - if default_sdk_root: - return default_sdk_root - try: - all_sdks = GetStdout(["xcodebuild", "-showsdks"]) - except GypError: - # If xcodebuild fails, there will be no valid SDKs - return "" - for line in all_sdks.splitlines(): - items = line.split() - if len(items) >= 3 and items[-2] == "-sdk": - sdk_root = items[-1] - sdk_path = self._XcodeSdkPath(sdk_root) - if sdk_path == default_sdk_path: - return sdk_root - return "" - - -class MacPrefixHeader(object): - """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature. - - This feature consists of several pieces: - * If GCC_PREFIX_HEADER is present, all compilations in that project get an - additional |-include path_to_prefix_header| cflag. - * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is - instead compiled, and all other compilations in the project get an - additional |-include path_to_compiled_header| instead. - + Compiled prefix headers have the extension gch. There is one gch file for - every language used in the project (c, cc, m, mm), since gch files for - different languages aren't compatible. - + gch files themselves are built with the target's normal cflags, but they - obviously don't get the |-include| flag. Instead, they need a -x flag that - describes their language. - + All o files in the target need to depend on the gch file, to make sure - it's built before any o file is built. - - This class helps with some of these tasks, but it needs help from the build - system for writing dependencies to the gch files, for writing build commands - for the gch files, and for figuring out the location of the gch files. - """ - - def __init__( - self, xcode_settings, gyp_path_to_build_path, gyp_path_to_build_output - ): - """If xcode_settings is None, all methods on this class are no-ops. - - Args: - gyp_path_to_build_path: A function that takes a gyp-relative path, - and returns a path relative to the build directory. - gyp_path_to_build_output: A function that takes a gyp-relative path and - a language code ('c', 'cc', 'm', or 'mm'), and that returns a path - to where the output of precompiling that path for that language - should be placed (without the trailing '.gch'). - """ - # This doesn't support per-configuration prefix headers. Good enough - # for now. - self.header = None - self.compile_headers = False - if xcode_settings: - self.header = xcode_settings.GetPerTargetSetting("GCC_PREFIX_HEADER") - self.compile_headers = ( - xcode_settings.GetPerTargetSetting( - "GCC_PRECOMPILE_PREFIX_HEADER", default="NO" - ) - != "NO" - ) - self.compiled_headers = {} - if self.header: - if self.compile_headers: - for lang in ["c", "cc", "m", "mm"]: - self.compiled_headers[lang] = gyp_path_to_build_output( - self.header, lang - ) - self.header = gyp_path_to_build_path(self.header) - - def _CompiledHeader(self, lang, arch): - assert self.compile_headers - h = self.compiled_headers[lang] - if arch: - h += "." + arch - return h - - def GetInclude(self, lang, arch=None): - """Gets the cflags to include the prefix header for language |lang|.""" - if self.compile_headers and lang in self.compiled_headers: - return "-include %s" % self._CompiledHeader(lang, arch) - elif self.header: - return "-include %s" % self.header - else: - return "" - - def _Gch(self, lang, arch): - """Returns the actual file name of the prefix header for language |lang|.""" - assert self.compile_headers - return self._CompiledHeader(lang, arch) + ".gch" - - def GetObjDependencies(self, sources, objs, arch=None): - """Given a list of source files and the corresponding object files, returns - a list of (source, object, gch) tuples, where |gch| is the build-directory - relative path to the gch file each object file depends on. |compilable[i]| - has to be the source file belonging to |objs[i]|.""" - if not self.header or not self.compile_headers: - return [] - - result = [] - for source, obj in zip(sources, objs): - ext = os.path.splitext(source)[1] - lang = { - ".c": "c", - ".cpp": "cc", - ".cc": "cc", - ".cxx": "cc", - ".m": "m", - ".mm": "mm", - }.get(ext, None) - if lang: - result.append((source, obj, self._Gch(lang, arch))) - return result - - def GetPchBuildCommands(self, arch=None): - """Returns [(path_to_gch, language_flag, language, header)]. - |path_to_gch| and |header| are relative to the build directory. - """ - if not self.header or not self.compile_headers: - return [] - return [ - (self._Gch("c", arch), "-x c-header", "c", self.header), - (self._Gch("cc", arch), "-x c++-header", "cc", self.header), - (self._Gch("m", arch), "-x objective-c-header", "m", self.header), - (self._Gch("mm", arch), "-x objective-c++-header", "mm", self.header), - ] - - -def XcodeVersion(): - """Returns a tuple of version and build version of installed Xcode.""" - # `xcodebuild -version` output looks like - # Xcode 4.6.3 - # Build version 4H1503 - # or like - # Xcode 3.2.6 - # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0 - # BuildVersion: 10M2518 - # Convert that to ('0463', '4H1503') or ('0326', '10M2518'). - global XCODE_VERSION_CACHE - if XCODE_VERSION_CACHE: - return XCODE_VERSION_CACHE - version = "" - build = "" - try: - version_list = GetStdoutQuiet(["xcodebuild", "-version"]).splitlines() - # In some circumstances xcodebuild exits 0 but doesn't return - # the right results; for example, a user on 10.7 or 10.8 with - # a bogus path set via xcode-select - # In that case this may be a CLT-only install so fall back to - # checking that version. - if len(version_list) < 2: - raise GypError("xcodebuild returned unexpected results") - version = version_list[0].split()[-1] # Last word on first line - build = version_list[-1].split()[-1] # Last word on last line - except GypError: # Xcode not installed so look for XCode Command Line Tools - version = CLTVersion() # macOS Catalina returns 11.0.0.0.1.1567737322 - if not version: - raise GypError("No Xcode or CLT version detected!") - # Be careful to convert "4.2.3" to "0423" and "11.0.0" to "1100": - version = version.split(".")[:3] # Just major, minor, micro - version[0] = version[0].zfill(2) # Add a leading zero if major is one digit - version = ("".join(version) + "00")[:4] # Limit to exactly four characters - XCODE_VERSION_CACHE = (version, build) - return XCODE_VERSION_CACHE - - -# This function ported from the logic in Homebrew's CLT version check -def CLTVersion(): - """Returns the version of command-line tools from pkgutil.""" - # pkgutil output looks like - # package-id: com.apple.pkg.CLTools_Executables - # version: 5.0.1.0.1.1382131676 - # volume: / - # location: / - # install-time: 1382544035 - # groups: com.apple.FindSystemFiles.pkg-group - # com.apple.DevToolsBoth.pkg-group - # com.apple.DevToolsNonRelocatableShared.pkg-group - STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo" - FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI" - MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables" - - regex = re.compile("version: (?P.+)") - for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]: - try: - output = GetStdout(["/usr/sbin/pkgutil", "--pkg-info", key]) - return re.search(regex, output).groupdict()["version"] - except GypError: - continue - - regex = re.compile(r'Command Line Tools for Xcode\s+(?P\S+)') - try: - output = GetStdout(["/usr/sbin/softwareupdate", "--history"]) - return re.search(regex, output).groupdict()["version"] - except GypError: - return None - - -def GetStdoutQuiet(cmdlist): - """Returns the content of standard output returned by invoking |cmdlist|. - Ignores the stderr. - Raises |GypError| if the command return with a non-zero return code.""" - job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out = job.communicate()[0] - if PY3: - out = out.decode("utf-8") - if job.returncode != 0: - raise GypError("Error %d running %s" % (job.returncode, cmdlist[0])) - return out.rstrip("\n") - - -def GetStdout(cmdlist): - """Returns the content of standard output returned by invoking |cmdlist|. - Raises |GypError| if the command return with a non-zero return code.""" - job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) - out = job.communicate()[0] - if PY3: - out = out.decode("utf-8") - if job.returncode != 0: - sys.stderr.write(out + "\n") - raise GypError("Error %d running %s" % (job.returncode, cmdlist[0])) - return out.rstrip("\n") - - -def MergeGlobalXcodeSettingsToSpec(global_dict, spec): - """Merges the global xcode_settings dictionary into each configuration of the - target represented by spec. For keys that are both in the global and the local - xcode_settings dict, the local key gets precedence. - """ - # The xcode generator special-cases global xcode_settings and does something - # that amounts to merging in the global xcode_settings into each local - # xcode_settings dict. - global_xcode_settings = global_dict.get("xcode_settings", {}) - for config in spec["configurations"].values(): - if "xcode_settings" in config: - new_settings = global_xcode_settings.copy() - new_settings.update(config["xcode_settings"]) - config["xcode_settings"] = new_settings - - -def IsMacBundle(flavor, spec): - """Returns if |spec| should be treated as a bundle. - - Bundles are directories with a certain subdirectory structure, instead of - just a single file. Bundle rules do not produce a binary but also package - resources into that directory.""" - is_mac_bundle = ( - int(spec.get("mac_xctest_bundle", 0)) != 0 - or int(spec.get("mac_xcuitest_bundle", 0)) != 0 - or (int(spec.get("mac_bundle", 0)) != 0 and flavor == "mac") - ) - - if is_mac_bundle: - assert spec["type"] != "none", ( - 'mac_bundle targets cannot have type none (target "%s")' - % spec["target_name"] - ) - return is_mac_bundle - - -def GetMacBundleResources(product_dir, xcode_settings, resources): - """Yields (output, resource) pairs for every resource in |resources|. - Only call this for mac bundle targets. - - Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. - xcode_settings: The XcodeSettings of the current target. - resources: A list of bundle resources, relative to the build directory. - """ - dest = os.path.join(product_dir, xcode_settings.GetBundleResourceFolder()) - for res in resources: - output = dest - - # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangeable. - assert " " not in res, "Spaces in resource filenames not supported (%s)" % res - - # Split into (path,file). - res_parts = os.path.split(res) - - # Now split the path into (prefix,maybe.lproj). - lproj_parts = os.path.split(res_parts[0]) - # If the resource lives in a .lproj bundle, add that to the destination. - if lproj_parts[1].endswith(".lproj"): - output = os.path.join(output, lproj_parts[1]) - - output = os.path.join(output, res_parts[1]) - # Compiled XIB files are referred to by .nib. - if output.endswith(".xib"): - output = os.path.splitext(output)[0] + ".nib" - # Compiled storyboard files are referred to by .storyboardc. - if output.endswith(".storyboard"): - output = os.path.splitext(output)[0] + ".storyboardc" - - yield output, res - - -def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): - """Returns (info_plist, dest_plist, defines, extra_env), where: - * |info_plist| is the source plist path, relative to the - build directory, - * |dest_plist| is the destination plist path, relative to the - build directory, - * |defines| is a list of preprocessor defines (empty if the plist - shouldn't be preprocessed, - * |extra_env| is a dict of env variables that should be exported when - invoking |mac_tool copy-info-plist|. - - Only call this for mac bundle targets. - - Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. - xcode_settings: The XcodeSettings of the current target. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build directory. - """ - info_plist = xcode_settings.GetPerTargetSetting("INFOPLIST_FILE") - if not info_plist: - return None, None, [], {} - - # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangeable. - assert " " not in info_plist, ( - "Spaces in Info.plist filenames not supported (%s)" % info_plist - ) - - info_plist = gyp_path_to_build_path(info_plist) - - # If explicitly set to preprocess the plist, invoke the C preprocessor and - # specify any defines as -D flags. - if ( - xcode_settings.GetPerTargetSetting("INFOPLIST_PREPROCESS", default="NO") - == "YES" - ): - # Create an intermediate file based on the path. - defines = shlex.split( - xcode_settings.GetPerTargetSetting( - "INFOPLIST_PREPROCESSOR_DEFINITIONS", default="" - ) - ) - else: - defines = [] - - dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath()) - extra_env = xcode_settings.GetPerTargetSettings() - - return info_plist, dest_plist, defines, extra_env - - -def _GetXcodeEnv( - xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None -): - """Return the environment variables that Xcode would set. See - http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 - for a full list. - - Args: - xcode_settings: An XcodeSettings object. If this is None, this function - returns an empty dict. - built_products_dir: Absolute path to the built products dir. - srcroot: Absolute path to the source root. - configuration: The build configuration name. - additional_settings: An optional dict with more values to add to the - result. - """ - - if not xcode_settings: - return {} - - # This function is considered a friend of XcodeSettings, so let it reach into - # its implementation details. - spec = xcode_settings.spec - - # These are filled in on an as-needed basis. - env = { - "BUILT_FRAMEWORKS_DIR": built_products_dir, - "BUILT_PRODUCTS_DIR": built_products_dir, - "CONFIGURATION": configuration, - "PRODUCT_NAME": xcode_settings.GetProductName(), - # For FULL_PRODUCT_NAME see: - # /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec # noqa: E501 - "SRCROOT": srcroot, - "SOURCE_ROOT": "${SRCROOT}", - # This is not true for static libraries, but currently the env is only - # written for bundles: - "TARGET_BUILD_DIR": built_products_dir, - "TEMP_DIR": "${TMPDIR}", - "XCODE_VERSION_ACTUAL": XcodeVersion()[0], - } - if xcode_settings.GetPerConfigSetting("SDKROOT", configuration): - env["SDKROOT"] = xcode_settings._SdkPath(configuration) - else: - env["SDKROOT"] = "" - - if xcode_settings.mac_toolchain_dir: - env["DEVELOPER_DIR"] = xcode_settings.mac_toolchain_dir - - if spec["type"] in ( - "executable", - "static_library", - "shared_library", - "loadable_module", - ): - env["EXECUTABLE_NAME"] = xcode_settings.GetExecutableName() - env["EXECUTABLE_PATH"] = xcode_settings.GetExecutablePath() - env["FULL_PRODUCT_NAME"] = xcode_settings.GetFullProductName() - mach_o_type = xcode_settings.GetMachOType() - if mach_o_type: - env["MACH_O_TYPE"] = mach_o_type - env["PRODUCT_TYPE"] = xcode_settings.GetProductType() - if xcode_settings._IsBundle(): - # xcodeproj_file.py sets the same Xcode subfolder value for this as for - # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value. - env["BUILT_FRAMEWORKS_DIR"] = os.path.join( - built_products_dir + os.sep + xcode_settings.GetBundleFrameworksFolderPath() - ) - env["CONTENTS_FOLDER_PATH"] = xcode_settings.GetBundleContentsFolderPath() - env["EXECUTABLE_FOLDER_PATH"] = xcode_settings.GetBundleExecutableFolderPath() - env[ - "UNLOCALIZED_RESOURCES_FOLDER_PATH" - ] = xcode_settings.GetBundleResourceFolder() - env["JAVA_FOLDER_PATH"] = xcode_settings.GetBundleJavaFolderPath() - env["FRAMEWORKS_FOLDER_PATH"] = xcode_settings.GetBundleFrameworksFolderPath() - env[ - "SHARED_FRAMEWORKS_FOLDER_PATH" - ] = xcode_settings.GetBundleSharedFrameworksFolderPath() - env[ - "SHARED_SUPPORT_FOLDER_PATH" - ] = xcode_settings.GetBundleSharedSupportFolderPath() - env["PLUGINS_FOLDER_PATH"] = xcode_settings.GetBundlePlugInsFolderPath() - env["XPCSERVICES_FOLDER_PATH"] = xcode_settings.GetBundleXPCServicesFolderPath() - env["INFOPLIST_PATH"] = xcode_settings.GetBundlePlistPath() - env["WRAPPER_NAME"] = xcode_settings.GetWrapperName() - - install_name = xcode_settings.GetInstallName() - if install_name: - env["LD_DYLIB_INSTALL_NAME"] = install_name - install_name_base = xcode_settings.GetInstallNameBase() - if install_name_base: - env["DYLIB_INSTALL_NAME_BASE"] = install_name_base - xcode_version, _ = XcodeVersion() - if xcode_version >= "0500" and not env.get("SDKROOT"): - sdk_root = xcode_settings._SdkRoot(configuration) - if not sdk_root: - sdk_root = xcode_settings._XcodeSdkPath("") - if sdk_root is None: - sdk_root = "" - env["SDKROOT"] = sdk_root - - if not additional_settings: - additional_settings = {} - else: - # Flatten lists to strings. - for k in additional_settings: - if not isinstance(additional_settings[k], str): - additional_settings[k] = " ".join(additional_settings[k]) - additional_settings.update(env) - - for k in additional_settings: - additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k]) - - return additional_settings - - -def _NormalizeEnvVarReferences(str): - """Takes a string containing variable references in the form ${FOO}, $(FOO), - or $FOO, and returns a string with all variable references in the form ${FOO}. - """ - # $FOO -> ${FOO} - str = re.sub(r"\$([a-zA-Z_][a-zA-Z0-9_]*)", r"${\1}", str) - - # $(FOO) -> ${FOO} - matches = re.findall(r"(\$\(([a-zA-Z0-9\-_]+)\))", str) - for match in matches: - to_replace, variable = match - assert "$(" not in match, "$($(FOO)) variables not supported: " + match - str = str.replace(to_replace, "${" + variable + "}") - - return str - - -def ExpandEnvVars(string, expansions): - """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the - expansions list. If the variable expands to something that references - another variable, this variable is expanded as well if it's in env -- - until no variables present in env are left.""" - for k, v in reversed(expansions): - string = string.replace("${" + k + "}", v) - string = string.replace("$(" + k + ")", v) - string = string.replace("$" + k, v) - return string - - -def _TopologicallySortedEnvVarKeys(env): - """Takes a dict |env| whose values are strings that can refer to other keys, - for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of - env such that key2 is after key1 in L if env[key2] refers to env[key1]. - - Throws an Exception in case of dependency cycles. - """ - # Since environment variables can refer to other variables, the evaluation - # order is important. Below is the logic to compute the dependency graph - # and sort it. - regex = re.compile(r"\$\{([a-zA-Z0-9\-_]+)\}") - - def GetEdges(node): - # Use a definition of edges such that user_of_variable -> used_varible. - # This happens to be easier in this case, since a variable's - # definition contains all variables it references in a single string. - # We can then reverse the result of the topological sort at the end. - # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) - matches = set([v for v in regex.findall(env[node]) if v in env]) - for dependee in matches: - assert "${" not in dependee, "Nested variables not supported: " + dependee - return matches - - try: - # Topologically sort, and then reverse, because we used an edge definition - # that's inverted from the expected result of this function (see comment - # above). - order = gyp.common.TopologicallySorted(env.keys(), GetEdges) - order.reverse() - return order - except gyp.common.CycleError as e: - raise GypError( - "Xcode environment variables are cyclically dependent: " + str(e.nodes) - ) - - -def GetSortedXcodeEnv( - xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None -): - env = _GetXcodeEnv( - xcode_settings, built_products_dir, srcroot, configuration, additional_settings - ) - return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)] - - -def GetSpecPostbuildCommands(spec, quiet=False): - """Returns the list of postbuilds explicitly defined on |spec|, in a form - executable by a shell.""" - postbuilds = [] - for postbuild in spec.get("postbuilds", []): - if not quiet: - postbuilds.append( - "echo POSTBUILD\\(%s\\) %s" - % (spec["target_name"], postbuild["postbuild_name"]) - ) - postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild["action"])) - return postbuilds - - -def _HasIOSTarget(targets): - """Returns true if any target contains the iOS specific key - IPHONEOS_DEPLOYMENT_TARGET.""" - for target_dict in targets.values(): - for config in target_dict["configurations"].values(): - if config.get("xcode_settings", {}).get("IPHONEOS_DEPLOYMENT_TARGET"): - return True - return False - - -def _AddIOSDeviceConfigurations(targets): - """Clone all targets and append -iphoneos to the name. Configure these targets - to build for iOS devices and use correct architectures for those builds.""" - for target_dict in targets.values(): - toolset = target_dict["toolset"] - configs = target_dict["configurations"] - for config_name, simulator_config_dict in dict(configs).items(): - iphoneos_config_dict = copy.deepcopy(simulator_config_dict) - configs[config_name + "-iphoneos"] = iphoneos_config_dict - configs[config_name + "-iphonesimulator"] = simulator_config_dict - if toolset == "target": - simulator_config_dict["xcode_settings"]["SDKROOT"] = "iphonesimulator" - iphoneos_config_dict["xcode_settings"]["SDKROOT"] = "iphoneos" - return targets - - -def CloneConfigurationForDeviceAndEmulator(target_dicts): - """If |target_dicts| contains any iOS targets, automatically create -iphoneos - targets for iOS device builds.""" - if _HasIOSTarget(target_dicts): - return _AddIOSDeviceConfigurations(target_dicts) - return target_dicts diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py deleted file mode 100644 index 10ddcbccd0310..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py +++ /dev/null @@ -1,302 +0,0 @@ -# Copyright (c) 2014 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Xcode-ninja wrapper project file generator. - -This updates the data structures passed to the Xcode gyp generator to build -with ninja instead. The Xcode project itself is transformed into a list of -executable targets, each with a build step to build with ninja, and a target -with every source and resource file. This appears to sidestep some of the -major performance headaches experienced using complex projects and large number -of targets within Xcode. -""" - -import errno -import gyp.generator.ninja -import os -import re -import xml.sax.saxutils - - -def _WriteWorkspace(main_gyp, sources_gyp, params): - """ Create a workspace to wrap main and sources gyp paths. """ - (build_file_root, build_file_ext) = os.path.splitext(main_gyp) - workspace_path = build_file_root + ".xcworkspace" - options = params["options"] - if options.generator_output: - workspace_path = os.path.join(options.generator_output, workspace_path) - try: - os.makedirs(workspace_path) - except OSError as e: - if e.errno != errno.EEXIST: - raise - output_string = ( - '\n' + '\n' - ) - for gyp_name in [main_gyp, sources_gyp]: - name = os.path.splitext(os.path.basename(gyp_name))[0] + ".xcodeproj" - name = xml.sax.saxutils.quoteattr("group:" + name) - output_string += " \n" % name - output_string += "\n" - - workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata") - - try: - with open(workspace_file, "r") as input_file: - input_string = input_file.read() - if input_string == output_string: - return - except IOError: - # Ignore errors if the file doesn't exist. - pass - - with open(workspace_file, "w") as output_file: - output_file.write(output_string) - - -def _TargetFromSpec(old_spec, params): - """ Create fake target for xcode-ninja wrapper. """ - # Determine ninja top level build dir (e.g. /path/to/out). - ninja_toplevel = None - jobs = 0 - if params: - options = params["options"] - ninja_toplevel = os.path.join( - options.toplevel_dir, gyp.generator.ninja.ComputeOutputDir(params) - ) - jobs = params.get("generator_flags", {}).get("xcode_ninja_jobs", 0) - - target_name = old_spec.get("target_name") - product_name = old_spec.get("product_name", target_name) - product_extension = old_spec.get("product_extension") - - ninja_target = {} - ninja_target["target_name"] = target_name - ninja_target["product_name"] = product_name - if product_extension: - ninja_target["product_extension"] = product_extension - ninja_target["toolset"] = old_spec.get("toolset") - ninja_target["default_configuration"] = old_spec.get("default_configuration") - ninja_target["configurations"] = {} - - # Tell Xcode to look in |ninja_toplevel| for build products. - new_xcode_settings = {} - if ninja_toplevel: - new_xcode_settings["CONFIGURATION_BUILD_DIR"] = ( - "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel - ) - - if "configurations" in old_spec: - for config in old_spec["configurations"]: - old_xcode_settings = old_spec["configurations"][config].get( - "xcode_settings", {} - ) - if "IPHONEOS_DEPLOYMENT_TARGET" in old_xcode_settings: - new_xcode_settings["CODE_SIGNING_REQUIRED"] = "NO" - new_xcode_settings["IPHONEOS_DEPLOYMENT_TARGET"] = old_xcode_settings[ - "IPHONEOS_DEPLOYMENT_TARGET" - ] - for key in ["BUNDLE_LOADER", "TEST_HOST"]: - if key in old_xcode_settings: - new_xcode_settings[key] = old_xcode_settings[key] - - ninja_target["configurations"][config] = {} - ninja_target["configurations"][config][ - "xcode_settings" - ] = new_xcode_settings - - ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0) - ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0) - ninja_target["ios_app_extension"] = old_spec.get("ios_app_extension", 0) - ninja_target["ios_watchkit_extension"] = old_spec.get("ios_watchkit_extension", 0) - ninja_target["ios_watchkit_app"] = old_spec.get("ios_watchkit_app", 0) - ninja_target["type"] = old_spec["type"] - if ninja_toplevel: - ninja_target["actions"] = [ - { - "action_name": "Compile and copy %s via ninja" % target_name, - "inputs": [], - "outputs": [], - "action": [ - "env", - "PATH=%s" % os.environ["PATH"], - "ninja", - "-C", - new_xcode_settings["CONFIGURATION_BUILD_DIR"], - target_name, - ], - "message": "Compile and copy %s via ninja" % target_name, - }, - ] - if jobs > 0: - ninja_target["actions"][0]["action"].extend(("-j", jobs)) - return ninja_target - - -def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): - """Limit targets for Xcode wrapper. - - Xcode sometimes performs poorly with too many targets, so only include - proper executable targets, with filters to customize. - Arguments: - target_extras: Regular expression to always add, matching any target. - executable_target_pattern: Regular expression limiting executable targets. - spec: Specifications for target. - """ - target_name = spec.get("target_name") - # Always include targets matching target_extras. - if target_extras is not None and re.search(target_extras, target_name): - return True - - # Otherwise just show executable targets and xc_tests. - if int(spec.get("mac_xctest_bundle", 0)) != 0 or ( - spec.get("type", "") == "executable" - and spec.get("product_extension", "") != "bundle" - ): - - # If there is a filter and the target does not match, exclude the target. - if executable_target_pattern is not None: - if not re.search(executable_target_pattern, target_name): - return False - return True - return False - - -def CreateWrapper(target_list, target_dicts, data, params): - """Initialize targets for the ninja wrapper. - - This sets up the necessary variables in the targets to generate Xcode projects - that use ninja as an external builder. - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - data: Dict of flattened build files keyed on gyp path. - params: Dict of global options for gyp. - """ - orig_gyp = params["build_files"][0] - for gyp_name, gyp_dict in data.items(): - if gyp_name == orig_gyp: - depth = gyp_dict["_DEPTH"] - - # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE - # and prepend .ninja before the .gyp extension. - generator_flags = params.get("generator_flags", {}) - main_gyp = generator_flags.get("xcode_ninja_main_gyp", None) - if main_gyp is None: - (build_file_root, build_file_ext) = os.path.splitext(orig_gyp) - main_gyp = build_file_root + ".ninja" + build_file_ext - - # Create new |target_list|, |target_dicts| and |data| data structures. - new_target_list = [] - new_target_dicts = {} - new_data = {} - - # Set base keys needed for |data|. - new_data[main_gyp] = {} - new_data[main_gyp]["included_files"] = [] - new_data[main_gyp]["targets"] = [] - new_data[main_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {}) - - # Normally the xcode-ninja generator includes only valid executable targets. - # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to - # executable targets that match the pattern. (Default all) - executable_target_pattern = generator_flags.get( - "xcode_ninja_executable_target_pattern", None - ) - - # For including other non-executable targets, add the matching target name - # to the |xcode_ninja_target_pattern| regular expression. (Default none) - target_extras = generator_flags.get("xcode_ninja_target_pattern", None) - - for old_qualified_target in target_list: - spec = target_dicts[old_qualified_target] - if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): - # Add to new_target_list. - target_name = spec.get("target_name") - new_target_name = "%s:%s#target" % (main_gyp, target_name) - new_target_list.append(new_target_name) - - # Add to new_target_dicts. - new_target_dicts[new_target_name] = _TargetFromSpec(spec, params) - - # Add to new_data. - for old_target in data[old_qualified_target.split(":")[0]]["targets"]: - if old_target["target_name"] == target_name: - new_data_target = {} - new_data_target["target_name"] = old_target["target_name"] - new_data_target["toolset"] = old_target["toolset"] - new_data[main_gyp]["targets"].append(new_data_target) - - # Create sources target. - sources_target_name = "sources_for_indexing" - sources_target = _TargetFromSpec( - { - "target_name": sources_target_name, - "toolset": "target", - "default_configuration": "Default", - "mac_bundle": "0", - "type": "executable", - }, - None, - ) - - # Tell Xcode to look everywhere for headers. - sources_target["configurations"] = {"Default": {"include_dirs": [depth]}} - - # Put excluded files into the sources target so they can be opened in Xcode. - skip_excluded_files = not generator_flags.get( - "xcode_ninja_list_excluded_files", True - ) - - sources = [] - for target, target_dict in target_dicts.items(): - base = os.path.dirname(target) - files = target_dict.get("sources", []) + target_dict.get( - "mac_bundle_resources", [] - ) - - if not skip_excluded_files: - files.extend( - target_dict.get("sources_excluded", []) - + target_dict.get("mac_bundle_resources_excluded", []) - ) - - for action in target_dict.get("actions", []): - files.extend(action.get("inputs", [])) - - if not skip_excluded_files: - files.extend(action.get("inputs_excluded", [])) - - # Remove files starting with $. These are mostly intermediate files for the - # build system. - files = [file for file in files if not file.startswith("$")] - - # Make sources relative to root build file. - relative_path = os.path.dirname(main_gyp) - sources += [ - os.path.relpath(os.path.join(base, file), relative_path) for file in files - ] - - sources_target["sources"] = sorted(set(sources)) - - # Put sources_to_index in it's own gyp. - sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp") - fully_qualified_target_name = "%s:%s#target" % (sources_gyp, sources_target_name) - - # Add to new_target_list, new_target_dicts and new_data. - new_target_list.append(fully_qualified_target_name) - new_target_dicts[fully_qualified_target_name] = sources_target - new_data_target = {} - new_data_target["target_name"] = sources_target["target_name"] - new_data_target["_DEPTH"] = depth - new_data_target["toolset"] = "target" - new_data[sources_gyp] = {} - new_data[sources_gyp]["targets"] = [] - new_data[sources_gyp]["included_files"] = [] - new_data[sources_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {}) - new_data[sources_gyp]["targets"].append(new_data_target) - - # Write workspace to file. - _WriteWorkspace(main_gyp, sources_gyp, params) - return (new_target_list, new_target_dicts, new_data) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py deleted file mode 100644 index d90dd99dccecf..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py +++ /dev/null @@ -1,3198 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Xcode project file generator. - -This module is both an Xcode project file generator and a documentation of the -Xcode project file format. Knowledge of the project file format was gained -based on extensive experience with Xcode, and by making changes to projects in -Xcode.app and observing the resultant changes in the associated project files. - -XCODE PROJECT FILES - -The generator targets the file format as written by Xcode 3.2 (specifically, -3.2.6), but past experience has taught that the format has not changed -significantly in the past several years, and future versions of Xcode are able -to read older project files. - -Xcode project files are "bundled": the project "file" from an end-user's -perspective is actually a directory with an ".xcodeproj" extension. The -project file from this module's perspective is actually a file inside this -directory, always named "project.pbxproj". This file contains a complete -description of the project and is all that is needed to use the xcodeproj. -Other files contained in the xcodeproj directory are simply used to store -per-user settings, such as the state of various UI elements in the Xcode -application. - -The project.pbxproj file is a property list, stored in a format almost -identical to the NeXTstep property list format. The file is able to carry -Unicode data, and is encoded in UTF-8. The root element in the property list -is a dictionary that contains several properties of minimal interest, and two -properties of immense interest. The most important property is a dictionary -named "objects". The entire structure of the project is represented by the -children of this property. The objects dictionary is keyed by unique 96-bit -values represented by 24 uppercase hexadecimal characters. Each value in the -objects dictionary is itself a dictionary, describing an individual object. - -Each object in the dictionary is a member of a class, which is identified by -the "isa" property of each object. A variety of classes are represented in a -project file. Objects can refer to other objects by ID, using the 24-character -hexadecimal object key. A project's objects form a tree, with a root object -of class PBXProject at the root. As an example, the PBXProject object serves -as parent to an XCConfigurationList object defining the build configurations -used in the project, a PBXGroup object serving as a container for all files -referenced in the project, and a list of target objects, each of which defines -a target in the project. There are several different types of target object, -such as PBXNativeTarget and PBXAggregateTarget. In this module, this -relationship is expressed by having each target type derive from an abstract -base named XCTarget. - -The project.pbxproj file's root dictionary also contains a property, sibling to -the "objects" dictionary, named "rootObject". The value of rootObject is a -24-character object key referring to the root PBXProject object in the -objects dictionary. - -In Xcode, every file used as input to a target or produced as a final product -of a target must appear somewhere in the hierarchy rooted at the PBXGroup -object referenced by the PBXProject's mainGroup property. A PBXGroup is -generally represented as a folder in the Xcode application. PBXGroups can -contain other PBXGroups as well as PBXFileReferences, which are pointers to -actual files. - -Each XCTarget contains a list of build phases, represented in this module by -the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations -are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the -"Compile Sources" and "Link Binary With Libraries" phases displayed in the -Xcode application. Files used as input to these phases (for example, source -files in the former case and libraries and frameworks in the latter) are -represented by PBXBuildFile objects, referenced by elements of "files" lists -in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile -object as a "weak" reference: it does not "own" the PBXBuildFile, which is -owned by the root object's mainGroup or a descendant group. In most cases, the -layer of indirection between an XCBuildPhase and a PBXFileReference via a -PBXBuildFile appears extraneous, but there's actually one reason for this: -file-specific compiler flags are added to the PBXBuildFile object so as to -allow a single file to be a member of multiple targets while having distinct -compiler flags for each. These flags can be modified in the Xcode applciation -in the "Build" tab of a File Info window. - -When a project is open in the Xcode application, Xcode will rewrite it. As -such, this module is careful to adhere to the formatting used by Xcode, to -avoid insignificant changes appearing in the file when it is used in the -Xcode application. This will keep version control repositories happy, and -makes it possible to compare a project file used in Xcode to one generated by -this module to determine if any significant changes were made in the -application. - -Xcode has its own way of assigning 24-character identifiers to each object, -which is not duplicated here. Because the identifier only is only generated -once, when an object is created, and is then left unchanged, there is no need -to attempt to duplicate Xcode's behavior in this area. The generator is free -to select any identifier, even at random, to refer to the objects it creates, -and Xcode will retain those identifiers and use them when subsequently -rewriting the project file. However, the generator would choose new random -identifiers each time the project files are generated, leading to difficulties -comparing "used" project files to "pristine" ones produced by this module, -and causing the appearance of changes as every object identifier is changed -when updated projects are checked in to a version control repository. To -mitigate this problem, this module chooses identifiers in a more deterministic -way, by hashing a description of each object as well as its parent and ancestor -objects. This strategy should result in minimal "shift" in IDs as successive -generations of project files are produced. - -THIS MODULE - -This module introduces several classes, all derived from the XCObject class. -Nearly all of the "brains" are built into the XCObject class, which understands -how to create and modify objects, maintain the proper tree structure, compute -identifiers, and print objects. For the most part, classes derived from -XCObject need only provide a _schema class object, a dictionary that -expresses what properties objects of the class may contain. - -Given this structure, it's possible to build a minimal project file by creating -objects of the appropriate types and making the proper connections: - - config_list = XCConfigurationList() - group = PBXGroup() - project = PBXProject({'buildConfigurationList': config_list, - 'mainGroup': group}) - -With the project object set up, it can be added to an XCProjectFile object. -XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject -subclass that does not actually correspond to a class type found in a project -file. Rather, it is used to represent the project file's root dictionary. -Printing an XCProjectFile will print the entire project file, including the -full "objects" dictionary. - - project_file = XCProjectFile({'rootObject': project}) - project_file.ComputeIDs() - project_file.Print() - -Xcode project files are always encoded in UTF-8. This module will accept -strings of either the str class or the unicode class. Strings of class str -are assumed to already be encoded in UTF-8. Obviously, if you're just using -ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset. -Strings of class unicode are handled properly and encoded in UTF-8 when -a project file is output. -""" - -import gyp.common -import hashlib -import posixpath -import re -import struct -import sys - -try: - basestring, cmp, unicode -except NameError: # Python 3 - basestring = unicode = str - - def cmp(x, y): - return (x > y) - (x < y) - - -# See XCObject._EncodeString. This pattern is used to determine when a string -# can be printed unquoted. Strings that match this pattern may be printed -# unquoted. Strings that do not match must be quoted and may be further -# transformed to be properly encoded. Note that this expression matches the -# characters listed with "+", for 1 or more occurrences: if a string is empty, -# it must not match this pattern, because it needs to be encoded as "". -_unquoted = re.compile("^[A-Za-z0-9$./_]+$") - -# Strings that match this pattern are quoted regardless of what _unquoted says. -# Oddly, Xcode will quote any string with a run of three or more underscores. -_quoted = re.compile("___") - -# This pattern should match any character that needs to be escaped by -# XCObject._EncodeString. See that function. -_escaped = re.compile('[\\\\"]|[\x00-\x1f]') - - -# Used by SourceTreeAndPathFromPath -_path_leading_variable = re.compile(r"^\$\((.*?)\)(/(.*))?$") - - -def SourceTreeAndPathFromPath(input_path): - """Given input_path, returns a tuple with sourceTree and path values. - - Examples: - input_path (source_tree, output_path) - '$(VAR)/path' ('VAR', 'path') - '$(VAR)' ('VAR', None) - 'path' (None, 'path') - """ - - source_group_match = _path_leading_variable.match(input_path) - if source_group_match: - source_tree = source_group_match.group(1) - output_path = source_group_match.group(3) # This may be None. - else: - source_tree = None - output_path = input_path - - return (source_tree, output_path) - - -def ConvertVariablesToShellSyntax(input_string): - return re.sub(r"\$\((.*?)\)", "${\\1}", input_string) - - -class XCObject(object): - """The abstract base of all class types used in Xcode project files. - - Class variables: - _schema: A dictionary defining the properties of this class. The keys to - _schema are string property keys as used in project files. Values - are a list of four or five elements: - [ is_list, property_type, is_strong, is_required, default ] - is_list: True if the property described is a list, as opposed - to a single element. - property_type: The type to use as the value of the property, - or if is_list is True, the type to use for each - element of the value's list. property_type must - be an XCObject subclass, or one of the built-in - types str, int, or dict. - is_strong: If property_type is an XCObject subclass, is_strong - is True to assert that this class "owns," or serves - as parent, to the property value (or, if is_list is - True, values). is_strong must be False if - property_type is not an XCObject subclass. - is_required: True if the property is required for the class. - Note that is_required being True does not preclude - an empty string ("", in the case of property_type - str) or list ([], in the case of is_list True) from - being set for the property. - default: Optional. If is_required is True, default may be set - to provide a default value for objects that do not supply - their own value. If is_required is True and default - is not provided, users of the class must supply their own - value for the property. - Note that although the values of the array are expressed in - boolean terms, subclasses provide values as integers to conserve - horizontal space. - _should_print_single_line: False in XCObject. Subclasses whose objects - should be written to the project file in the - alternate single-line format, such as - PBXFileReference and PBXBuildFile, should - set this to True. - _encode_transforms: Used by _EncodeString to encode unprintable characters. - The index into this list is the ordinal of the - character to transform; each value is a string - used to represent the character in the output. XCObject - provides an _encode_transforms list suitable for most - XCObject subclasses. - _alternate_encode_transforms: Provided for subclasses that wish to use - the alternate encoding rules. Xcode seems - to use these rules when printing objects in - single-line format. Subclasses that desire - this behavior should set _encode_transforms - to _alternate_encode_transforms. - _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs - to construct this object's ID. Most classes that need custom - hashing behavior should do it by overriding Hashables, - but in some cases an object's parent may wish to push a - hashable value into its child, and it can do so by appending - to _hashables. - Attributes: - id: The object's identifier, a 24-character uppercase hexadecimal string. - Usually, objects being created should not set id until the entire - project file structure is built. At that point, UpdateIDs() should - be called on the root object to assign deterministic values for id to - each object in the tree. - parent: The object's parent. This is set by a parent XCObject when a child - object is added to it. - _properties: The object's property dictionary. An object's properties are - described by its class' _schema variable. - """ - - _schema = {} - _should_print_single_line = False - - # See _EncodeString. - _encode_transforms = [] - i = 0 - while i < ord(" "): - _encode_transforms.append("\\U%04x" % i) - i = i + 1 - _encode_transforms[7] = "\\a" - _encode_transforms[8] = "\\b" - _encode_transforms[9] = "\\t" - _encode_transforms[10] = "\\n" - _encode_transforms[11] = "\\v" - _encode_transforms[12] = "\\f" - _encode_transforms[13] = "\\n" - - _alternate_encode_transforms = list(_encode_transforms) - _alternate_encode_transforms[9] = chr(9) - _alternate_encode_transforms[10] = chr(10) - _alternate_encode_transforms[11] = chr(11) - - def __init__(self, properties=None, id=None, parent=None): - self.id = id - self.parent = parent - self._properties = {} - self._hashables = [] - self._SetDefaultsFromSchema() - self.UpdateProperties(properties) - - def __repr__(self): - try: - name = self.Name() - except NotImplementedError: - return "<%s at 0x%x>" % (self.__class__.__name__, id(self)) - return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self)) - - def Copy(self): - """Make a copy of this object. - - The new object will have its own copy of lists and dicts. Any XCObject - objects owned by this object (marked "strong") will be copied in the - new object, even those found in lists. If this object has any weak - references to other XCObjects, the same references are added to the new - object without making a copy. - """ - - that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.items(): - is_strong = self._schema[key][2] - - if isinstance(value, XCObject): - if is_strong: - new_value = value.Copy() - new_value.parent = that - that._properties[key] = new_value - else: - that._properties[key] = value - elif isinstance(value, (basestring, int)): - that._properties[key] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe to - # call Copy. - that._properties[key] = [] - for item in value: - new_item = item.Copy() - new_item.parent = that - that._properties[key].append(new_item) - else: - that._properties[key] = value[:] - elif isinstance(value, dict): - # dicts are never strong. - if is_strong: - raise TypeError( - "Strong dict for key " + key + " in " + self.__class__.__name__ - ) - else: - that._properties[key] = value.copy() - else: - raise TypeError( - "Unexpected type " - + value.__class__.__name__ - + " for key " - + key - + " in " - + self.__class__.__name__ - ) - - return that - - def Name(self): - """Return the name corresponding to an object. - - Not all objects necessarily need to be nameable, and not all that do have - a "name" property. Override as needed. - """ - - # If the schema indicates that "name" is required, try to access the - # property even if it doesn't exist. This will result in a KeyError - # being raised for the property that should be present, which seems more - # appropriate than NotImplementedError in this case. - if "name" in self._properties or ( - "name" in self._schema and self._schema["name"][3] - ): - return self._properties["name"] - - raise NotImplementedError(self.__class__.__name__ + " must implement Name") - - def Comment(self): - """Return a comment string for the object. - - Most objects just use their name as the comment, but PBXProject uses - different values. - - The returned comment is not escaped and does not have any comment marker - strings applied to it. - """ - - return self.Name() - - def Hashables(self): - hashables = [self.__class__.__name__] - - name = self.Name() - if name is not None: - hashables.append(name) - - hashables.extend(self._hashables) - - return hashables - - def HashablesForChild(self): - return None - - def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None): - """Set "id" properties deterministically. - - An object's "id" property is set based on a hash of its class type and - name, as well as the class type and name of all ancestor objects. As - such, it is only advisable to call ComputeIDs once an entire project file - tree is built. - - If recursive is True, recurse into all descendant objects and update their - hashes. - - If overwrite is True, any existing value set in the "id" property will be - replaced. - """ - - def _HashUpdate(hash, data): - """Update hash with data's length and contents. - - If the hash were updated only with the value of data, it would be - possible for clowns to induce collisions by manipulating the names of - their objects. By adding the length, it's exceedingly less likely that - ID collisions will be encountered, intentionally or not. - """ - - hash.update(struct.pack(">i", len(data))) - hash.update(data) - - if seed_hash is None: - seed_hash = hashlib.sha1() - - hash = seed_hash.copy() - - hashables = self.Hashables() - assert len(hashables) > 0 - for hashable in hashables: - _HashUpdate(hash, hashable) - - if recursive: - hashables_for_child = self.HashablesForChild() - if hashables_for_child is None: - child_hash = hash - else: - assert len(hashables_for_child) > 0 - child_hash = seed_hash.copy() - for hashable in hashables_for_child: - _HashUpdate(child_hash, hashable) - - for child in self.Children(): - child.ComputeIDs(recursive, overwrite, child_hash) - - if overwrite or self.id is None: - # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is - # is 160 bits. Instead of throwing out 64 bits of the digest, xor them - # into the portion that gets used. - assert hash.digest_size % 4 == 0 - digest_int_count = hash.digest_size // 4 - digest_ints = struct.unpack(">" + "I" * digest_int_count, hash.digest()) - id_ints = [0, 0, 0] - for index in range(0, digest_int_count): - id_ints[index % 3] ^= digest_ints[index] - self.id = "%08X%08X%08X" % tuple(id_ints) - - def EnsureNoIDCollisions(self): - """Verifies that no two objects have the same ID. Checks all descendants. - """ - - ids = {} - descendants = self.Descendants() - for descendant in descendants: - if descendant.id in ids: - other = ids[descendant.id] - raise KeyError( - 'Duplicate ID %s, objects "%s" and "%s" in "%s"' - % ( - descendant.id, - str(descendant._properties), - str(other._properties), - self._properties["rootObject"].Name(), - ) - ) - ids[descendant.id] = descendant - - def Children(self): - """Returns a list of all of this object's owned (strong) children.""" - - children = [] - for property, attributes in self._schema.items(): - (is_list, property_type, is_strong) = attributes[0:3] - if is_strong and property in self._properties: - if not is_list: - children.append(self._properties[property]) - else: - children.extend(self._properties[property]) - return children - - def Descendants(self): - """Returns a list of all of this object's descendants, including this - object. - """ - - children = self.Children() - descendants = [self] - for child in children: - descendants.extend(child.Descendants()) - return descendants - - def PBXProjectAncestor(self): - # The base case for recursion is defined at PBXProject.PBXProjectAncestor. - if self.parent: - return self.parent.PBXProjectAncestor() - return None - - def _EncodeComment(self, comment): - """Encodes a comment to be placed in the project file output, mimicking - Xcode behavior. - """ - - # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If - # the string already contains a "*/", it is turned into "(*)/". This keeps - # the file writer from outputting something that would be treated as the - # end of a comment in the middle of something intended to be entirely a - # comment. - - return "/* " + comment.replace("*/", "(*)/") + " */" - - def _EncodeTransform(self, match): - # This function works closely with _EncodeString. It will only be called - # by re.sub with match.group(0) containing a character matched by the - # the _escaped expression. - char = match.group(0) - - # Backslashes (\) and quotation marks (") are always replaced with a - # backslash-escaped version of the same. Everything else gets its - # replacement from the class' _encode_transforms array. - if char == "\\": - return "\\\\" - if char == '"': - return '\\"' - return self._encode_transforms[ord(char)] - - def _EncodeString(self, value): - """Encodes a string to be placed in the project file output, mimicking - Xcode behavior. - """ - - # Use quotation marks when any character outside of the range A-Z, a-z, 0-9, - # $ (dollar sign), . (period), and _ (underscore) is present. Also use - # quotation marks to represent empty strings. - # - # Escape " (double-quote) and \ (backslash) by preceding them with a - # backslash. - # - # Some characters below the printable ASCII range are encoded specially: - # 7 ^G BEL is encoded as "\a" - # 8 ^H BS is encoded as "\b" - # 11 ^K VT is encoded as "\v" - # 12 ^L NP is encoded as "\f" - # 127 ^? DEL is passed through as-is without escaping - # - In PBXFileReference and PBXBuildFile objects: - # 9 ^I HT is passed through as-is without escaping - # 10 ^J NL is passed through as-is without escaping - # 13 ^M CR is passed through as-is without escaping - # - In other objects: - # 9 ^I HT is encoded as "\t" - # 10 ^J NL is encoded as "\n" - # 13 ^M CR is encoded as "\n" rendering it indistinguishable from - # 10 ^J NL - # All other characters within the ASCII control character range (0 through - # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point - # in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e". - # Characters above the ASCII range are passed through to the output encoded - # as UTF-8 without any escaping. These mappings are contained in the - # class' _encode_transforms list. - - if _unquoted.search(value) and not _quoted.search(value): - return value - - return '"' + _escaped.sub(self._EncodeTransform, value) + '"' - - def _XCPrint(self, file, tabs, line): - file.write("\t" * tabs + line) - - def _XCPrintableValue(self, tabs, value, flatten_list=False): - """Returns a representation of value that may be printed in a project file, - mimicking Xcode's behavior. - - _XCPrintableValue can handle str and int values, XCObjects (which are - made printable by returning their id property), and list and dict objects - composed of any of the above types. When printing a list or dict, and - _should_print_single_line is False, the tabs parameter is used to determine - how much to indent the lines corresponding to the items in the list or - dict. - - If flatten_list is True, single-element lists will be transformed into - strings. - """ - - printable = "" - comment = None - - if self._should_print_single_line: - sep = " " - element_tabs = "" - end_tabs = "" - else: - sep = "\n" - element_tabs = "\t" * (tabs + 1) - end_tabs = "\t" * tabs - - if isinstance(value, XCObject): - printable += value.id - comment = value.Comment() - elif isinstance(value, str): - printable += self._EncodeString(value) - elif isinstance(value, basestring): - printable += self._EncodeString(value.encode("utf-8")) - elif isinstance(value, int): - printable += str(value) - elif isinstance(value, list): - if flatten_list and len(value) <= 1: - if len(value) == 0: - printable += self._EncodeString("") - else: - printable += self._EncodeString(value[0]) - else: - printable = "(" + sep - for item in value: - printable += ( - element_tabs - + self._XCPrintableValue(tabs + 1, item, flatten_list) - + "," - + sep - ) - printable += end_tabs + ")" - elif isinstance(value, dict): - printable = "{" + sep - for item_key, item_value in sorted(value.items()): - printable += ( - element_tabs - + self._XCPrintableValue(tabs + 1, item_key, flatten_list) - + " = " - + self._XCPrintableValue(tabs + 1, item_value, flatten_list) - + ";" - + sep - ) - printable += end_tabs + "}" - else: - raise TypeError("Can't make " + value.__class__.__name__ + " printable") - - if comment: - printable += " " + self._EncodeComment(comment) - - return printable - - def _XCKVPrint(self, file, tabs, key, value): - """Prints a key and value, members of an XCObject's _properties dictionary, - to file. - - tabs is an int identifying the indentation level. If the class' - _should_print_single_line variable is True, tabs is ignored and the - key-value pair will be followed by a space insead of a newline. - """ - - if self._should_print_single_line: - printable = "" - after_kv = " " - else: - printable = "\t" * tabs - after_kv = "\n" - - # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy - # objects without comments. Sometimes it prints them with comments, but - # the majority of the time, it doesn't. To avoid unnecessary changes to - # the project file after Xcode opens it, don't write comments for - # remoteGlobalIDString. This is a sucky hack and it would certainly be - # cleaner to extend the schema to indicate whether or not a comment should - # be printed, but since this is the only case where the problem occurs and - # Xcode itself can't seem to make up its mind, the hack will suffice. - # - # Also see PBXContainerItemProxy._schema['remoteGlobalIDString']. - if key == "remoteGlobalIDString" and isinstance(self, PBXContainerItemProxy): - value_to_print = value.id - else: - value_to_print = value - - # PBXBuildFile's settings property is represented in the output as a dict, - # but a hack here has it represented as a string. Arrange to strip off the - # quotes so that it shows up in the output as expected. - if key == "settings" and isinstance(self, PBXBuildFile): - strip_value_quotes = True - else: - strip_value_quotes = False - - # In another one-off, let's set flatten_list on buildSettings properties - # of XCBuildConfiguration objects, because that's how Xcode treats them. - if key == "buildSettings" and isinstance(self, XCBuildConfiguration): - flatten_list = True - else: - flatten_list = False - - try: - printable_key = self._XCPrintableValue(tabs, key, flatten_list) - printable_value = self._XCPrintableValue(tabs, value_to_print, flatten_list) - if ( - strip_value_quotes - and len(printable_value) > 1 - and printable_value[0] == '"' - and printable_value[-1] == '"' - ): - printable_value = printable_value[1:-1] - printable += printable_key + " = " + printable_value + ";" + after_kv - except TypeError as e: - gyp.common.ExceptionAppend(e, 'while printing key "%s"' % key) - raise - - self._XCPrint(file, 0, printable) - - def Print(self, file=sys.stdout): - """Prints a reprentation of this object to file, adhering to Xcode output - formatting. - """ - - self.VerifyHasRequiredProperties() - - if self._should_print_single_line: - # When printing an object in a single line, Xcode doesn't put any space - # between the beginning of a dictionary (or presumably a list) and the - # first contained item, so you wind up with snippets like - # ...CDEF = {isa = PBXFileReference; fileRef = 0123... - # If it were me, I would have put a space in there after the opening - # curly, but I guess this is just another one of those inconsistencies - # between how Xcode prints PBXFileReference and PBXBuildFile objects as - # compared to other objects. Mimic Xcode's behavior here by using an - # empty string for sep. - sep = "" - end_tabs = 0 - else: - sep = "\n" - end_tabs = 2 - - # Start the object. For example, '\t\tPBXProject = {\n'. - self._XCPrint(file, 2, self._XCPrintableValue(2, self) + " = {" + sep) - - # "isa" isn't in the _properties dictionary, it's an intrinsic property - # of the class which the object belongs to. Xcode always outputs "isa" - # as the first element of an object dictionary. - self._XCKVPrint(file, 3, "isa", self.__class__.__name__) - - # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.items()): - self._XCKVPrint(file, 3, property, value) - - # End the object. - self._XCPrint(file, end_tabs, "};\n") - - def UpdateProperties(self, properties, do_copy=False): - """Merge the supplied properties into the _properties dictionary. - - The input properties must adhere to the class schema or a KeyError or - TypeError exception will be raised. If adding an object of an XCObject - subclass and the schema indicates a strong relationship, the object's - parent will be set to this object. - - If do_copy is True, then lists, dicts, strong-owned XCObjects, and - strong-owned XCObjects in lists will be copied instead of having their - references added. - """ - - if properties is None: - return - - for property, value in properties.items(): - # Make sure the property is in the schema. - if property not in self._schema: - raise KeyError(property + " not in " + self.__class__.__name__) - - # Make sure the property conforms to the schema. - (is_list, property_type, is_strong) = self._schema[property][0:3] - if is_list: - if value.__class__ != list: - raise TypeError( - property - + " of " - + self.__class__.__name__ - + " must be list, not " - + value.__class__.__name__ - ) - for item in value: - if not isinstance(item, property_type) and not ( - isinstance(item, basestring) and property_type == str - ): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError( - "item of " - + property - + " of " - + self.__class__.__name__ - + " must be " - + property_type.__name__ - + ", not " - + item.__class__.__name__ - ) - elif not isinstance(value, property_type) and not ( - isinstance(value, basestring) and property_type == str - ): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError( - property - + " of " - + self.__class__.__name__ - + " must be " - + property_type.__name__ - + ", not " - + value.__class__.__name__ - ) - - # Checks passed, perform the assignment. - if do_copy: - if isinstance(value, XCObject): - if is_strong: - self._properties[property] = value.Copy() - else: - self._properties[property] = value - elif isinstance(value, (basestring, int)): - self._properties[property] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, - # so it's safe to call Copy. - self._properties[property] = [] - for item in value: - self._properties[property].append(item.Copy()) - else: - self._properties[property] = value[:] - elif isinstance(value, dict): - self._properties[property] = value.copy() - else: - raise TypeError( - "Don't know how to copy a " - + value.__class__.__name__ - + " object for " - + property - + " in " - + self.__class__.__name__ - ) - else: - self._properties[property] = value - - # Set up the child's back-reference to this object. Don't use |value| - # any more because it may not be right if do_copy is true. - if is_strong: - if not is_list: - self._properties[property].parent = self - else: - for item in self._properties[property]: - item.parent = self - - def HasProperty(self, key): - return key in self._properties - - def GetProperty(self, key): - return self._properties[key] - - def SetProperty(self, key, value): - self.UpdateProperties({key: value}) - - def DelProperty(self, key): - if key in self._properties: - del self._properties[key] - - def AppendProperty(self, key, value): - # TODO(mark): Support ExtendProperty too (and make this call that)? - - # Schema validation. - if key not in self._schema: - raise KeyError(key + " not in " + self.__class__.__name__) - - (is_list, property_type, is_strong) = self._schema[key][0:3] - if not is_list: - raise TypeError(key + " of " + self.__class__.__name__ + " must be list") - if not isinstance(value, property_type): - raise TypeError( - "item of " - + key - + " of " - + self.__class__.__name__ - + " must be " - + property_type.__name__ - + ", not " - + value.__class__.__name__ - ) - - # If the property doesn't exist yet, create a new empty list to receive the - # item. - self._properties[key] = self._properties.get(key, []) - - # Set up the ownership link. - if is_strong: - value.parent = self - - # Store the item. - self._properties[key].append(value) - - def VerifyHasRequiredProperties(self): - """Ensure that all properties identified as required by the schema are - set. - """ - - # TODO(mark): A stronger verification mechanism is needed. Some - # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.items(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and property not in self._properties: - raise KeyError(self.__class__.__name__ + " requires " + property) - - def _SetDefaultsFromSchema(self): - """Assign object default values according to the schema. This will not - overwrite properties that have already been set.""" - - defaults = {} - for property, attributes in self._schema.items(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if ( - is_required - and len(attributes) >= 5 - and property not in self._properties - ): - default = attributes[4] - - defaults[property] = default - - if len(defaults) > 0: - # Use do_copy=True so that each new object gets its own copy of strong - # objects, lists, and dicts. - self.UpdateProperties(defaults, do_copy=True) - - -class XCHierarchicalElement(XCObject): - """Abstract base for PBXGroup and PBXFileReference. Not represented in a - project file.""" - - # TODO(mark): Do name and path belong here? Probably so. - # If path is set and name is not, name may have a default value. Name will - # be set to the basename of path, if the basename of path is different from - # the full value of path. If path is already just a leaf name, name will - # not be set. - _schema = XCObject._schema.copy() - _schema.update( - { - "comments": [0, str, 0, 0], - "fileEncoding": [0, str, 0, 0], - "includeInIndex": [0, int, 0, 0], - "indentWidth": [0, int, 0, 0], - "lineEnding": [0, int, 0, 0], - "sourceTree": [0, str, 0, 1, ""], - "tabWidth": [0, int, 0, 0], - "usesTabs": [0, int, 0, 0], - "wrapsLines": [0, int, 0, 0], - } - ) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - if "path" in self._properties and "name" not in self._properties: - path = self._properties["path"] - name = posixpath.basename(path) - if name != "" and path != name: - self.SetProperty("name", name) - - if "path" in self._properties and ( - "sourceTree" not in self._properties - or self._properties["sourceTree"] == "" - ): - # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take - # the variable out and make the path be relative to that variable by - # assigning the variable name as the sourceTree. - (source_tree, path) = SourceTreeAndPathFromPath(self._properties["path"]) - if source_tree is not None: - self._properties["sourceTree"] = source_tree - if path is not None: - self._properties["path"] = path - if ( - source_tree is not None - and path is None - and "name" not in self._properties - ): - # The path was of the form "$(SDKROOT)" with no path following it. - # This object is now relative to that variable, so it has no path - # attribute of its own. It does, however, keep a name. - del self._properties["path"] - self._properties["name"] = source_tree - - def Name(self): - if "name" in self._properties: - return self._properties["name"] - elif "path" in self._properties: - return self._properties["path"] - else: - # This happens in the case of the root PBXGroup. - return None - - def Hashables(self): - """Custom hashables for XCHierarchicalElements. - - XCHierarchicalElements are special. Generally, their hashes shouldn't - change if the paths don't change. The normal XCObject implementation of - Hashables adds a hashable for each object, which means that if - the hierarchical structure changes (possibly due to changes caused when - TakeOverOnlyChild runs and encounters slight changes in the hierarchy), - the hashes will change. For example, if a project file initially contains - a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent - a/b. If someone later adds a/f2 to the project file, a/b can no longer be - collapsed, and f1 winds up with parent b and grandparent a. That would - be sufficient to change f1's hash. - - To counteract this problem, hashables for all XCHierarchicalElements except - for the main group (which has neither a name nor a path) are taken to be - just the set of path components. Because hashables are inherited from - parents, this provides assurance that a/b/f1 has the same set of hashables - whether its parent is b or a/b. - - The main group is a special case. As it is permitted to have no name or - path, it is permitted to use the standard XCObject hash mechanism. This - is not considered a problem because there can be only one main group. - """ - - if self == self.PBXProjectAncestor()._properties["mainGroup"]: - # super - return XCObject.Hashables(self) - - hashables = [] - - # Put the name in first, ensuring that if TakeOverOnlyChild collapses - # children into a top-level group like "Source", the name always goes - # into the list of hashables without interfering with path components. - if "name" in self._properties: - # Make it less likely for people to manipulate hashes by following the - # pattern of always pushing an object type value onto the list first. - hashables.append(self.__class__.__name__ + ".name") - hashables.append(self._properties["name"]) - - # NOTE: This still has the problem that if an absolute path is encountered, - # including paths with a sourceTree, they'll still inherit their parents' - # hashables, even though the paths aren't relative to their parents. This - # is not expected to be much of a problem in practice. - path = self.PathFromSourceTreeAndPath() - if path is not None: - components = path.split(posixpath.sep) - for component in components: - hashables.append(self.__class__.__name__ + ".path") - hashables.append(component) - - hashables.extend(self._hashables) - - return hashables - - def Compare(self, other): - # Allow comparison of these types. PBXGroup has the highest sort rank; - # PBXVariantGroup is treated as equal to PBXFileReference. - valid_class_types = { - PBXFileReference: "file", - PBXGroup: "group", - PBXVariantGroup: "file", - } - self_type = valid_class_types[self.__class__] - other_type = valid_class_types[other.__class__] - - if self_type == other_type: - # If the two objects are of the same sort rank, compare their names. - return cmp(self.Name(), other.Name()) - - # Otherwise, sort groups before everything else. - if self_type == "group": - return -1 - return 1 - - def CompareRootGroup(self, other): - # This function should be used only to compare direct children of the - # containing PBXProject's mainGroup. These groups should appear in the - # listed order. - # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the - # generator should have a way of influencing this list rather than having - # to hardcode for the generator here. - order = [ - "Source", - "Intermediates", - "Projects", - "Frameworks", - "Products", - "Build", - ] - - # If the groups aren't in the listed order, do a name comparison. - # Otherwise, groups in the listed order should come before those that - # aren't. - self_name = self.Name() - other_name = other.Name() - self_in = isinstance(self, PBXGroup) and self_name in order - other_in = isinstance(self, PBXGroup) and other_name in order - if not self_in and not other_in: - return self.Compare(other) - if self_name in order and other_name not in order: - return -1 - if other_name in order and self_name not in order: - return 1 - - # If both groups are in the listed order, go by the defined order. - self_index = order.index(self_name) - other_index = order.index(other_name) - if self_index < other_index: - return -1 - if self_index > other_index: - return 1 - return 0 - - def PathFromSourceTreeAndPath(self): - # Turn the object's sourceTree and path properties into a single flat - # string of a form comparable to the path parameter. If there's a - # sourceTree property other than "", wrap it in $(...) for the - # comparison. - components = [] - if self._properties["sourceTree"] != "": - components.append("$(" + self._properties["sourceTree"] + ")") - if "path" in self._properties: - components.append(self._properties["path"]) - - if len(components) > 0: - return posixpath.join(*components) - - return None - - def FullPath(self): - # Returns a full path to self relative to the project file, or relative - # to some other source tree. Start with self, and walk up the chain of - # parents prepending their paths, if any, until no more parents are - # available (project-relative path) or until a path relative to some - # source tree is found. - xche = self - path = None - while isinstance(xche, XCHierarchicalElement) and ( - path is None or (not path.startswith("/") and not path.startswith("$")) - ): - this_path = xche.PathFromSourceTreeAndPath() - if this_path is not None and path is not None: - path = posixpath.join(this_path, path) - elif this_path is not None: - path = this_path - xche = xche.parent - - return path - - -class PBXGroup(XCHierarchicalElement): - """ - Attributes: - _children_by_path: Maps pathnames of children of this PBXGroup to the - actual child XCHierarchicalElement objects. - _variant_children_by_name_and_path: Maps (name, path) tuples of - PBXVariantGroup children to the actual child PBXVariantGroup objects. - """ - - _schema = XCHierarchicalElement._schema.copy() - _schema.update( - { - "children": [1, XCHierarchicalElement, 1, 1, []], - "name": [0, str, 0, 0], - "path": [0, str, 0, 0], - } - ) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCHierarchicalElement.__init__(self, properties, id, parent) - self._children_by_path = {} - self._variant_children_by_name_and_path = {} - for child in self._properties.get("children", []): - self._AddChildToDicts(child) - - def Hashables(self): - # super - hashables = XCHierarchicalElement.Hashables(self) - - # It is not sufficient to just rely on name and parent to build a unique - # hashable : a node could have two child PBXGroup sharing a common name. - # To add entropy the hashable is enhanced with the names of all its - # children. - for child in self._properties.get("children", []): - child_name = child.Name() - if child_name is not None: - hashables.append(child_name) - - return hashables - - def HashablesForChild(self): - # To avoid a circular reference the hashables used to compute a child id do - # not include the child names. - return XCHierarchicalElement.Hashables(self) - - def _AddChildToDicts(self, child): - # Sets up this PBXGroup object's dicts to reference the child properly. - child_path = child.PathFromSourceTreeAndPath() - if child_path: - if child_path in self._children_by_path: - raise ValueError("Found multiple children with path " + child_path) - self._children_by_path[child_path] = child - - if isinstance(child, PBXVariantGroup): - child_name = child._properties.get("name", None) - key = (child_name, child_path) - if key in self._variant_children_by_name_and_path: - raise ValueError( - "Found multiple PBXVariantGroup children with " - + "name " - + str(child_name) - + " and path " - + str(child_path) - ) - self._variant_children_by_name_and_path[key] = child - - def AppendChild(self, child): - # Callers should use this instead of calling - # AppendProperty('children', child) directly because this function - # maintains the group's dicts. - self.AppendProperty("children", child) - self._AddChildToDicts(child) - - def GetChildByName(self, name): - # This is not currently optimized with a dict as GetChildByPath is because - # it has few callers. Most callers probably want GetChildByPath. This - # function is only useful to get children that have names but no paths, - # which is rare. The children of the main group ("Source", "Products", - # etc.) is pretty much the only case where this likely to come up. - # - # TODO(mark): Maybe this should raise an error if more than one child is - # present with the same name. - if "children" not in self._properties: - return None - - for child in self._properties["children"]: - if child.Name() == name: - return child - - return None - - def GetChildByPath(self, path): - if not path: - return None - - if path in self._children_by_path: - return self._children_by_path[path] - - return None - - def GetChildByRemoteObject(self, remote_object): - # This method is a little bit esoteric. Given a remote_object, which - # should be a PBXFileReference in another project file, this method will - # return this group's PBXReferenceProxy object serving as a local proxy - # for the remote PBXFileReference. - # - # This function might benefit from a dict optimization as GetChildByPath - # for some workloads, but profiling shows that it's not currently a - # problem. - if "children" not in self._properties: - return None - - for child in self._properties["children"]: - if not isinstance(child, PBXReferenceProxy): - continue - - container_proxy = child._properties["remoteRef"] - if container_proxy._properties["remoteGlobalIDString"] == remote_object: - return child - - return None - - def AddOrGetFileByPath(self, path, hierarchical): - """Returns an existing or new file reference corresponding to path. - - If hierarchical is True, this method will create or use the necessary - hierarchical group structure corresponding to path. Otherwise, it will - look in and create an item in the current group only. - - If an existing matching reference is found, it is returned, otherwise, a - new one will be created, added to the correct group, and returned. - - If path identifies a directory by virtue of carrying a trailing slash, - this method returns a PBXFileReference of "folder" type. If path - identifies a variant, by virtue of it identifying a file inside a directory - with an ".lproj" extension, this method returns a PBXVariantGroup - containing the variant named by path, and possibly other variants. For - all other paths, a "normal" PBXFileReference will be returned. - """ - - # Adding or getting a directory? Directories end with a trailing slash. - is_dir = False - if path.endswith("/"): - is_dir = True - path = posixpath.normpath(path) - if is_dir: - path = path + "/" - - # Adding or getting a variant? Variants are files inside directories - # with an ".lproj" extension. Xcode uses variants for localization. For - # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named - # MainMenu.nib inside path/to, and give it a variant named Language. In - # this example, grandparent would be set to path/to and parent_root would - # be set to Language. - variant_name = None - parent = posixpath.dirname(path) - grandparent = posixpath.dirname(parent) - parent_basename = posixpath.basename(parent) - (parent_root, parent_ext) = posixpath.splitext(parent_basename) - if parent_ext == ".lproj": - variant_name = parent_root - if grandparent == "": - grandparent = None - - # Putting a directory inside a variant group is not currently supported. - assert not is_dir or variant_name is None - - path_split = path.split(posixpath.sep) - if ( - len(path_split) == 1 - or ((is_dir or variant_name is not None) and len(path_split) == 2) - or not hierarchical - ): - # The PBXFileReference or PBXVariantGroup will be added to or gotten from - # this PBXGroup, no recursion necessary. - if variant_name is None: - # Add or get a PBXFileReference. - file_ref = self.GetChildByPath(path) - if file_ref is not None: - assert file_ref.__class__ == PBXFileReference - else: - file_ref = PBXFileReference({"path": path}) - self.AppendChild(file_ref) - else: - # Add or get a PBXVariantGroup. The variant group name is the same - # as the basename (MainMenu.nib in the example above). grandparent - # specifies the path to the variant group itself, and path_split[-2:] - # is the path of the specific variant relative to its group. - variant_group_name = posixpath.basename(path) - variant_group_ref = self.AddOrGetVariantGroupByNameAndPath( - variant_group_name, grandparent - ) - variant_path = posixpath.sep.join(path_split[-2:]) - variant_ref = variant_group_ref.GetChildByPath(variant_path) - if variant_ref is not None: - assert variant_ref.__class__ == PBXFileReference - else: - variant_ref = PBXFileReference( - {"name": variant_name, "path": variant_path} - ) - variant_group_ref.AppendChild(variant_ref) - # The caller is interested in the variant group, not the specific - # variant file. - file_ref = variant_group_ref - return file_ref - else: - # Hierarchical recursion. Add or get a PBXGroup corresponding to the - # outermost path component, and then recurse into it, chopping off that - # path component. - next_dir = path_split[0] - group_ref = self.GetChildByPath(next_dir) - if group_ref is not None: - assert group_ref.__class__ == PBXGroup - else: - group_ref = PBXGroup({"path": next_dir}) - self.AppendChild(group_ref) - return group_ref.AddOrGetFileByPath( - posixpath.sep.join(path_split[1:]), hierarchical - ) - - def AddOrGetVariantGroupByNameAndPath(self, name, path): - """Returns an existing or new PBXVariantGroup for name and path. - - If a PBXVariantGroup identified by the name and path arguments is already - present as a child of this object, it is returned. Otherwise, a new - PBXVariantGroup with the correct properties is created, added as a child, - and returned. - - This method will generally be called by AddOrGetFileByPath, which knows - when to create a variant group based on the structure of the pathnames - passed to it. - """ - - key = (name, path) - if key in self._variant_children_by_name_and_path: - variant_group_ref = self._variant_children_by_name_and_path[key] - assert variant_group_ref.__class__ == PBXVariantGroup - return variant_group_ref - - variant_group_properties = {"name": name} - if path is not None: - variant_group_properties["path"] = path - variant_group_ref = PBXVariantGroup(variant_group_properties) - self.AppendChild(variant_group_ref) - - return variant_group_ref - - def TakeOverOnlyChild(self, recurse=False): - """If this PBXGroup has only one child and it's also a PBXGroup, take - it over by making all of its children this object's children. - - This function will continue to take over only children when those children - are groups. If there are three PBXGroups representing a, b, and c, with - c inside b and b inside a, and a and b have no other children, this will - result in a taking over both b and c, forming a PBXGroup for a/b/c. - - If recurse is True, this function will recurse into children and ask them - to collapse themselves by taking over only children as well. Assuming - an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f - (d1, d2, and f are files, the rest are groups), recursion will result in - a group for a/b/c containing a group for d3/e. - """ - - # At this stage, check that child class types are PBXGroup exactly, - # instead of using isinstance. The only subclass of PBXGroup, - # PBXVariantGroup, should not participate in reparenting in the same way: - # reparenting by merging different object types would be wrong. - while ( - len(self._properties["children"]) == 1 - and self._properties["children"][0].__class__ == PBXGroup - ): - # Loop to take over the innermost only-child group possible. - - child = self._properties["children"][0] - - # Assume the child's properties, including its children. Save a copy - # of this object's old properties, because they'll still be needed. - # This object retains its existing id and parent attributes. - old_properties = self._properties - self._properties = child._properties - self._children_by_path = child._children_by_path - - if ( - "sourceTree" not in self._properties - or self._properties["sourceTree"] == "" - ): - # The child was relative to its parent. Fix up the path. Note that - # children with a sourceTree other than "" are not relative to - # their parents, so no path fix-up is needed in that case. - if "path" in old_properties: - if "path" in self._properties: - # Both the original parent and child have paths set. - self._properties["path"] = posixpath.join( - old_properties["path"], self._properties["path"] - ) - else: - # Only the original parent has a path, use it. - self._properties["path"] = old_properties["path"] - if "sourceTree" in old_properties: - # The original parent had a sourceTree set, use it. - self._properties["sourceTree"] = old_properties["sourceTree"] - - # If the original parent had a name set, keep using it. If the original - # parent didn't have a name but the child did, let the child's name - # live on. If the name attribute seems unnecessary now, get rid of it. - if "name" in old_properties and old_properties["name"] not in ( - None, - self.Name(), - ): - self._properties["name"] = old_properties["name"] - if ( - "name" in self._properties - and "path" in self._properties - and self._properties["name"] == self._properties["path"] - ): - del self._properties["name"] - - # Notify all children of their new parent. - for child in self._properties["children"]: - child.parent = self - - # If asked to recurse, recurse. - if recurse: - for child in self._properties["children"]: - if child.__class__ == PBXGroup: - child.TakeOverOnlyChild(recurse) - - def SortGroup(self): - self._properties["children"] = sorted( - self._properties["children"], cmp=lambda x, y: x.Compare(y) - ) - - # Recurse. - for child in self._properties["children"]: - if isinstance(child, PBXGroup): - child.SortGroup() - - -class XCFileLikeElement(XCHierarchicalElement): - # Abstract base for objects that can be used as the fileRef property of - # PBXBuildFile. - - def PathHashables(self): - # A PBXBuildFile that refers to this object will call this method to - # obtain additional hashables specific to this XCFileLikeElement. Don't - # just use this object's hashables, they're not specific and unique enough - # on their own (without access to the parent hashables.) Instead, provide - # hashables that identify this object by path by getting its hashables as - # well as the hashables of ancestor XCHierarchicalElement objects. - - hashables = [] - xche = self - while isinstance(xche, XCHierarchicalElement): - xche_hashables = xche.Hashables() - for index, xche_hashable in enumerate(xche_hashables): - hashables.insert(index, xche_hashable) - xche = xche.parent - return hashables - - -class XCContainerPortal(XCObject): - # Abstract base for objects that can be used as the containerPortal property - # of PBXContainerItemProxy. - pass - - -class XCRemoteObject(XCObject): - # Abstract base for objects that can be used as the remoteGlobalIDString - # property of PBXContainerItemProxy. - pass - - -class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): - _schema = XCFileLikeElement._schema.copy() - _schema.update( - { - "explicitFileType": [0, str, 0, 0], - "lastKnownFileType": [0, str, 0, 0], - "name": [0, str, 0, 0], - "path": [0, str, 0, 1], - } - ) - - # Weird output rules for PBXFileReference. - _should_print_single_line = True - # super - _encode_transforms = XCFileLikeElement._alternate_encode_transforms - - def __init__(self, properties=None, id=None, parent=None): - # super - XCFileLikeElement.__init__(self, properties, id, parent) - if "path" in self._properties and self._properties["path"].endswith("/"): - self._properties["path"] = self._properties["path"][:-1] - is_dir = True - else: - is_dir = False - - if ( - "path" in self._properties - and "lastKnownFileType" not in self._properties - and "explicitFileType" not in self._properties - ): - # TODO(mark): This is the replacement for a replacement for a quick hack. - # It is no longer incredibly sucky, but this list needs to be extended. - extension_map = { - "a": "archive.ar", - "app": "wrapper.application", - "bdic": "file", - "bundle": "wrapper.cfbundle", - "c": "sourcecode.c.c", - "cc": "sourcecode.cpp.cpp", - "cpp": "sourcecode.cpp.cpp", - "css": "text.css", - "cxx": "sourcecode.cpp.cpp", - "dart": "sourcecode", - "dylib": "compiled.mach-o.dylib", - "framework": "wrapper.framework", - "gyp": "sourcecode", - "gypi": "sourcecode", - "h": "sourcecode.c.h", - "hxx": "sourcecode.cpp.h", - "icns": "image.icns", - "java": "sourcecode.java", - "js": "sourcecode.javascript", - "kext": "wrapper.kext", - "m": "sourcecode.c.objc", - "mm": "sourcecode.cpp.objcpp", - "nib": "wrapper.nib", - "o": "compiled.mach-o.objfile", - "pdf": "image.pdf", - "pl": "text.script.perl", - "plist": "text.plist.xml", - "pm": "text.script.perl", - "png": "image.png", - "py": "text.script.python", - "r": "sourcecode.rez", - "rez": "sourcecode.rez", - "s": "sourcecode.asm", - "storyboard": "file.storyboard", - "strings": "text.plist.strings", - "swift": "sourcecode.swift", - "ttf": "file", - "xcassets": "folder.assetcatalog", - "xcconfig": "text.xcconfig", - "xcdatamodel": "wrapper.xcdatamodel", - "xcdatamodeld": "wrapper.xcdatamodeld", - "xib": "file.xib", - "y": "sourcecode.yacc", - } - - prop_map = { - "dart": "explicitFileType", - "gyp": "explicitFileType", - "gypi": "explicitFileType", - } - - if is_dir: - file_type = "folder" - prop_name = "lastKnownFileType" - else: - basename = posixpath.basename(self._properties["path"]) - (root, ext) = posixpath.splitext(basename) - # Check the map using a lowercase extension. - # TODO(mark): Maybe it should try with the original case first and fall - # back to lowercase, in case there are any instances where case - # matters. There currently aren't. - if ext != "": - ext = ext[1:].lower() - - # TODO(mark): "text" is the default value, but "file" is appropriate - # for unrecognized files not containing text. Xcode seems to choose - # based on content. - file_type = extension_map.get(ext, "text") - prop_name = prop_map.get(ext, "lastKnownFileType") - - self._properties[prop_name] = file_type - - -class PBXVariantGroup(PBXGroup, XCFileLikeElement): - """PBXVariantGroup is used by Xcode to represent localizations.""" - - # No additions to the schema relative to PBXGroup. - pass - - -# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below -# because it uses PBXContainerItemProxy, defined below. - - -class XCBuildConfiguration(XCObject): - _schema = XCObject._schema.copy() - _schema.update( - { - "baseConfigurationReference": [0, PBXFileReference, 0, 0], - "buildSettings": [0, dict, 0, 1, {}], - "name": [0, str, 0, 1], - } - ) - - def HasBuildSetting(self, key): - return key in self._properties["buildSettings"] - - def GetBuildSetting(self, key): - return self._properties["buildSettings"][key] - - def SetBuildSetting(self, key, value): - # TODO(mark): If a list, copy? - self._properties["buildSettings"][key] = value - - def AppendBuildSetting(self, key, value): - if key not in self._properties["buildSettings"]: - self._properties["buildSettings"][key] = [] - self._properties["buildSettings"][key].append(value) - - def DelBuildSetting(self, key): - if key in self._properties["buildSettings"]: - del self._properties["buildSettings"][key] - - def SetBaseConfiguration(self, value): - self._properties["baseConfigurationReference"] = value - - -class XCConfigurationList(XCObject): - # _configs is the default list of configurations. - _configs = [ - XCBuildConfiguration({"name": "Debug"}), - XCBuildConfiguration({"name": "Release"}), - ] - - _schema = XCObject._schema.copy() - _schema.update( - { - "buildConfigurations": [1, XCBuildConfiguration, 1, 1, _configs], - "defaultConfigurationIsVisible": [0, int, 0, 1, 1], - "defaultConfigurationName": [0, str, 0, 1, "Release"], - } - ) - - def Name(self): - return ( - "Build configuration list for " - + self.parent.__class__.__name__ - + ' "' - + self.parent.Name() - + '"' - ) - - def ConfigurationNamed(self, name): - """Convenience accessor to obtain an XCBuildConfiguration by name.""" - for configuration in self._properties["buildConfigurations"]: - if configuration._properties["name"] == name: - return configuration - - raise KeyError(name) - - def DefaultConfiguration(self): - """Convenience accessor to obtain the default XCBuildConfiguration.""" - return self.ConfigurationNamed(self._properties["defaultConfigurationName"]) - - def HasBuildSetting(self, key): - """Determines the state of a build setting in all XCBuildConfiguration - child objects. - - If all child objects have key in their build settings, and the value is the - same in all child objects, returns 1. - - If no child objects have the key in their build settings, returns 0. - - If some, but not all, child objects have the key in their build settings, - or if any children have different values for the key, returns -1. - """ - - has = None - value = None - for configuration in self._properties["buildConfigurations"]: - configuration_has = configuration.HasBuildSetting(key) - if has is None: - has = configuration_has - elif has != configuration_has: - return -1 - - if configuration_has: - configuration_value = configuration.GetBuildSetting(key) - if value is None: - value = configuration_value - elif value != configuration_value: - return -1 - - if not has: - return 0 - - return 1 - - def GetBuildSetting(self, key): - """Gets the build setting for key. - - All child XCConfiguration objects must have the same value set for the - setting, or a ValueError will be raised. - """ - - # TODO(mark): This is wrong for build settings that are lists. The list - # contents should be compared (and a list copy returned?) - - value = None - for configuration in self._properties["buildConfigurations"]: - configuration_value = configuration.GetBuildSetting(key) - if value is None: - value = configuration_value - else: - if value != configuration_value: - raise ValueError("Variant values for " + key) - - return value - - def SetBuildSetting(self, key, value): - """Sets the build setting for key to value in all child - XCBuildConfiguration objects. - """ - - for configuration in self._properties["buildConfigurations"]: - configuration.SetBuildSetting(key, value) - - def AppendBuildSetting(self, key, value): - """Appends value to the build setting for key, which is treated as a list, - in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties["buildConfigurations"]: - configuration.AppendBuildSetting(key, value) - - def DelBuildSetting(self, key): - """Deletes the build setting key from all child XCBuildConfiguration - objects. - """ - - for configuration in self._properties["buildConfigurations"]: - configuration.DelBuildSetting(key) - - def SetBaseConfiguration(self, value): - """Sets the build configuration in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties["buildConfigurations"]: - configuration.SetBaseConfiguration(value) - - -class PBXBuildFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update( - { - "fileRef": [0, XCFileLikeElement, 0, 1], - "settings": [0, str, 0, 0], # hack, it's a dict - } - ) - - # Weird output rules for PBXBuildFile. - _should_print_single_line = True - _encode_transforms = XCObject._alternate_encode_transforms - - def Name(self): - # Example: "main.cc in Sources" - return self._properties["fileRef"].Name() + " in " + self.parent.Name() - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # It is not sufficient to just rely on Name() to get the - # XCFileLikeElement's name, because that is not a complete pathname. - # PathHashables returns hashables unique enough that no two - # PBXBuildFiles should wind up with the same set of hashables, unless - # someone adds the same file multiple times to the same target. That - # would be considered invalid anyway. - hashables.extend(self._properties["fileRef"].PathHashables()) - - return hashables - - -class XCBuildPhase(XCObject): - """Abstract base for build phase classes. Not represented in a project - file. - - Attributes: - _files_by_path: A dict mapping each path of a child in the files list by - path (keys) to the corresponding PBXBuildFile children (values). - _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys) - to the corresponding PBXBuildFile children (values). - """ - - # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't - # actually have a "files" list. XCBuildPhase should not have "files" but - # another abstract subclass of it should provide this, and concrete build - # phase types that do have "files" lists should be derived from that new - # abstract subclass. XCBuildPhase should only provide buildActionMask and - # runOnlyForDeploymentPostprocessing, and not files or the various - # file-related methods and attributes. - - _schema = XCObject._schema.copy() - _schema.update( - { - "buildActionMask": [0, int, 0, 1, 0x7FFFFFFF], - "files": [1, PBXBuildFile, 1, 1, []], - "runOnlyForDeploymentPostprocessing": [0, int, 0, 1, 0], - } - ) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - - self._files_by_path = {} - self._files_by_xcfilelikeelement = {} - for pbxbuildfile in self._properties.get("files", []): - self._AddBuildFileToDicts(pbxbuildfile) - - def FileGroup(self, path): - # Subclasses must override this by returning a two-element tuple. The - # first item in the tuple should be the PBXGroup to which "path" should be - # added, either as a child or deeper descendant. The second item should - # be a boolean indicating whether files should be added into hierarchical - # groups or one single flat group. - raise NotImplementedError(self.__class__.__name__ + " must implement FileGroup") - - def _AddPathToDict(self, pbxbuildfile, path): - """Adds path to the dict tracking paths belonging to this build phase. - - If the path is already a member of this build phase, raises an exception. - """ - - if path in self._files_by_path: - raise ValueError("Found multiple build files with path " + path) - self._files_by_path[path] = pbxbuildfile - - def _AddBuildFileToDicts(self, pbxbuildfile, path=None): - """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. - - If path is specified, then it is the path that is being added to the - phase, and pbxbuildfile must contain either a PBXFileReference directly - referencing that path, or it must contain a PBXVariantGroup that itself - contains a PBXFileReference referencing the path. - - If path is not specified, either the PBXFileReference's path or the paths - of all children of the PBXVariantGroup are taken as being added to the - phase. - - If the path is already present in the phase, raises an exception. - - If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile - are already present in the phase, referenced by a different PBXBuildFile - object, raises an exception. This does not raise an exception when - a PBXFileReference or PBXVariantGroup reappear and are referenced by the - same PBXBuildFile that has already introduced them, because in the case - of PBXVariantGroup objects, they may correspond to multiple paths that are - not all added simultaneously. When this situation occurs, the path needs - to be added to _files_by_path, but nothing needs to change in - _files_by_xcfilelikeelement, and the caller should have avoided adding - the PBXBuildFile if it is already present in the list of children. - """ - - xcfilelikeelement = pbxbuildfile._properties["fileRef"] - - paths = [] - if path is not None: - # It's best when the caller provides the path. - if isinstance(xcfilelikeelement, PBXVariantGroup): - paths.append(path) - else: - # If the caller didn't provide a path, there can be either multiple - # paths (PBXVariantGroup) or one. - if isinstance(xcfilelikeelement, PBXVariantGroup): - for variant in xcfilelikeelement._properties["children"]: - paths.append(variant.FullPath()) - else: - paths.append(xcfilelikeelement.FullPath()) - - # Add the paths first, because if something's going to raise, the - # messages provided by _AddPathToDict are more useful owing to its - # having access to a real pathname and not just an object's Name(). - for a_path in paths: - self._AddPathToDict(pbxbuildfile, a_path) - - # If another PBXBuildFile references this XCFileLikeElement, there's a - # problem. - if ( - xcfilelikeelement in self._files_by_xcfilelikeelement - and self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile - ): - raise ValueError( - "Found multiple build files for " + xcfilelikeelement.Name() - ) - self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile - - def AppendBuildFile(self, pbxbuildfile, path=None): - # Callers should use this instead of calling - # AppendProperty('files', pbxbuildfile) directly because this function - # maintains the object's dicts. Better yet, callers can just call AddFile - # with a pathname and not worry about building their own PBXBuildFile - # objects. - self.AppendProperty("files", pbxbuildfile) - self._AddBuildFileToDicts(pbxbuildfile, path) - - def AddFile(self, path, settings=None): - (file_group, hierarchical) = self.FileGroup(path) - file_ref = file_group.AddOrGetFileByPath(path, hierarchical) - - if file_ref in self._files_by_xcfilelikeelement and isinstance( - file_ref, PBXVariantGroup - ): - # There's already a PBXBuildFile in this phase corresponding to the - # PBXVariantGroup. path just provides a new variant that belongs to - # the group. Add the path to the dict. - pbxbuildfile = self._files_by_xcfilelikeelement[file_ref] - self._AddBuildFileToDicts(pbxbuildfile, path) - else: - # Add a new PBXBuildFile to get file_ref into the phase. - if settings is None: - pbxbuildfile = PBXBuildFile({"fileRef": file_ref}) - else: - pbxbuildfile = PBXBuildFile({"fileRef": file_ref, "settings": settings}) - self.AppendBuildFile(pbxbuildfile, path) - - -class PBXHeadersBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return "Headers" - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXResourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return "Resources" - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXSourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return "Sources" - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXFrameworksBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return "Frameworks" - - def FileGroup(self, path): - (root, ext) = posixpath.splitext(path) - if ext != "": - ext = ext[1:].lower() - if ext == "o": - # .o files are added to Xcode Frameworks phases, but conceptually aren't - # frameworks, they're more like sources or intermediates. Redirect them - # to show up in one of those other groups. - return self.PBXProjectAncestor().RootGroupForPath(path) - else: - return (self.PBXProjectAncestor().FrameworksGroup(), False) - - -class PBXShellScriptBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update( - { - "inputPaths": [1, str, 0, 1, []], - "name": [0, str, 0, 0], - "outputPaths": [1, str, 0, 1, []], - "shellPath": [0, str, 0, 1, "/bin/sh"], - "shellScript": [0, str, 0, 1], - "showEnvVarsInLog": [0, int, 0, 0], - } - ) - - def Name(self): - if "name" in self._properties: - return self._properties["name"] - - return "ShellScript" - - -class PBXCopyFilesBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update( - { - "dstPath": [0, str, 0, 1], - "dstSubfolderSpec": [0, int, 0, 1], - "name": [0, str, 0, 0], - } - ) - - # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)". - # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path" - # or None. If group 3 is "path", group 4 will be None otherwise group 4 is - # "DIR2" and group 6 is "path". - path_tree_re = re.compile(r"^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$") - - # path_tree_{first,second}_to_subfolder map names of Xcode variables to the - # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase - # object. - path_tree_first_to_subfolder = { - # Types that can be chosen via the Xcode UI. - "BUILT_PRODUCTS_DIR": 16, # Products Directory - "BUILT_FRAMEWORKS_DIR": 10, # Not an official Xcode macro. - # Existed before support for the - # names below was added. Maps to - # "Frameworks". - } - - path_tree_second_to_subfolder = { - "WRAPPER_NAME": 1, # Wrapper - # Although Xcode's friendly name is "Executables", the destination - # is demonstrably the value of the build setting - # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH. - "EXECUTABLE_FOLDER_PATH": 6, # Executables. - "UNLOCALIZED_RESOURCES_FOLDER_PATH": 7, # Resources - "JAVA_FOLDER_PATH": 15, # Java Resources - "FRAMEWORKS_FOLDER_PATH": 10, # Frameworks - "SHARED_FRAMEWORKS_FOLDER_PATH": 11, # Shared Frameworks - "SHARED_SUPPORT_FOLDER_PATH": 12, # Shared Support - "PLUGINS_FOLDER_PATH": 13, # PlugIns - # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec. - # Note that it re-uses the BUILT_PRODUCTS_DIR value for - # dstSubfolderSpec. dstPath is set below. - "XPCSERVICES_FOLDER_PATH": 16, # XPC Services. - } - - def Name(self): - if "name" in self._properties: - return self._properties["name"] - - return "CopyFiles" - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - def SetDestination(self, path): - """Set the dstSubfolderSpec and dstPath properties from path. - - path may be specified in the same notation used for XCHierarchicalElements, - specifically, "$(DIR)/path". - """ - - path_tree_match = self.path_tree_re.search(path) - if path_tree_match: - path_tree = path_tree_match.group(1) - if path_tree in self.path_tree_first_to_subfolder: - subfolder = self.path_tree_first_to_subfolder[path_tree] - relative_path = path_tree_match.group(3) - if relative_path is None: - relative_path = "" - - if subfolder == 16 and path_tree_match.group(4) is not None: - # BUILT_PRODUCTS_DIR (16) is the first element in a path whose - # second element is possibly one of the variable names in - # path_tree_second_to_subfolder. Xcode sets the values of all these - # variables to relative paths so .gyp files must prefix them with - # BUILT_PRODUCTS_DIR, e.g. - # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then - # xcode_emulation.py can export these variables with the same values - # as Xcode yet make & ninja files can determine the absolute path - # to the target. Xcode uses the dstSubfolderSpec value set here - # to determine the full path. - # - # An alternative of xcode_emulation.py setting the values to - # absolute paths when exporting these variables has been - # ruled out because then the values would be different - # depending on the build tool. - # - # Another alternative is to invent new names for the variables used - # to match to the subfolder indices in the second table. .gyp files - # then will not need to prepend $(BUILT_PRODUCTS_DIR) because - # xcode_emulation.py can set the values of those variables to - # the absolute paths when exporting. This is possibly the thinking - # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner. - # - # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because - # this same way could be used to specify destinations in .gyp files - # that pre-date this addition to GYP. However they would only work - # with the Xcode generator. - # The previous version of xcode_emulation.py - # does not export these variables. Such files will get the benefit - # of the Xcode UI showing the proper destination name simply by - # regenerating the projects with this version of GYP. - path_tree = path_tree_match.group(4) - relative_path = path_tree_match.group(6) - separator = "/" - - if path_tree in self.path_tree_second_to_subfolder: - subfolder = self.path_tree_second_to_subfolder[path_tree] - if relative_path is None: - relative_path = "" - separator = "" - if path_tree == "XPCSERVICES_FOLDER_PATH": - relative_path = ( - "$(CONTENTS_FOLDER_PATH)/XPCServices" - + separator - + relative_path - ) - else: - # subfolder = 16 from above - # The second element of the path is an unrecognized variable. - # Include it and any remaining elements in relative_path. - relative_path = path_tree_match.group(3) - - else: - # The path starts with an unrecognized Xcode variable - # name like $(SRCROOT). Xcode will still handle this - # as an "absolute path" that starts with the variable. - subfolder = 0 - relative_path = path - elif path.startswith("/"): - # Special case. Absolute paths are in dstSubfolderSpec 0. - subfolder = 0 - relative_path = path[1:] - else: - raise ValueError( - "Can't use path %s in a %s" % (path, self.__class__.__name__) - ) - - self._properties["dstPath"] = relative_path - self._properties["dstSubfolderSpec"] = subfolder - - -class PBXBuildRule(XCObject): - _schema = XCObject._schema.copy() - _schema.update( - { - "compilerSpec": [0, str, 0, 1], - "filePatterns": [0, str, 0, 0], - "fileType": [0, str, 0, 1], - "isEditable": [0, int, 0, 1, 1], - "outputFiles": [1, str, 0, 1, []], - "script": [0, str, 0, 0], - } - ) - - def Name(self): - # Not very inspired, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.append(self._properties["fileType"]) - if "filePatterns" in self._properties: - hashables.append(self._properties["filePatterns"]) - return hashables - - -class PBXContainerItemProxy(XCObject): - # When referencing an item in this project file, containerPortal is the - # PBXProject root object of this project file. When referencing an item in - # another project file, containerPortal is a PBXFileReference identifying - # the other project file. - # - # When serving as a proxy to an XCTarget (in this project file or another), - # proxyType is 1. When serving as a proxy to a PBXFileReference (in another - # project file), proxyType is 2. Type 2 is used for references to the - # producs of the other project file's targets. - # - # Xcode is weird about remoteGlobalIDString. Usually, it's printed without - # a comment, indicating that it's tracked internally simply as a string, but - # sometimes it's printed with a comment (usually when the object is initially - # created), indicating that it's tracked as a project file object at least - # sometimes. This module always tracks it as an object, but contains a hack - # to prevent it from printing the comment in the project file output. See - # _XCKVPrint. - _schema = XCObject._schema.copy() - _schema.update( - { - "containerPortal": [0, XCContainerPortal, 0, 1], - "proxyType": [0, int, 0, 1], - "remoteGlobalIDString": [0, XCRemoteObject, 0, 1], - "remoteInfo": [0, str, 0, 1], - } - ) - - def __repr__(self): - props = self._properties - name = "%s.gyp:%s" % (props["containerPortal"].Name(), props["remoteInfo"]) - return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties["containerPortal"].Hashables()) - hashables.extend(self._properties["remoteGlobalIDString"].Hashables()) - return hashables - - -class PBXTargetDependency(XCObject): - # The "target" property accepts an XCTarget object, and obviously not - # NoneType. But XCTarget is defined below, so it can't be put into the - # schema yet. The definition of PBXTargetDependency can't be moved below - # XCTarget because XCTarget's own schema references PBXTargetDependency. - # Python doesn't deal well with this circular relationship, and doesn't have - # a real way to do forward declarations. To work around, the type of - # the "target" property is reset below, after XCTarget is defined. - # - # At least one of "name" and "target" is required. - _schema = XCObject._schema.copy() - _schema.update( - { - "name": [0, str, 0, 0], - "target": [0, None.__class__, 0, 0], - "targetProxy": [0, PBXContainerItemProxy, 1, 1], - } - ) - - def __repr__(self): - name = self._properties.get("name") or self._properties["target"].Name() - return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties["targetProxy"].Hashables()) - return hashables - - -class PBXReferenceProxy(XCFileLikeElement): - _schema = XCFileLikeElement._schema.copy() - _schema.update( - { - "fileType": [0, str, 0, 1], - "path": [0, str, 0, 1], - "remoteRef": [0, PBXContainerItemProxy, 1, 1], - } - ) - - -class XCTarget(XCRemoteObject): - # An XCTarget is really just an XCObject, the XCRemoteObject thing is just - # to allow PBXProject to be used in the remoteGlobalIDString property of - # PBXContainerItemProxy. - # - # Setting a "name" property at instantiation may also affect "productName", - # which may in turn affect the "PRODUCT_NAME" build setting in children of - # "buildConfigurationList". See __init__ below. - _schema = XCRemoteObject._schema.copy() - _schema.update( - { - "buildConfigurationList": [ - 0, - XCConfigurationList, - 1, - 1, - XCConfigurationList(), - ], - "buildPhases": [1, XCBuildPhase, 1, 1, []], - "dependencies": [1, PBXTargetDependency, 1, 1, []], - "name": [0, str, 0, 1], - "productName": [0, str, 0, 1], - } - ) - - def __init__( - self, - properties=None, - id=None, - parent=None, - force_outdir=None, - force_prefix=None, - force_extension=None, - ): - # super - XCRemoteObject.__init__(self, properties, id, parent) - - # Set up additional defaults not expressed in the schema. If a "name" - # property was supplied, set "productName" if it is not present. Also set - # the "PRODUCT_NAME" build setting in each configuration, but only if - # the setting is not present in any build configuration. - if "name" in self._properties: - if "productName" not in self._properties: - self.SetProperty("productName", self._properties["name"]) - - if "productName" in self._properties: - if "buildConfigurationList" in self._properties: - configs = self._properties["buildConfigurationList"] - if configs.HasBuildSetting("PRODUCT_NAME") == 0: - configs.SetBuildSetting( - "PRODUCT_NAME", self._properties["productName"] - ) - - def AddDependency(self, other): - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject == other_pbxproject: - # Add a dependency to another target in the same project file. - container = PBXContainerItemProxy( - { - "containerPortal": pbxproject, - "proxyType": 1, - "remoteGlobalIDString": other, - "remoteInfo": other.Name(), - } - ) - dependency = PBXTargetDependency( - {"target": other, "targetProxy": container} - ) - self.AppendProperty("dependencies", dependency) - else: - # Add a dependency to a target in a different project file. - other_project_ref = pbxproject.AddOrGetProjectReference(other_pbxproject)[1] - container = PBXContainerItemProxy( - { - "containerPortal": other_project_ref, - "proxyType": 1, - "remoteGlobalIDString": other, - "remoteInfo": other.Name(), - } - ) - dependency = PBXTargetDependency( - {"name": other.Name(), "targetProxy": container} - ) - self.AppendProperty("dependencies", dependency) - - # Proxy all of these through to the build configuration list. - - def ConfigurationNamed(self, name): - return self._properties["buildConfigurationList"].ConfigurationNamed(name) - - def DefaultConfiguration(self): - return self._properties["buildConfigurationList"].DefaultConfiguration() - - def HasBuildSetting(self, key): - return self._properties["buildConfigurationList"].HasBuildSetting(key) - - def GetBuildSetting(self, key): - return self._properties["buildConfigurationList"].GetBuildSetting(key) - - def SetBuildSetting(self, key, value): - return self._properties["buildConfigurationList"].SetBuildSetting(key, value) - - def AppendBuildSetting(self, key, value): - return self._properties["buildConfigurationList"].AppendBuildSetting(key, value) - - def DelBuildSetting(self, key): - return self._properties["buildConfigurationList"].DelBuildSetting(key) - - -# Redefine the type of the "target" property. See PBXTargetDependency._schema -# above. -PBXTargetDependency._schema["target"][1] = XCTarget - - -class PBXNativeTarget(XCTarget): - # buildPhases is overridden in the schema to be able to set defaults. - # - # NOTE: Contrary to most objects, it is advisable to set parent when - # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject - # object. A parent reference is required for a PBXNativeTarget during - # construction to be able to set up the target defaults for productReference, - # because a PBXBuildFile object must be created for the target and it must - # be added to the PBXProject's mainGroup hierarchy. - _schema = XCTarget._schema.copy() - _schema.update( - { - "buildPhases": [ - 1, - XCBuildPhase, - 1, - 1, - [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()], - ], - "buildRules": [1, PBXBuildRule, 1, 1, []], - "productReference": [0, PBXFileReference, 0, 1], - "productType": [0, str, 0, 1], - } - ) - - # Mapping from Xcode product-types to settings. The settings are: - # filetype : used for explicitFileType in the project file - # prefix : the prefix for the file name - # suffix : the suffix for the file name - _product_filetypes = { - "com.apple.product-type.application": ["wrapper.application", "", ".app"], - "com.apple.product-type.application.watchapp": [ - "wrapper.application", - "", - ".app", - ], - "com.apple.product-type.watchkit-extension": [ - "wrapper.app-extension", - "", - ".appex", - ], - "com.apple.product-type.app-extension": ["wrapper.app-extension", "", ".appex"], - "com.apple.product-type.bundle": ["wrapper.cfbundle", "", ".bundle"], - "com.apple.product-type.framework": ["wrapper.framework", "", ".framework"], - "com.apple.product-type.library.dynamic": [ - "compiled.mach-o.dylib", - "lib", - ".dylib", - ], - "com.apple.product-type.library.static": ["archive.ar", "lib", ".a"], - "com.apple.product-type.tool": ["compiled.mach-o.executable", "", ""], - "com.apple.product-type.bundle.unit-test": ["wrapper.cfbundle", "", ".xctest"], - "com.apple.product-type.bundle.ui-testing": ["wrapper.cfbundle", "", ".xctest"], - "com.googlecode.gyp.xcode.bundle": ["compiled.mach-o.dylib", "", ".so"], - "com.apple.product-type.kernel-extension": ["wrapper.kext", "", ".kext"], - } - - def __init__( - self, - properties=None, - id=None, - parent=None, - force_outdir=None, - force_prefix=None, - force_extension=None, - ): - # super - XCTarget.__init__(self, properties, id, parent) - - if ( - "productName" in self._properties - and "productType" in self._properties - and "productReference" not in self._properties - and self._properties["productType"] in self._product_filetypes - ): - products_group = None - pbxproject = self.PBXProjectAncestor() - if pbxproject is not None: - products_group = pbxproject.ProductsGroup() - - if products_group is not None: - (filetype, prefix, suffix) = self._product_filetypes[ - self._properties["productType"] - ] - # Xcode does not have a distinct type for loadable modules that are - # pure BSD targets (not in a bundle wrapper). GYP allows such modules - # to be specified by setting a target type to loadable_module without - # having mac_bundle set. These are mapped to the pseudo-product type - # com.googlecode.gyp.xcode.bundle. - # - # By picking up this special type and converting it to a dynamic - # library (com.apple.product-type.library.dynamic) with fix-ups, - # single-file loadable modules can be produced. - # - # MACH_O_TYPE is changed to mh_bundle to produce the proper file type - # (as opposed to mh_dylib). In order for linking to succeed, - # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be - # cleared. They are meaningless for type mh_bundle. - # - # Finally, the .so extension is forcibly applied over the default - # (.dylib), unless another forced extension is already selected. - # .dylib is plainly wrong, and .bundle is used by loadable_modules in - # bundle wrappers (com.apple.product-type.bundle). .so seems an odd - # choice because it's used as the extension on many other systems that - # don't distinguish between linkable shared libraries and non-linkable - # loadable modules, but there's precedent: Python loadable modules on - # Mac OS X use an .so extension. - if self._properties["productType"] == "com.googlecode.gyp.xcode.bundle": - self._properties[ - "productType" - ] = "com.apple.product-type.library.dynamic" - self.SetBuildSetting("MACH_O_TYPE", "mh_bundle") - self.SetBuildSetting("DYLIB_CURRENT_VERSION", "") - self.SetBuildSetting("DYLIB_COMPATIBILITY_VERSION", "") - if force_extension is None: - force_extension = suffix[1:] - - if ( - self._properties["productType"] - == "com.apple.product-type-bundle.unit.test" - or self._properties["productType"] - == "com.apple.product-type-bundle.ui-testing" - ): - if force_extension is None: - force_extension = suffix[1:] - - if force_extension is not None: - # If it's a wrapper (bundle), set WRAPPER_EXTENSION. - # Extension override. - suffix = "." + force_extension - if filetype.startswith("wrapper."): - self.SetBuildSetting("WRAPPER_EXTENSION", force_extension) - else: - self.SetBuildSetting("EXECUTABLE_EXTENSION", force_extension) - - if filetype.startswith("compiled.mach-o.executable"): - product_name = self._properties["productName"] - product_name += suffix - suffix = "" - self.SetProperty("productName", product_name) - self.SetBuildSetting("PRODUCT_NAME", product_name) - - # Xcode handles most prefixes based on the target type, however there - # are exceptions. If a "BSD Dynamic Library" target is added in the - # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that - # behavior. - if force_prefix is not None: - prefix = force_prefix - if filetype.startswith("wrapper."): - self.SetBuildSetting("WRAPPER_PREFIX", prefix) - else: - self.SetBuildSetting("EXECUTABLE_PREFIX", prefix) - - if force_outdir is not None: - self.SetBuildSetting("TARGET_BUILD_DIR", force_outdir) - - # TODO(tvl): Remove the below hack. - # http://code.google.com/p/gyp/issues/detail?id=122 - - # Some targets include the prefix in the target_name. These targets - # really should just add a product_name setting that doesn't include - # the prefix. For example: - # target_name = 'libevent', product_name = 'event' - # This check cleans up for them. - product_name = self._properties["productName"] - prefix_len = len(prefix) - if prefix_len and (product_name[:prefix_len] == prefix): - product_name = product_name[prefix_len:] - self.SetProperty("productName", product_name) - self.SetBuildSetting("PRODUCT_NAME", product_name) - - ref_props = { - "explicitFileType": filetype, - "includeInIndex": 0, - "path": prefix + product_name + suffix, - "sourceTree": "BUILT_PRODUCTS_DIR", - } - file_ref = PBXFileReference(ref_props) - products_group.AppendChild(file_ref) - self.SetProperty("productReference", file_ref) - - def GetBuildPhaseByType(self, type): - if "buildPhases" not in self._properties: - return None - - the_phase = None - for phase in self._properties["buildPhases"]: - if isinstance(phase, type): - # Some phases may be present in multiples in a well-formed project file, - # but phases like PBXSourcesBuildPhase may only be present singly, and - # this function is intended as an aid to GetBuildPhaseByType. Loop - # over the entire list of phases and assert if more than one of the - # desired type is found. - assert the_phase is None - the_phase = phase - - return the_phase - - def HeadersPhase(self): - headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase) - if headers_phase is None: - headers_phase = PBXHeadersBuildPhase() - - # The headers phase should come before the resources, sources, and - # frameworks phases, if any. - insert_at = len(self._properties["buildPhases"]) - for index, phase in enumerate(self._properties["buildPhases"]): - if ( - isinstance(phase, PBXResourcesBuildPhase) - or isinstance(phase, PBXSourcesBuildPhase) - or isinstance(phase, PBXFrameworksBuildPhase) - ): - insert_at = index - break - - self._properties["buildPhases"].insert(insert_at, headers_phase) - headers_phase.parent = self - - return headers_phase - - def ResourcesPhase(self): - resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) - if resources_phase is None: - resources_phase = PBXResourcesBuildPhase() - - # The resources phase should come before the sources and frameworks - # phases, if any. - insert_at = len(self._properties["buildPhases"]) - for index, phase in enumerate(self._properties["buildPhases"]): - if isinstance(phase, PBXSourcesBuildPhase) or isinstance( - phase, PBXFrameworksBuildPhase - ): - insert_at = index - break - - self._properties["buildPhases"].insert(insert_at, resources_phase) - resources_phase.parent = self - - return resources_phase - - def SourcesPhase(self): - sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) - if sources_phase is None: - sources_phase = PBXSourcesBuildPhase() - self.AppendProperty("buildPhases", sources_phase) - - return sources_phase - - def FrameworksPhase(self): - frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) - if frameworks_phase is None: - frameworks_phase = PBXFrameworksBuildPhase() - self.AppendProperty("buildPhases", frameworks_phase) - - return frameworks_phase - - def AddDependency(self, other): - # super - XCTarget.AddDependency(self, other) - - static_library_type = "com.apple.product-type.library.static" - shared_library_type = "com.apple.product-type.library.dynamic" - framework_type = "com.apple.product-type.framework" - if ( - isinstance(other, PBXNativeTarget) - and "productType" in self._properties - and self._properties["productType"] != static_library_type - and "productType" in other._properties - and ( - other._properties["productType"] == static_library_type - or ( - ( - other._properties["productType"] == shared_library_type - or other._properties["productType"] == framework_type - ) - and ( - (not other.HasBuildSetting("MACH_O_TYPE")) - or other.GetBuildSetting("MACH_O_TYPE") != "mh_bundle" - ) - ) - ) - ): - - file_ref = other.GetProperty("productReference") - - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject != other_pbxproject: - other_project_product_group = pbxproject.AddOrGetProjectReference( - other_pbxproject - )[0] - file_ref = other_project_product_group.GetChildByRemoteObject(file_ref) - - self.FrameworksPhase().AppendProperty( - "files", PBXBuildFile({"fileRef": file_ref}) - ) - - -class PBXAggregateTarget(XCTarget): - pass - - -class PBXProject(XCContainerPortal): - # A PBXProject is really just an XCObject, the XCContainerPortal thing is - # just to allow PBXProject to be used in the containerPortal property of - # PBXContainerItemProxy. - """ - - Attributes: - path: "sample.xcodeproj". TODO(mark) Document me! - _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each - value is a reference to the dict in the - projectReferences list associated with the keyed - PBXProject. - """ - - _schema = XCContainerPortal._schema.copy() - _schema.update( - { - "attributes": [0, dict, 0, 0], - "buildConfigurationList": [ - 0, - XCConfigurationList, - 1, - 1, - XCConfigurationList(), - ], - "compatibilityVersion": [0, str, 0, 1, "Xcode 3.2"], - "hasScannedForEncodings": [0, int, 0, 1, 1], - "mainGroup": [0, PBXGroup, 1, 1, PBXGroup()], - "projectDirPath": [0, str, 0, 1, ""], - "projectReferences": [1, dict, 0, 0], - "projectRoot": [0, str, 0, 1, ""], - "targets": [1, XCTarget, 1, 1, []], - } - ) - - def __init__(self, properties=None, id=None, parent=None, path=None): - self.path = path - self._other_pbxprojects = {} - # super - return XCContainerPortal.__init__(self, properties, id, parent) - - def Name(self): - name = self.path - if name[-10:] == ".xcodeproj": - name = name[:-10] - return posixpath.basename(name) - - def Path(self): - return self.path - - def Comment(self): - return "Project object" - - def Children(self): - # super - children = XCContainerPortal.Children(self) - - # Add children that the schema doesn't know about. Maybe there's a more - # elegant way around this, but this is the only case where we need to own - # objects in a dictionary (that is itself in a list), and three lines for - # a one-off isn't that big a deal. - if "projectReferences" in self._properties: - for reference in self._properties["projectReferences"]: - children.append(reference["ProductGroup"]) - - return children - - def PBXProjectAncestor(self): - return self - - def _GroupByName(self, name): - if "mainGroup" not in self._properties: - self.SetProperty("mainGroup", PBXGroup()) - - main_group = self._properties["mainGroup"] - group = main_group.GetChildByName(name) - if group is None: - group = PBXGroup({"name": name}) - main_group.AppendChild(group) - - return group - - # SourceGroup and ProductsGroup are created by default in Xcode's own - # templates. - def SourceGroup(self): - return self._GroupByName("Source") - - def ProductsGroup(self): - return self._GroupByName("Products") - - # IntermediatesGroup is used to collect source-like files that are generated - # by rules or script phases and are placed in intermediate directories such - # as DerivedSources. - def IntermediatesGroup(self): - return self._GroupByName("Intermediates") - - # FrameworksGroup and ProjectsGroup are top-level groups used to collect - # frameworks and projects. - def FrameworksGroup(self): - return self._GroupByName("Frameworks") - - def ProjectsGroup(self): - return self._GroupByName("Projects") - - def RootGroupForPath(self, path): - """Returns a PBXGroup child of this object to which path should be added. - - This method is intended to choose between SourceGroup and - IntermediatesGroup on the basis of whether path is present in a source - directory or an intermediates directory. For the purposes of this - determination, any path located within a derived file directory such as - PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates - directory. - - The returned value is a two-element tuple. The first element is the - PBXGroup, and the second element specifies whether that group should be - organized hierarchically (True) or as a single flat list (False). - """ - - # TODO(mark): make this a class variable and bind to self on call? - # Also, this list is nowhere near exhaustive. - # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by - # gyp.generator.xcode. There should probably be some way for that module - # to push the names in, rather than having to hard-code them here. - source_tree_groups = { - "DERIVED_FILE_DIR": (self.IntermediatesGroup, True), - "INTERMEDIATE_DIR": (self.IntermediatesGroup, True), - "PROJECT_DERIVED_FILE_DIR": (self.IntermediatesGroup, True), - "SHARED_INTERMEDIATE_DIR": (self.IntermediatesGroup, True), - } - - (source_tree, path) = SourceTreeAndPathFromPath(path) - if source_tree is not None and source_tree in source_tree_groups: - (group_func, hierarchical) = source_tree_groups[source_tree] - group = group_func() - return (group, hierarchical) - - # TODO(mark): make additional choices based on file extension. - - return (self.SourceGroup(), True) - - def AddOrGetFileInRootGroup(self, path): - """Returns a PBXFileReference corresponding to path in the correct group - according to RootGroupForPath's heuristics. - - If an existing PBXFileReference for path exists, it will be returned. - Otherwise, one will be created and returned. - """ - - (group, hierarchical) = self.RootGroupForPath(path) - return group.AddOrGetFileByPath(path, hierarchical) - - def RootGroupsTakeOverOnlyChildren(self, recurse=False): - """Calls TakeOverOnlyChild for all groups in the main group.""" - - for group in self._properties["mainGroup"]._properties["children"]: - if isinstance(group, PBXGroup): - group.TakeOverOnlyChild(recurse) - - def SortGroups(self): - # Sort the children of the mainGroup (like "Source" and "Products") - # according to their defined order. - self._properties["mainGroup"]._properties["children"] = sorted( - self._properties["mainGroup"]._properties["children"], - cmp=lambda x, y: x.CompareRootGroup(y), - ) - - # Sort everything else by putting group before files, and going - # alphabetically by name within sections of groups and files. SortGroup - # is recursive. - for group in self._properties["mainGroup"]._properties["children"]: - if not isinstance(group, PBXGroup): - continue - - if group.Name() == "Products": - # The Products group is a special case. Instead of sorting - # alphabetically, sort things in the order of the targets that - # produce the products. To do this, just build up a new list of - # products based on the targets. - products = [] - for target in self._properties["targets"]: - if not isinstance(target, PBXNativeTarget): - continue - product = target._properties["productReference"] - # Make sure that the product is already in the products group. - assert product in group._properties["children"] - products.append(product) - - # Make sure that this process doesn't miss anything that was already - # in the products group. - assert len(products) == len(group._properties["children"]) - group._properties["children"] = products - else: - group.SortGroup() - - def AddOrGetProjectReference(self, other_pbxproject): - """Add a reference to another project file (via PBXProject object) to this - one. - - Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in - this project file that contains a PBXReferenceProxy object for each - product of each PBXNativeTarget in the other project file. ProjectRef is - a PBXFileReference to the other project file. - - If this project file already references the other project file, the - existing ProductGroup and ProjectRef are returned. The ProductGroup will - still be updated if necessary. - """ - - if "projectReferences" not in self._properties: - self._properties["projectReferences"] = [] - - product_group = None - project_ref = None - - if other_pbxproject not in self._other_pbxprojects: - # This project file isn't yet linked to the other one. Establish the - # link. - product_group = PBXGroup({"name": "Products"}) - - # ProductGroup is strong. - product_group.parent = self - - # There's nothing unique about this PBXGroup, and if left alone, it will - # wind up with the same set of hashables as all other PBXGroup objects - # owned by the projectReferences list. Add the hashables of the - # remote PBXProject that it's related to. - product_group._hashables.extend(other_pbxproject.Hashables()) - - # The other project reports its path as relative to the same directory - # that this project's path is relative to. The other project's path - # is not necessarily already relative to this project. Figure out the - # pathname that this project needs to use to refer to the other one. - this_path = posixpath.dirname(self.Path()) - projectDirPath = self.GetProperty("projectDirPath") - if projectDirPath: - if posixpath.isabs(projectDirPath[0]): - this_path = projectDirPath - else: - this_path = posixpath.join(this_path, projectDirPath) - other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) - - # ProjectRef is weak (it's owned by the mainGroup hierarchy). - project_ref = PBXFileReference( - { - "lastKnownFileType": "wrapper.pb-project", - "path": other_path, - "sourceTree": "SOURCE_ROOT", - } - ) - self.ProjectsGroup().AppendChild(project_ref) - - ref_dict = {"ProductGroup": product_group, "ProjectRef": project_ref} - self._other_pbxprojects[other_pbxproject] = ref_dict - self.AppendProperty("projectReferences", ref_dict) - - # Xcode seems to sort this list case-insensitively - self._properties["projectReferences"] = sorted( - self._properties["projectReferences"], - cmp=lambda x, y: cmp( - x["ProjectRef"].Name().lower(), y["ProjectRef"].Name().lower() - ), - ) - else: - # The link already exists. Pull out the relevnt data. - project_ref_dict = self._other_pbxprojects[other_pbxproject] - product_group = project_ref_dict["ProductGroup"] - project_ref = project_ref_dict["ProjectRef"] - - self._SetUpProductReferences(other_pbxproject, product_group, project_ref) - - inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False) - targets = other_pbxproject.GetProperty("targets") - if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets): - dir_path = project_ref._properties["path"] - product_group._hashables.extend(dir_path) - - return [product_group, project_ref] - - def _AllSymrootsUnique(self, target, inherit_unique_symroot): - # Returns True if all configurations have a unique 'SYMROOT' attribute. - # The value of inherit_unique_symroot decides, if a configuration is assumed - # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't - # define an explicit value for 'SYMROOT'. - symroots = self._DefinedSymroots(target) - for s in self._DefinedSymroots(target): - if ( - s is not None - and not self._IsUniqueSymrootForTarget(s) - or s is None - and not inherit_unique_symroot - ): - return False - return True if symroots else inherit_unique_symroot - - def _DefinedSymroots(self, target): - # Returns all values for the 'SYMROOT' attribute defined in all - # configurations for this target. If any configuration doesn't define the - # 'SYMROOT' attribute, None is added to the returned set. If all - # configurations don't define the 'SYMROOT' attribute, an empty set is - # returned. - config_list = target.GetProperty("buildConfigurationList") - symroots = set() - for config in config_list.GetProperty("buildConfigurations"): - setting = config.GetProperty("buildSettings") - if "SYMROOT" in setting: - symroots.add(setting["SYMROOT"]) - else: - symroots.add(None) - if len(symroots) == 1 and None in symroots: - return set() - return symroots - - def _IsUniqueSymrootForTarget(self, symroot): - # This method returns True if all configurations in target contain a - # 'SYMROOT' attribute that is unique for the given target. A value is - # unique, if the Xcode macro '$SRCROOT' appears in it in any form. - uniquifier = ["$SRCROOT", "$(SRCROOT)"] - if any(x in symroot for x in uniquifier): - return True - return False - - def _SetUpProductReferences(self, other_pbxproject, product_group, project_ref): - # TODO(mark): This only adds references to products in other_pbxproject - # when they don't exist in this pbxproject. Perhaps it should also - # remove references from this pbxproject that are no longer present in - # other_pbxproject. Perhaps it should update various properties if they - # change. - for target in other_pbxproject._properties["targets"]: - if not isinstance(target, PBXNativeTarget): - continue - - other_fileref = target._properties["productReference"] - if product_group.GetChildByRemoteObject(other_fileref) is None: - # Xcode sets remoteInfo to the name of the target and not the name - # of its product, despite this proxy being a reference to the product. - container_item = PBXContainerItemProxy( - { - "containerPortal": project_ref, - "proxyType": 2, - "remoteGlobalIDString": other_fileref, - "remoteInfo": target.Name(), - } - ) - # TODO(mark): Does sourceTree get copied straight over from the other - # project? Can the other project ever have lastKnownFileType here - # instead of explicitFileType? (Use it if so?) Can path ever be - # unset? (I don't think so.) Can other_fileref have name set, and - # does it impact the PBXReferenceProxy if so? These are the questions - # that perhaps will be answered one day. - reference_proxy = PBXReferenceProxy( - { - "fileType": other_fileref._properties["explicitFileType"], - "path": other_fileref._properties["path"], - "sourceTree": other_fileref._properties["sourceTree"], - "remoteRef": container_item, - } - ) - - product_group.AppendChild(reference_proxy) - - def SortRemoteProductReferences(self): - # For each remote project file, sort the associated ProductGroup in the - # same order that the targets are sorted in the remote project file. This - # is the sort order used by Xcode. - - def CompareProducts(x, y, remote_products): - # x and y are PBXReferenceProxy objects. Go through their associated - # PBXContainerItem to get the remote PBXFileReference, which will be - # present in the remote_products list. - x_remote = x._properties["remoteRef"]._properties["remoteGlobalIDString"] - y_remote = y._properties["remoteRef"]._properties["remoteGlobalIDString"] - x_index = remote_products.index(x_remote) - y_index = remote_products.index(y_remote) - - # Use the order of each remote PBXFileReference in remote_products to - # determine the sort order. - return cmp(x_index, y_index) - - for other_pbxproject, ref_dict in self._other_pbxprojects.items(): - # Build up a list of products in the remote project file, ordered the - # same as the targets that produce them. - remote_products = [] - for target in other_pbxproject._properties["targets"]: - if not isinstance(target, PBXNativeTarget): - continue - remote_products.append(target._properties["productReference"]) - - # Sort the PBXReferenceProxy children according to the list of remote - # products. - product_group = ref_dict["ProductGroup"] - product_group._properties["children"] = sorted( - product_group._properties["children"], - cmp=lambda x, y, rp=remote_products: CompareProducts(x, y, rp), - ) - - -class XCProjectFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update( - { - "archiveVersion": [0, int, 0, 1, 1], - "classes": [0, dict, 0, 1, {}], - "objectVersion": [0, int, 0, 1, 46], - "rootObject": [0, PBXProject, 1, 1], - } - ) - - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): - # Although XCProjectFile is implemented here as an XCObject, it's not a - # proper object in the Xcode sense, and it certainly doesn't have its own - # ID. Pass through an attempt to update IDs to the real root object. - if recursive: - self._properties["rootObject"].ComputeIDs(recursive, overwrite, hash) - - def Print(self, file=sys.stdout): - self.VerifyHasRequiredProperties() - - # Add the special "objects" property, which will be caught and handled - # separately during printing. This structure allows a fairly standard - # loop do the normal printing. - self._properties["objects"] = {} - self._XCPrint(file, 0, "// !$*UTF8*$!\n") - if self._should_print_single_line: - self._XCPrint(file, 0, "{ ") - else: - self._XCPrint(file, 0, "{\n") - for property, value in sorted( - self._properties.items(), cmp=lambda x, y: cmp(x, y) - ): - if property == "objects": - self._PrintObjects(file) - else: - self._XCKVPrint(file, 1, property, value) - self._XCPrint(file, 0, "}\n") - del self._properties["objects"] - - def _PrintObjects(self, file): - if self._should_print_single_line: - self._XCPrint(file, 0, "objects = {") - else: - self._XCPrint(file, 1, "objects = {\n") - - objects_by_class = {} - for object in self.Descendants(): - if object == self: - continue - class_name = object.__class__.__name__ - if class_name not in objects_by_class: - objects_by_class[class_name] = [] - objects_by_class[class_name].append(object) - - for class_name in sorted(objects_by_class): - self._XCPrint(file, 0, "\n") - self._XCPrint(file, 0, "/* Begin " + class_name + " section */\n") - for object in sorted( - objects_by_class[class_name], cmp=lambda x, y: cmp(x.id, y.id) - ): - object.Print(file) - self._XCPrint(file, 0, "/* End " + class_name + " section */\n") - - if self._should_print_single_line: - self._XCPrint(file, 0, "}; ") - else: - self._XCPrint(file, 1, "};\n") diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py deleted file mode 100644 index 0a945322b4587..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Applies a fix to CR LF TAB handling in xml.dom. - -Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 -Working around this: http://bugs.python.org/issue5752 -TODO(bradnelson): Consider dropping this when we drop XP support. -""" - - -import xml.dom.minidom - - -def _Replacement_write_data(writer, data, is_attrib=False): - """Writes datachars to writer.""" - data = data.replace("&", "&").replace("<", "<") - data = data.replace('"', """).replace(">", ">") - if is_attrib: - data = data.replace("\r", " ").replace("\n", " ").replace("\t", " ") - writer.write(data) - - -def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): - # indent = current indentation - # addindent = indentation to add to higher levels - # newl = newline string - writer.write(indent + "<" + self.tagName) - - attrs = self._get_attributes() - a_names = sorted(attrs.keys()) - - for a_name in a_names: - writer.write(' %s="' % a_name) - _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) - writer.write('"') - if self.childNodes: - writer.write(">%s" % newl) - for node in self.childNodes: - node.writexml(writer, indent + addindent, addindent, newl) - writer.write("%s%s" % (indent, self.tagName, newl)) - else: - writer.write("/>%s" % newl) - - -class XmlFix(object): - """Object to manage temporary patching of xml.dom.minidom.""" - - def __init__(self): - # Preserve current xml.dom.minidom functions. - self.write_data = xml.dom.minidom._write_data - self.writexml = xml.dom.minidom.Element.writexml - # Inject replacement versions of a function and a method. - xml.dom.minidom._write_data = _Replacement_write_data - xml.dom.minidom.Element.writexml = _Replacement_writexml - - def Cleanup(self): - if self.write_data: - xml.dom.minidom._write_data = self.write_data - xml.dom.minidom.Element.writexml = self.writexml - self.write_data = None - - def __del__(self): - self.Cleanup() diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/requirements_dev.txt b/node_modules/libnpmexec/node_modules/node-gyp/gyp/requirements_dev.txt deleted file mode 100644 index 28ecacab60293..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/requirements_dev.txt +++ /dev/null @@ -1,2 +0,0 @@ -flake8 -pytest diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/setup.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/setup.py deleted file mode 100644 index d1869c1b52055..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/setup.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from os import path - -from setuptools import setup - -here = path.abspath(path.dirname(__file__)) -# Get the long description from the README file -with open(path.join(here, "README.md")) as in_file: - long_description = in_file.read() - -setup( - name="gyp-next", - version="0.6.2", - description="A fork of the GYP build system for use in the Node.js projects", - long_description=long_description, - long_description_content_type="text/markdown", - author="Node.js contributors", - author_email="ryzokuken@disroot.org", - url="https://github.com/nodejs/gyp-next", - package_dir={"": "pylib"}, - packages=["gyp", "gyp.generator"], - entry_points={"console_scripts": ["gyp=gyp:script_main"]}, - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", - classifiers=[ - "Development Status :: 3 - Alpha", - "Environment :: Console", - "Intended Audience :: Developers", - "License :: OSI Approved :: BSD License", - "Natural Language :: English", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - ], -) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/test_gyp.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/test_gyp.py deleted file mode 100755 index 382e75272d8b6..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/test_gyp.py +++ /dev/null @@ -1,268 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gyptest.py -- test runner for GYP tests.""" - -from __future__ import print_function - -import argparse -import os -import platform -import subprocess -import sys -import time - - -def is_test_name(f): - return f.startswith("gyptest") and f.endswith(".py") - - -def find_all_gyptest_files(directory): - result = [] - for root, dirs, files in os.walk(directory): - result.extend([os.path.join(root, f) for f in files if is_test_name(f)]) - result.sort() - return result - - -def main(argv=None): - if argv is None: - argv = sys.argv - - parser = argparse.ArgumentParser() - parser.add_argument("-a", "--all", action="store_true", help="run all tests") - parser.add_argument("-C", "--chdir", action="store", help="change to directory") - parser.add_argument( - "-f", - "--format", - action="store", - default="", - help="run tests with the specified formats", - ) - parser.add_argument( - "-G", - "--gyp_option", - action="append", - default=[], - help="Add -G options to the gyp command line", - ) - parser.add_argument( - "-l", "--list", action="store_true", help="list available tests and exit" - ) - parser.add_argument( - "-n", - "--no-exec", - action="store_true", - help="no execute, just print the command line", - ) - parser.add_argument( - "--path", action="append", default=[], help="additional $PATH directory" - ) - parser.add_argument( - "-q", - "--quiet", - action="store_true", - help="quiet, don't print anything unless there are failures", - ) - parser.add_argument( - "-v", - "--verbose", - action="store_true", - help="print configuration info and test results.", - ) - parser.add_argument("tests", nargs="*") - args = parser.parse_args(argv[1:]) - - if args.chdir: - os.chdir(args.chdir) - - if args.path: - extra_path = [os.path.abspath(p) for p in args.path] - extra_path = os.pathsep.join(extra_path) - os.environ["PATH"] = extra_path + os.pathsep + os.environ["PATH"] - - if not args.tests: - if not args.all: - sys.stderr.write("Specify -a to get all tests.\n") - return 1 - args.tests = ["test"] - - tests = [] - for arg in args.tests: - if os.path.isdir(arg): - tests.extend(find_all_gyptest_files(os.path.normpath(arg))) - else: - if not is_test_name(os.path.basename(arg)): - print(arg, "is not a valid gyp test name.", file=sys.stderr) - sys.exit(1) - tests.append(arg) - - if args.list: - for test in tests: - print(test) - sys.exit(0) - - os.environ["PYTHONPATH"] = os.path.abspath("test/lib") - - if args.verbose: - print_configuration_info() - - if args.gyp_option and not args.quiet: - print("Extra Gyp options: %s\n" % args.gyp_option) - - if args.format: - format_list = args.format.split(",") - else: - format_list = { - "aix5": ["make"], - "freebsd7": ["make"], - "freebsd8": ["make"], - "openbsd5": ["make"], - "cygwin": ["msvs"], - "win32": ["msvs", "ninja"], - "linux": ["make", "ninja"], - "linux2": ["make", "ninja"], - "linux3": ["make", "ninja"], - # TODO: Re-enable xcode-ninja. - # https://bugs.chromium.org/p/gyp/issues/detail?id=530 - # 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'], - "darwin": ["make", "ninja", "xcode"], - }[sys.platform] - - gyp_options = [] - for option in args.gyp_option: - gyp_options += ["-G", option] - - runner = Runner(format_list, tests, gyp_options, args.verbose) - runner.run() - - if not args.quiet: - runner.print_results() - - if runner.failures: - return 1 - else: - return 0 - - -def print_configuration_info(): - print("Test configuration:") - if sys.platform == "darwin": - sys.path.append(os.path.abspath("test/lib")) - import TestMac - - print(" Mac %s %s" % (platform.mac_ver()[0], platform.mac_ver()[2])) - print(" Xcode %s" % TestMac.Xcode.Version()) - elif sys.platform == "win32": - sys.path.append(os.path.abspath("pylib")) - import gyp.MSVSVersion - - print(" Win %s %s\n" % platform.win32_ver()[0:2]) - print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description()) - elif sys.platform in ("linux", "linux2"): - print(" Linux %s" % " ".join(platform.linux_distribution())) - print(" Python %s" % platform.python_version()) - print(" PYTHONPATH=%s" % os.environ["PYTHONPATH"]) - print() - - -class Runner(object): - def __init__(self, formats, tests, gyp_options, verbose): - self.formats = formats - self.tests = tests - self.verbose = verbose - self.gyp_options = gyp_options - self.failures = [] - self.num_tests = len(formats) * len(tests) - num_digits = len(str(self.num_tests)) - self.fmt_str = "[%%%dd/%%%dd] (%%s) %%s" % (num_digits, num_digits) - self.isatty = sys.stdout.isatty() and not self.verbose - self.env = os.environ.copy() - self.hpos = 0 - - def run(self): - run_start = time.time() - - i = 1 - for fmt in self.formats: - for test in self.tests: - self.run_test(test, fmt, i) - i += 1 - - if self.isatty: - self.erase_current_line() - - self.took = time.time() - run_start - - def run_test(self, test, fmt, i): - if self.isatty: - self.erase_current_line() - - msg = self.fmt_str % (i, self.num_tests, fmt, test) - self.print_(msg) - - start = time.time() - cmd = [sys.executable, test] + self.gyp_options - self.env["TESTGYP_FORMAT"] = fmt - proc = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.env - ) - proc.wait() - took = time.time() - start - - stdout = proc.stdout.read().decode("utf8") - if proc.returncode == 2: - res = "skipped" - elif proc.returncode: - res = "failed" - self.failures.append("(%s) %s" % (test, fmt)) - else: - res = "passed" - res_msg = " %s %.3fs" % (res, took) - self.print_(res_msg) - - if ( - stdout - and not stdout.endswith("PASSED\n") - and not (stdout.endswith("NO RESULT\n")) - ): - print() - print("\n".join(" %s" % line for line in stdout.splitlines())) - elif not self.isatty: - print() - - def print_(self, msg): - print(msg, end="") - index = msg.rfind("\n") - if index == -1: - self.hpos += len(msg) - else: - self.hpos = len(msg) - index - sys.stdout.flush() - - def erase_current_line(self): - print("\b" * self.hpos + " " * self.hpos + "\b" * self.hpos, end="") - sys.stdout.flush() - self.hpos = 0 - - def print_results(self): - num_failures = len(self.failures) - if num_failures: - print() - if num_failures == 1: - print("Failed the following test:") - else: - print("Failed the following %d tests:" % num_failures) - print("\t" + "\n\t".join(sorted(self.failures))) - print() - print( - "Ran %d tests in %.3fs, %d failed." - % (self.num_tests, self.took, num_failures) - ) - print() - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec b/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec deleted file mode 100644 index 85e2e268a51b7..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec +++ /dev/null @@ -1,27 +0,0 @@ -/* - gyp.pbfilespec - GYP source file spec for Xcode 3 - - There is not much documentation available regarding the format - of .pbfilespec files. As a starting point, see for instance the - outdated documentation at: - http://maxao.free.fr/xcode-plugin-interface/specifications.html - and the files in: - /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ - - Place this file in directory: - ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ -*/ - -( - { - Identifier = sourcecode.gyp; - BasedOn = sourcecode; - Name = "GYP Files"; - Extensions = ("gyp", "gypi"); - MIMETypes = ("text/gyp"); - Language = "xcode.lang.gyp"; - IsTextFile = YES; - IsSourceFile = YES; - } -) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec b/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec deleted file mode 100644 index 3b3506d319e0f..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec +++ /dev/null @@ -1,226 +0,0 @@ -/* - Copyright (c) 2011 Google Inc. All rights reserved. - Use of this source code is governed by a BSD-style license that can be - found in the LICENSE file. - - gyp.xclangspec - GYP language specification for Xcode 3 - - There is not much documentation available regarding the format - of .xclangspec files. As a starting point, see for instance the - outdated documentation at: - http://maxao.free.fr/xcode-plugin-interface/specifications.html - and the files in: - /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ - - Place this file in directory: - ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ -*/ - -( - - { - Identifier = "xcode.lang.gyp.keyword"; - Syntax = { - Words = ( - "and", - "or", - " (caar gyp-parse-history) target-point) - (setq gyp-parse-history (cdr gyp-parse-history)))) - -(defun gyp-parse-point () - "The point of the last parse state added by gyp-parse-to." - (caar gyp-parse-history)) - -(defun gyp-parse-sections () - "A list of section symbols holding at the last parse state point." - (cdar gyp-parse-history)) - -(defun gyp-inside-dictionary-p () - "Predicate returning true if the parser is inside a dictionary." - (not (eq (cadar gyp-parse-history) 'list))) - -(defun gyp-add-parse-history (point sections) - "Add parse state SECTIONS to the parse history at POINT so that parsing can be - resumed instantly." - (while (>= (caar gyp-parse-history) point) - (setq gyp-parse-history (cdr gyp-parse-history))) - (setq gyp-parse-history (cons (cons point sections) gyp-parse-history))) - -(defun gyp-parse-to (target-point) - "Parses from (point) to TARGET-POINT adding the parse state information to - gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a - string literal has been parsed. Returns nil if no further parsing can be - done, otherwise returns the position of the start of a parsed string, leaving - the point at the end of the string." - (let ((parsing t) - string-start) - (while parsing - (setq string-start nil) - ;; Parse up to a character that starts a sexp, or if the nesting - ;; level decreases. - (let ((state (parse-partial-sexp (gyp-parse-point) - target-point - -1 - t)) - (sections (gyp-parse-sections))) - (if (= (nth 0 state) -1) - (setq sections (cdr sections)) ; pop out a level - (cond ((looking-at-p "['\"]") ; a string - (setq string-start (point)) - (goto-char (scan-sexps (point) 1)) - (if (gyp-inside-dictionary-p) - ;; Look for sections inside a dictionary - (let ((section (gyp-section-name - (buffer-substring-no-properties - (+ 1 string-start) - (- (point) 1))))) - (setq sections (cons section (cdr sections))))) - ;; Stop after the string so it can be fontified. - (setq target-point (point))) - ((looking-at-p "{") - ;; Inside a dictionary. Increase nesting. - (forward-char 1) - (setq sections (cons 'unknown sections))) - ((looking-at-p "\\[") - ;; Inside a list. Increase nesting - (forward-char 1) - (setq sections (cons 'list sections))) - ((not (eobp)) - ;; other - (forward-char 1)))) - (gyp-add-parse-history (point) sections) - (setq parsing (< (point) target-point)))) - string-start)) - -(defun gyp-section-at-point () - "Transform the last parse state, which is a list of nested sections and return - the section symbol that should be used to determine font-lock information for - the string. Can return nil indicating the string should not have any attached - section." - (let ((sections (gyp-parse-sections))) - (cond - ((eq (car sections) 'conditions) - ;; conditions can occur in a variables section, but we still want to - ;; highlight it as a keyword. - nil) - ((and (eq (car sections) 'list) - (eq (cadr sections) 'list)) - ;; conditions and sources can have items in [[ ]] - (caddr sections)) - (t (cadr sections))))) - -(defun gyp-section-match (limit) - "Parse from (point) to LIMIT returning by means of match data what was - matched. The group of the match indicates what style font-lock should apply. - See also `gyp-add-font-lock-keywords'." - (gyp-invalidate-parse-states-after (point)) - (let ((group nil) - (string-start t)) - (while (and (< (point) limit) - (not group) - string-start) - (setq string-start (gyp-parse-to limit)) - (if string-start - (setq group (case (gyp-section-at-point) - ('dependencies 1) - ('variables 2) - ('conditions 2) - ('sources 3) - ('defines 4) - (nil nil))))) - (if group - (progn - ;; Set the match data to indicate to the font-lock mechanism the - ;; highlighting to be performed. - (set-match-data (append (list string-start (point)) - (make-list (* (1- group) 2) nil) - (list (1+ string-start) (1- (point))))) - t)))) - -;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for -;;; canonical list of keywords. -(defun gyp-add-font-lock-keywords () - "Add gyp-mode keywords to font-lock mechanism." - ;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match - ;; so that we can do the font-locking in a single font-lock pass. - (font-lock-add-keywords - nil - (list - ;; Top-level keywords - (list (concat "['\"]\\(" - (regexp-opt (list "action" "action_name" "actions" "cflags" - "cflags_cc" "conditions" "configurations" - "copies" "defines" "dependencies" "destination" - "direct_dependent_settings" - "export_dependent_settings" "extension" "files" - "include_dirs" "includes" "inputs" "ldflags" "libraries" - "link_settings" "mac_bundle" "message" - "msvs_external_rule" "outputs" "product_name" - "process_outputs_as_sources" "rules" "rule_name" - "sources" "suppress_wildcard" - "target_conditions" "target_defaults" - "target_defines" "target_name" "toolsets" - "targets" "type" "variables" "xcode_settings")) - "[!/+=]?\\)") 1 'font-lock-keyword-face t) - ;; Type of target - (list (concat "['\"]\\(" - (regexp-opt (list "loadable_module" "static_library" - "shared_library" "executable" "none")) - "\\)") 1 'font-lock-type-face t) - (list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1 - 'font-lock-function-name-face t) - (list 'gyp-section-match - (list 1 'font-lock-function-name-face t t) ; dependencies - (list 2 'font-lock-variable-name-face t t) ; variables, conditions - (list 3 'font-lock-constant-face t t) ; sources - (list 4 'font-lock-preprocessor-face t t)) ; preprocessor - ;; Variable expansion - (list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t) - ;; Command expansion - (list " "%s"' % (src, dst)) - - print("}") - - -def main(): - if len(sys.argv) < 2: - print(__doc__, file=sys.stderr) - print(file=sys.stderr) - print("usage: %s target1 target2..." % (sys.argv[0]), file=sys.stderr) - return 1 - - edges = LoadEdges("dump.json", sys.argv[1:]) - - WriteGraph(edges) - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_gyp.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_gyp.py deleted file mode 100755 index 7313b4fe1b671..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_gyp.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Pretty-prints the contents of a GYP file.""" - -from __future__ import print_function - -import sys -import re - - -# Regex to remove comments when we're counting braces. -COMMENT_RE = re.compile(r"\s*#.*") - -# Regex to remove quoted strings when we're counting braces. -# It takes into account quoted quotes, and makes sure that the quotes match. -# NOTE: It does not handle quotes that span more than one line, or -# cases where an escaped quote is preceded by an escaped backslash. -QUOTE_RE_STR = r'(?P[\'"])(.*?)(? 0: - after = True - - # This catches the special case of a closing brace having something - # other than just whitespace ahead of it -- we don't want to - # unindent that until after this line is printed so it stays with - # the previous indentation level. - if cnt < 0 and closing_prefix_re.match(stripline): - after = True - return (cnt, after) - - -def prettyprint_input(lines): - """Does the main work of indenting the input based on the brace counts.""" - indent = 0 - basic_offset = 2 - for line in lines: - if COMMENT_RE.match(line): - print(line) - else: - line = line.strip("\r\n\t ") # Otherwise doesn't strip \r on Unix. - if len(line) > 0: - (brace_diff, after) = count_braces(line) - if brace_diff != 0: - if after: - print(" " * (basic_offset * indent) + line) - indent += brace_diff - else: - indent += brace_diff - print(" " * (basic_offset * indent) + line) - else: - print(" " * (basic_offset * indent) + line) - else: - print("") - - -def main(): - if len(sys.argv) > 1: - data = open(sys.argv[1]).read().splitlines() - else: - data = sys.stdin.read().splitlines() - # Split up the double braces. - lines = split_double_braces(data) - - # Indent and print the output. - prettyprint_input(lines) - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_sln.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_sln.py deleted file mode 100755 index 2b1cb1de74757..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_sln.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Prints the information in a sln file in a diffable way. - - It first outputs each projects in alphabetical order with their - dependencies. - - Then it outputs a possible build order. -""" - -from __future__ import print_function - -import os -import re -import sys -import pretty_vcproj - -__author__ = "nsylvain (Nicolas Sylvain)" - - -def BuildProject(project, built, projects, deps): - # if all dependencies are done, we can build it, otherwise we try to build the - # dependency. - # This is not infinite-recursion proof. - for dep in deps[project]: - if dep not in built: - BuildProject(dep, built, projects, deps) - print(project) - built.append(project) - - -def ParseSolution(solution_file): - # All projects, their clsid and paths. - projects = dict() - - # A list of dependencies associated with a project. - dependencies = dict() - - # Regular expressions that matches the SLN format. - # The first line of a project definition. - begin_project = re.compile( - r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942' - r'}"\) = "(.*)", "(.*)", "(.*)"$' - ) - # The last line of a project definition. - end_project = re.compile("^EndProject$") - # The first line of a dependency list. - begin_dep = re.compile(r"ProjectSection\(ProjectDependencies\) = postProject$") - # The last line of a dependency list. - end_dep = re.compile("EndProjectSection$") - # A line describing a dependency. - dep_line = re.compile(" *({.*}) = ({.*})$") - - in_deps = False - solution = open(solution_file) - for line in solution: - results = begin_project.search(line) - if results: - # Hack to remove icu because the diff is too different. - if results.group(1).find("icu") != -1: - continue - # We remove "_gyp" from the names because it helps to diff them. - current_project = results.group(1).replace("_gyp", "") - projects[current_project] = [ - results.group(2).replace("_gyp", ""), - results.group(3), - results.group(2), - ] - dependencies[current_project] = [] - continue - - results = end_project.search(line) - if results: - current_project = None - continue - - results = begin_dep.search(line) - if results: - in_deps = True - continue - - results = end_dep.search(line) - if results: - in_deps = False - continue - - results = dep_line.search(line) - if results and in_deps and current_project: - dependencies[current_project].append(results.group(1)) - continue - - # Change all dependencies clsid to name instead. - for project in dependencies: - # For each dependencies in this project - new_dep_array = [] - for dep in dependencies[project]: - # Look for the project name matching this cldis - for project_info in projects: - if projects[project_info][1] == dep: - new_dep_array.append(project_info) - dependencies[project] = sorted(new_dep_array) - - return (projects, dependencies) - - -def PrintDependencies(projects, deps): - print("---------------------------------------") - print("Dependencies for all projects") - print("---------------------------------------") - print("-- --") - - for (project, dep_list) in sorted(deps.items()): - print("Project : %s" % project) - print("Path : %s" % projects[project][0]) - if dep_list: - for dep in dep_list: - print(" - %s" % dep) - print("") - - print("-- --") - - -def PrintBuildOrder(projects, deps): - print("---------------------------------------") - print("Build order ") - print("---------------------------------------") - print("-- --") - - built = [] - for (project, _) in sorted(deps.items()): - if project not in built: - BuildProject(project, built, projects, deps) - - print("-- --") - - -def PrintVCProj(projects): - - for project in projects: - print("-------------------------------------") - print("-------------------------------------") - print(project) - print(project) - print(project) - print("-------------------------------------") - print("-------------------------------------") - - project_path = os.path.abspath( - os.path.join(os.path.dirname(sys.argv[1]), projects[project][2]) - ) - - pretty = pretty_vcproj - argv = [ - "", - project_path, - "$(SolutionDir)=%s\\" % os.path.dirname(sys.argv[1]), - ] - argv.extend(sys.argv[3:]) - pretty.main(argv) - - -def main(): - # check if we have exactly 1 parameter. - if len(sys.argv) < 2: - print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]) - return 1 - - (projects, deps) = ParseSolution(sys.argv[1]) - PrintDependencies(projects, deps) - PrintBuildOrder(projects, deps) - - if "--recursive" in sys.argv: - PrintVCProj(projects) - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_vcproj.py b/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_vcproj.py deleted file mode 100755 index b171fae6cf444..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/gyp/tools/pretty_vcproj.py +++ /dev/null @@ -1,345 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Make the format of a vcproj really pretty. - - This script normalize and sort an xml. It also fetches all the properties - inside linked vsprops and include them explicitly in the vcproj. - - It outputs the resulting xml to stdout. -""" - -from __future__ import print_function - -import os -import sys - -from xml.dom.minidom import parse -from xml.dom.minidom import Node - -__author__ = "nsylvain (Nicolas Sylvain)" - -try: - cmp -except NameError: - - def cmp(x, y): - return (x > y) - (x < y) - - -REPLACEMENTS = dict() -ARGUMENTS = None - - -class CmpTuple(object): - """Compare function between 2 tuple.""" - - def __call__(self, x, y): - return cmp(x[0], y[0]) - - -class CmpNode(object): - """Compare function between 2 xml nodes.""" - - def __call__(self, x, y): - def get_string(node): - node_string = "node" - node_string += node.nodeName - if node.nodeValue: - node_string += node.nodeValue - - if node.attributes: - # We first sort by name, if present. - node_string += node.getAttribute("Name") - - all_nodes = [] - for (name, value) in node.attributes.items(): - all_nodes.append((name, value)) - - all_nodes.sort(CmpTuple()) - for (name, value) in all_nodes: - node_string += name - node_string += value - - return node_string - - return cmp(get_string(x), get_string(y)) - - -def PrettyPrintNode(node, indent=0): - if node.nodeType == Node.TEXT_NODE: - if node.data.strip(): - print("%s%s" % (" " * indent, node.data.strip())) - return - - if node.childNodes: - node.normalize() - # Get the number of attributes - attr_count = 0 - if node.attributes: - attr_count = node.attributes.length - - # Print the main tag - if attr_count == 0: - print("%s<%s>" % (" " * indent, node.nodeName)) - else: - print("%s<%s" % (" " * indent, node.nodeName)) - - all_attributes = [] - for (name, value) in node.attributes.items(): - all_attributes.append((name, value)) - all_attributes.sort(CmpTuple()) - for (name, value) in all_attributes: - print('%s %s="%s"' % (" " * indent, name, value)) - print("%s>" % (" " * indent)) - if node.nodeValue: - print("%s %s" % (" " * indent, node.nodeValue)) - - for sub_node in node.childNodes: - PrettyPrintNode(sub_node, indent=indent + 2) - print("%s" % (" " * indent, node.nodeName)) - - -def FlattenFilter(node): - """Returns a list of all the node and sub nodes.""" - node_list = [] - - if node.attributes and node.getAttribute("Name") == "_excluded_files": - # We don't add the "_excluded_files" filter. - return [] - - for current in node.childNodes: - if current.nodeName == "Filter": - node_list.extend(FlattenFilter(current)) - else: - node_list.append(current) - - return node_list - - -def FixFilenames(filenames, current_directory): - new_list = [] - for filename in filenames: - if filename: - for key in REPLACEMENTS: - filename = filename.replace(key, REPLACEMENTS[key]) - os.chdir(current_directory) - filename = filename.strip("\"' ") - if filename.startswith("$"): - new_list.append(filename) - else: - new_list.append(os.path.abspath(filename)) - return new_list - - -def AbsoluteNode(node): - """Makes all the properties we know about in this node absolute.""" - if node.attributes: - for (name, value) in node.attributes.items(): - if name in [ - "InheritedPropertySheets", - "RelativePath", - "AdditionalIncludeDirectories", - "IntermediateDirectory", - "OutputDirectory", - "AdditionalLibraryDirectories", - ]: - # We want to fix up these paths - path_list = value.split(";") - new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1])) - node.setAttribute(name, ";".join(new_list)) - if not value: - node.removeAttribute(name) - - -def CleanupVcproj(node): - """For each sub node, we call recursively this function.""" - for sub_node in node.childNodes: - AbsoluteNode(sub_node) - CleanupVcproj(sub_node) - - # Normalize the node, and remove all extraneous whitespaces. - for sub_node in node.childNodes: - if sub_node.nodeType == Node.TEXT_NODE: - sub_node.data = sub_node.data.replace("\r", "") - sub_node.data = sub_node.data.replace("\n", "") - sub_node.data = sub_node.data.rstrip() - - # Fix all the semicolon separated attributes to be sorted, and we also - # remove the dups. - if node.attributes: - for (name, value) in node.attributes.items(): - sorted_list = sorted(value.split(";")) - unique_list = [] - for i in sorted_list: - if not unique_list.count(i): - unique_list.append(i) - node.setAttribute(name, ";".join(unique_list)) - if not value: - node.removeAttribute(name) - - if node.childNodes: - node.normalize() - - # For each node, take a copy, and remove it from the list. - node_array = [] - while node.childNodes and node.childNodes[0]: - # Take a copy of the node and remove it from the list. - current = node.childNodes[0] - node.removeChild(current) - - # If the child is a filter, we want to append all its children - # to this same list. - if current.nodeName == "Filter": - node_array.extend(FlattenFilter(current)) - else: - node_array.append(current) - - # Sort the list. - node_array.sort(CmpNode()) - - # Insert the nodes in the correct order. - for new_node in node_array: - # But don't append empty tool node. - if new_node.nodeName == "Tool": - if new_node.attributes and new_node.attributes.length == 1: - # This one was empty. - continue - if new_node.nodeName == "UserMacro": - continue - node.appendChild(new_node) - - -def GetConfiguationNodes(vcproj): - # TODO(nsylvain): Find a better way to navigate the xml. - nodes = [] - for node in vcproj.childNodes: - if node.nodeName == "Configurations": - for sub_node in node.childNodes: - if sub_node.nodeName == "Configuration": - nodes.append(sub_node) - - return nodes - - -def GetChildrenVsprops(filename): - dom = parse(filename) - if dom.documentElement.attributes: - vsprops = dom.documentElement.getAttribute("InheritedPropertySheets") - return FixFilenames(vsprops.split(";"), os.path.dirname(filename)) - return [] - - -def SeekToNode(node1, child2): - # A text node does not have properties. - if child2.nodeType == Node.TEXT_NODE: - return None - - # Get the name of the current node. - current_name = child2.getAttribute("Name") - if not current_name: - # There is no name. We don't know how to merge. - return None - - # Look through all the nodes to find a match. - for sub_node in node1.childNodes: - if sub_node.nodeName == child2.nodeName: - name = sub_node.getAttribute("Name") - if name == current_name: - return sub_node - - # No match. We give up. - return None - - -def MergeAttributes(node1, node2): - # No attributes to merge? - if not node2.attributes: - return - - for (name, value2) in node2.attributes.items(): - # Don't merge the 'Name' attribute. - if name == "Name": - continue - value1 = node1.getAttribute(name) - if value1: - # The attribute exist in the main node. If it's equal, we leave it - # untouched, otherwise we concatenate it. - if value1 != value2: - node1.setAttribute(name, ";".join([value1, value2])) - else: - # The attribute does not exist in the main node. We append this one. - node1.setAttribute(name, value2) - - # If the attribute was a property sheet attributes, we remove it, since - # they are useless. - if name == "InheritedPropertySheets": - node1.removeAttribute(name) - - -def MergeProperties(node1, node2): - MergeAttributes(node1, node2) - for child2 in node2.childNodes: - child1 = SeekToNode(node1, child2) - if child1: - MergeProperties(child1, child2) - else: - node1.appendChild(child2.cloneNode(True)) - - -def main(argv): - """Main function of this vcproj prettifier.""" - global ARGUMENTS - ARGUMENTS = argv - - # check if we have exactly 1 parameter. - if len(argv) < 2: - print( - 'Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - "[key2=value2]" % argv[0] - ) - return 1 - - # Parse the keys - for i in range(2, len(argv)): - (key, value) = argv[i].split("=") - REPLACEMENTS[key] = value - - # Open the vcproj and parse the xml. - dom = parse(argv[1]) - - # First thing we need to do is find the Configuration Node and merge them - # with the vsprops they include. - for configuration_node in GetConfiguationNodes(dom.documentElement): - # Get the property sheets associated with this configuration. - vsprops = configuration_node.getAttribute("InheritedPropertySheets") - - # Fix the filenames to be absolute. - vsprops_list = FixFilenames( - vsprops.strip().split(";"), os.path.dirname(argv[1]) - ) - - # Extend the list of vsprops with all vsprops contained in the current - # vsprops. - for current_vsprops in vsprops_list: - vsprops_list.extend(GetChildrenVsprops(current_vsprops)) - - # Now that we have all the vsprops, we need to merge them. - for current_vsprops in vsprops_list: - MergeProperties(configuration_node, parse(current_vsprops).documentElement) - - # Now that everything is merged, we need to cleanup the xml. - CleanupVcproj(dom.documentElement) - - # Finally, we use the prett xml function to print the vcproj back to the - # user. - # print dom.toprettyxml(newl="\n") - PrettyPrintNode(dom.documentElement) - return 0 - - -if __name__ == "__main__": - sys.exit(main(sys.argv)) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/Find-VisualStudio.cs b/node_modules/libnpmexec/node_modules/node-gyp/lib/Find-VisualStudio.cs deleted file mode 100644 index d2e45a76275f5..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/Find-VisualStudio.cs +++ /dev/null @@ -1,250 +0,0 @@ -// Copyright 2017 - Refael Ackermann -// Distributed under MIT style license -// See accompanying file LICENSE at https://github.com/node4good/windows-autoconf - -// Usage: -// powershell -ExecutionPolicy Unrestricted -Command "Add-Type -Path Find-VisualStudio.cs; [VisualStudioConfiguration.Main]::PrintJson()" -// This script needs to be compatible with PowerShell v2 to run on Windows 2008R2 and Windows 7. - -using System; -using System.Text; -using System.Runtime.InteropServices; -using System.Collections.Generic; - -namespace VisualStudioConfiguration -{ - [Flags] - public enum InstanceState : uint - { - None = 0, - Local = 1, - Registered = 2, - NoRebootRequired = 4, - NoErrors = 8, - Complete = 4294967295, - } - - [Guid("6380BCFF-41D3-4B2E-8B2E-BF8A6810C848")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface IEnumSetupInstances - { - - void Next([MarshalAs(UnmanagedType.U4), In] int celt, - [MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.Interface), Out] ISetupInstance[] rgelt, - [MarshalAs(UnmanagedType.U4)] out int pceltFetched); - - void Skip([MarshalAs(UnmanagedType.U4), In] int celt); - - void Reset(); - - [return: MarshalAs(UnmanagedType.Interface)] - IEnumSetupInstances Clone(); - } - - [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupConfiguration - { - } - - [Guid("26AAB78C-4A60-49D6-AF3B-3C35BC93365D")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupConfiguration2 : ISetupConfiguration - { - - [return: MarshalAs(UnmanagedType.Interface)] - IEnumSetupInstances EnumInstances(); - - [return: MarshalAs(UnmanagedType.Interface)] - ISetupInstance GetInstanceForCurrentProcess(); - - [return: MarshalAs(UnmanagedType.Interface)] - ISetupInstance GetInstanceForPath([MarshalAs(UnmanagedType.LPWStr), In] string path); - - [return: MarshalAs(UnmanagedType.Interface)] - IEnumSetupInstances EnumAllInstances(); - } - - [Guid("B41463C3-8866-43B5-BC33-2B0676F7F42E")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupInstance - { - } - - [Guid("89143C9A-05AF-49B0-B717-72E218A2185C")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupInstance2 : ISetupInstance - { - [return: MarshalAs(UnmanagedType.BStr)] - string GetInstanceId(); - - [return: MarshalAs(UnmanagedType.Struct)] - System.Runtime.InteropServices.ComTypes.FILETIME GetInstallDate(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetInstallationName(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetInstallationPath(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetInstallationVersion(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetDisplayName([MarshalAs(UnmanagedType.U4), In] int lcid); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetDescription([MarshalAs(UnmanagedType.U4), In] int lcid); - - [return: MarshalAs(UnmanagedType.BStr)] - string ResolvePath([MarshalAs(UnmanagedType.LPWStr), In] string pwszRelativePath); - - [return: MarshalAs(UnmanagedType.U4)] - InstanceState GetState(); - - [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)] - ISetupPackageReference[] GetPackages(); - - ISetupPackageReference GetProduct(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetProductPath(); - - [return: MarshalAs(UnmanagedType.VariantBool)] - bool IsLaunchable(); - - [return: MarshalAs(UnmanagedType.VariantBool)] - bool IsComplete(); - - [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)] - ISetupPropertyStore GetProperties(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetEnginePath(); - } - - [Guid("DA8D8A16-B2B6-4487-A2F1-594CCCCD6BF5")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupPackageReference - { - - [return: MarshalAs(UnmanagedType.BStr)] - string GetId(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetVersion(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetChip(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetLanguage(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetBranch(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetType(); - - [return: MarshalAs(UnmanagedType.BStr)] - string GetUniqueId(); - - [return: MarshalAs(UnmanagedType.VariantBool)] - bool GetIsExtension(); - } - - [Guid("c601c175-a3be-44bc-91f6-4568d230fc83")] - [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] - [ComImport] - public interface ISetupPropertyStore - { - - [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_BSTR)] - string[] GetNames(); - - object GetValue([MarshalAs(UnmanagedType.LPWStr), In] string pwszName); - } - - [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")] - [CoClass(typeof(SetupConfigurationClass))] - [ComImport] - public interface SetupConfiguration : ISetupConfiguration2, ISetupConfiguration - { - } - - [Guid("177F0C4A-1CD3-4DE7-A32C-71DBBB9FA36D")] - [ClassInterface(ClassInterfaceType.None)] - [ComImport] - public class SetupConfigurationClass - { - } - - public static class Main - { - public static void PrintJson() - { - ISetupConfiguration query = new SetupConfiguration(); - ISetupConfiguration2 query2 = (ISetupConfiguration2)query; - IEnumSetupInstances e = query2.EnumAllInstances(); - - int pceltFetched; - ISetupInstance2[] rgelt = new ISetupInstance2[1]; - List instances = new List(); - while (true) - { - e.Next(1, rgelt, out pceltFetched); - if (pceltFetched <= 0) - { - Console.WriteLine(String.Format("[{0}]", string.Join(",", instances.ToArray()))); - return; - } - - try - { - instances.Add(InstanceJson(rgelt[0])); - } - catch (COMException) - { - // Ignore instances that can't be queried. - } - } - } - - private static string JsonString(string s) - { - return "\"" + s.Replace("\\", "\\\\").Replace("\"", "\\\"") + "\""; - } - - private static string InstanceJson(ISetupInstance2 setupInstance2) - { - // Visual Studio component directory: - // https://docs.microsoft.com/en-us/visualstudio/install/workload-and-component-ids - - StringBuilder json = new StringBuilder(); - json.Append("{"); - - string path = JsonString(setupInstance2.GetInstallationPath()); - json.Append(String.Format("\"path\":{0},", path)); - - string version = JsonString(setupInstance2.GetInstallationVersion()); - json.Append(String.Format("\"version\":{0},", version)); - - List packages = new List(); - foreach (ISetupPackageReference package in setupInstance2.GetPackages()) - { - string id = JsonString(package.GetId()); - packages.Add(id); - } - json.Append(String.Format("\"packages\":[{0}]", string.Join(",", packages.ToArray()))); - - json.Append("}"); - return json.ToString(); - } - } -} diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/build.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/build.js deleted file mode 100644 index c2388fb348cc5..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/build.js +++ /dev/null @@ -1,204 +0,0 @@ -'use strict' - -const fs = require('graceful-fs') -const path = require('path') -const glob = require('glob') -const log = require('npmlog') -const which = require('which') -const win = process.platform === 'win32' - -function build (gyp, argv, callback) { - var platformMake = 'make' - if (process.platform === 'aix') { - platformMake = 'gmake' - } else if (process.platform.indexOf('bsd') !== -1) { - platformMake = 'gmake' - } else if (win && argv.length > 0) { - argv = argv.map(function (target) { - return '/t:' + target - }) - } - - var makeCommand = gyp.opts.make || process.env.MAKE || platformMake - var command = win ? 'msbuild' : makeCommand - var jobs = gyp.opts.jobs || process.env.JOBS - var buildType - var config - var arch - var nodeDir - var guessedSolution - - loadConfigGypi() - - /** - * Load the "config.gypi" file that was generated during "configure". - */ - - function loadConfigGypi () { - var configPath = path.resolve('build', 'config.gypi') - - fs.readFile(configPath, 'utf8', function (err, data) { - if (err) { - if (err.code === 'ENOENT') { - callback(new Error('You must run `node-gyp configure` first!')) - } else { - callback(err) - } - return - } - config = JSON.parse(data.replace(/#.+\n/, '')) - - // get the 'arch', 'buildType', and 'nodeDir' vars from the config - buildType = config.target_defaults.default_configuration - arch = config.variables.target_arch - nodeDir = config.variables.nodedir - - if ('debug' in gyp.opts) { - buildType = gyp.opts.debug ? 'Debug' : 'Release' - } - if (!buildType) { - buildType = 'Release' - } - - log.verbose('build type', buildType) - log.verbose('architecture', arch) - log.verbose('node dev dir', nodeDir) - - if (win) { - findSolutionFile() - } else { - doWhich() - } - }) - } - - /** - * On Windows, find the first build/*.sln file. - */ - - function findSolutionFile () { - glob('build/*.sln', function (err, files) { - if (err) { - return callback(err) - } - if (files.length === 0) { - return callback(new Error('Could not find *.sln file. Did you run "configure"?')) - } - guessedSolution = files[0] - log.verbose('found first Solution file', guessedSolution) - doWhich() - }) - } - - /** - * Uses node-which to locate the msbuild / make executable. - */ - - function doWhich () { - // On Windows use msbuild provided by node-gyp configure - if (win) { - if (!config.variables.msbuild_path) { - return callback(new Error( - 'MSBuild is not set, please run `node-gyp configure`.')) - } - command = config.variables.msbuild_path - log.verbose('using MSBuild:', command) - doBuild() - return - } - // First make sure we have the build command in the PATH - which(command, function (err, execPath) { - if (err) { - // Some other error or 'make' not found on Unix, report that to the user - callback(err) - return - } - log.verbose('`which` succeeded for `' + command + '`', execPath) - doBuild() - }) - } - - /** - * Actually spawn the process and compile the module. - */ - - function doBuild () { - // Enable Verbose build - var verbose = log.levels[log.level] <= log.levels.verbose - var j - - if (!win && verbose) { - argv.push('V=1') - } - - if (win && !verbose) { - argv.push('/clp:Verbosity=minimal') - } - - if (win) { - // Turn off the Microsoft logo on Windows - argv.push('/nologo') - } - - // Specify the build type, Release by default - if (win) { - // Convert .gypi config target_arch to MSBuild /Platform - // Since there are many ways to state '32-bit Intel', default to it. - // N.B. msbuild's Condition string equality tests are case-insensitive. - var archLower = arch.toLowerCase() - var p = archLower === 'x64' ? 'x64' - : (archLower === 'arm' ? 'ARM' - : (archLower === 'arm64' ? 'ARM64' : 'Win32')) - argv.push('/p:Configuration=' + buildType + ';Platform=' + p) - if (jobs) { - j = parseInt(jobs, 10) - if (!isNaN(j) && j > 0) { - argv.push('/m:' + j) - } else if (jobs.toUpperCase() === 'MAX') { - argv.push('/m:' + require('os').cpus().length) - } - } - } else { - argv.push('BUILDTYPE=' + buildType) - // Invoke the Makefile in the 'build' dir. - argv.push('-C') - argv.push('build') - if (jobs) { - j = parseInt(jobs, 10) - if (!isNaN(j) && j > 0) { - argv.push('--jobs') - argv.push(j) - } else if (jobs.toUpperCase() === 'MAX') { - argv.push('--jobs') - argv.push(require('os').cpus().length) - } - } - } - - if (win) { - // did the user specify their own .sln file? - var hasSln = argv.some(function (arg) { - return path.extname(arg) === '.sln' - }) - if (!hasSln) { - argv.unshift(gyp.opts.solution || guessedSolution) - } - } - - var proc = gyp.spawn(command, argv) - proc.on('exit', onExit) - } - - function onExit (code, signal) { - if (code !== 0) { - return callback(new Error('`' + command + '` failed with exit code: ' + code)) - } - if (signal) { - return callback(new Error('`' + command + '` got signal: ' + signal)) - } - callback() - } -} - -module.exports = build -module.exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module' diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/clean.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/clean.js deleted file mode 100644 index dbfa4dbb99d3c..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/clean.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict' - -const rm = require('rimraf') -const log = require('npmlog') - -function clean (gyp, argv, callback) { - // Remove the 'build' dir - var buildDir = 'build' - - log.verbose('clean', 'removing "%s" directory', buildDir) - rm(buildDir, callback) -} - -module.exports = clean -module.exports.usage = 'Removes any generated build files and the "out" dir' diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/configure.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/configure.js deleted file mode 100644 index d4342b9d76a60..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/configure.js +++ /dev/null @@ -1,386 +0,0 @@ -'use strict' - -const fs = require('graceful-fs') -const path = require('path') -const log = require('npmlog') -const os = require('os') -const processRelease = require('./process-release') -const win = process.platform === 'win32' -const findNodeDirectory = require('./find-node-directory') -const msgFormat = require('util').format -var findPython = require('./find-python') -if (win) { - var findVisualStudio = require('./find-visualstudio') -} - -function configure (gyp, argv, callback) { - var python - var buildDir = path.resolve('build') - var configNames = ['config.gypi', 'common.gypi'] - var configs = [] - var nodeDir - var release = processRelease(argv, gyp, process.version, process.release) - - findPython(gyp.opts.python, function (err, found) { - if (err) { - callback(err) - } else { - python = found - getNodeDir() - } - }) - - function getNodeDir () { - // 'python' should be set by now - process.env.PYTHON = python - - if (gyp.opts.nodedir) { - // --nodedir was specified. use that for the dev files - nodeDir = gyp.opts.nodedir.replace(/^~/, os.homedir()) - - log.verbose('get node dir', 'compiling against specified --nodedir dev files: %s', nodeDir) - createBuildDir() - } else { - // if no --nodedir specified, ensure node dependencies are installed - if ('v' + release.version !== process.version) { - // if --target was given, then determine a target version to compile for - log.verbose('get node dir', 'compiling against --target node version: %s', release.version) - } else { - // if no --target was specified then use the current host node version - log.verbose('get node dir', 'no --target version specified, falling back to host node version: %s', release.version) - } - - if (!release.semver) { - // could not parse the version string with semver - return callback(new Error('Invalid version number: ' + release.version)) - } - - // If the tarball option is set, always remove and reinstall the headers - // into devdir. Otherwise only install if they're not already there. - gyp.opts.ensure = !gyp.opts.tarball - - gyp.commands.install([release.version], function (err) { - if (err) { - return callback(err) - } - log.verbose('get node dir', 'target node version installed:', release.versionDir) - nodeDir = path.resolve(gyp.devDir, release.versionDir) - createBuildDir() - }) - } - } - - function createBuildDir () { - log.verbose('build dir', 'attempting to create "build" dir: %s', buildDir) - fs.mkdir(buildDir, { recursive: true }, function (err, isNew) { - if (err) { - return callback(err) - } - log.verbose('build dir', '"build" dir needed to be created?', isNew) - if (win) { - findVisualStudio(release.semver, gyp.opts.msvs_version, - createConfigFile) - } else { - createConfigFile() - } - }) - } - - function createConfigFile (err, vsInfo) { - if (err) { - return callback(err) - } - - var configFilename = 'config.gypi' - var configPath = path.resolve(buildDir, configFilename) - - log.verbose('build/' + configFilename, 'creating config file') - - var config = process.config || {} - var defaults = config.target_defaults - var variables = config.variables - - // default "config.variables" - if (!variables) { - variables = config.variables = {} - } - - // default "config.defaults" - if (!defaults) { - defaults = config.target_defaults = {} - } - - // don't inherit the "defaults" from node's `process.config` object. - // doing so could cause problems in cases where the `node` executable was - // compiled on a different machine (with different lib/include paths) than - // the machine where the addon is being built to - defaults.cflags = [] - defaults.defines = [] - defaults.include_dirs = [] - defaults.libraries = [] - - // set the default_configuration prop - if ('debug' in gyp.opts) { - defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release' - } - - if (!defaults.default_configuration) { - defaults.default_configuration = 'Release' - } - - // set the target_arch variable - variables.target_arch = gyp.opts.arch || process.arch || 'ia32' - if (variables.target_arch === 'arm64') { - defaults.msvs_configuration_platform = 'ARM64' - defaults.xcode_configuration_platform = 'arm64' - } - - // set the node development directory - variables.nodedir = nodeDir - - // disable -T "thin" static archives by default - variables.standalone_static_library = gyp.opts.thin ? 0 : 1 - - if (win) { - process.env.GYP_MSVS_VERSION = Math.min(vsInfo.versionYear, 2015) - process.env.GYP_MSVS_OVERRIDE_PATH = vsInfo.path - defaults.msbuild_toolset = vsInfo.toolset - if (vsInfo.sdk) { - defaults.msvs_windows_target_platform_version = vsInfo.sdk - } - if (variables.target_arch === 'arm64') { - if (vsInfo.versionMajor > 15 || - (vsInfo.versionMajor === 15 && vsInfo.versionMajor >= 9)) { - defaults.msvs_enable_marmasm = 1 - } else { - log.warn('Compiling ARM64 assembly is only available in\n' + - 'Visual Studio 2017 version 15.9 and above') - } - } - variables.msbuild_path = vsInfo.msBuild - } - - // loop through the rest of the opts and add the unknown ones as variables. - // this allows for module-specific configure flags like: - // - // $ node-gyp configure --shared-libxml2 - Object.keys(gyp.opts).forEach(function (opt) { - if (opt === 'argv') { - return - } - if (opt in gyp.configDefs) { - return - } - variables[opt.replace(/-/g, '_')] = gyp.opts[opt] - }) - - // ensures that any boolean values from `process.config` get stringified - function boolsToString (k, v) { - if (typeof v === 'boolean') { - return String(v) - } - return v - } - - log.silly('build/' + configFilename, config) - - // now write out the config.gypi file to the build/ dir - var prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step' - - var json = JSON.stringify(config, boolsToString, 2) - log.verbose('build/' + configFilename, 'writing out config file: %s', configPath) - configs.push(configPath) - fs.writeFile(configPath, [prefix, json, ''].join('\n'), findConfigs) - } - - function findConfigs (err) { - if (err) { - return callback(err) - } - - var name = configNames.shift() - if (!name) { - return runGyp() - } - var fullPath = path.resolve(name) - - log.verbose(name, 'checking for gypi file: %s', fullPath) - fs.stat(fullPath, function (err) { - if (err) { - if (err.code === 'ENOENT') { - findConfigs() // check next gypi filename - } else { - callback(err) - } - } else { - log.verbose(name, 'found gypi file') - configs.push(fullPath) - findConfigs() - } - }) - } - - function runGyp (err) { - if (err) { - return callback(err) - } - - if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) { - if (win) { - log.verbose('gyp', 'gyp format was not specified; forcing "msvs"') - // force the 'make' target for non-Windows - argv.push('-f', 'msvs') - } else { - log.verbose('gyp', 'gyp format was not specified; forcing "make"') - // force the 'make' target for non-Windows - argv.push('-f', 'make') - } - } - - // include all the ".gypi" files that were found - configs.forEach(function (config) { - argv.push('-I', config) - }) - - // For AIX and z/OS we need to set up the path to the exports file - // which contains the symbols needed for linking. - var nodeExpFile - if (process.platform === 'aix' || process.platform === 'os390') { - var ext = process.platform === 'aix' ? 'exp' : 'x' - var nodeRootDir = findNodeDirectory() - var candidates - - if (process.platform === 'aix') { - candidates = [ - 'include/node/node', - 'out/Release/node', - 'out/Debug/node', - 'node' - ].map(function (file) { - return file + '.' + ext - }) - } else { - candidates = [ - 'out/Release/obj.target/libnode', - 'out/Debug/obj.target/libnode', - 'lib/libnode' - ].map(function (file) { - return file + '.' + ext - }) - } - - var logprefix = 'find exports file' - nodeExpFile = findAccessibleSync(logprefix, nodeRootDir, candidates) - if (nodeExpFile !== undefined) { - log.verbose(logprefix, 'Found exports file: %s', nodeExpFile) - } else { - var msg = msgFormat('Could not find node.%s file in %s', ext, nodeRootDir) - log.error(logprefix, 'Could not find exports file') - return callback(new Error(msg)) - } - } - - // this logic ported from the old `gyp_addon` python file - var gypScript = path.resolve(__dirname, '..', 'gyp', 'gyp_main.py') - var addonGypi = path.resolve(__dirname, '..', 'addon.gypi') - var commonGypi = path.resolve(nodeDir, 'include/node/common.gypi') - fs.stat(commonGypi, function (err) { - if (err) { - commonGypi = path.resolve(nodeDir, 'common.gypi') - } - - var outputDir = 'build' - if (win) { - // Windows expects an absolute path - outputDir = buildDir - } - var nodeGypDir = path.resolve(__dirname, '..') - - var nodeLibFile = path.join(nodeDir, - !gyp.opts.nodedir ? '<(target_arch)' : '$(Configuration)', - release.name + '.lib') - - argv.push('-I', addonGypi) - argv.push('-I', commonGypi) - argv.push('-Dlibrary=shared_library') - argv.push('-Dvisibility=default') - argv.push('-Dnode_root_dir=' + nodeDir) - if (process.platform === 'aix' || process.platform === 'os390') { - argv.push('-Dnode_exp_file=' + nodeExpFile) - } - argv.push('-Dnode_gyp_dir=' + nodeGypDir) - - // Do this to keep Cygwin environments happy, else the unescaped '\' gets eaten up, - // resulting in bad paths, Ex c:parentFolderfolderanotherFolder instead of c:\parentFolder\folder\anotherFolder - if (win) { - nodeLibFile = nodeLibFile.replace(/\\/g, '\\\\') - } - argv.push('-Dnode_lib_file=' + nodeLibFile) - argv.push('-Dmodule_root_dir=' + process.cwd()) - argv.push('-Dnode_engine=' + - (gyp.opts.node_engine || process.jsEngine || 'v8')) - argv.push('--depth=.') - argv.push('--no-parallel') - - // tell gyp to write the Makefile/Solution files into output_dir - argv.push('--generator-output', outputDir) - - // tell make to write its output into the same dir - argv.push('-Goutput_dir=.') - - // enforce use of the "binding.gyp" file - argv.unshift('binding.gyp') - - // execute `gyp` from the current target nodedir - argv.unshift(gypScript) - - // make sure python uses files that came with this particular node package - var pypath = [path.join(__dirname, '..', 'gyp', 'pylib')] - if (process.env.PYTHONPATH) { - pypath.push(process.env.PYTHONPATH) - } - process.env.PYTHONPATH = pypath.join(win ? ';' : ':') - - var cp = gyp.spawn(python, argv) - cp.on('exit', onCpExit) - }) - } - - function onCpExit (code) { - if (code !== 0) { - callback(new Error('`gyp` failed with exit code: ' + code)) - } else { - // we're done - callback() - } - } -} - -/** - * Returns the first file or directory from an array of candidates that is - * readable by the current user, or undefined if none of the candidates are - * readable. - */ -function findAccessibleSync (logprefix, dir, candidates) { - for (var next = 0; next < candidates.length; next++) { - var candidate = path.resolve(dir, candidates[next]) - try { - var fd = fs.openSync(candidate, 'r') - } catch (e) { - // this candidate was not found or not readable, do nothing - log.silly(logprefix, 'Could not open %s: %s', candidate, e.message) - continue - } - fs.closeSync(fd) - log.silly(logprefix, 'Found readable %s', candidate) - return candidate - } - - return undefined -} - -module.exports = configure -module.exports.test = { - findAccessibleSync: findAccessibleSync -} -module.exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module' diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/find-node-directory.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/find-node-directory.js deleted file mode 100644 index 0dd781a6cfae8..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/find-node-directory.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict' - -const path = require('path') -const log = require('npmlog') - -function findNodeDirectory (scriptLocation, processObj) { - // set dirname and process if not passed in - // this facilitates regression tests - if (scriptLocation === undefined) { - scriptLocation = __dirname - } - if (processObj === undefined) { - processObj = process - } - - // Have a look to see what is above us, to try and work out where we are - var npmParentDirectory = path.join(scriptLocation, '../../../..') - log.verbose('node-gyp root', 'npm_parent_directory is ' + - path.basename(npmParentDirectory)) - var nodeRootDir = '' - - log.verbose('node-gyp root', 'Finding node root directory') - if (path.basename(npmParentDirectory) === 'deps') { - // We are in a build directory where this script lives in - // deps/npm/node_modules/node-gyp/lib - nodeRootDir = path.join(npmParentDirectory, '..') - log.verbose('node-gyp root', 'in build directory, root = ' + - nodeRootDir) - } else if (path.basename(npmParentDirectory) === 'node_modules') { - // We are in a node install directory where this script lives in - // lib/node_modules/npm/node_modules/node-gyp/lib or - // node_modules/npm/node_modules/node-gyp/lib depending on the - // platform - if (processObj.platform === 'win32') { - nodeRootDir = path.join(npmParentDirectory, '..') - } else { - nodeRootDir = path.join(npmParentDirectory, '../..') - } - log.verbose('node-gyp root', 'in install directory, root = ' + - nodeRootDir) - } else { - // We don't know where we are, try working it out from the location - // of the node binary - var nodeDir = path.dirname(processObj.execPath) - var directoryUp = path.basename(nodeDir) - if (directoryUp === 'bin') { - nodeRootDir = path.join(nodeDir, '..') - } else if (directoryUp === 'Release' || directoryUp === 'Debug') { - // If we are a recently built node, and the directory structure - // is that of a repository. If we are on Windows then we only need - // to go one level up, everything else, two - if (processObj.platform === 'win32') { - nodeRootDir = path.join(nodeDir, '..') - } else { - nodeRootDir = path.join(nodeDir, '../..') - } - } - // Else return the default blank, "". - } - return nodeRootDir -} - -module.exports = findNodeDirectory diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/find-python.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/find-python.js deleted file mode 100644 index af269de2fc6ca..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/find-python.js +++ /dev/null @@ -1,316 +0,0 @@ -'use strict' - -const path = require('path') -const log = require('npmlog') -const semver = require('semver') -const cp = require('child_process') -const extend = require('util')._extend // eslint-disable-line -const win = process.platform === 'win32' -const logWithPrefix = require('./util').logWithPrefix - -function PythonFinder (configPython, callback) { - this.callback = callback - this.configPython = configPython - this.errorLog = [] -} - -PythonFinder.prototype = { - log: logWithPrefix(log, 'find Python'), - argsExecutable: ['-c', 'import sys; print(sys.executable);'], - argsVersion: ['-c', 'import sys; print("%s.%s.%s" % sys.version_info[:3]);'], - semverRange: '2.7.x || >=3.5.0', - - // These can be overridden for testing: - execFile: cp.execFile, - env: process.env, - win: win, - pyLauncher: 'py.exe', - winDefaultLocations: [ - path.join(process.env.SystemDrive || 'C:', 'Python37', 'python.exe'), - path.join(process.env.SystemDrive || 'C:', 'Python27', 'python.exe') - ], - - // Logs a message at verbose level, but also saves it to be displayed later - // at error level if an error occurs. This should help diagnose the problem. - addLog: function addLog (message) { - this.log.verbose(message) - this.errorLog.push(message) - }, - - // Find Python by trying a sequence of possibilities. - // Ignore errors, keep trying until Python is found. - findPython: function findPython () { - const SKIP = 0; const FAIL = 1 - var toCheck = getChecks.apply(this) - - function getChecks () { - if (this.env.NODE_GYP_FORCE_PYTHON) { - return [{ - before: () => { - this.addLog( - 'checking Python explicitly set from NODE_GYP_FORCE_PYTHON') - this.addLog('- process.env.NODE_GYP_FORCE_PYTHON is ' + - `"${this.env.NODE_GYP_FORCE_PYTHON}"`) - }, - check: this.checkCommand, - arg: this.env.NODE_GYP_FORCE_PYTHON - }] - } - - var checks = [ - { - before: () => { - if (!this.configPython) { - this.addLog( - 'Python is not set from command line or npm configuration') - return SKIP - } - this.addLog('checking Python explicitly set from command line or ' + - 'npm configuration') - this.addLog('- "--python=" or "npm config get python" is ' + - `"${this.configPython}"`) - }, - check: this.checkCommand, - arg: this.configPython - }, - { - before: () => { - if (!this.env.PYTHON) { - this.addLog('Python is not set from environment variable ' + - 'PYTHON') - return SKIP - } - this.addLog('checking Python explicitly set from environment ' + - 'variable PYTHON') - this.addLog(`- process.env.PYTHON is "${this.env.PYTHON}"`) - }, - check: this.checkCommand, - arg: this.env.PYTHON - }, - { - before: () => { this.addLog('checking if "python3" can be used') }, - check: this.checkCommand, - arg: 'python3' - }, - { - before: () => { this.addLog('checking if "python" can be used') }, - check: this.checkCommand, - arg: 'python' - }, - { - before: () => { this.addLog('checking if "python2" can be used') }, - check: this.checkCommand, - arg: 'python2' - } - ] - - if (this.win) { - for (var i = 0; i < this.winDefaultLocations.length; ++i) { - const location = this.winDefaultLocations[i] - checks.push({ - before: () => { - this.addLog('checking if Python is ' + - `${location}`) - }, - check: this.checkExecPath, - arg: location - }) - } - checks.push({ - before: () => { - this.addLog( - 'checking if the py launcher can be used to find Python') - }, - check: this.checkPyLauncher - }) - } - - return checks - } - - function runChecks (err) { - this.log.silly('runChecks: err = %j', (err && err.stack) || err) - - const check = toCheck.shift() - if (!check) { - return this.fail() - } - - const before = check.before.apply(this) - if (before === SKIP) { - return runChecks.apply(this) - } - if (before === FAIL) { - return this.fail() - } - - const args = [runChecks.bind(this)] - if (check.arg) { - args.unshift(check.arg) - } - check.check.apply(this, args) - } - - runChecks.apply(this) - }, - - // Check if command is a valid Python to use. - // Will exit the Python finder on success. - // If on Windows, run in a CMD shell to support BAT/CMD launchers. - checkCommand: function checkCommand (command, errorCallback) { - var exec = command - var args = this.argsExecutable - var shell = false - if (this.win) { - // Arguments have to be manually quoted - exec = `"${exec}"` - args = args.map(a => `"${a}"`) - shell = true - } - - this.log.verbose(`- executing "${command}" to get executable path`) - this.run(exec, args, shell, function (err, execPath) { - // Possible outcomes: - // - Error: not in PATH, not executable or execution fails - // - Gibberish: the next command to check version will fail - // - Absolute path to executable - if (err) { - this.addLog(`- "${command}" is not in PATH or produced an error`) - return errorCallback(err) - } - this.addLog(`- executable path is "${execPath}"`) - this.checkExecPath(execPath, errorCallback) - }.bind(this)) - }, - - // Check if the py launcher can find a valid Python to use. - // Will exit the Python finder on success. - // Distributions of Python on Windows by default install with the "py.exe" - // Python launcher which is more likely to exist than the Python executable - // being in the $PATH. - checkPyLauncher: function checkPyLauncher (errorCallback) { - this.log.verbose( - `- executing "${this.pyLauncher}" to get Python executable path`) - this.run(this.pyLauncher, this.argsExecutable, false, - function (err, execPath) { - // Possible outcomes: same as checkCommand - if (err) { - this.addLog( - `- "${this.pyLauncher}" is not in PATH or produced an error`) - return errorCallback(err) - } - this.addLog(`- executable path is "${execPath}"`) - this.checkExecPath(execPath, errorCallback) - }.bind(this)) - }, - - // Check if a Python executable is the correct version to use. - // Will exit the Python finder on success. - checkExecPath: function checkExecPath (execPath, errorCallback) { - this.log.verbose(`- executing "${execPath}" to get version`) - this.run(execPath, this.argsVersion, false, function (err, version) { - // Possible outcomes: - // - Error: executable can not be run (likely meaning the command wasn't - // a Python executable and the previous command produced gibberish) - // - Gibberish: somehow the last command produced an executable path, - // this will fail when verifying the version - // - Version of the Python executable - if (err) { - this.addLog(`- "${execPath}" could not be run`) - return errorCallback(err) - } - this.addLog(`- version is "${version}"`) - - const range = new semver.Range(this.semverRange) - var valid = false - try { - valid = range.test(version) - } catch (err) { - this.log.silly('range.test() threw:\n%s', err.stack) - this.addLog(`- "${execPath}" does not have a valid version`) - this.addLog('- is it a Python executable?') - return errorCallback(err) - } - - if (!valid) { - this.addLog(`- version is ${version} - should be ${this.semverRange}`) - this.addLog('- THIS VERSION OF PYTHON IS NOT SUPPORTED') - return errorCallback(new Error( - `Found unsupported Python version ${version}`)) - } - this.succeed(execPath, version) - }.bind(this)) - }, - - // Run an executable or shell command, trimming the output. - run: function run (exec, args, shell, callback) { - var env = extend({}, this.env) - env.TERM = 'dumb' - const opts = { env: env, shell: shell } - - this.log.silly('execFile: exec = %j', exec) - this.log.silly('execFile: args = %j', args) - this.log.silly('execFile: opts = %j', opts) - try { - this.execFile(exec, args, opts, execFileCallback.bind(this)) - } catch (err) { - this.log.silly('execFile: threw:\n%s', err.stack) - return callback(err) - } - - function execFileCallback (err, stdout, stderr) { - this.log.silly('execFile result: err = %j', (err && err.stack) || err) - this.log.silly('execFile result: stdout = %j', stdout) - this.log.silly('execFile result: stderr = %j', stderr) - if (err) { - return callback(err) - } - const execPath = stdout.trim() - callback(null, execPath) - } - }, - - succeed: function succeed (execPath, version) { - this.log.info(`using Python version ${version} found at "${execPath}"`) - process.nextTick(this.callback.bind(null, null, execPath)) - }, - - fail: function fail () { - const errorLog = this.errorLog.join('\n') - - const pathExample = this.win ? 'C:\\Path\\To\\python.exe' - : '/path/to/pythonexecutable' - // For Windows 80 col console, use up to the column before the one marked - // with X (total 79 chars including logger prefix, 58 chars usable here): - // X - const info = [ - '**********************************************************', - 'You need to install the latest version of Python.', - 'Node-gyp should be able to find and use Python. If not,', - 'you can try one of the following options:', - `- Use the switch --python="${pathExample}"`, - ' (accepted by both node-gyp and npm)', - '- Set the environment variable PYTHON', - '- Set the npm configuration variable python:', - ` npm config set python "${pathExample}"`, - 'For more information consult the documentation at:', - 'https://github.com/nodejs/node-gyp#installation', - '**********************************************************' - ].join('\n') - - this.log.error(`\n${errorLog}\n\n${info}\n`) - process.nextTick(this.callback.bind(null, new Error( - 'Could not find any Python installation to use'))) - } -} - -function findPython (configPython, callback) { - var finder = new PythonFinder(configPython, callback) - finder.findPython() -} - -module.exports = findPython -module.exports.test = { - PythonFinder: PythonFinder, - findPython: findPython -} diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/find-visualstudio.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/find-visualstudio.js deleted file mode 100644 index 9c6dad90f8ad9..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/find-visualstudio.js +++ /dev/null @@ -1,434 +0,0 @@ -'use strict' - -const log = require('npmlog') -const execFile = require('child_process').execFile -const path = require('path').win32 -const logWithPrefix = require('./util').logWithPrefix -const regSearchKeys = require('./util').regSearchKeys - -function findVisualStudio (nodeSemver, configMsvsVersion, callback) { - const finder = new VisualStudioFinder(nodeSemver, configMsvsVersion, - callback) - finder.findVisualStudio() -} - -function VisualStudioFinder (nodeSemver, configMsvsVersion, callback) { - this.nodeSemver = nodeSemver - this.configMsvsVersion = configMsvsVersion - this.callback = callback - this.errorLog = [] - this.validVersions = [] -} - -VisualStudioFinder.prototype = { - log: logWithPrefix(log, 'find VS'), - - regSearchKeys: regSearchKeys, - - // Logs a message at verbose level, but also saves it to be displayed later - // at error level if an error occurs. This should help diagnose the problem. - addLog: function addLog (message) { - this.log.verbose(message) - this.errorLog.push(message) - }, - - findVisualStudio: function findVisualStudio () { - this.configVersionYear = null - this.configPath = null - if (this.configMsvsVersion) { - this.addLog('msvs_version was set from command line or npm config') - if (this.configMsvsVersion.match(/^\d{4}$/)) { - this.configVersionYear = parseInt(this.configMsvsVersion, 10) - this.addLog( - `- looking for Visual Studio version ${this.configVersionYear}`) - } else { - this.configPath = path.resolve(this.configMsvsVersion) - this.addLog( - `- looking for Visual Studio installed in "${this.configPath}"`) - } - } else { - this.addLog('msvs_version not set from command line or npm config') - } - - if (process.env.VCINSTALLDIR) { - this.envVcInstallDir = - path.resolve(process.env.VCINSTALLDIR, '..') - this.addLog('running in VS Command Prompt, installation path is:\n' + - `"${this.envVcInstallDir}"\n- will only use this version`) - } else { - this.addLog('VCINSTALLDIR not set, not running in VS Command Prompt') - } - - this.findVisualStudio2017OrNewer((info) => { - if (info) { - return this.succeed(info) - } - this.findVisualStudio2015((info) => { - if (info) { - return this.succeed(info) - } - this.findVisualStudio2013((info) => { - if (info) { - return this.succeed(info) - } - this.fail() - }) - }) - }) - }, - - succeed: function succeed (info) { - this.log.info(`using VS${info.versionYear} (${info.version}) found at:` + - `\n"${info.path}"` + - '\nrun with --verbose for detailed information') - process.nextTick(this.callback.bind(null, null, info)) - }, - - fail: function fail () { - if (this.configMsvsVersion && this.envVcInstallDir) { - this.errorLog.push( - 'msvs_version does not match this VS Command Prompt or the', - 'installation cannot be used.') - } else if (this.configMsvsVersion) { - // If msvs_version was specified but finding VS failed, print what would - // have been accepted - this.errorLog.push('') - if (this.validVersions) { - this.errorLog.push('valid versions for msvs_version:') - this.validVersions.forEach((version) => { - this.errorLog.push(`- "${version}"`) - }) - } else { - this.errorLog.push('no valid versions for msvs_version were found') - } - } - - const errorLog = this.errorLog.join('\n') - - // For Windows 80 col console, use up to the column before the one marked - // with X (total 79 chars including logger prefix, 62 chars usable here): - // X - const infoLog = [ - '**************************************************************', - 'You need to install the latest version of Visual Studio', - 'including the "Desktop development with C++" workload.', - 'For more information consult the documentation at:', - 'https://github.com/nodejs/node-gyp#on-windows', - '**************************************************************' - ].join('\n') - - this.log.error(`\n${errorLog}\n\n${infoLog}\n`) - process.nextTick(this.callback.bind(null, new Error( - 'Could not find any Visual Studio installation to use'))) - }, - - // Invoke the PowerShell script to get information about Visual Studio 2017 - // or newer installations - findVisualStudio2017OrNewer: function findVisualStudio2017OrNewer (cb) { - var ps = path.join(process.env.SystemRoot, 'System32', - 'WindowsPowerShell', 'v1.0', 'powershell.exe') - var csFile = path.join(__dirname, 'Find-VisualStudio.cs') - var psArgs = [ - '-ExecutionPolicy', - 'Unrestricted', - '-NoProfile', - '-Command', - '&{Add-Type -Path \'' + csFile + '\';' + '[VisualStudioConfiguration.Main]::PrintJson()}' - ] - - this.log.silly('Running', ps, psArgs) - var child = execFile(ps, psArgs, { encoding: 'utf8' }, - (err, stdout, stderr) => { - this.parseData(err, stdout, stderr, cb) - }) - child.stdin.end() - }, - - // Parse the output of the PowerShell script and look for an installation - // of Visual Studio 2017 or newer to use - parseData: function parseData (err, stdout, stderr, cb) { - this.log.silly('PS stderr = %j', stderr) - - const failPowershell = () => { - this.addLog( - 'could not use PowerShell to find Visual Studio 2017 or newer, try re-running with \'--loglevel silly\' for more details') - cb(null) - } - - if (err) { - this.log.silly('PS err = %j', err && (err.stack || err)) - return failPowershell() - } - - var vsInfo - try { - vsInfo = JSON.parse(stdout) - } catch (e) { - this.log.silly('PS stdout = %j', stdout) - this.log.silly(e) - return failPowershell() - } - - if (!Array.isArray(vsInfo)) { - this.log.silly('PS stdout = %j', stdout) - return failPowershell() - } - - vsInfo = vsInfo.map((info) => { - this.log.silly(`processing installation: "${info.path}"`) - info.path = path.resolve(info.path) - var ret = this.getVersionInfo(info) - ret.path = info.path - ret.msBuild = this.getMSBuild(info, ret.versionYear) - ret.toolset = this.getToolset(info, ret.versionYear) - ret.sdk = this.getSDK(info) - return ret - }) - this.log.silly('vsInfo:', vsInfo) - - // Remove future versions or errors parsing version number - vsInfo = vsInfo.filter((info) => { - if (info.versionYear) { - return true - } - this.addLog(`unknown version "${info.version}" found at "${info.path}"`) - return false - }) - - // Sort to place newer versions first - vsInfo.sort((a, b) => b.versionYear - a.versionYear) - - for (var i = 0; i < vsInfo.length; ++i) { - const info = vsInfo[i] - this.addLog(`checking VS${info.versionYear} (${info.version}) found ` + - `at:\n"${info.path}"`) - - if (info.msBuild) { - this.addLog('- found "Visual Studio C++ core features"') - } else { - this.addLog('- "Visual Studio C++ core features" missing') - continue - } - - if (info.toolset) { - this.addLog(`- found VC++ toolset: ${info.toolset}`) - } else { - this.addLog('- missing any VC++ toolset') - continue - } - - if (info.sdk) { - this.addLog(`- found Windows SDK: ${info.sdk}`) - } else { - this.addLog('- missing any Windows SDK') - continue - } - - if (!this.checkConfigVersion(info.versionYear, info.path)) { - continue - } - - return cb(info) - } - - this.addLog( - 'could not find a version of Visual Studio 2017 or newer to use') - cb(null) - }, - - // Helper - process version information - getVersionInfo: function getVersionInfo (info) { - const match = /^(\d+)\.(\d+)\..*/.exec(info.version) - if (!match) { - this.log.silly('- failed to parse version:', info.version) - return {} - } - this.log.silly('- version match = %j', match) - var ret = { - version: info.version, - versionMajor: parseInt(match[1], 10), - versionMinor: parseInt(match[2], 10) - } - if (ret.versionMajor === 15) { - ret.versionYear = 2017 - return ret - } - if (ret.versionMajor === 16) { - ret.versionYear = 2019 - return ret - } - this.log.silly('- unsupported version:', ret.versionMajor) - return {} - }, - - // Helper - process MSBuild information - getMSBuild: function getMSBuild (info, versionYear) { - const pkg = 'Microsoft.VisualStudio.VC.MSBuild.Base' - if (info.packages.indexOf(pkg) !== -1) { - this.log.silly('- found VC.MSBuild.Base') - if (versionYear === 2017) { - return path.join(info.path, 'MSBuild', '15.0', 'Bin', 'MSBuild.exe') - } - if (versionYear === 2019) { - return path.join(info.path, 'MSBuild', 'Current', 'Bin', 'MSBuild.exe') - } - } - return null - }, - - // Helper - process toolset information - getToolset: function getToolset (info, versionYear) { - const pkg = 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64' - const express = 'Microsoft.VisualStudio.WDExpress' - - if (info.packages.indexOf(pkg) !== -1) { - this.log.silly('- found VC.Tools.x86.x64') - } else if (info.packages.indexOf(express) !== -1) { - this.log.silly('- found Visual Studio Express (looking for toolset)') - } else { - return null - } - - if (versionYear === 2017) { - return 'v141' - } else if (versionYear === 2019) { - return 'v142' - } - this.log.silly('- invalid versionYear:', versionYear) - return null - }, - - // Helper - process Windows SDK information - getSDK: function getSDK (info) { - const win8SDK = 'Microsoft.VisualStudio.Component.Windows81SDK' - const win10SDKPrefix = 'Microsoft.VisualStudio.Component.Windows10SDK.' - - var Win10SDKVer = 0 - info.packages.forEach((pkg) => { - if (!pkg.startsWith(win10SDKPrefix)) { - return - } - const parts = pkg.split('.') - if (parts.length > 5 && parts[5] !== 'Desktop') { - this.log.silly('- ignoring non-Desktop Win10SDK:', pkg) - return - } - const foundSdkVer = parseInt(parts[4], 10) - if (isNaN(foundSdkVer)) { - // Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb - this.log.silly('- failed to parse Win10SDK number:', pkg) - return - } - this.log.silly('- found Win10SDK:', foundSdkVer) - Win10SDKVer = Math.max(Win10SDKVer, foundSdkVer) - }) - - if (Win10SDKVer !== 0) { - return `10.0.${Win10SDKVer}.0` - } else if (info.packages.indexOf(win8SDK) !== -1) { - this.log.silly('- found Win8SDK') - return '8.1' - } - return null - }, - - // Find an installation of Visual Studio 2015 to use - findVisualStudio2015: function findVisualStudio2015 (cb) { - return this.findOldVS({ - version: '14.0', - versionMajor: 14, - versionMinor: 0, - versionYear: 2015, - toolset: 'v140' - }, cb) - }, - - // Find an installation of Visual Studio 2013 to use - findVisualStudio2013: function findVisualStudio2013 (cb) { - if (this.nodeSemver.major >= 9) { - this.addLog( - 'not looking for VS2013 as it is only supported up to Node.js 8') - return cb(null) - } - return this.findOldVS({ - version: '12.0', - versionMajor: 12, - versionMinor: 0, - versionYear: 2013, - toolset: 'v120' - }, cb) - }, - - // Helper - common code for VS2013 and VS2015 - findOldVS: function findOldVS (info, cb) { - const regVC7 = ['HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7', - 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7'] - const regMSBuild = 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions' - - this.addLog(`looking for Visual Studio ${info.versionYear}`) - this.regSearchKeys(regVC7, info.version, [], (err, res) => { - if (err) { - this.addLog('- not found') - return cb(null) - } - - const vsPath = path.resolve(res, '..') - this.addLog(`- found in "${vsPath}"`) - - const msBuildRegOpts = process.arch === 'ia32' ? [] : ['/reg:32'] - this.regSearchKeys([`${regMSBuild}\\${info.version}`], - 'MSBuildToolsPath', msBuildRegOpts, (err, res) => { - if (err) { - this.addLog( - '- could not find MSBuild in registry for this version') - return cb(null) - } - - const msBuild = path.join(res, 'MSBuild.exe') - this.addLog(`- MSBuild in "${msBuild}"`) - - if (!this.checkConfigVersion(info.versionYear, vsPath)) { - return cb(null) - } - - info.path = vsPath - info.msBuild = msBuild - info.sdk = null - cb(info) - }) - }) - }, - - // After finding a usable version of Visual Stuido: - // - add it to validVersions to be displayed at the end if a specific - // version was requested and not found; - // - check if this is the version that was requested. - // - check if this matches the Visual Studio Command Prompt - checkConfigVersion: function checkConfigVersion (versionYear, vsPath) { - this.validVersions.push(versionYear) - this.validVersions.push(vsPath) - - if (this.configVersionYear && this.configVersionYear !== versionYear) { - this.addLog('- msvs_version does not match this version') - return false - } - if (this.configPath && - path.relative(this.configPath, vsPath) !== '') { - this.addLog('- msvs_version does not point to this installation') - return false - } - if (this.envVcInstallDir && - path.relative(this.envVcInstallDir, vsPath) !== '') { - this.addLog('- does not match this Visual Studio Command Prompt') - return false - } - - return true - } -} - -module.exports = findVisualStudio -module.exports.test = { - VisualStudioFinder: VisualStudioFinder, - findVisualStudio: findVisualStudio -} diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/install.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/install.js deleted file mode 100644 index f9fa2b34bd30c..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/install.js +++ /dev/null @@ -1,447 +0,0 @@ -'use strict' - -const fs = require('graceful-fs') -const os = require('os') -const tar = require('tar') -const path = require('path') -const crypto = require('crypto') -const log = require('npmlog') -const semver = require('semver') -const request = require('request') -const processRelease = require('./process-release') -const win = process.platform === 'win32' -const getProxyFromURI = require('./proxy') - -function install (fs, gyp, argv, callback) { - var release = processRelease(argv, gyp, process.version, process.release) - - // ensure no double-callbacks happen - function cb (err) { - if (cb.done) { - return - } - cb.done = true - if (err) { - log.warn('install', 'got an error, rolling back install') - // roll-back the install if anything went wrong - gyp.commands.remove([release.versionDir], function () { - callback(err) - }) - } else { - callback(null, release.version) - } - } - - // Determine which node dev files version we are installing - log.verbose('install', 'input version string %j', release.version) - - if (!release.semver) { - // could not parse the version string with semver - return callback(new Error('Invalid version number: ' + release.version)) - } - - if (semver.lt(release.version, '0.8.0')) { - return callback(new Error('Minimum target version is `0.8.0` or greater. Got: ' + release.version)) - } - - // 0.x.y-pre versions are not published yet and cannot be installed. Bail. - if (release.semver.prerelease[0] === 'pre') { - log.verbose('detected "pre" node version', release.version) - if (gyp.opts.nodedir) { - log.verbose('--nodedir flag was passed; skipping install', gyp.opts.nodedir) - callback() - } else { - callback(new Error('"pre" versions of node cannot be installed, use the --nodedir flag instead')) - } - return - } - - // flatten version into String - log.verbose('install', 'installing version: %s', release.versionDir) - - // the directory where the dev files will be installed - var devDir = path.resolve(gyp.devDir, release.versionDir) - - // If '--ensure' was passed, then don't *always* install the version; - // check if it is already installed, and only install when needed - if (gyp.opts.ensure) { - log.verbose('install', '--ensure was passed, so won\'t reinstall if already installed') - fs.stat(devDir, function (err) { - if (err) { - if (err.code === 'ENOENT') { - log.verbose('install', 'version not already installed, continuing with install', release.version) - go() - } else if (err.code === 'EACCES') { - eaccesFallback(err) - } else { - cb(err) - } - return - } - log.verbose('install', 'version is already installed, need to check "installVersion"') - var installVersionFile = path.resolve(devDir, 'installVersion') - fs.readFile(installVersionFile, 'ascii', function (err, ver) { - if (err && err.code !== 'ENOENT') { - return cb(err) - } - var installVersion = parseInt(ver, 10) || 0 - log.verbose('got "installVersion"', installVersion) - log.verbose('needs "installVersion"', gyp.package.installVersion) - if (installVersion < gyp.package.installVersion) { - log.verbose('install', 'version is no good; reinstalling') - go() - } else { - log.verbose('install', 'version is good') - cb() - } - }) - }) - } else { - go() - } - - function getContentSha (res, callback) { - var shasum = crypto.createHash('sha256') - res.on('data', function (chunk) { - shasum.update(chunk) - }).on('end', function () { - callback(null, shasum.digest('hex')) - }) - } - - function go () { - log.verbose('ensuring nodedir is created', devDir) - - // first create the dir for the node dev files - fs.mkdir(devDir, { recursive: true }, function (err, created) { - if (err) { - if (err.code === 'EACCES') { - eaccesFallback(err) - } else { - cb(err) - } - return - } - - if (created) { - log.verbose('created nodedir', created) - } - - // now download the node tarball - var tarPath = gyp.opts.tarball - var badDownload = false - var extractCount = 0 - var contentShasums = {} - var expectShasums = {} - - // checks if a file to be extracted from the tarball is valid. - // only .h header files and the gyp files get extracted - function isValid (path) { - var isValid = valid(path) - if (isValid) { - log.verbose('extracted file from tarball', path) - extractCount++ - } else { - // invalid - log.silly('ignoring from tarball', path) - } - return isValid - } - - // download the tarball and extract! - if (tarPath) { - return tar.extract({ - file: tarPath, - strip: 1, - filter: isValid, - cwd: devDir - }).then(afterTarball, cb) - } - - try { - var req = download(gyp, process.env, release.tarballUrl) - } catch (e) { - return cb(e) - } - - // something went wrong downloading the tarball? - req.on('error', function (err) { - if (err.code === 'ENOTFOUND') { - return cb(new Error('This is most likely not a problem with node-gyp or the package itself and\n' + - 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' + - 'network settings.')) - } - badDownload = true - cb(err) - }) - - req.on('close', function () { - if (extractCount === 0) { - cb(new Error('Connection closed while downloading tarball file')) - } - }) - - req.on('response', function (res) { - if (res.statusCode !== 200) { - badDownload = true - cb(new Error(res.statusCode + ' response downloading ' + release.tarballUrl)) - return - } - // content checksum - getContentSha(res, function (_, checksum) { - var filename = path.basename(release.tarballUrl).trim() - contentShasums[filename] = checksum - log.verbose('content checksum', filename, checksum) - }) - - // start unzipping and untaring - res.pipe(tar.extract({ - strip: 1, - cwd: devDir, - filter: isValid - }).on('close', afterTarball).on('error', cb)) - }) - - // invoked after the tarball has finished being extracted - function afterTarball () { - if (badDownload) { - return - } - if (extractCount === 0) { - return cb(new Error('There was a fatal problem while downloading/extracting the tarball')) - } - log.verbose('tarball', 'done parsing tarball') - var async = 0 - - if (win) { - // need to download node.lib - async++ - downloadNodeLib(deref) - } - - // write the "installVersion" file - async++ - var installVersionPath = path.resolve(devDir, 'installVersion') - fs.writeFile(installVersionPath, gyp.package.installVersion + '\n', deref) - - // Only download SHASUMS.txt if we downloaded something in need of SHA verification - if (!tarPath || win) { - // download SHASUMS.txt - async++ - downloadShasums(deref) - } - - if (async === 0) { - // no async tasks required - cb() - } - - function deref (err) { - if (err) { - return cb(err) - } - - async-- - if (!async) { - log.verbose('download contents checksum', JSON.stringify(contentShasums)) - // check content shasums - for (var k in contentShasums) { - log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k]) - if (contentShasums[k] !== expectShasums[k]) { - cb(new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k])) - return - } - } - cb() - } - } - } - - function downloadShasums (done) { - log.verbose('check download content checksum, need to download `SHASUMS256.txt`...') - log.verbose('checksum url', release.shasumsUrl) - try { - var req = download(gyp, process.env, release.shasumsUrl) - } catch (e) { - return cb(e) - } - - req.on('error', done) - req.on('response', function (res) { - if (res.statusCode !== 200) { - done(new Error(res.statusCode + ' status code downloading checksum')) - return - } - - var chunks = [] - res.on('data', function (chunk) { - chunks.push(chunk) - }) - res.on('end', function () { - var lines = Buffer.concat(chunks).toString().trim().split('\n') - lines.forEach(function (line) { - var items = line.trim().split(/\s+/) - if (items.length !== 2) { - return - } - - // 0035d18e2dcf9aad669b1c7c07319e17abfe3762 ./node-v0.11.4.tar.gz - var name = items[1].replace(/^\.\//, '') - expectShasums[name] = items[0] - }) - - log.verbose('checksum data', JSON.stringify(expectShasums)) - done() - }) - }) - } - - function downloadNodeLib (done) { - log.verbose('on Windows; need to download `' + release.name + '.lib`...') - var archs = ['ia32', 'x64', 'arm64'] - var async = archs.length - archs.forEach(function (arch) { - var dir = path.resolve(devDir, arch) - var targetLibPath = path.resolve(dir, release.name + '.lib') - var libUrl = release[arch].libUrl - var libPath = release[arch].libPath - var name = arch + ' ' + release.name + '.lib' - log.verbose(name, 'dir', dir) - log.verbose(name, 'url', libUrl) - - fs.mkdir(dir, { recursive: true }, function (err) { - if (err) { - return done(err) - } - log.verbose('streaming', name, 'to:', targetLibPath) - - try { - var req = download(gyp, process.env, libUrl, cb) - } catch (e) { - return cb(e) - } - - req.on('error', done) - req.on('response', function (res) { - if (res.statusCode === 403 || res.statusCode === 404) { - if (arch === 'arm64') { - // Arm64 is a newer platform on Windows and not all node distributions provide it. - log.verbose(`${name} was not found in ${libUrl}`) - } else { - log.warn(`${name} was not found in ${libUrl}`) - } - return - } else if (res.statusCode !== 200) { - done(new Error(res.statusCode + ' status code downloading ' + name)) - return - } - - getContentSha(res, function (_, checksum) { - contentShasums[libPath] = checksum - log.verbose('content checksum', libPath, checksum) - }) - - var ws = fs.createWriteStream(targetLibPath) - ws.on('error', cb) - req.pipe(ws) - }) - req.on('end', function () { --async || done() }) - }) - }) - } // downloadNodeLib() - }) // mkdir() - } // go() - - /** - * Checks if a given filename is "valid" for this installation. - */ - - function valid (file) { - // header files - var extname = path.extname(file) - return extname === '.h' || extname === '.gypi' - } - - /** - * The EACCES fallback is a workaround for npm's `sudo` behavior, where - * it drops the permissions before invoking any child processes (like - * node-gyp). So what happens is the "nobody" user doesn't have - * permission to create the dev dir. As a fallback, make the tmpdir() be - * the dev dir for this installation. This is not ideal, but at least - * the compilation will succeed... - */ - - function eaccesFallback (err) { - var noretry = '--node_gyp_internal_noretry' - if (argv.indexOf(noretry) !== -1) { - return cb(err) - } - var tmpdir = os.tmpdir() - gyp.devDir = path.resolve(tmpdir, '.node-gyp') - var userString = '' - try { - // os.userInfo can fail on some systems, it's not critical here - userString = ` ("${os.userInfo().username}")` - } catch (e) {} - log.warn('EACCES', 'current user%s does not have permission to access the dev dir "%s"', userString, devDir) - log.warn('EACCES', 'attempting to reinstall using temporary dev dir "%s"', gyp.devDir) - if (process.cwd() === tmpdir) { - log.verbose('tmpdir == cwd', 'automatically will remove dev files after to save disk space') - gyp.todo.push({ name: 'remove', args: argv }) - } - gyp.commands.install([noretry].concat(argv), cb) - } -} - -function download (gyp, env, url) { - log.http('GET', url) - - var requestOpts = { - uri: url, - headers: { - 'User-Agent': 'node-gyp v' + gyp.version + ' (node ' + process.version + ')', - Connection: 'keep-alive' - } - } - - var cafile = gyp.opts.cafile - if (cafile) { - requestOpts.ca = readCAFile(cafile) - } - - // basic support for a proxy server - var proxyUrl = getProxyFromURI(gyp, env, url) - if (proxyUrl) { - if (/^https?:\/\//i.test(proxyUrl)) { - log.verbose('download', 'using proxy url: "%s"', proxyUrl) - requestOpts.proxy = proxyUrl - } else { - log.warn('download', 'ignoring invalid "proxy" config setting: "%s"', proxyUrl) - } - } - - var req = request(requestOpts) - req.on('response', function (res) { - log.http(res.statusCode, url) - }) - - return req -} - -function readCAFile (filename) { - // The CA file can contain multiple certificates so split on certificate - // boundaries. [\S\s]*? is used to match everything including newlines. - var ca = fs.readFileSync(filename, 'utf8') - var re = /(-----BEGIN CERTIFICATE-----[\S\s]*?-----END CERTIFICATE-----)/g - return ca.match(re) -} - -module.exports = function (gyp, argv, callback) { - return install(fs, gyp, argv, callback) -} -module.exports.test = { - download: download, - install: install, - readCAFile: readCAFile -} -module.exports.usage = 'Install node development files for the specified node version.' diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/list.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/list.js deleted file mode 100644 index 405ebc0d88959..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/list.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' - -const fs = require('graceful-fs') -const log = require('npmlog') - -function list (gyp, args, callback) { - var devDir = gyp.devDir - log.verbose('list', 'using node-gyp dir:', devDir) - - fs.readdir(devDir, onreaddir) - - function onreaddir (err, versions) { - if (err && err.code !== 'ENOENT') { - return callback(err) - } - - if (Array.isArray(versions)) { - versions = versions.filter(function (v) { return v !== 'current' }) - } else { - versions = [] - } - callback(null, versions) - } -} - -module.exports = list -module.exports.usage = 'Prints a listing of the currently installed node development files' diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/node-gyp.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/node-gyp.js deleted file mode 100644 index 81fc590919ba8..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/node-gyp.js +++ /dev/null @@ -1,210 +0,0 @@ -'use strict' - -const path = require('path') -const nopt = require('nopt') -const log = require('npmlog') -const childProcess = require('child_process') -const EE = require('events').EventEmitter -const inherits = require('util').inherits -const commands = [ - // Module build commands - 'build', - 'clean', - 'configure', - 'rebuild', - // Development Header File management commands - 'install', - 'list', - 'remove' -] -const aliases = { - ls: 'list', - rm: 'remove' -} - -// differentiate node-gyp's logs from npm's -log.heading = 'gyp' - -function gyp () { - return new Gyp() -} - -function Gyp () { - var self = this - - this.devDir = '' - this.commands = {} - - commands.forEach(function (command) { - self.commands[command] = function (argv, callback) { - log.verbose('command', command, argv) - return require('./' + command)(self, argv, callback) - } - }) -} -inherits(Gyp, EE) -exports.Gyp = Gyp -var proto = Gyp.prototype - -/** - * Export the contents of the package.json. - */ - -proto.package = require('../package.json') - -/** - * nopt configuration definitions - */ - -proto.configDefs = { - help: Boolean, // everywhere - arch: String, // 'configure' - cafile: String, // 'install' - debug: Boolean, // 'build' - directory: String, // bin - make: String, // 'build' - msvs_version: String, // 'configure' - ensure: Boolean, // 'install' - solution: String, // 'build' (windows only) - proxy: String, // 'install' - noproxy: String, // 'install' - devdir: String, // everywhere - nodedir: String, // 'configure' - loglevel: String, // everywhere - python: String, // 'configure' - 'dist-url': String, // 'install' - tarball: String, // 'install' - jobs: String, // 'build' - thin: String // 'configure' -} - -/** - * nopt shorthands - */ - -proto.shorthands = { - release: '--no-debug', - C: '--directory', - debug: '--debug', - j: '--jobs', - silly: '--loglevel=silly', - verbose: '--loglevel=verbose', - silent: '--loglevel=silent' -} - -/** - * expose the command aliases for the bin file to use. - */ - -proto.aliases = aliases - -/** - * Parses the given argv array and sets the 'opts', - * 'argv' and 'command' properties. - */ - -proto.parseArgv = function parseOpts (argv) { - this.opts = nopt(this.configDefs, this.shorthands, argv) - this.argv = this.opts.argv.remain.slice() - - var commands = this.todo = [] - - // create a copy of the argv array with aliases mapped - argv = this.argv.map(function (arg) { - // is this an alias? - if (arg in this.aliases) { - arg = this.aliases[arg] - } - return arg - }, this) - - // process the mapped args into "command" objects ("name" and "args" props) - argv.slice().forEach(function (arg) { - if (arg in this.commands) { - var args = argv.splice(0, argv.indexOf(arg)) - argv.shift() - if (commands.length > 0) { - commands[commands.length - 1].args = args - } - commands.push({ name: arg, args: [] }) - } - }, this) - if (commands.length > 0) { - commands[commands.length - 1].args = argv.splice(0) - } - - // support for inheriting config env variables from npm - var npmConfigPrefix = 'npm_config_' - Object.keys(process.env).forEach(function (name) { - if (name.indexOf(npmConfigPrefix) !== 0) { - return - } - var val = process.env[name] - if (name === npmConfigPrefix + 'loglevel') { - log.level = val - } else { - // add the user-defined options to the config - name = name.substring(npmConfigPrefix.length) - // gyp@741b7f1 enters an infinite loop when it encounters - // zero-length options so ensure those don't get through. - if (name) { - this.opts[name] = val - } - } - }, this) - - if (this.opts.loglevel) { - log.level = this.opts.loglevel - } - log.resume() -} - -/** - * Spawns a child process and emits a 'spawn' event. - */ - -proto.spawn = function spawn (command, args, opts) { - if (!opts) { - opts = {} - } - if (!opts.silent && !opts.stdio) { - opts.stdio = [0, 1, 2] - } - var cp = childProcess.spawn(command, args, opts) - log.info('spawn', command) - log.info('spawn args', args) - return cp -} - -/** - * Returns the usage instructions for node-gyp. - */ - -proto.usage = function usage () { - var str = [ - '', - ' Usage: node-gyp [options]', - '', - ' where is one of:', - commands.map(function (c) { - return ' - ' + c + ' - ' + require('./' + c).usage - }).join('\n'), - '', - 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'), - 'node@' + process.versions.node - ].join('\n') - return str -} - -/** - * Version number getter. - */ - -Object.defineProperty(proto, 'version', { - get: function () { - return this.package.version - }, - enumerable: true -}) - -module.exports = exports = gyp diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/process-release.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/process-release.js deleted file mode 100644 index 95b55e4426dee..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/process-release.js +++ /dev/null @@ -1,147 +0,0 @@ -/* eslint-disable node/no-deprecated-api */ - -'use strict' - -const semver = require('semver') -const url = require('url') -const path = require('path') -const log = require('npmlog') - -// versions where -headers.tar.gz started shipping -const headersTarballRange = '>= 3.0.0 || ~0.12.10 || ~0.10.42' -const bitsre = /\/win-(x86|x64|arm64)\// -const bitsreV3 = /\/win-(x86|ia32|x64)\// // io.js v3.x.x shipped with "ia32" but should -// have been "x86" - -// Captures all the logic required to determine download URLs, local directory and -// file names. Inputs come from command-line switches (--target, --dist-url), -// `process.version` and `process.release` where it exists. -function processRelease (argv, gyp, defaultVersion, defaultRelease) { - var version = (semver.valid(argv[0]) && argv[0]) || gyp.opts.target || defaultVersion - var versionSemver = semver.parse(version) - var overrideDistUrl = gyp.opts['dist-url'] || gyp.opts.disturl - var isDefaultVersion - var isNamedForLegacyIojs - var name - var distBaseUrl - var baseUrl - var libUrl32 - var libUrl64 - var libUrlArm64 - var tarballUrl - var canGetHeaders - - if (!versionSemver) { - // not a valid semver string, nothing we can do - return { version: version } - } - // flatten version into String - version = versionSemver.version - - // defaultVersion should come from process.version so ought to be valid semver - isDefaultVersion = version === semver.parse(defaultVersion).version - - // can't use process.release if we're using --target=x.y.z - if (!isDefaultVersion) { - defaultRelease = null - } - - if (defaultRelease) { - // v3 onward, has process.release - name = defaultRelease.name.replace(/io\.js/, 'iojs') // remove the '.' for directory naming purposes - } else { - // old node or alternative --target= - // semver.satisfies() doesn't like prerelease tags so test major directly - isNamedForLegacyIojs = versionSemver.major >= 1 && versionSemver.major < 4 - // isNamedForLegacyIojs is required to support Electron < 4 (in particular Electron 3) - // as previously this logic was used to ensure "iojs" was used to download iojs releases - // and "node" for node releases. Unfortunately the logic was broad enough that electron@3 - // published release assets as "iojs" so that the node-gyp logic worked. Once Electron@3 has - // been EOL for a while (late 2019) we should remove this hack. - name = isNamedForLegacyIojs ? 'iojs' : 'node' - } - - // check for the nvm.sh standard mirror env variables - if (!overrideDistUrl && process.env.NODEJS_ORG_MIRROR) { - overrideDistUrl = process.env.NODEJS_ORG_MIRROR - } - - if (overrideDistUrl) { - log.verbose('download', 'using dist-url', overrideDistUrl) - } - - if (overrideDistUrl) { - distBaseUrl = overrideDistUrl.replace(/\/+$/, '') - } else { - distBaseUrl = 'https://nodejs.org/dist' - } - distBaseUrl += '/v' + version + '/' - - // new style, based on process.release so we have a lot of the data we need - if (defaultRelease && defaultRelease.headersUrl && !overrideDistUrl) { - baseUrl = url.resolve(defaultRelease.headersUrl, './') - libUrl32 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x86', versionSemver.major) - libUrl64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x64', versionSemver.major) - libUrlArm64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'arm64', versionSemver.major) - tarballUrl = defaultRelease.headersUrl - } else { - // older versions without process.release are captured here and we have to make - // a lot of assumptions, additionally if you --target=x.y.z then we can't use the - // current process.release - baseUrl = distBaseUrl - libUrl32 = resolveLibUrl(name, baseUrl, 'x86', versionSemver.major) - libUrl64 = resolveLibUrl(name, baseUrl, 'x64', versionSemver.major) - libUrlArm64 = resolveLibUrl(name, baseUrl, 'arm64', versionSemver.major) - - // making the bold assumption that anything with a version number >3.0.0 will - // have a *-headers.tar.gz file in its dist location, even some frankenstein - // custom version - canGetHeaders = semver.satisfies(versionSemver, headersTarballRange) - tarballUrl = url.resolve(baseUrl, name + '-v' + version + (canGetHeaders ? '-headers' : '') + '.tar.gz') - } - - return { - version: version, - semver: versionSemver, - name: name, - baseUrl: baseUrl, - tarballUrl: tarballUrl, - shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'), - versionDir: (name !== 'node' ? name + '-' : '') + version, - ia32: { - libUrl: libUrl32, - libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)) - }, - x64: { - libUrl: libUrl64, - libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path)) - }, - arm64: { - libUrl: libUrlArm64, - libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrlArm64).path)) - } - } -} - -function normalizePath (p) { - return path.normalize(p).replace(/\\/g, '/') -} - -function resolveLibUrl (name, defaultUrl, arch, versionMajor) { - var base = url.resolve(defaultUrl, './') - var hasLibUrl = bitsre.test(defaultUrl) || (versionMajor === 3 && bitsreV3.test(defaultUrl)) - - if (!hasLibUrl) { - // let's assume it's a baseUrl then - if (versionMajor >= 1) { - return url.resolve(base, 'win-' + arch + '/' + name + '.lib') - } - // prior to io.js@1.0.0 32-bit node.lib lives in /, 64-bit lives in /x64/ - return url.resolve(base, (arch === 'x86' ? '' : arch + '/') + name + '.lib') - } - - // else we have a proper url to a .lib, just make sure it's the right arch - return defaultUrl.replace(versionMajor === 3 ? bitsreV3 : bitsre, '/win-' + arch + '/') -} - -module.exports = processRelease diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/proxy.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/proxy.js deleted file mode 100644 index 92d9ed2f7f085..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/proxy.js +++ /dev/null @@ -1,92 +0,0 @@ -'use strict' -// Taken from https://github.com/request/request/blob/212570b/lib/getProxyFromURI.js - -const url = require('url') - -function formatHostname (hostname) { - // canonicalize the hostname, so that 'oogle.com' won't match 'google.com' - return hostname.replace(/^\.*/, '.').toLowerCase() -} - -function parseNoProxyZone (zone) { - zone = zone.trim().toLowerCase() - - var zoneParts = zone.split(':', 2) - var zoneHost = formatHostname(zoneParts[0]) - var zonePort = zoneParts[1] - var hasPort = zone.indexOf(':') > -1 - - return { hostname: zoneHost, port: zonePort, hasPort: hasPort } -} - -function uriInNoProxy (uri, noProxy) { - var port = uri.port || (uri.protocol === 'https:' ? '443' : '80') - var hostname = formatHostname(uri.hostname) - var noProxyList = noProxy.split(',') - - // iterate through the noProxyList until it finds a match. - return noProxyList.map(parseNoProxyZone).some(function (noProxyZone) { - var isMatchedAt = hostname.indexOf(noProxyZone.hostname) - var hostnameMatched = ( - isMatchedAt > -1 && - (isMatchedAt === hostname.length - noProxyZone.hostname.length) - ) - - if (noProxyZone.hasPort) { - return (port === noProxyZone.port) && hostnameMatched - } - - return hostnameMatched - }) -} - -function getProxyFromURI (gyp, env, uri) { - // If a string URI/URL was given, parse it into a URL object - if (typeof uri === 'string') { - // eslint-disable-next-line - uri = url.parse(uri) - } - - // Decide the proper request proxy to use based on the request URI object and the - // environmental variables (NO_PROXY, HTTP_PROXY, etc.) - // respect NO_PROXY environment variables (see: https://lynx.invisible-island.net/lynx2.8.7/breakout/lynx_help/keystrokes/environments.html) - - var noProxy = gyp.opts.noproxy || env.NO_PROXY || env.no_proxy || env.npm_config_noproxy || '' - - // if the noProxy is a wildcard then return null - - if (noProxy === '*') { - return null - } - - // if the noProxy is not empty and the uri is found return null - - if (noProxy !== '' && uriInNoProxy(uri, noProxy)) { - return null - } - - // Check for HTTP or HTTPS Proxy in environment Else default to null - - if (uri.protocol === 'http:') { - return gyp.opts.proxy || - env.HTTP_PROXY || - env.http_proxy || - env.npm_config_proxy || null - } - - if (uri.protocol === 'https:') { - return gyp.opts.proxy || - env.HTTPS_PROXY || - env.https_proxy || - env.HTTP_PROXY || - env.http_proxy || - env.npm_config_proxy || null - } - - // if none of that works, return null - // (What uri protocol are you using then?) - - return null -} - -module.exports = getProxyFromURI diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/rebuild.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/rebuild.js deleted file mode 100644 index a1c5b27cbe56a..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/rebuild.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -function rebuild (gyp, argv, callback) { - gyp.todo.push( - { name: 'clean', args: [] } - , { name: 'configure', args: argv } - , { name: 'build', args: [] } - ) - process.nextTick(callback) -} - -module.exports = rebuild -module.exports.usage = 'Runs "clean", "configure" and "build" all at once' diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/remove.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/remove.js deleted file mode 100644 index 8c945e5659001..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/remove.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict' - -const fs = require('fs') -const rm = require('rimraf') -const path = require('path') -const log = require('npmlog') -const semver = require('semver') - -function remove (gyp, argv, callback) { - var devDir = gyp.devDir - log.verbose('remove', 'using node-gyp dir:', devDir) - - // get the user-specified version to remove - var version = argv[0] || gyp.opts.target - log.verbose('remove', 'removing target version:', version) - - if (!version) { - return callback(new Error('You must specify a version number to remove. Ex: "' + process.version + '"')) - } - - var versionSemver = semver.parse(version) - if (versionSemver) { - // flatten the version Array into a String - version = versionSemver.version - } - - var versionPath = path.resolve(gyp.devDir, version) - log.verbose('remove', 'removing development files for version:', version) - - // first check if its even installed - fs.stat(versionPath, function (err) { - if (err) { - if (err.code === 'ENOENT') { - callback(null, 'version was already uninstalled: ' + version) - } else { - callback(err) - } - return - } - // Go ahead and delete the dir - rm(versionPath, callback) - }) -} - -module.exports = exports = remove -module.exports.usage = 'Removes the node development files for the specified version' diff --git a/node_modules/libnpmexec/node_modules/node-gyp/lib/util.js b/node_modules/libnpmexec/node_modules/node-gyp/lib/util.js deleted file mode 100644 index 3e23c628e6ad7..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/lib/util.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict' - -const log = require('npmlog') -const execFile = require('child_process').execFile -const path = require('path') - -function logWithPrefix (log, prefix) { - function setPrefix (logFunction) { - return (...args) => logFunction.apply(null, [ prefix, ...args ]) // eslint-disable-line - } - return { - silly: setPrefix(log.silly), - verbose: setPrefix(log.verbose), - info: setPrefix(log.info), - warn: setPrefix(log.warn), - error: setPrefix(log.error) - } -} - -function regGetValue (key, value, addOpts, cb) { - const outReValue = value.replace(/\W/g, '.') - const outRe = new RegExp(`^\\s+${outReValue}\\s+REG_\\w+\\s+(\\S.*)$`, 'im') - const reg = path.join(process.env.SystemRoot, 'System32', 'reg.exe') - const regArgs = ['query', key, '/v', value].concat(addOpts) - - log.silly('reg', 'running', reg, regArgs) - const child = execFile(reg, regArgs, { encoding: 'utf8' }, - function (err, stdout, stderr) { - log.silly('reg', 'reg.exe stdout = %j', stdout) - if (err || stderr.trim() !== '') { - log.silly('reg', 'reg.exe err = %j', err && (err.stack || err)) - log.silly('reg', 'reg.exe stderr = %j', stderr) - return cb(err, stderr) - } - - const result = outRe.exec(stdout) - if (!result) { - log.silly('reg', 'error parsing stdout') - return cb(new Error('Could not parse output of reg.exe')) - } - log.silly('reg', 'found: %j', result[1]) - cb(null, result[1]) - }) - child.stdin.end() -} - -function regSearchKeys (keys, value, addOpts, cb) { - var i = 0 - const search = () => { - log.silly('reg-search', 'looking for %j in %j', value, keys[i]) - regGetValue(keys[i], value, addOpts, (err, res) => { - ++i - if (err && i < keys.length) { return search() } - cb(err, res) - }) - } - search() -} - -module.exports = { - logWithPrefix: logWithPrefix, - regGetValue: regGetValue, - regSearchKeys: regSearchKeys -} diff --git a/node_modules/libnpmexec/node_modules/node-gyp/macOS_Catalina.md b/node_modules/libnpmexec/node_modules/node-gyp/macOS_Catalina.md deleted file mode 100644 index ca2fd234733dd..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/macOS_Catalina.md +++ /dev/null @@ -1,104 +0,0 @@ -# Installation notes for macOS Catalina (v10.15) - -_This document specifically refers to upgrades from previous versions of macOS to Catalina (10.15). It should be removed from the source repository when Catalina ceases to be the latest macOS version or when future Catalina versions no longer raise these issues._ - -**Both upgrading to macOS Catalina and running a Software Update in Catalina may cause normal `node-gyp` installations to fail. This might manifest as the following error during `npm install`:** - -```console -gyp: No Xcode or CLT version detected! -``` - -## node-gyp v7 - -The newest release of `node-gyp` should solve this problem. If you are using `node-gyp` directly then you should be able to install v7 and use it as-is. - -If you need to use `node-gyp` from within `npm` (e.g. through `npm install`), you will have to install `node-gyp` (either globally with `-g` or to a predictable location) and tell `npm` where the new version is. Either use: - -* `npm config set node_gyp `; or -* run `npm` with an environment variable prefix: `npm_config_node_gyp= npm install` - -Where "path to node-gyp" is to the `node-gyp` executable which may be a symlink in your global bin directory (e.g. `/usr/local/bin/node-gyp`), or a path to the `node-gyp` installation directory and the `bin/node-gyp.js` file within it (e.g. `/usr/local/lib/node_modules/node-gyp/bin/node-gyp.js`). - -**If you use `npm config set` to change your global `node_gyp` you are responsible for keeping it up to date and can't rely on `npm` to give you a newer version when available.** Use `npm config delete node_gyp` to unset this configuration option. - -## Fixing Catalina for older versions of `node-gyp` - -### Is my Mac running macOS Catalina? -Let's first make sure that your Mac is running Catalina: -``` -% sw_vers - ProductName: Mac OS X - ProductVersion: 10.15 - BuildVersion: 19A602 -``` -If `ProductVersion` is less then `10.15` then this document is not for you. Normal install docs for `node-gyp` on macOS can be found at https://github.com/nodejs/node-gyp#on-macos - - -### The acid test -To see if `Xcode Command Line Tools` is installed in a way that will work with `node-gyp`, run: -``` -curl -sL https://github.com/nodejs/node-gyp/raw/master/macOS_Catalina_acid_test.sh | bash -``` - -If test succeeded, _you are done_! You should be ready to install `node-gyp`. - -If test failed, there is a problem with your Xcode Command Line Tools installation. [Continue to Solutions](#Solutions). - -### Solutions -There are three ways to install the Xcode libraries `node-gyp` needs on macOS. People running Catalina have had success with some but not others in a way that has been unpredictable. - -1. With the full Xcode (~7.6 GB download) from the `App Store` app. -2. With the _much_ smaller Xcode Command Line Tools via `xcode-select --install` -3. With the _much_ smaller Xcode Command Line Tools via manual download. **For people running the latest version of Catalina (10.15.2 at the time of this writing), this has worked when the other two solutions haven't.** - -### Installing `node-gyp` using the full Xcode -1. `xcodebuild -version` should show `Xcode 11.1` or later. - * If not, then install/upgrade Xcode from the App Store app. -2. Open the Xcode app and... - * Under __Preferences > Locations__ select the tools if their location is empty. - * Allow Xcode app to do an essential install of the most recent compiler tools. -3. Once all installations are _complete_, quit out of Xcode. -4. `sudo xcodebuild -license accept` # If you agree with the licensing terms. -5. `softwareupdate -l` # No listing is a good sign. - * If Xcode or Tools upgrades are listed, use "Software Upgrade" to install them. -6. `xcode-select -version` # Should return `xcode-select version 2370` or later. -7. `xcode-select -print-path` # Should return `/Applications/Xcode.app/Contents/Developer` -8. Try the [_acid test_ steps above](#The-acid-test) to see if your Mac is ready. -9. If the _acid test_ does _not_ pass then... -10. `sudo xcode-select --reset` # Enter root password. No output is normal. -11. Repeat step 7 above. Is the path different this time? Repeat the _acid test_. - -### Installing `node-gyp` using the Xcode Command Line Tools via `xcode-select --install` -1. If the _acid test_ has not succeeded, then try `xcode-select --install` -2. If the installation command returns `xcode-select: error: command line tools are already installed, use "Software Update" to install updates`, continue to [remove and reinstall](#i-did-all-that-and-the-acid-test-still-does-not-pass--) -3. Wait until the install process is _complete_. -4. `softwareupdate -l` # No listing is a good sign. - * If Xcode or Tools upgrades are listed, use "Software Update" to install them. -5. `xcode-select -version` # Should return `xcode-select version 2370` or later. -6. `xcode-select -print-path` # Should return `/Library/Developer/CommandLineTools` -7. Try the [_acid test_ steps above](#The-acid-test) to see if your Mac is ready. -8. If the _acid test_ does _not_ pass then... -9. `sudo xcode-select --reset` # Enter root password. No output is normal. -10. Repeat step 5 above. Is the path different this time? Repeat the _acid test_. - -### Installing `node-gyp` using the Xcode Command Line Tools via manual download -1. Download the appropriate version of the "Command Line Tools for Xcode" for your version of Catalina from . As of MacOS 10.15.5, that's [Command_Line_Tools_for_Xcode_11.5.dmg](https://download.developer.apple.com/Developer_Tools/Command_Line_Tools_for_Xcode_11.5/Command_Line_Tools_for_Xcode_11.5.dmg) -2. Install the package. -3. Run the [_acid test_ steps above](#The-acid-test). - -### I did all that and the acid test still does not pass :-( -1. `sudo rm -rf $(xcode-select -print-path)` # Enter root password. No output is normal. -2. `sudo rm -rf /Library/Developer/CommandLineTools` # Enter root password. -3. `xcode-select --reset` -4. `xcode-select --install` -5. If the [_acid test_ steps above](#The-acid-test) still does _not_ pass then... -6. `npm explore npm -g -- npm install node-gyp@latest` -7. `npm explore npm -g -- npm explore npm-lifecycle -- npm install node-gyp@latest` -8. If the _acid test_ still does _not_ pass then... -9. Add a comment to https://github.com/nodejs/node-gyp/issues/1927 so we can improve. - -Lessons learned from: -* https://github.com/nodejs/node-gyp/issues/1779 -* https://github.com/nodejs/node-gyp/issues/1861 -* https://github.com/nodejs/node-gyp/issues/1927 and elsewhere -* Thanks to @rrrix for discovering Solution 3 diff --git a/node_modules/libnpmexec/node_modules/node-gyp/macOS_Catalina_acid_test.sh b/node_modules/libnpmexec/node_modules/node-gyp/macOS_Catalina_acid_test.sh deleted file mode 100644 index e1e98941a8e4c..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/macOS_Catalina_acid_test.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -pkgs=( - "com.apple.pkg.DeveloperToolsCLILeo" # standalone - "com.apple.pkg.DeveloperToolsCLI" # from XCode - "com.apple.pkg.CLTools_Executables" # Mavericks -) - -for pkg in "${pkgs[@]}"; do - output=$(/usr/sbin/pkgutil --pkg-info "$pkg" 2>/dev/null) - if [ "$output" ]; then - version=$(echo "$output" | grep 'version' | cut -d' ' -f2) - break - fi -done - -if [ "$version" ]; then - echo "Command Line Tools version: $version" -else - echo >&2 'Command Line Tools not found' -fi diff --git a/node_modules/libnpmexec/node_modules/node-gyp/package.json b/node_modules/libnpmexec/node_modules/node-gyp/package.json deleted file mode 100644 index 8e256f01702f0..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "node-gyp", - "description": "Node.js native addon build tool", - "license": "MIT", - "keywords": [ - "native", - "addon", - "module", - "c", - "c++", - "bindings", - "gyp" - ], - "version": "7.1.2", - "installVersion": 9, - "author": "Nathan Rajlich (http://tootallnate.net)", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/node-gyp.git" - }, - "preferGlobal": true, - "bin": "./bin/node-gyp.js", - "main": "./lib/node-gyp.js", - "dependencies": { - "env-paths": "^2.2.0", - "glob": "^7.1.4", - "graceful-fs": "^4.2.3", - "nopt": "^5.0.0", - "npmlog": "^4.1.2", - "request": "^2.88.2", - "rimraf": "^3.0.2", - "semver": "^7.3.2", - "tar": "^6.0.2", - "which": "^2.0.2" - }, - "engines": { - "node": ">= 10.12.0" - }, - "devDependencies": { - "bindings": "^1.5.0", - "nan": "^2.14.2", - "require-inject": "^1.4.4", - "standard": "^14.3.4", - "tap": "^12.7.0" - }, - "scripts": { - "lint": "standard */*.js test/**/*.js", - "test": "npm run lint && tap --timeout=120 test/test-*" - } -} diff --git a/node_modules/libnpmexec/node_modules/node-gyp/src/win_delay_load_hook.cc b/node_modules/libnpmexec/node_modules/node-gyp/src/win_delay_load_hook.cc deleted file mode 100644 index 169f8029f10fd..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/src/win_delay_load_hook.cc +++ /dev/null @@ -1,39 +0,0 @@ -/* - * When this file is linked to a DLL, it sets up a delay-load hook that - * intervenes when the DLL is trying to load the host executable - * dynamically. Instead of trying to locate the .exe file it'll just - * return a handle to the process image. - * - * This allows compiled addons to work when the host executable is renamed. - */ - -#ifdef _MSC_VER - -#pragma managed(push, off) - -#ifndef WIN32_LEAN_AND_MEAN -#define WIN32_LEAN_AND_MEAN -#endif - -#include - -#include -#include - -static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) { - HMODULE m; - if (event != dliNotePreLoadLibrary) - return NULL; - - if (_stricmp(info->szDll, HOST_BINARY) != 0) - return NULL; - - m = GetModuleHandle(NULL); - return (FARPROC) m; -} - -decltype(__pfnDliNotifyHook2) __pfnDliNotifyHook2 = load_exe_hook; - -#pragma managed(pop) - -#endif diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/common.js b/node_modules/libnpmexec/node_modules/node-gyp/test/common.js deleted file mode 100644 index b714ee29029d3..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/common.js +++ /dev/null @@ -1,3 +0,0 @@ -const envPaths = require('env-paths') - -module.exports.devDir = () => envPaths('node-gyp', { suffix: '' }).cache diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt deleted file mode 100644 index 244f6b0798240..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt +++ /dev/null @@ -1 +0,0 @@ -[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.PackageGroup.CoreEditor","PortableFacades","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Editors","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build"]}] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt deleted file mode 100644 index dd5e77dafb9df..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt +++ /dev/null @@ -1 +0,0 @@ -[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb","Win10SDK_IpOverUsb","Microsoft.VisualStudio.Component.VC.ATL.ARM64","Microsoft.VisualCpp.ATL.ARM64","Microsoft.VisualStudio.Component.VC.ATL.ARM","Microsoft.VisualCpp.ATL.ARM","Microsoft.VisualStudio.Component.VC.Tools.ARM","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.Icecap.Analysis","Microsoft.VisualCpp.CRT.Redist.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.Store","Microsoft.VisualCpp.CRT.arm.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM.Resources","Microsoft.VisualCpp.Premium.Tools.ARM.Base","Microsoft.VisualCpp.Premium.Tools.ARM.Base.Resources","Microsoft.VisualCpp.PGO.ARM","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualStudio.Product.Community","Microsoft.VisualCpp.Tools.Hostx86.Targetarm","Microsoft.VisualStudio.Component.VC.Tools.ARM64","Microsoft.VisualStudio.VC.MSBuild.Arm64","Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ARM64.Store","Microsoft.VisualCpp.CRT.ARM64.Desktop","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.Icecap.Analysis.Resources","Microsoft.VisualCpp.VCTip.hostX86.targetARM","Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM64","Microsoft.VisualCpp.Tools.Core","Microsoft.VisualCpp.PGO.ARM64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm64","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64.Resources","Microsoft.VisualCpp.Premium.Tools.ARM64.Base","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.CRT.Redist.ARM64","Microsoft.VisualCpp.CRT.arm.OneCore.Desktop","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Component.WixToolset.VisualStudioExtension.Dev15","WixToolset.VisualStudioExtension.Dev15","Microsoft.VisualCpp.MFC.X64","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.Component.Windows10SDK.17763","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","MLGen","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Component.TestTools.Core","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.NuGet.Licenses","SQLCommon","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualCpp.Tools.HostX64.TargetARM","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.HTMLHelpWorkshop.Msi","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.VCTip.hostX64.targetARM","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualCpp.Tools.HostX64.TargetARM.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.Component.MSBuild","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualCpp.Tools.Hostx86.Targetarm64","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualCpp.VCTip.hostX86.targetARM64","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualCpp.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualStudio.RazorExtension","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualCpp.MFC.Source","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.MFC.Redist.X86","Microsoft.VisualStudio.WebToolsExtensions.Chip","Microsoft.DiagnosticsHub.Runtime.Resources","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualCpp.MFC.Redist.X64","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualCpp.MFC.MBCS","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Component.TextTemplating","Win10SDK_10.0.17763","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualCpp.MFC.MBCS.X64","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.Graphics.EnableTools","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualCpp.MFC.Headers","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualCpp.CRT.Headers","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetARM64","Microsoft.VisualCpp.VCTip.hostX64.targetARM64","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.Icecap.Collection.Msi","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.Tools.HostX64.TargetARM64.Resources","Microsoft.VisualStudio.Component.VC.ATLMFC","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualStudio.Component.Graphics.Win81","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.MFC.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.CredentialProvider","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.Component.NuGet","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualCpp.PGO.Headers","Microsoft.DiagnosticsHub.Collection","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualStudio.Branding.Community","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.WebToolsExtensions.Common","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.NuGet.Core","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.DiaSymReader.Native","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Component.Roslyn.LanguageServices","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.InteractiveWindow","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.Debugger.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","sqlsysclrtypes","Microsoft.VisualStudio.ProTools","Component.Microsoft.VisualStudio.RazorExtension","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.Build.Dependencies","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.Debugger.Managed","Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","sqlsysclrtypes","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.Component.Windows10SDK.17134","Microsoft.VisualStudio.PackageGroup.Core","PortableFacades","Microsoft.DiaSymReader","Microsoft.DiagnosticsHub.Runtime","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.Community","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.ServiceHub","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.TeamExplorer","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents.Resources","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.GraphProvider","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents","Microsoft.CodeAnalysis.VisualStudio.Setup.Interactive.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.NuGet.Build.Tasks","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.Build","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.MinShell.Interop","Microsoft.Build.FileTracker.Msi","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.CoreEditor","Win10SDK_10.0.17134","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.VC.Ide.MFC","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.Devenv","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.MefHosting","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualCpp.MFC.X86","Microsoft.VisualStudio.Log.Resources","Microsoft.Icecap.Analysis.Targeted","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.VisualStudio.Initializer","Microsoft.Net.PackageGroup.4.6.Redist"]}] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt deleted file mode 100644 index c4b3b5f2b0163..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt +++ /dev/null @@ -1 +0,0 @@ -[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress","version":"15.9.28307.858","packages":["Microsoft.VisualStudio.Product.WDExpress","Microsoft.VisualStudio.Workload.WDExpress","Microsoft.VisualStudio.Component.Windows10SDK.17763","MLGen","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.Windows10SDK.14393","Win10SDK_10.0.14393.795","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.Component.VC.Tools.ARM64","Microsoft.VisualStudio.VC.MSBuild.Arm64","Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.ARM64","Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ARM64.Store","Microsoft.VisualCpp.CRT.ARM64.Desktop","Microsoft.VisualCpp.Tools.Hostx86.Targetarm64","Microsoft.VisualCpp.VCTip.hostX86.targetARM64","Microsoft.VisualCpp.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualStudio.Component.VC.Tools.ARM","Microsoft.VisualCpp.Tools.Hostx86.Targetarm","Microsoft.VisualCpp.VCTip.hostX86.targetARM","Microsoft.VisualCpp.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.Store","Microsoft.VisualCpp.CRT.arm.Desktop","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.Component.VC.CLI.Support","Microsoft.VisualCpp.CLI.X86","Microsoft.VisualCpp.CLI.X64","Microsoft.VisualCpp.CLI.Source","Microsoft.VisualCpp.CLI.ARM64","Microsoft.VisualCpp.CLI.ARM","Microsoft.VisualStudio.VC.Templates.CLR","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Templates.CLR.Resources","Microsoft.Component.VC.Runtime.OSSupport","Microsoft.Windows.UniversalCRT.Tools.Msi","Microsoft.Windows.UniversalCRT.Tools.Msi","Microsoft.Windows.UniversalCRT.ExtensionSDK.Msi","Microsoft.Windows.UniversalCRT.HeadersLibsSources.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.Component.HelpViewer","Microsoft.HelpViewer","Microsoft.VisualStudio.Help.Configuration.Msi","Microsoft.VisualStudio.Component.SQL.DataSources","Microsoft.VisualStudio.Component.SQL.SSDT","Microsoft.VisualStudio.Component.SQL.CMDUtils","sqlcmdlnutils","Microsoft.VisualStudio.Component.Common.Azure.Tools","Microsoft.VisualStudio.Azure.CommonAzureTools","SSDT","Microsoft.VisualStudio.Component.SQL.ADAL","sql_adalsql","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.VisualStudio.Component.SQL.LocalDB.Runtime","Microsoft.VisualStudio.Component.SQL.NCLI","sqllocaldb","sqlncli","Microsoft.VisualStudio.Component.EntityFramework","Microsoft.VisualStudio.PackageGroup.DslRuntime","Microsoft.VisualStudio.Dsl.Core","Microsoft.VisualStudio.Dsl.GraphObject","Microsoft.VisualStudio.Dsl.Core.Resources","Microsoft.VisualStudio.EntityFrameworkTools","Microsoft.VisualStudio.EntityFrameworkTools.Msi","Microsoft.VisualStudio.Component.Roslyn.LanguageServices","Microsoft.VisualStudio.InteractiveWindow","Microsoft.DiaSymReader.Native","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents.Resources","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents","Microsoft.CodeAnalysis.VisualStudio.Setup.Interactive.Resources","Microsoft.Net.ComponentGroup.TargetingPacks.Common","Microsoft.Net.Component.4.6.TargetingPack","Microsoft.Net.4.6.TargetingPack","Microsoft.Net.Component.4.5.2.TargetingPack","Microsoft.Net.4.5.2.TargetingPack","Microsoft.Net.Component.4.5.1.TargetingPack","Microsoft.Net.4.5.1.TargetingPack","Microsoft.Net.Component.4.5.TargetingPack","Microsoft.Net.4.5.TargetingPack","Microsoft.Net.Component.4.TargetingPack","Microsoft.Net.4.TargetingPack","Microsoft.Net.ComponentGroup.DevelopmentPrerequisites","Microsoft.Net.Component.4.6.1.TargetingPack","Microsoft.Net.4.6.1.TargetingPack","Microsoft.Net.Cumulative.TargetingPack.Resources","Microsoft.Net.Component.4.6.1.SDK","Microsoft.Net.4.6.1.SDK","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.Component.VisualStudioData","Microsoft.VisualStudio.Component.SQL.CLR","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.XamlDiagnostics","Microsoft.VisualStudio.XamlDiagnostics.Resources","Microsoft.VisualStudio.XamlDesigner","Microsoft.VisualStudio.XamlDesigner.Resources","Microsoft.VisualStudio.XamlDesigner.Executables","Microsoft.VisualStudio.XamlShared","Microsoft.VisualStudio.XamlShared.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.Managed","Microsoft.VisualStudio.PackageGroup.IntelliTrace.Core","Microsoft.IntelliTrace.Core","Microsoft.IntelliTrace.Core.Targeted","Microsoft.IntelliTrace.ProfilerProxy.Msi.x64","Microsoft.IntelliTrace.ProfilerProxy.Msi","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.TestWindow.SourceBasedTestDiscovery","Microsoft.VisualStudio.TestWindow.Dotnet","Microsoft.VisualStudio.TestTools.TestGeneration","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.TestTools.Enterprise","Microsoft.VisualStudio.PackageGroup.TestTools.MSTestV2.Managed","Microsoft.VisualStudio.TestTools.MSTestV2.WizardExtension.UnitTest","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.Component.ClickOnce","Microsoft.VisualStudio.PackageGroup.ClickOnce.MSBuild","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.ClickOnce.SignTool.Msi","Microsoft.SQL.ClickOnceBootstrapper.Msi","Microsoft.Net.ClickOnceBootstrapper","Microsoft.ClickOnce.BootStrapper.Msi.Resources","Microsoft.ClickOnce.BootStrapper.Msi","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.ClickOnce.Resources","Microsoft.VisualStudio.ClickOnce","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.TemplateEngine","Microsoft.VisualStudio.WebToolsExtensions.Common","Microsoft.NET.Sdk","Microsoft.VisualStudio.PackageGroup.TestTools.Templates.Managed","Microsoft.VisualStudio.TestTools.Templates.Managed","Microsoft.VisualStudio.TestTools.Templates.Managed.Resources","Microsoft.VisualStudio.Templates.VB.MSTestv2.Desktop.UnitTest","Microsoft.VisualStudio.Templates.CS.MSTestv2.Desktop.UnitTest","Microsoft.VisualStudio.Templates.VB.Wpf","Microsoft.VisualStudio.Templates.VB.Wpf.Resources","Microsoft.VisualStudio.Templates.VB.Winforms","Microsoft.VisualStudio.Templates.VB.ManagedCore","Microsoft.VisualStudio.Templates.VB.Shared","Microsoft.VisualStudio.Templates.VB.Shared.Resources","Microsoft.VisualStudio.Templates.VB.ManagedCore.Resources","Microsoft.VisualStudio.Templates.CS.GettingStarted.Desktop.Package","Microsoft.VisualStudio.Templates.GetStarted.Desktop.Setup","Microsoft.VisualStudio.Templates.CS.GettingStarted.Console.Package","Microsoft.VisualStudio.Templates.GetStarted.Resources","Microsoft.VisualStudio.Templates.GetStarted.Common.Setup","Microsoft.VisualStudio.Templates.GetStarted.Console.Setup","Microsoft.VisualStudio.Templates.CS.Wpf","Microsoft.VisualStudio.Templates.CS.Wpf.Resources","Microsoft.VisualStudio.Templates.CS.Winforms","Microsoft.VisualStudio.Templates.CS.ManagedCore","Microsoft.VisualStudio.Templates.CS.Shared","Microsoft.VisualStudio.Templates.Editorconfig.Wizard.Setup","Templates.Editorconfig.SolutionFile.Setup","Microsoft.VisualStudio.Templates.CS.Shared.Resources","Microsoft.VisualStudio.Templates.CS.ManagedCore.Resources","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","PortableFacades","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.WDExpress","Microsoft.VisualStudio.WDExpress.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.6.Redist","Microsoft.VisualStudio.Branding.WDExpress"]}] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt deleted file mode 100644 index fc0a257f44783..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt +++ /dev/null @@ -1 +0,0 @@ -[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildToolsUnusable","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.Net.4.6.1.FullRedist.NonThreshold","Microsoft.Windows.UniversalCRT.Msu.81","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build"]}] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt deleted file mode 100644 index f07d254164882..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt +++ /dev/null @@ -1 +0,0 @@ -[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools","version":"16.1.28922.388","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.OpenFolder.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VisualC.Logging","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.DiaSymReader","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.PerfLib","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.VisualStudio.Editors","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build","Microsoft.VisualStudio.PackageGroup.NuGet","Microsoft.VisualStudio.NuGet.BuildTools"]}] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt deleted file mode 100644 index 50071c25f189e..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt +++ /dev/null @@ -1 +0,0 @@ -[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community","version":"16.1.28922.388","packages":["Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.VC.ExternalBuildFramework","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.VisualStudio.Component.Windows10SDK.17763","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Component.Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.LiveShare","Microsoft.Icecap.Analysis","Microsoft.Icecap.Analysis.Targeted","Microsoft.Icecap.Analysis.Resources","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.Icecap.Collection.Msi","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.DiagnosticsHub.Instrumentation","Microsoft.DiagnosticsHub.CpuSampling.ExternalDependencies","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.DiagnosticsHub.Runtime.ExternalDependencies","Microsoft.DiagnosticsHub.Runtime.ExternalDependencies.Targeted","Microsoft.DiagnosticsHub.Collection.ExternalDependencies.x64","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.DiagnosticsHub.Runtime","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.DiagnosticsHub.Collection","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.Component.IntelliCode","Microsoft.VisualStudio.IntelliCode","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.VisualStudio.LiveShareApi","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.DiagnosticsHub.KB2882822.Win7","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VisualC.Logging","Microsoft.WebTools.Shared","Microsoft.WebTools.DotNet.Core.ItemTemplates","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.Windows.UniversalCRT.Msu.7","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Microsoft.VisualStudio.NuGet.PowershellBindingRedirect","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.DbgHelp.Win8","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.DbgHelp.Win8","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.DbgHelp.Win8","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.Product.Community","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.PackageGroup.NuGet","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.OpenFolder.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.Devenv","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.PerfLib","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.Net.4.7.2.FullRedist","Microsoft.VisualStudio.Branding.Community"]}] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt deleted file mode 100644 index 806509e7ce865..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt +++ /dev/null @@ -1 +0,0 @@ -[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview","version":"16.0.28608.199","packages":["Microsoft.VisualStudio.Product.Enterprise","Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.VC.ExternalBuildFramework","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualStudio.Graphics.EnableTools","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.Component.Windows10SDK.17763","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.ComponentGroup.ArchitectureTools.Native","Microsoft.VisualStudio.Component.ClassDesigner","Microsoft.VisualStudio.ClassDesigner","Microsoft.VisualStudio.ClassDesigner.Resources","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.Progression.Enterprise","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.Component.CodeMap","Microsoft.VisualStudio.Component.GraphDocument","Microsoft.VisualStudio.Vmp","Microsoft.VisualStudio.GraphDocument","Microsoft.VisualStudio.GraphDocument.Resources","Microsoft.VisualStudio.CodeMap","Microsoft.VisualStudio.Component.SQL.LocalDB.Runtime","Microsoft.VisualStudio.Component.SQL.NCLI","sqllocaldb","sqlncli","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Component.Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.VisualStudio.Component.IntelliTrace.FrontEnd","Microsoft.IntelliTrace.DiagnosticsHubAgent.Targeted","Microsoft.IntelliTrace.Debugger","Microsoft.IntelliTrace.Debugger.Targeted","Microsoft.IntelliTrace.FrontEnd","Microsoft.DiagnosticsHub.Instrumentation","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.DiagnosticsHub.Runtime","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.DiagnosticsHub.Runtime.Resources","Microsoft.DiagnosticsHub.Collection","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.Dsl.GraphObject","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.WebTools.Shared","Microsoft.VisualStudio.WebToolsExtensions.DotNet.Core.ItemTemplates","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.IntelliTrace.Core","Microsoft.IntelliTrace.Core","Microsoft.IntelliTrace.Core.Concord","Microsoft.IntelliTrace.Core.Targeted","Microsoft.IntelliTrace.ProfilerProxy.Msi.x64","Microsoft.IntelliTrace.ProfilerProxy.Msi","Microsoft.VisualStudio.TestTools.DynamicCodeCoverage","Microsoft.VisualStudio.TestTools.CodeCoverage.Msi","Microsoft.VisualStudio.TestTools.CodeCoverage","Microsoft.Icecap.Analysis","Microsoft.Icecap.Analysis.Targeted","Microsoft.Icecap.Analysis.Resources","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.Icecap.Collection.Msi","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Remote","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Premium","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.VisualStudio.TestTools.NE.Msi.Targeted","Microsoft.VisualStudio.TestTools.NetworkEmulation","Microsoft.VisualStudio.TestTools.DataCollectors","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.ProfessionalCore","Microsoft.VisualStudio.Professional","Microsoft.VisualStudio.Professional.Msi","Microsoft.VisualStudio.PackageGroup.EnterpriseCore","Microsoft.VisualStudio.Enterprise.Msi","Microsoft.VisualStudio.Enterprise","Microsoft.ShDocVw","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.Devenv","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.Platform.Search","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.VisualStudio.Branding.Enterprise"]}] diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/ca-bundle.crt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/ca-bundle.crt deleted file mode 100644 index fb1dea98a78c8..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/ca-bundle.crt +++ /dev/null @@ -1,40 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDJjCCAg4CAhnOMA0GCSqGSIb3DQEBBQUAMH0xCzAJBgNVBAYTAlVTMQswCQYD -VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n -TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv -bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMBkxFzAV -BgNVBAMMDnN0cm9uZ2xvb3AuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAwOYI7OZ2FX/YjRgLZoDQlbPc5UZXU/j0e1wwiJNPtPEax9Y5Uoza0Pnt -Ikzkc2SfvQ+IJrhXo385tI0W5juuqbHnE7UrjUuPjUX6NHevkxcs/flmjan5wnZM -cPsGhH71WDuUEEflvZihf2Se2x+xgZtMhc5XGmVmRuZFYKvkgUhA2/w8/QrK+jPT -n9QRJxZjWNh2RBdC1B7u4jffSmOSUljYFH1I2eTeY+Rdi6YUIYSU9gEoZxsv3Tia -SomfMF5jt2Mouo6MzA+IhLvvFjcrcph1Qxgi9RkfdCMMd+Ipm9YWELkyG1bDRpQy -0iyHD4gvVsAqz1Y2KdRSdc3Kt+nTqwIDAQABoxkwFzAVBgNVHREEDjAMhwQAAAAA -hwR/AAABMA0GCSqGSIb3DQEBBQUAA4IBAQAhy4J0hML3NgmDRHdL5/iTucBe22Mf -jJjg2aifD1S187dHm+Il4qZNO2plWwAhN0h704f+8wpsaALxUvBIu6nvlvcMP5PH -jGN5JLe2Km3UaPvYOQU2SgacLilu+uBcIo2JSHLV6O7ziqUj5Gior6YxDLCtEZie -Ea8aX5/YjuACtEMJ1JjRqjgkM66XAoUe0E8onOK3FgTIO3tGoTJwRp0zS50pFuP0 -PsZtT04ck6mmXEXXknNoAyBCvPypfms9OHqcUIW9fiQnrGbS/Ri4QSQYj0DtFk/1 -na4fY1gf3zTHxH8259b/TOOaPfTnCEsOQtjUrWNR4xhmVZ+HJy4yytUW ------END CERTIFICATE----- ------BEGIN CERTIFICATE----- -MIIDbzCCAlcCAmm6MA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNVBAYTAlVTMQswCQYD -VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n -TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv -bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMH0xCzAJ -BgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZ -MBcGA1UECgwQU3Ryb25nTG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRow -GAYDVQQDDBFjYS5zdHJvbmdsb29wLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBANfj86jkvvYDjHBgiqWhk9Cj+bqiMq3MqnV0CBO4iuK33Fo6XssE -H+yVdXlIBFbFe6t655MdBVOR2Sfj7WqNh96vhu6PyDHiwcQlTaiLU6nhIed1J4Wv -lvnJHFmp8Wbtx5AgLT4UYu03ftvXEl2DLi3vhSL2tRM1ebXHB/KPbRWkb25DPX0P -foOHot3f2dgNe2x6kponf7E/QDmAu3s7Nlkfh+ryDhgGU7wocXEhXbprNqRqOGNo -xbXgUI+/9XDxYT/7Gn5LF/fPjtN+aB0SKMnTsDhprVlZie83mlqJ46fOOrR+vrsQ -mi/1m/TadrARtZoIExC/cQRdVM05EK4tUa8CAwEAATANBgkqhkiG9w0BAQsFAAOC -AQEAQ7k5WhyhDTIGYCNzRnrMHWSzGqa1y4tJMW06wafJNRqTm1cthq1ibc6Hfq5a -K10K0qMcgauRTfQ1MWrVCTW/KnJ1vkhiTOH+RvxapGn84gSaRmV6KZen0+gMsgae -KEGe/3Hn+PmDVV+PTamHgPACfpTww38WHIe/7Ce9gHfG7MZ8cKHNZhDy0IAYPln+ -YRwMLd7JNQffHAbWb2CE1mcea4H/12U8JZW5tHCF6y9V+7IuDzqwIrLKcW3lG17n -VUG6ODF/Ryqn3V5X+TL91YyXi6c34y34IpC7MQDV/67U7+5Bp5CfeDPWW2wVSrW+ -uGZtfEvhbNm6m2i4UNmpCXxUZQ== ------END CERTIFICATE----- diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/ca.crt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/ca.crt deleted file mode 100644 index aaf97575b18b4..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/ca.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDZDCCAkwCCQCAzfCLqrJvuTANBgkqhkiG9w0BAQsFADB0MQswCQYDVQQGEwJV -UzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsMCG5vZGUt -Z3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1aWxkQG5v -ZGVqcy5vcmcwHhcNMTkwNjIyMDYyMjMzWhcNMjIwNDExMDYyMjMzWjB0MQswCQYD -VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsM -CG5vZGUtZ3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1 -aWxkQG5vZGVqcy5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDS -CHjvtVW4HdbbUwZ/ZV9s6U4x0KSoyNQrsCZjB8kRpFPe50DS5mfmu2SNBGYKRgzk -4QEEwFB9N2o8YTWsCefSRl6ti4ToPZqulU4hhRKYrEGtMJcRzi3IN7s200JaO3UH -01Su8ruO0NESb5zEU1Ykfh8Lub8TGEAINmgI61d/5d5Aq3kDjUHQJt1Ekw03Ylnu -juQyCGZxLxnngu0mIvwzyL/UeeUgsfQLzvppUk6In7tC1zzMjSPWo0c8qu6KvrW4 -bKYnkZkzdQifzbpO5ERMEsh5HWq0uHa6+dgcVHFvlhdqF4Uat87ygNplVf0txsZB -MNVqbz1k6xkZYMnzDoydAgMBAAEwDQYJKoZIhvcNAQELBQADggEBADspZGtKpWxy -J1W3FA1aeQhMvequQTcMRz4avkm4K4HfTdV1iVD4CbvdezBphouBlyLVLDFJP7RZ -m7dBJVgBwnxufoFLne8cR2MGqDRoySbFT1AtDJdxabE6Fg+QGUpgOQfeBJ6ANlSB -+qJ+HG4QA+Ouh5hxz9mgYwkIsMUABHiwENdZ/kT8Edw4xKgd3uH0YP4iiePMD66c -rzW3uXH5J1jnKgBlpxtog4P6dHCcoq+PZJ17W5bdXNyqC1LPzQqniZ2BNcEZ4ix3 -slAZAOWD1zLLGJhBPMV1fa0sHNBWc6oicr3YK/IDb0cp9kiLvnUu1pHy+LWQGqtC -rceJuGsnJEQ= ------END CERTIFICATE----- diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/server.crt b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/server.crt deleted file mode 100644 index 5d0c440e0788a..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/server.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDYjCCAkoCCQCSlmGR7KzZGTANBgkqhkiG9w0BAQsFADB0MQswCQYDVQQGEwJV -UzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsMCG5vZGUt -Z3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1aWxkQG5v -ZGVqcy5vcmcwHhcNMTkwNjIyMDYyNTU1WhcNMjkwNjE5MDYyNTU1WjByMQswCQYD -VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsM -CG5vZGUtZ3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHTAbBgkqhkiG9w0BCQEWDmJ1 -aWxkQGlvanMub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6S1E -2WchgmbJYqCnpN7310ZgHjIOqeJe6MpSue2u6z6mTNd5izgvQNaANmn3xLFCS5zs -uZaTvdPYPkcmSQzb1YcZSUYnAxZifjYARc6kb5GSBl3q+O70ELyFrimXfZ4JI+bd -IG9KiHY17DlvZZZj/csGYVWWg0mkeH3O5LPX6/DXQVh/9+gZ02/cdIBCAtZHQwqx -7tF/qZA/kD4GZNFpU1DYHzf9H6g9htoCqmNHQWrV2T9yFybt6mbZp9kglBmyKYCc -7hmQnb7N/mHn1yIuwhBsirCJTfKH86gN81u8M3+SVHA2VUHDllcNhpDWlmInXA+I -tHdGZHCp95ohqpCPgQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCdvYj6CD0ZLwT2 -3t1r+deC3TJuHlNVSeKeT7wIfFnh2FW5riGV0q/w6eXPLTHjuiS6YmpAAbdNUgX/ -sq64FqI2RLpX6pgY5yB0SKopMcJxMLKqmF4zHpIHxtYN5EmN3PR0vehneBR/nZ2T -3ikvWD5JeXlm7Dfw+tjijdxM/sEoDWErGup4mMKMd1s5s830p+ITJUa50d0DLFdH -mqPSbUZF8mMPwGJd+nu1Ht3gTLtK7+gYJgGtXMJmGC0Qg77EJHDB2NbotgDGNmSU -1H9BpAeFHHIcbh2Rr7kkTvnh/c03vFe+CsDZmezcmRpRzW1fKj3YbfqBxU4XwJrL -a5T/N9xU ------END CERTIFICATE----- diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/server.key b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/server.key deleted file mode 100644 index a8447391e02e2..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/server.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEA6S1E2WchgmbJYqCnpN7310ZgHjIOqeJe6MpSue2u6z6mTNd5 -izgvQNaANmn3xLFCS5zsuZaTvdPYPkcmSQzb1YcZSUYnAxZifjYARc6kb5GSBl3q -+O70ELyFrimXfZ4JI+bdIG9KiHY17DlvZZZj/csGYVWWg0mkeH3O5LPX6/DXQVh/ -9+gZ02/cdIBCAtZHQwqx7tF/qZA/kD4GZNFpU1DYHzf9H6g9htoCqmNHQWrV2T9y -Fybt6mbZp9kglBmyKYCc7hmQnb7N/mHn1yIuwhBsirCJTfKH86gN81u8M3+SVHA2 -VUHDllcNhpDWlmInXA+ItHdGZHCp95ohqpCPgQIDAQABAoIBABW8R4ewalo6dJlB -+n6O3jFt+PW3mtBRLqGqgm2cb0q0a1IMX+MPWLBFjmwEErl+AH0F4rcmBx2Ryr17 -amEy1qcf0caXyHksNAApznqzWXag7iizxnxv4cZRnHBwphNqkNWM5p3oYd04j6w2 -amDg1O9KZozaKo6QZclpiMiezwjKG+PVZLT8p7afswjv+yDWPDByhlcGiye9QD1T -VuZ0QCoXp6N/8JxW0gdkLp9NqFvGeGFzJ5h6L+d7A6BWw8akXrBRHHcKkyvVYBfd -myhSzSK4FPFMaxaEY/65FlVSyAO6ezGm3Umx4g7mkFjLdwKWaIOjkBkPeFgl3Pp4 -7Lo5X3UCgYEA/FrrIwmEU5ayulBVScEMKeavu5eNY4r0Sqbpov2oyTdYe8G49Pzy -ryMXfunY43moLKpajGwgTKRGvdqFtK08AAkaCssiAPkP3rZuZvMTF4sLo/vlWrjP -3er+tUqj22BzXi5XV0BAvH8Y3TL8KQ3he/8JxDvkC811/DQ9Y/Da3U8CgYEA7Itw -UM37URma08Bj9VTMoL9ZCyURewX+ZLDb2+O8sXGXJs28i1RkE6PTBlnRmedn+Jjk -byzQ5Cs5wA5uMbhYTA7kgXOs1bvgQqmlLmyL6FfHkucoMhr2Di7VeGf4OxE26JZ8 -JdY4+1MOyI3A2rR8WU+GmHxy0ay4K2xe6W0vsi8CgYBoGLEKIPDe8jkDtgOYivOT -jT9MaLXALB+dc8DIpU4swpHTaxP6qyUIrbcReTEolJSU6Ci16BxiwRkVU8D3yMYJ -VbfSX/zE3fh37FUaToa/nXHN0SjJBZdpeXhcHE//PIgaf48zxKNvnhYJmPB/luQ+ -m/PRaMsnOzfCM2JniYEe7QKBgGwjnxhB4tgDtaWCue/pcZc3gzS2IJS2e8N6mzie -l6Ajhu+FdOHZldrotUuc+la61OxwsVYmDeWR4VftAPGYDj3PPSX1RRl9R5wSRGLB -2wBASQvew6CMdNqtDIh8N56BUzHnwh/mHKzBHuwO6hDSHFsUITtLAY7bwGKRq55Z -fUmfAoGBANOYFyoJoDLcl+Jd750lyqfCifcGtkRdmZMtrPXaYnD8ZGme9vz1vsK/ -4iUkV3mi7Z9s1LXMa/tPPfKdVhCM1PXost3/si0+u1Bz5yKqEPXlyy2ltpIVyGu8 -yiy7y75asp8Iii/1cgtwyp9+VeSif8wJ+MHQoGdGxvAQP80R3EjF ------END RSA PRIVATE KEY----- diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/test-charmap.py b/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/test-charmap.py deleted file mode 100644 index b338f915bc38f..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/fixtures/test-charmap.py +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import print_function -import sys -import locale - -try: - reload(sys) -except NameError: # Python 3 - pass - -def main(): - encoding = locale.getdefaultlocale()[1] - if not encoding: - return False - - try: - sys.setdefaultencoding(encoding) - except AttributeError: # Python 3 - pass - - textmap = { - 'cp936': u'\u4e2d\u6587', - 'cp1252': u'Lat\u012Bna', - 'cp932': u'\u306b\u307b\u3093\u3054' - } - if encoding in textmap: - print(textmap[encoding]) - return True - -if __name__ == '__main__': - print(main()) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/process-exec-sync.js b/node_modules/libnpmexec/node_modules/node-gyp/test/process-exec-sync.js deleted file mode 100644 index 21763bc26de10..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/process-exec-sync.js +++ /dev/null @@ -1,140 +0,0 @@ -'use strict' - -const fs = require('graceful-fs') -const childProcess = require('child_process') - -function startsWith (str, search, pos) { - if (String.prototype.startsWith) { - return str.startsWith(search, pos) - } - - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search -} - -function processExecSync (file, args, options) { - var child, error, timeout, tmpdir, command - command = makeCommand(file, args) - - /* - this function emulates child_process.execSync for legacy node <= 0.10.x - derived from https://github.com/gvarsanyi/sync-exec/blob/master/js/sync-exec.js - */ - - options = options || {} - // init timeout - timeout = Date.now() + options.timeout - // init tmpdir - var osTempBase = '/tmp' - var os = determineOS() - osTempBase = '/tmp' - - if (process.env.TMP) { - osTempBase = process.env.TMP - } - - if (osTempBase[osTempBase.length - 1] !== '/') { - osTempBase += '/' - } - - tmpdir = osTempBase + 'processExecSync.' + Date.now() + Math.random() - fs.mkdirSync(tmpdir) - - // init command - if (os === 'linux') { - command = '(' + command + ' > ' + tmpdir + '/stdout 2> ' + tmpdir + - '/stderr); echo $? > ' + tmpdir + '/status' - } else { - command = '(' + command + ' > ' + tmpdir + '/stdout 2> ' + tmpdir + - '/stderr) | echo %errorlevel% > ' + tmpdir + '/status | exit' - } - - // init child - child = childProcess.exec(command, options) - - var maxTry = 100000 // increases the test time by 6 seconds on win-2016-node-0.10 - var tryCount = 0 - while (tryCount < maxTry) { - try { - var x = fs.readFileSync(tmpdir + '/status') - if (x.toString() === '0') { - break - } - } catch (ignore) {} - tryCount++ - if (Date.now() > timeout) { - error = child - break - } - } - - ['stdout', 'stderr', 'status'].forEach(function (file) { - child[file] = fs.readFileSync(tmpdir + '/' + file, options.encoding) - setTimeout(unlinkFile, 500, tmpdir + '/' + file) - }) - - child.status = Number(child.status) - if (child.status !== 0) { - error = child - } - - try { - fs.rmdirSync(tmpdir) - } catch (ignore) {} - if (error) { - throw error - } - return child.stdout -} - -function makeCommand (file, args) { - var command, quote - command = file - if (args.length > 0) { - for (var i in args) { - command = command + ' ' - if (args[i][0] === '-') { - command = command + args[i] - } else { - if (!quote) { - command = command + '"' - quote = true - } - command = command + args[i] - if (quote) { - if (args.length === (parseInt(i) + 1)) { - command = command + '"' - } - } - } - } - } - return command -} - -function determineOS () { - var os = '' - var tmpVar = '' - if (process.env.OSTYPE) { - tmpVar = process.env.OSTYPE - } else if (process.env.OS) { - tmpVar = process.env.OS - } else { - // default is linux - tmpVar = 'linux' - } - - if (startsWith(tmpVar, 'linux')) { - os = 'linux' - } - if (startsWith(tmpVar, 'win')) { - os = 'win' - } - - return os -} - -function unlinkFile (file) { - fs.unlinkSync(file) -} - -module.exports = processExecSync diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/simple-proxy.js b/node_modules/libnpmexec/node_modules/node-gyp/test/simple-proxy.js deleted file mode 100644 index cb0dfcfec7edc..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/simple-proxy.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' - -const http = require('http') -const https = require('https') -const server = http.createServer(handler) -const port = +process.argv[2] -const prefix = process.argv[3] -const upstream = process.argv[4] -var calls = 0 - -server.listen(port) - -function handler (req, res) { - if (req.url.indexOf(prefix) !== 0) { - throw new Error('request url [' + req.url + '] does not start with [' + prefix + ']') - } - - var upstreamUrl = upstream + req.url.substring(prefix.length) - https.get(upstreamUrl, function (ures) { - ures.on('end', function () { - if (++calls === 2) { - server.close() - } - }) - ures.pipe(res) - }) -} diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-addon.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-addon.js deleted file mode 100644 index f79eff73c169e..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-addon.js +++ /dev/null @@ -1,150 +0,0 @@ -'use strict' - -const test = require('tap').test -const path = require('path') -const fs = require('graceful-fs') -const childProcess = require('child_process') -const os = require('os') -const addonPath = path.resolve(__dirname, 'node_modules', 'hello_world') -const nodeGyp = path.resolve(__dirname, '..', 'bin', 'node-gyp.js') -const execFileSync = childProcess.execFileSync || require('./process-exec-sync') -const execFile = childProcess.execFile - -function runHello (hostProcess) { - if (!hostProcess) { - hostProcess = process.execPath - } - var testCode = "console.log(require('hello_world').hello())" - return execFileSync(hostProcess, ['-e', testCode], { cwd: __dirname }).toString() -} - -function getEncoding () { - var code = 'import locale;print(locale.getdefaultlocale()[1])' - return execFileSync('python', ['-c', code]).toString().trim() -} - -function checkCharmapValid () { - var data - try { - data = execFileSync('python', ['fixtures/test-charmap.py'], - { cwd: __dirname }) - } catch (err) { - return false - } - var lines = data.toString().trim().split('\n') - return lines.pop() === 'True' -} - -test('build simple addon', function (t) { - t.plan(3) - - // Set the loglevel otherwise the output disappears when run via 'npm test' - var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello().trim(), 'world') - }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') -}) - -test('build simple addon in path with non-ascii characters', function (t) { - t.plan(1) - - if (!checkCharmapValid()) { - return t.skip('python console app can\'t encode non-ascii character.') - } - - var testDirNames = { - cp936: '文件夹', - cp1252: 'Latīna', - cp932: 'フォルダ' - } - // Select non-ascii characters by current encoding - var testDirName = testDirNames[getEncoding()] - // If encoding is UTF-8 or other then no need to test - if (!testDirName) { - return t.skip('no need to test') - } - - t.plan(3) - - var data - var configPath = path.join(addonPath, 'build', 'config.gypi') - try { - data = fs.readFileSync(configPath, 'utf8') - } catch (err) { - t.error(err) - return - } - var config = JSON.parse(data.replace(/#.+\n/, '')) - var nodeDir = config.variables.nodedir - var testNodeDir = path.join(addonPath, testDirName) - // Create symbol link to path with non-ascii characters - try { - fs.symlinkSync(nodeDir, testNodeDir, 'dir') - } catch (err) { - switch (err.code) { - case 'EEXIST': break - case 'EPERM': - t.error(err, 'Please try to running console as an administrator') - return - default: - t.error(err) - return - } - } - - var cmd = [ - nodeGyp, - 'rebuild', - '-C', - addonPath, - '--loglevel=verbose', - '-nodedir=' + testNodeDir - ] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { - try { - fs.unlink(testNodeDir) - } catch (err) { - t.error(err) - } - - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello().trim(), 'world') - }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') -}) - -test('addon works with renamed host executable', function (t) { - // No `fs.copyFileSync` before node8. - if (process.version.substr(1).split('.')[0] < 8) { - t.skip('skipping test for old node version') - t.end() - return - } - - t.plan(3) - - var notNodePath = path.join(os.tmpdir(), 'notnode' + path.extname(process.execPath)) - fs.copyFileSync(process.execPath, notNodePath) - - var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello(notNodePath).trim(), 'world') - fs.unlinkSync(notNodePath) - }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-configure-python.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-configure-python.js deleted file mode 100644 index ac25f7972ea23..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-configure-python.js +++ /dev/null @@ -1,79 +0,0 @@ -'use strict' - -const test = require('tap').test -const path = require('path') -const devDir = require('./common').devDir() -const gyp = require('../lib/node-gyp') -const requireInject = require('require-inject') -const configure = requireInject('../lib/configure', { - 'graceful-fs': { - openSync: function () { return 0 }, - closeSync: function () { }, - writeFile: function (file, data, cb) { cb() }, - stat: function (file, cb) { cb(null, {}) }, - mkdir: function (dir, options, cb) { cb() } - } -}) - -const EXPECTED_PYPATH = path.join(__dirname, '..', 'gyp', 'pylib') -const SEPARATOR = process.platform === 'win32' ? ';' : ':' -const SPAWN_RESULT = { on: function () { } } - -require('npmlog').level = 'warn' - -test('configure PYTHONPATH with no existing env', function (t) { - t.plan(1) - - delete process.env.PYTHONPATH - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, EXPECTED_PYPATH) - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) -}) - -test('configure PYTHONPATH with existing env of one dir', function (t) { - t.plan(2) - - var existingPath = path.join('a', 'b') - process.env.PYTHONPATH = existingPath - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) - - var dirs = process.env.PYTHONPATH.split(SEPARATOR) - t.deepEqual(dirs, [EXPECTED_PYPATH, existingPath]) - - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) -}) - -test('configure PYTHONPATH with existing env of multiple dirs', function (t) { - t.plan(2) - - var pythonDir1 = path.join('a', 'b') - var pythonDir2 = path.join('b', 'c') - var existingPath = [pythonDir1, pythonDir2].join(SEPARATOR) - process.env.PYTHONPATH = existingPath - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) - - var dirs = process.env.PYTHONPATH.split(SEPARATOR) - t.deepEqual(dirs, [EXPECTED_PYPATH, pythonDir1, pythonDir2]) - - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-download.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-download.js deleted file mode 100644 index fe373e3280ce6..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-download.js +++ /dev/null @@ -1,268 +0,0 @@ -'use strict' - -const test = require('tap').test -const fs = require('fs') -const path = require('path') -const http = require('http') -const https = require('https') -const install = require('../lib/install') -const semver = require('semver') -const devDir = require('./common').devDir() -const rimraf = require('rimraf') -const gyp = require('../lib/node-gyp') -const log = require('npmlog') - -log.level = 'warn' - -test('download over http', function (t) { - t.plan(2) - - var server = http.createServer(function (req, res) { - t.strictEqual(req.headers['user-agent'], - 'node-gyp v42 (node ' + process.version + ')') - res.end('ok') - server.close() - }) - - var host = 'localhost' - server.listen(0, host, function () { - var port = this.address().port - var gyp = { - opts: {}, - version: '42' - } - var url = 'http://' + host + ':' + port - var req = install.test.download(gyp, {}, url) - req.on('response', function (res) { - var body = '' - res.setEncoding('utf8') - res.on('data', function (data) { - body += data - }) - res.on('end', function () { - t.strictEqual(body, 'ok') - }) - }) - }) -}) - -test('download over https with custom ca', function (t) { - t.plan(3) - - var cert = fs.readFileSync(path.join(__dirname, 'fixtures/server.crt'), 'utf8') - var key = fs.readFileSync(path.join(__dirname, 'fixtures/server.key'), 'utf8') - - var cafile = path.join(__dirname, '/fixtures/ca.crt') - var ca = install.test.readCAFile(cafile) - t.strictEqual(ca.length, 1) - - var options = { ca: ca, cert: cert, key: key } - var server = https.createServer(options, function (req, res) { - t.strictEqual(req.headers['user-agent'], - 'node-gyp v42 (node ' + process.version + ')') - res.end('ok') - server.close() - }) - - server.on('clientError', function (err) { - throw err - }) - - var host = 'localhost' - server.listen(8000, host, function () { - var port = this.address().port - var gyp = { - opts: { cafile: cafile }, - version: '42' - } - var url = 'https://' + host + ':' + port - var req = install.test.download(gyp, {}, url) - req.on('response', function (res) { - var body = '' - res.setEncoding('utf8') - res.on('data', function (data) { - body += data - }) - res.on('end', function () { - t.strictEqual(body, 'ok') - }) - }) - }) -}) - -test('download over http with proxy', function (t) { - t.plan(2) - - var server = http.createServer(function (req, res) { - t.strictEqual(req.headers['user-agent'], - 'node-gyp v42 (node ' + process.version + ')') - res.end('ok') - pserver.close(function () { - server.close() - }) - }) - - var pserver = http.createServer(function (req, res) { - t.strictEqual(req.headers['user-agent'], - 'node-gyp v42 (node ' + process.version + ')') - res.end('proxy ok') - server.close(function () { - pserver.close() - }) - }) - - var host = 'localhost' - server.listen(0, host, function () { - var port = this.address().port - pserver.listen(port + 1, host, function () { - var gyp = { - opts: { - proxy: 'http://' + host + ':' + (port + 1) - }, - version: '42' - } - var url = 'http://' + host + ':' + port - var req = install.test.download(gyp, {}, url) - req.on('response', function (res) { - var body = '' - res.setEncoding('utf8') - res.on('data', function (data) { - body += data - }) - res.on('end', function () { - t.strictEqual(body, 'proxy ok') - }) - }) - }) - }) -}) - -test('download over http with noproxy', function (t) { - t.plan(2) - - var server = http.createServer(function (req, res) { - t.strictEqual(req.headers['user-agent'], - 'node-gyp v42 (node ' + process.version + ')') - res.end('ok') - pserver.close(function () { - server.close() - }) - }) - - var pserver = http.createServer(function (req, res) { - t.strictEqual(req.headers['user-agent'], - 'node-gyp v42 (node ' + process.version + ')') - res.end('proxy ok') - server.close(function () { - pserver.close() - }) - }) - - var host = 'localhost' - server.listen(0, host, function () { - var port = this.address().port - pserver.listen(port + 1, host, function () { - var gyp = { - opts: { - proxy: 'http://' + host + ':' + (port + 1), - noproxy: 'localhost' - }, - version: '42' - } - var url = 'http://' + host + ':' + port - var req = install.test.download(gyp, {}, url) - req.on('response', function (res) { - var body = '' - res.setEncoding('utf8') - res.on('data', function (data) { - body += data - }) - res.on('end', function () { - t.strictEqual(body, 'ok') - }) - }) - }) - }) -}) - -test('download with missing cafile', function (t) { - t.plan(1) - var gyp = { - opts: { cafile: 'no.such.file' } - } - try { - install.test.download(gyp, {}, 'http://bad/') - } catch (e) { - t.ok(/no.such.file/.test(e.message)) - } -}) - -test('check certificate splitting', function (t) { - var cas = install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt')) - t.plan(2) - t.strictEqual(cas.length, 2) - t.notStrictEqual(cas[0], cas[1]) -}) - -// only run this test if we are running a version of Node with predictable version path behavior - -test('download headers (actual)', function (t) { - if (process.env.FAST_TEST || - process.release.name !== 'node' || - semver.prerelease(process.version) !== null || - semver.satisfies(process.version, '<10')) { - return t.skip('Skipping actual download of headers due to test environment configuration') - } - - t.plan(17) - - const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) - rimraf(expectedDir, (err) => { - t.ifError(err) - - const prog = gyp() - prog.parseArgv([]) - prog.devDir = devDir - log.level = 'warn' - install(prog, [], (err) => { - t.ifError(err) - - fs.readFile(path.join(expectedDir, 'installVersion'), 'utf8', (err, data) => { - t.ifError(err) - t.strictEqual(data, '9\n', 'correct installVersion') - }) - - fs.readdir(path.join(expectedDir, 'include/node'), (err, list) => { - t.ifError(err) - - t.ok(list.includes('common.gypi')) - t.ok(list.includes('config.gypi')) - t.ok(list.includes('node.h')) - t.ok(list.includes('node_version.h')) - t.ok(list.includes('openssl')) - t.ok(list.includes('uv')) - t.ok(list.includes('uv.h')) - t.ok(list.includes('v8-platform.h')) - t.ok(list.includes('v8.h')) - t.ok(list.includes('zlib.h')) - }) - - fs.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8', (err, contents) => { - t.ifError(err) - - const lines = contents.split('\n') - - // extract the 3 version parts from the defines to build a valid version string and - // and check them against our current env version - const version = ['major', 'minor', 'patch'].reduce((version, type) => { - const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`) - const line = lines.find((l) => re.test(l)) - const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR' - return `${version}${type !== 'major' ? '.' : 'v'}${i}` - }, '') - - t.strictEqual(version, process.version) - }) - }) - }) -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-accessible-sync.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-accessible-sync.js deleted file mode 100644 index 0a2e584c4fb33..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-accessible-sync.js +++ /dev/null @@ -1,84 +0,0 @@ -'use strict' - -const test = require('tap').test -const path = require('path') -const requireInject = require('require-inject') -const configure = requireInject('../lib/configure', { - 'graceful-fs': { - closeSync: function () { return undefined }, - openSync: function (path) { - if (readableFiles.some(function (f) { return f === path })) { - return 0 - } else { - var error = new Error('ENOENT - not found') - throw error - } - } - } -}) - -const dir = path.sep + 'testdir' -const readableFile = 'readable_file' -const anotherReadableFile = 'another_readable_file' -const readableFileInDir = 'somedir' + path.sep + readableFile -const readableFiles = [ - path.resolve(dir, readableFile), - path.resolve(dir, anotherReadableFile), - path.resolve(dir, readableFileInDir) -] - -test('find accessible - empty array', function (t) { - t.plan(1) - - var candidates = [] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - single item array, readable', function (t) { - t.plan(1) - - var candidates = [readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFile)) -}) - -test('find accessible - single item array, readable in subdir', function (t) { - t.plan(1) - - var candidates = [readableFileInDir] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFileInDir)) -}) - -test('find accessible - single item array, unreadable', function (t) { - t.plan(1) - - var candidates = ['unreadable_file'] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - multi item array, no matches', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', 'unreadable_file'] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - multi item array, single match', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFile)) -}) - -test('find accessible - multi item array, return first match', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', anotherReadableFile, readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, anotherReadableFile)) -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-node-directory.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-node-directory.js deleted file mode 100644 index f1380d162ae7c..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-node-directory.js +++ /dev/null @@ -1,119 +0,0 @@ -'use strict' - -const test = require('tap').test -const path = require('path') -const findNodeDirectory = require('../lib/find-node-directory') - -const platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix'] - -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in a build tree where npm is installed in -// .... /deps/npm -test('test find-node-directory - node install', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj), - path.join('/x')) - } -}) - -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in an installed tree where npm is installed in -// .... /lib/node_modules/npm or .../node_modules/npm -// depending on the patform -test('test find-node-directory - node build', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - if (platforms[next] === 'win32') { - t.equal( - findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib', - processObj), path.join('/y')) - } else { - t.equal( - findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib', - processObj), path.join('/y')) - } - } -}) - -// we should find the directory based on the execPath -// for node and match because it was in the bin directory -test('test find-node-directory - node in bin directory', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/x/y')) - } -}) - -// we should find the directory based on the execPath -// for node and match because it was in the Release directory -test('test find-node-directory - node in build release dir', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj - if (platforms[next] === 'win32') { - processObj = { execPath: '/x/y/Release/node', platform: platforms[next] } - } else { - processObj = { - execPath: '/x/y/out/Release/node', - platform: platforms[next] - } - } - - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/x/y')) - } -}) - -// we should find the directory based on the execPath -// for node and match because it was in the Debug directory -test('test find-node-directory - node in Debug release dir', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj - if (platforms[next] === 'win32') { - processObj = { execPath: '/a/b/Debug/node', platform: platforms[next] } - } else { - processObj = { execPath: '/a/b/out/Debug/node', platform: platforms[next] } - } - - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/a/b')) - } -}) - -// we should not find it as it will not match based on the execPath nor -// the directory from which the script is running -test('test find-node-directory - not found', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/z/y', platform: next } - t.equal(findNodeDirectory('/a/b/c/d', processObj), '') - } -}) - -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in a build tree where npm is installed in -// .... /deps/npm -// same test as above but make sure additional directory entries -// don't cause an issue -test('test find-node-directory - node install', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib', - processObj), path.join('/x/y/z/a/b/c')) - } -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-python.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-python.js deleted file mode 100644 index 6be887f7eb3fc..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-python.js +++ /dev/null @@ -1,230 +0,0 @@ -'use strict' - -delete process.env.PYTHON - -const test = require('tap').test -const findPython = require('../lib/find-python') -const execFile = require('child_process').execFile -const PythonFinder = findPython.test.PythonFinder - -require('npmlog').level = 'warn' - -test('find python', function (t) { - t.plan(4) - - findPython.test.findPython(null, function (err, found) { - t.strictEqual(err, null) - var proc = execFile(found, ['-V'], function (err, stdout, stderr) { - t.strictEqual(err, null) - if (/Python 2/.test(stderr)) { - t.strictEqual(stdout, '') - t.ok(/Python 2/.test(stderr)) - } else { - t.ok(/Python 3/.test(stdout)) - t.strictEqual(stderr, '') - } - }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') - }) -}) - -function poison (object, property) { - function fail () { - console.error(Error(`Property ${property} should not have been accessed.`)) - process.abort() - } - var descriptor = { - configurable: false, - enumerable: false, - get: fail, - set: fail - } - Object.defineProperty(object, property, descriptor) -} - -function TestPythonFinder () { - PythonFinder.apply(this, arguments) -} -TestPythonFinder.prototype = Object.create(PythonFinder.prototype) -// Silence npmlog - remove for debugging -TestPythonFinder.prototype.log = { - silly: () => {}, - verbose: () => {}, - info: () => {}, - warn: () => {}, - error: () => {} -} -delete TestPythonFinder.prototype.env.NODE_GYP_FORCE_PYTHON - -test('find python - python', function (t) { - t.plan(6) - - var f = new TestPythonFinder('python', done) - f.execFile = function (program, args, opts, cb) { - f.execFile = function (program, args, opts, cb) { - poison(f, 'execFile') - t.strictEqual(program, '/path/python') - t.ok(/sys\.version_info/.test(args[1])) - cb(null, '2.7.15') - } - t.strictEqual(program, - process.platform === 'win32' ? '"python"' : 'python') - t.ok(/sys\.executable/.test(args[1])) - cb(null, '/path/python') - } - f.findPython() - - function done (err, python) { - t.strictEqual(err, null) - t.strictEqual(python, '/path/python') - } -}) - -test('find python - python too old', function (t) { - t.plan(2) - - var f = new TestPythonFinder(null, done) - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(null, '/path/python') - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(null, '2.3.4') - } else { - t.fail() - } - } - f.findPython() - - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not supported/i.test(f.errorLog)) - } -}) - -test('find python - no python', function (t) { - t.plan(2) - - var f = new TestPythonFinder(null, done) - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(new Error('not a Python executable')) - } else { - t.fail() - } - } - f.findPython() - - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } -}) - -test('find python - no python2, no python, unix', function (t) { - t.plan(2) - - var f = new TestPythonFinder(null, done) - f.checkPyLauncher = t.fail - f.win = false - - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else { - t.fail() - } - } - f.findPython() - - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } -}) - -test('find python - no python, use python launcher', function (t) { - t.plan(3) - - var f = new TestPythonFinder(null, done) - f.win = true - - f.execFile = function (program, args, opts, cb) { - if (program === 'py.exe') { - t.notEqual(args.indexOf('-c'), -1) - return cb(null, 'Z:\\snake.exe') - } - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (f.winDefaultLocations.includes(program)) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - if (program === 'Z:\\snake.exe') { - cb(null, '2.7.14') - } else { - t.fail() - } - } else { - t.fail() - } - } - f.findPython() - - function done (err, python) { - t.strictEqual(err, null) - t.strictEqual(python, 'Z:\\snake.exe') - } -}) - -test('find python - no python, no python launcher, good guess', function (t) { - t.plan(2) - - var re = /C:[\\/]Python37[\\/]python[.]exe/ - var f = new TestPythonFinder(null, done) - f.win = true - - f.execFile = function (program, args, opts, cb) { - if (program === 'py.exe') { - return cb(new Error('not found')) - } - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (re.test(program) && - /sys\.version_info/.test(args[args.length - 1])) { - cb(null, '3.7.3') - } else { - t.fail() - } - } - f.findPython() - - function done (err, python) { - t.strictEqual(err, null) - t.ok(re.test(python)) - } -}) - -test('find python - no python, no python launcher, bad guess', function (t) { - t.plan(2) - - var f = new TestPythonFinder(null, done) - f.win = true - - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(new Error('not a Python executable')) - } else { - t.fail() - } - } - f.findPython() - - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-visualstudio.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-visualstudio.js deleted file mode 100644 index 1327cf8841111..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-find-visualstudio.js +++ /dev/null @@ -1,676 +0,0 @@ -'use strict' - -const test = require('tap').test -const fs = require('fs') -const path = require('path') -const findVisualStudio = require('../lib/find-visualstudio') -const VisualStudioFinder = findVisualStudio.test.VisualStudioFinder - -const semverV1 = { major: 1, minor: 0, patch: 0 } - -delete process.env.VCINSTALLDIR - -function poison (object, property) { - function fail () { - console.error(Error(`Property ${property} should not have been accessed.`)) - process.abort() - } - var descriptor = { - configurable: false, - enumerable: false, - get: fail, - set: fail - } - Object.defineProperty(object, property, descriptor) -} - -function TestVisualStudioFinder () { VisualStudioFinder.apply(this, arguments) } -TestVisualStudioFinder.prototype = Object.create(VisualStudioFinder.prototype) -// Silence npmlog - remove for debugging -TestVisualStudioFinder.prototype.log = { - silly: () => {}, - verbose: () => {}, - info: () => {}, - warn: () => {}, - error: () => {} -} - -test('VS2013', function (t) { - t.plan(4) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\MSBuild12\\MSBuild.exe', - path: 'C:\\VS2013', - sdk: null, - toolset: 'v120', - version: '12.0', - versionMajor: 12, - versionMinor: 0, - versionYear: 2013 - }) - }) - - finder.findVisualStudio2017OrNewer = (cb) => { - finder.parseData(new Error(), '', '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - continue - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\VS2013\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\MSBuild12\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) - } - } - return cb(new Error()) - } - finder.findVisualStudio() -}) - -test('VS2013 should not be found on new node versions', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder({ - major: 10, - minor: 0, - patch: 0 - }, null, (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') - }) - - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - continue - default: - t.fail(`unexpected search for registry value ${fullName}`) - } - } - return cb(new Error()) - } - finder.findVisualStudio() -}) - -test('VS2015', function (t) { - t.plan(4) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\MSBuild14\\MSBuild.exe', - path: 'C:\\VS2015', - sdk: null, - toolset: 'v140', - version: '14.0', - versionMajor: 14, - versionMinor: 0, - versionYear: 2015 - }) - }) - - finder.findVisualStudio2017OrNewer = (cb) => { - finder.parseData(new Error(), '', '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\VS2015\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\MSBuild14\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) - } - } - return cb(new Error()) - } - finder.findVisualStudio() -}) - -test('error from PowerShell', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(new Error(), '', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') - }) -}) - -test('empty output from PowerShell', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(null, '', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') - }) -}) - -test('output from PowerShell not JSON', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(null, 'AAAABBBB', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') - }) -}) - -test('wrong JSON from PowerShell', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(null, '{}', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') - }) -}) - -test('empty JSON from PowerShell', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(null, '[]', '', (info) => { - t.ok(/find .* Visual Studio/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') - }) -}) - -test('future version', function (t) { - t.plan(3) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(null, JSON.stringify([{ - packages: [ - 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64', - 'Microsoft.VisualStudio.Component.Windows10SDK.17763', - 'Microsoft.VisualStudio.VC.MSBuild.Base' - ], - path: 'C:\\VS', - version: '9999.9999.9999.9999' - }]), '', (info) => { - t.ok(/unknown version/i.test(finder.errorLog[0]), 'expect error') - t.ok(/find .* Visual Studio/i.test(finder.errorLog[1]), 'expect error') - t.false(info, 'no data') - }) -}) - -test('single unusable VS2017', function (t) { - t.plan(3) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', (info) => { - t.ok(/checking/i.test(finder.errorLog[0]), 'expect error') - t.ok(/find .* Visual Studio/i.test(finder.errorLog[2]), 'expect error') - t.false(info, 'no data') - }) -}) - -test('minimal VS2017 Build Tools', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'BuildTools\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools', - sdk: '10.0.17134.0', - toolset: 'v141', - version: '15.9.28307.665', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 - }) - }) - - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2017_BuildTools_minimal.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) - -test('VS2017 Community with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'Community\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', - sdk: '10.0.17763.0', - toolset: 'v141', - version: '15.9.28307.665', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 - }) - }) - - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2017_Community_workload.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) - -test('VS2017 Express', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'WDExpress\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress', - sdk: '10.0.17763.0', - toolset: 'v141', - version: '15.9.28307.858', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 - }) - }) - - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', 'VS_2017_Express.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) - -test('VS2019 Preview with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'Preview\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview', - sdk: '10.0.17763.0', - toolset: 'v142', - version: '16.0.28608.199', - versionMajor: 16, - versionMinor: 0, - versionYear: 2019 - }) - }) - - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_Preview.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) - -test('minimal VS2019 Build Tools', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'BuildTools\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', - sdk: '10.0.17134.0', - toolset: 'v142', - version: '16.1.28922.388', - versionMajor: 16, - versionMinor: 1, - versionYear: 2019 - }) - }) - - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_BuildTools_minimal.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) - -test('VS2019 Community with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community', - sdk: '10.0.17763.0', - toolset: 'v142', - version: '16.1.28922.388', - versionMajor: 16, - versionMinor: 1, - versionYear: 2019 - }) - }) - - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_Community_workload.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) - -function allVsVersions (t, finder) { - finder.findVisualStudio2017OrNewer = (cb) => { - const data0 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Unusable.txt'))) - const data1 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_BuildTools_minimal.txt'))) - const data2 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Community_workload.txt'))) - const data3 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Express.txt'))) - const data4 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_Preview.txt'))) - const data5 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_BuildTools_minimal.txt'))) - const data6 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_Community_workload.txt'))) - const data = JSON.stringify(data0.concat(data1, data2, data3, data4, - data5, data6)) - finder.parseData(null, data, '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - continue - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - return cb(null, 'C:\\VS2013\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': - return cb(null, 'C:\\MSBuild12\\') - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - return cb(null, 'C:\\VS2015\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': - return cb(null, 'C:\\MSBuild14\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) - } - } - return cb(new Error()) - } -} - -test('fail when looking for invalid path', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, 'AABB', (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2013 by version number', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, '2013', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2013) - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2013 by installation path', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2013', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2013') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2015 by version number', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, '2015', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2015) - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2015 by installation path', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2017 by version number', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, '2017', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2017) - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2017 by installation path', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2019 by version number', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, '2019', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2019) - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2019 by installation path', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('msvs_version match should be case insensitive', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, - 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('latest version should be found by default', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2019) - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('run on a usable VS Command Prompt', function (t) { - t.plan(2) - - process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' - // VSINSTALLDIR is not defined on Visual C++ Build Tools 2015 - delete process.env.VSINSTALLDIR - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('VCINSTALLDIR match should be case insensitive', function (t) { - t.plan(2) - - process.env.VCINSTALLDIR = - 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS\\VC' - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('run on a unusable VS Command Prompt', function (t) { - t.plan(2) - - process.env.VCINSTALLDIR = - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildToolsUnusable\\VC' - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('run on a VS Command Prompt with matching msvs_version', function (t) { - t.plan(2) - - process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' - - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('run on a VS Command Prompt with mismatched msvs_version', function (t) { - t.plan(2) - - process.env.VCINSTALLDIR = - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC' - - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') - }) - - allVsVersions(t, finder) - finder.findVisualStudio() -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-install.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-install.js deleted file mode 100644 index c3317155e0f3d..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-install.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict' - -const test = require('tap').test -const install = require('../lib/install').test.install - -require('npmlog').level = 'error' // we expect a warning - -test('EACCES retry once', function (t) { - t.plan(3) - - var fs = {} - fs.stat = function (path, cb) { - var err = new Error() - err.code = 'EACCES' - cb(err) - t.ok(true) - } - - var gyp = {} - gyp.devDir = __dirname - gyp.opts = {} - gyp.opts.ensure = true - gyp.commands = {} - gyp.commands.install = function (argv, cb) { - install(fs, gyp, argv, cb) - } - gyp.commands.remove = function (argv, cb) { - cb() - } - - gyp.commands.install([], function (err) { - t.ok(true) - if (/"pre" versions of node cannot be installed/.test(err.message)) { - t.ok(true) - t.ok(true) - } - }) -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-options.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-options.js deleted file mode 100644 index b2ac62c874f06..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-options.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const test = require('tap').test -const gyp = require('../lib/node-gyp') - -test('options in environment', (t) => { - t.plan(1) - - // `npm test` dumps a ton of npm_config_* variables in the environment. - Object.keys(process.env) - .filter((key) => /^npm_config_/.test(key)) - .forEach((key) => { delete process.env[key] }) - - // in some platforms, certain keys are stubborn and cannot be removed - const keys = Object.keys(process.env) - .filter((key) => /^npm_config_/.test(key)) - .map((key) => key.substring('npm_config_'.length)) - .concat('argv', 'x') - - // Zero-length keys should get filtered out. - process.env.npm_config_ = '42' - // Other keys should get added. - process.env.npm_config_x = '42' - // Except loglevel. - process.env.npm_config_loglevel = 'debug' - - const g = gyp() - g.parseArgv(['rebuild']) // Also sets opts.argv. - - t.deepEqual(Object.keys(g.opts).sort(), keys.sort()) -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/test/test-process-release.js b/node_modules/libnpmexec/node_modules/node-gyp/test/test-process-release.js deleted file mode 100644 index c3ee0703c532f..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/test/test-process-release.js +++ /dev/null @@ -1,434 +0,0 @@ -'use strict' - -const test = require('tap').test -const processRelease = require('../lib/process-release') - -test('test process release - process.version = 0.8.20', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.8.20', null) - - t.equal(release.semver.version, '0.8.20') - delete release.semver - - t.deepEqual(release, { - version: '0.8.20', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.8.20/', - tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt', - versionDir: '0.8.20', - ia32: { libUrl: 'https://nodejs.org/dist/v0.8.20/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.8.20/arm64/node.lib', libPath: 'arm64/node.lib' } - }) -}) - -test('test process release - process.version = 0.10.21', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.21', null) - - t.equal(release.semver.version, '0.10.21') - delete release.semver - - t.deepEqual(release, { - version: '0.10.21', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.21/', - tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt', - versionDir: '0.10.21', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.21/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.21/arm64/node.lib', libPath: 'arm64/node.lib' } - }) -}) - -// prior to -headers.tar.gz -test('test process release - process.version = 0.12.9', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.12.9', null) - - t.equal(release.semver.version, '0.12.9') - delete release.semver - - t.deepEqual(release, { - version: '0.12.9', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.12.9/', - tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt', - versionDir: '0.12.9', - ia32: { libUrl: 'https://nodejs.org/dist/v0.12.9/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.12.9/arm64/node.lib', libPath: 'arm64/node.lib' } - }) -}) - -// prior to -headers.tar.gz -test('test process release - process.version = 0.10.41', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.41', null) - - t.equal(release.semver.version, '0.10.41') - delete release.semver - - t.deepEqual(release, { - version: '0.10.41', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.41/', - tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt', - versionDir: '0.10.41', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.41/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.41/arm64/node.lib', libPath: 'arm64/node.lib' } - }) -}) - -// has -headers.tar.gz -test('test process release - process.release ~ node@0.10.42', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.42', null) - - t.equal(release.semver.version, '0.10.42') - delete release.semver - - t.deepEqual(release, { - version: '0.10.42', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.42/', - tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt', - versionDir: '0.10.42', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.42/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.42/arm64/node.lib', libPath: 'arm64/node.lib' } - }) -}) - -// has -headers.tar.gz -test('test process release - process.release ~ node@0.12.10', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.12.10', null) - - t.equal(release.semver.version, '0.12.10') - delete release.semver - - t.deepEqual(release, { - version: '0.12.10', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.12.10/', - tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt', - versionDir: '0.12.10', - ia32: { libUrl: 'https://nodejs.org/dist/v0.12.10/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.12.10/arm64/node.lib', libPath: 'arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@4.1.23', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v4.1.23/', - tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@4.1.23 / corp build', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://some.custom.location/', - tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://some.custom.location/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://some.custom.location/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://some.custom.location/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://some.custom.location/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@12.8.0 Windows', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v12.8.0', { - name: 'node', - sourceUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', - headersUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib' - }) - - t.equal(release.semver.version, '12.8.0') - delete release.semver - - t.deepEqual(release, { - version: '12.8.0', - name: 'node', - baseUrl: 'https://nodejs.org/download/release/v12.8.0/', - tarballUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/release/v12.8.0/SHASUMS256.txt', - versionDir: '12.8.0', - ia32: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@12.8.0 Windows ARM64', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v12.8.0', { - name: 'node', - sourceUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', - headersUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib' - }) - - t.equal(release.semver.version, '12.8.0') - delete release.semver - - t.deepEqual(release, { - version: '12.8.0', - name: 'node', - baseUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/', - tarballUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - shasumsUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/SHASUMS256.txt', - versionDir: '12.8.0', - ia32: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@4.1.23 --target=0.10.40', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { target: '0.10.40' } }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '0.10.40') - delete release.semver - - t.deepEqual(release, { - version: '0.10.40', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.40/', - tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt', - versionDir: '0.10.40', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.40/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.40/arm64/node.lib', libPath: 'arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@4.1.23 --dist-url=https://foo.bar/baz', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { 'dist-url': 'https://foo.bar/baz' } }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://foo.bar/baz/v4.1.23/', - tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ frankenstein@4.1.23', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'frankenstein', - headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'frankenstein', - baseUrl: 'https://frankensteinjs.org/dist/v4.1.23/', - tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt', - versionDir: 'frankenstein-4.1.23', - ia32: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, - x64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, - arm64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } - }) -}) - -test('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { 'dist-url': 'http://foo.bar/baz/' } }, 'v4.1.23', { - name: 'frankenstein', - headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'frankenstein', - baseUrl: 'http://foo.bar/baz/v4.1.23/', - tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', - shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt', - versionDir: 'frankenstein-4.1.23', - ia32: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, - x64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, - arm64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } - }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function (t) { - t.plan(2) - - // note the missing 'v' on the arg, it should normalise when checking - // whether we're on the default or not - var release = processRelease(['4.0.0-rc.4'], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function (t) { - t.plan(2) - - // additional arguments can be passed in on the commandline that should be ignored if they - // are not specifying a valid version @ position 0 - var release = processRelease(['this is no version!'], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - NODEJS_ORG_MIRROR', function (t) { - t.plan(2) - - process.env.NODEJS_ORG_MIRROR = 'http://foo.bar' - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'http://foo.bar/v4.1.23/', - tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'http://foo.bar/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'http://foo.bar/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'http://foo.bar/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) - - delete process.env.NODEJS_ORG_MIRROR -}) diff --git a/node_modules/libnpmexec/node_modules/node-gyp/update-gyp.py b/node_modules/libnpmexec/node_modules/node-gyp/update-gyp.py deleted file mode 100755 index aa2bcb9eb991f..0000000000000 --- a/node_modules/libnpmexec/node_modules/node-gyp/update-gyp.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import os -import shutil -import subprocess -import sys -import tarfile -import tempfile -import urllib.request - -BASE_URL = "https://github.com/nodejs/gyp-next/archive/" -CHECKOUT_PATH = os.path.dirname(os.path.realpath(__file__)) -CHECKOUT_GYP_PATH = os.path.join(CHECKOUT_PATH, 'gyp') - -parser = argparse.ArgumentParser() -parser.add_argument("tag", help="gyp tag to update to") -args = parser.parse_args() - -tar_url = BASE_URL + args.tag + ".tar.gz" - -changed_files = subprocess.check_output(["git", "diff", "--name-only"]).strip() -if changed_files: - raise Exception("Can't update gyp while you have uncommitted changes in node-gyp") - -with tempfile.TemporaryDirectory() as tmp_dir: - tar_file = os.path.join(tmp_dir, 'gyp.tar.gz') - unzip_target = os.path.join(tmp_dir, 'gyp') - with open(tar_file, 'wb') as f: - print("Downloading gyp-next@" + args.tag + " into temporary directory...") - print("From: " + tar_url) - with urllib.request.urlopen(tar_url) as in_file: - f.write(in_file.read()) - - print("Unzipping...") - with tarfile.open(tar_file, "r:gz") as tar_ref: - tar_ref.extractall(unzip_target) - - print("Moving to current checkout (" + CHECKOUT_PATH + ")...") - if os.path.exists(CHECKOUT_GYP_PATH): - shutil.rmtree(CHECKOUT_GYP_PATH) - shutil.move(os.path.join(unzip_target, os.listdir(unzip_target)[0]), CHECKOUT_GYP_PATH) - -subprocess.check_output(["git", "add", "gyp"], cwd=CHECKOUT_PATH) -subprocess.check_output(["git", "commit", "-m", "gyp: update gyp to " + args.tag]) diff --git a/node_modules/libnpmexec/node_modules/npmlog/LICENSE b/node_modules/libnpmexec/node_modules/npmlog/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/libnpmexec/node_modules/npmlog/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmexec/node_modules/npmlog/log.js b/node_modules/libnpmexec/node_modules/npmlog/log.js deleted file mode 100644 index 341f3313ab354..0000000000000 --- a/node_modules/libnpmexec/node_modules/npmlog/log.js +++ /dev/null @@ -1,309 +0,0 @@ -'use strict' -var Progress = require('are-we-there-yet') -var Gauge = require('gauge') -var EE = require('events').EventEmitter -var log = exports = module.exports = new EE() -var util = require('util') - -var setBlocking = require('set-blocking') -var consoleControl = require('console-control-strings') - -setBlocking(true) -var stream = process.stderr -Object.defineProperty(log, 'stream', { - set: function (newStream) { - stream = newStream - if (this.gauge) this.gauge.setWriteTo(stream, stream) - }, - get: function () { - return stream - } -}) - -// by default, decide based on tty-ness. -var colorEnabled -log.useColor = function () { - return colorEnabled != null ? colorEnabled : stream.isTTY -} - -log.enableColor = function () { - colorEnabled = true - this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) -} -log.disableColor = function () { - colorEnabled = false - this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) -} - -// default level -log.level = 'info' - -log.gauge = new Gauge(stream, { - enabled: false, // no progress bars unless asked - theme: {hasColor: log.useColor()}, - template: [ - {type: 'progressbar', length: 20}, - {type: 'activityIndicator', kerning: 1, length: 1}, - {type: 'section', default: ''}, - ':', - {type: 'logline', kerning: 1, default: ''} - ] -}) - -log.tracker = new Progress.TrackerGroup() - -// we track this separately as we may need to temporarily disable the -// display of the status bar for our own loggy purposes. -log.progressEnabled = log.gauge.isEnabled() - -var unicodeEnabled - -log.enableUnicode = function () { - unicodeEnabled = true - this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) -} - -log.disableUnicode = function () { - unicodeEnabled = false - this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) -} - -log.setGaugeThemeset = function (themes) { - this.gauge.setThemeset(themes) -} - -log.setGaugeTemplate = function (template) { - this.gauge.setTemplate(template) -} - -log.enableProgress = function () { - if (this.progressEnabled) return - this.progressEnabled = true - this.tracker.on('change', this.showProgress) - if (this._pause) return - this.gauge.enable() -} - -log.disableProgress = function () { - if (!this.progressEnabled) return - this.progressEnabled = false - this.tracker.removeListener('change', this.showProgress) - this.gauge.disable() -} - -var trackerConstructors = ['newGroup', 'newItem', 'newStream'] - -var mixinLog = function (tracker) { - // mixin the public methods from log into the tracker - // (except: conflicts and one's we handle specially) - Object.keys(log).forEach(function (P) { - if (P[0] === '_') return - if (trackerConstructors.filter(function (C) { return C === P }).length) return - if (tracker[P]) return - if (typeof log[P] !== 'function') return - var func = log[P] - tracker[P] = function () { - return func.apply(log, arguments) - } - }) - // if the new tracker is a group, make sure any subtrackers get - // mixed in too - if (tracker instanceof Progress.TrackerGroup) { - trackerConstructors.forEach(function (C) { - var func = tracker[C] - tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } - }) - } - return tracker -} - -// Add tracker constructors to the top level log object -trackerConstructors.forEach(function (C) { - log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } -}) - -log.clearProgress = function (cb) { - if (!this.progressEnabled) return cb && process.nextTick(cb) - this.gauge.hide(cb) -} - -log.showProgress = function (name, completed) { - if (!this.progressEnabled) return - var values = {} - if (name) values.section = name - var last = log.record[log.record.length - 1] - if (last) { - values.subsection = last.prefix - var disp = log.disp[last.level] || last.level - var logline = this._format(disp, log.style[last.level]) - if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) - logline += ' ' + last.message.split(/\r?\n/)[0] - values.logline = logline - } - values.completed = completed || this.tracker.completed() - this.gauge.show(values) -}.bind(log) // bind for use in tracker's on-change listener - -// temporarily stop emitting, but don't drop -log.pause = function () { - this._paused = true - if (this.progressEnabled) this.gauge.disable() -} - -log.resume = function () { - if (!this._paused) return - this._paused = false - - var b = this._buffer - this._buffer = [] - b.forEach(function (m) { - this.emitLog(m) - }, this) - if (this.progressEnabled) this.gauge.enable() -} - -log._buffer = [] - -var id = 0 -log.record = [] -log.maxRecordSize = 10000 -log.log = function (lvl, prefix, message) { - var l = this.levels[lvl] - if (l === undefined) { - return this.emit('error', new Error(util.format( - 'Undefined log level: %j', lvl))) - } - - var a = new Array(arguments.length - 2) - var stack = null - for (var i = 2; i < arguments.length; i++) { - var arg = a[i - 2] = arguments[i] - - // resolve stack traces to a plain string. - if (typeof arg === 'object' && arg && - (arg instanceof Error) && arg.stack) { - - Object.defineProperty(arg, 'stack', { - value: stack = arg.stack + '', - enumerable: true, - writable: true - }) - } - } - if (stack) a.unshift(stack + '\n') - message = util.format.apply(util, a) - - var m = { id: id++, - level: lvl, - prefix: String(prefix || ''), - message: message, - messageRaw: a } - - this.emit('log', m) - this.emit('log.' + lvl, m) - if (m.prefix) this.emit(m.prefix, m) - - this.record.push(m) - var mrs = this.maxRecordSize - var n = this.record.length - mrs - if (n > mrs / 10) { - var newSize = Math.floor(mrs * 0.9) - this.record = this.record.slice(-1 * newSize) - } - - this.emitLog(m) -}.bind(log) - -log.emitLog = function (m) { - if (this._paused) { - this._buffer.push(m) - return - } - if (this.progressEnabled) this.gauge.pulse(m.prefix) - var l = this.levels[m.level] - if (l === undefined) return - if (l < this.levels[this.level]) return - if (l > 0 && !isFinite(l)) return - - // If 'disp' is null or undefined, use the lvl as a default - // Allows: '', 0 as valid disp - var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level - this.clearProgress() - m.message.split(/\r?\n/).forEach(function (line) { - if (this.heading) { - this.write(this.heading, this.headingStyle) - this.write(' ') - } - this.write(disp, log.style[m.level]) - var p = m.prefix || '' - if (p) this.write(' ') - this.write(p, this.prefixStyle) - this.write(' ' + line + '\n') - }, this) - this.showProgress() -} - -log._format = function (msg, style) { - if (!stream) return - - var output = '' - if (this.useColor()) { - style = style || {} - var settings = [] - if (style.fg) settings.push(style.fg) - if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) - if (style.bold) settings.push('bold') - if (style.underline) settings.push('underline') - if (style.inverse) settings.push('inverse') - if (settings.length) output += consoleControl.color(settings) - if (style.beep) output += consoleControl.beep() - } - output += msg - if (this.useColor()) { - output += consoleControl.color('reset') - } - return output -} - -log.write = function (msg, style) { - if (!stream) return - - stream.write(this._format(msg, style)) -} - -log.addLevel = function (lvl, n, style, disp) { - // If 'disp' is null or undefined, use the lvl as a default - if (disp == null) disp = lvl - this.levels[lvl] = n - this.style[lvl] = style - if (!this[lvl]) { - this[lvl] = function () { - var a = new Array(arguments.length + 1) - a[0] = lvl - for (var i = 0; i < arguments.length; i++) { - a[i + 1] = arguments[i] - } - return this.log.apply(this, a) - }.bind(this) - } - this.disp[lvl] = disp -} - -log.prefixStyle = { fg: 'magenta' } -log.headingStyle = { fg: 'white', bg: 'black' } - -log.style = {} -log.levels = {} -log.disp = {} -log.addLevel('silly', -Infinity, { inverse: true }, 'sill') -log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') -log.addLevel('info', 2000, { fg: 'green' }) -log.addLevel('timing', 2500, { fg: 'green', bg: 'black' }) -log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) -log.addLevel('notice', 3500, { fg: 'blue', bg: 'black' }) -log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') -log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') -log.addLevel('silent', Infinity) - -// allow 'error' prefix -log.on('error', function () {}) diff --git a/node_modules/libnpmexec/node_modules/npmlog/package.json b/node_modules/libnpmexec/node_modules/npmlog/package.json deleted file mode 100644 index 7220f8e72a3c7..0000000000000 --- a/node_modules/libnpmexec/node_modules/npmlog/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "name": "npmlog", - "description": "logger for npm", - "version": "4.1.2", - "repository": { - "type": "git", - "url": "https://github.com/npm/npmlog.git" - }, - "main": "log.js", - "files": [ - "log.js" - ], - "scripts": { - "test": "standard && tap test/*.js" - }, - "dependencies": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - }, - "devDependencies": { - "standard": "~7.1.2", - "tap": "~5.7.3" - }, - "license": "ISC" -} diff --git a/node_modules/libnpmexec/node_modules/pacote/LICENSE b/node_modules/libnpmexec/node_modules/pacote/LICENSE deleted file mode 100644 index a03cd0ed0b338..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/bin.js b/node_modules/libnpmexec/node_modules/pacote/lib/bin.js deleted file mode 100755 index c0409be1ff085..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/bin.js +++ /dev/null @@ -1,149 +0,0 @@ -#!/usr/bin/env node - -const run = conf => { - const pacote = require('../') - switch (conf._[0]) { - case 'resolve': - if (conf.long) - return pacote.manifest(conf._[1], conf).then(mani => ({ - resolved: mani._resolved, - integrity: mani._integrity, - from: mani._from, - })) - case 'manifest': - case 'packument': - return pacote[conf._[0]](conf._[1], conf) - - case 'tarball': - if (!conf._[2] || conf._[2] === '-') { - return pacote.tarball.stream(conf._[1], stream => { - stream.pipe(conf.testStdout || - /* istanbul ignore next */ process.stdout) - // make sure it resolves something falsey - return stream.promise().then(() => {}) - }, conf) - } else - return pacote.tarball.file(conf._[1], conf._[2], conf) - - case 'extract': - return pacote.extract(conf._[1], conf._[2], conf) - - default: /* istanbul ignore next */ { - throw new Error(`bad command: ${conf._[0]}`) - } - } -} - -const version = require('../package.json').version -const usage = () => -`Pacote - The JavaScript Package Handler, v${version} - -Usage: - - pacote resolve - Resolve a specifier and output the fully resolved target - Returns integrity and from if '--long' flag is set. - - pacote manifest - Fetch a manifest and print to stdout - - pacote packument - Fetch a full packument and print to stdout - - pacote tarball [] - Fetch a package tarball and save to - If is missing or '-', the tarball will be streamed to stdout. - - pacote extract - Extract a package to the destination folder. - -Configuration values all match the names of configs passed to npm, or -options passed to Pacote. Additional flags for this executable: - - --long Print an object from 'resolve', including integrity and spec. - --json Print result objects as JSON rather than node's default. - (This is the default if stdout is not a TTY.) - --help -h Print this helpful text. - -For example '--cache=/path/to/folder' will use that folder as the cache. -` - -const shouldJSON = (conf, result) => - conf.json || - !process.stdout.isTTY && - conf.json === undefined && - result && - typeof result === 'object' - -const pretty = (conf, result) => - shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result - -let addedLogListener = false -const main = args => { - const conf = parse(args) - if (conf.help || conf.h) - return console.log(usage()) - - if (!addedLogListener) { - process.on('log', console.error) - addedLogListener = true - } - - try { - return run(conf) - .then(result => result && console.log(pretty(conf, result))) - .catch(er => { - console.error(er) - process.exit(1) - }) - } catch (er) { - console.error(er.message) - console.error(usage()) - } -} - -const parseArg = arg => { - const split = arg.slice(2).split('=') - const k = split.shift() - const v = split.join('=') - const no = /^no-/.test(k) && !v - const key = (no ? k.substr(3) : k) - .replace(/^tag$/, 'defaultTag') - .replace(/-([a-z])/g, (_, c) => c.toUpperCase()) - const value = v ? v.replace(/^~/, process.env.HOME) : !no - return { key, value } -} - -const parse = args => { - const conf = { - _: [], - cache: process.env.HOME + '/.npm/_cacache', - } - let dashdash = false - args.forEach(arg => { - if (dashdash) - conf._.push(arg) - else if (arg === '--') - dashdash = true - else if (arg === '-h') - conf.help = true - else if (/^--/.test(arg)) { - const {key, value} = parseArg(arg) - conf[key] = value - } else { - conf._.push(arg) - } - }) - return conf -} - -if (module === require.main) - main(process.argv.slice(2)) -else - module.exports = { - main, - run, - usage, - parseArg, - parse, - } diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/dir.js b/node_modules/libnpmexec/node_modules/pacote/lib/dir.js deleted file mode 100644 index 0d3a00d95ae7c..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/dir.js +++ /dev/null @@ -1,95 +0,0 @@ -const Fetcher = require('./fetcher.js') -const FileFetcher = require('./file.js') -const cacache = require('cacache') -const Minipass = require('minipass') -const { promisify } = require('util') -const readPackageJson = require('read-package-json-fast') -const tarCreateOptions = require('./util/tar-create-options.js') -const packlist = require('npm-packlist') -const tar = require('tar') -const _prepareDir = Symbol('_prepareDir') -const { resolve } = require('path') - -const runScript = require('@npmcli/run-script') - -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -class DirFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - // just the fully resolved filename - this.resolved = this.spec.fetchSpec - } - - // exposes tarCreateOptions as public API - static tarCreateOptions (manifest) { - return tarCreateOptions(manifest) - } - - get types () { - return ['directory'] - } - - [_prepareDir] () { - return this.manifest().then(mani => { - if (!mani.scripts || !mani.scripts.prepare) - return - - // we *only* run prepare. - // pre/post-pack is run by the npm CLI for publish and pack, - // but this function is *also* run when installing git deps - const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe' - - // hide the banner if loglevel is silent, or if prepare running - // in the background. - const banner = this.opts.log && this.opts.log.level === 'silent' ? false - : stdio === 'inherit' - - return runScript({ - pkg: mani, - event: 'prepare', - path: this.resolved, - stdioString: true, - stdio, - banner, - env: { - npm_package_resolved: this.resolved, - npm_package_integrity: this.integrity, - npm_package_json: resolve(this.resolved, 'package.json'), - }, - }) - }) - } - - [_tarballFromResolved] () { - const stream = new Minipass() - stream.resolved = this.resolved - stream.integrity = this.integrity - - // run the prepare script, get the list of files, and tar it up - // pipe to the stream, and proxy errors the chain. - this[_prepareDir]() - .then(() => packlist({ path: this.resolved })) - .then(files => tar.c(tarCreateOptions(this.package), files) - .on('error', er => stream.emit('error', er)).pipe(stream)) - .catch(er => stream.emit('error', er)) - return stream - } - - manifest () { - if (this.package) - return Promise.resolve(this.package) - - return readPackageJson(this.resolved + '/package.json') - .then(mani => this.package = { - ...mani, - _integrity: this.integrity && String(this.integrity), - _resolved: this.resolved, - _from: this.from, - }) - } - - packument () { - return FileFetcher.prototype.packument.apply(this) - } -} -module.exports = DirFetcher diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/fetcher.js b/node_modules/libnpmexec/node_modules/pacote/lib/fetcher.js deleted file mode 100644 index 69dd025b7bd98..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/fetcher.js +++ /dev/null @@ -1,509 +0,0 @@ -// This is the base class that the other fetcher types in lib -// all descend from. -// It handles the unpacking and retry logic that is shared among -// all of the other Fetcher types. - -const npa = require('npm-package-arg') -const ssri = require('ssri') -const { promisify } = require('util') -const { basename, dirname } = require('path') -const rimraf = promisify(require('rimraf')) -const tar = require('tar') -const procLog = require('./util/proc-log.js') -const retry = require('promise-retry') -const fsm = require('fs-minipass') -const cacache = require('cacache') -const isPackageBin = require('./util/is-package-bin.js') -const getContents = require('@npmcli/installed-package-contents') - -// we only change ownership on unix platforms, and only if uid is 0 -const selfOwner = process.getuid && process.getuid() === 0 ? { - uid: 0, - gid: process.getgid(), -} : null -const chownr = selfOwner ? promisify(require('chownr')) : null -const inferOwner = selfOwner ? require('infer-owner') : null -const mkdirp = require('mkdirp') -const cacheDir = require('./util/cache-dir.js') - -// Private methods. -// Child classes should not have to override these. -// Users should never call them. -const _chown = Symbol('_chown') -const _extract = Symbol('_extract') -const _mkdir = Symbol('_mkdir') -const _empty = Symbol('_empty') -const _toFile = Symbol('_toFile') -const _tarxOptions = Symbol('_tarxOptions') -const _entryMode = Symbol('_entryMode') -const _istream = Symbol('_istream') -const _assertType = Symbol('_assertType') -const _tarballFromCache = Symbol('_tarballFromCache') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') - -class FetcherBase { - constructor (spec, opts) { - if (!opts || typeof opts !== 'object') - throw new TypeError('options object is required') - this.spec = npa(spec, opts.where) - - this.allowGitIgnore = !!opts.allowGitIgnore - - // a bit redundant because presumably the caller already knows this, - // but it makes it easier to not have to keep track of the requested - // spec when we're dispatching thousands of these at once, and normalizing - // is nice. saveSpec is preferred if set, because it turns stuff like - // x/y#committish into github:x/y#committish. use name@rawSpec for - // registry deps so that we turn xyz and xyz@ -> xyz@ - this.from = this.spec.registry - ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec - - this[_assertType]() - // clone the opts object so that others aren't upset when we mutate it - // by adding/modifying the integrity value. - this.opts = {...opts} - - this.cache = opts.cache || cacheDir() - this.resolved = opts.resolved || null - - // default to caching/verifying with sha512, that's what we usually have - // need to change this default, or start overriding it, when sha512 - // is no longer strong enough. - this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512' - - if (typeof opts.integrity === 'string') - this.opts.integrity = ssri.parse(opts.integrity) - - this.package = null - this.type = this.constructor.name - this.fmode = opts.fmode || 0o666 - this.dmode = opts.dmode || 0o777 - // we don't need a default umask, because we don't chmod files coming - // out of package tarballs. they're forced to have a mode that is - // valid, regardless of what's in the tarball entry, and then we let - // the process's umask setting do its job. but if configured, we do - // respect it. - this.umask = opts.umask || 0 - this.log = opts.log || procLog - - this.preferOnline = !!opts.preferOnline - this.preferOffline = !!opts.preferOffline - this.offline = !!opts.offline - - this.before = opts.before - this.fullMetadata = this.before ? true : !!opts.fullMetadata - - this.defaultTag = opts.defaultTag || 'latest' - this.registry = (opts.registry || 'https://registry.npmjs.org') - .replace(/\/+$/, '') - - // command to run 'prepare' scripts on directories and git dirs - // To use pacote with yarn, for example, set npmBin to 'yarn' - // and npmCliConfig with yarn's equivalents. - this.npmBin = opts.npmBin || 'npm' - - // command to install deps for preparing - this.npmInstallCmd = opts.npmInstallCmd || [ 'install', '--force' ] - - // XXX fill more of this in based on what we know from this.opts - // we explicitly DO NOT fill in --tag, though, since we are often - // going to be packing in the context of a publish, which may set - // a dist-tag, but certainly wants to keep defaulting to latest. - this.npmCliConfig = opts.npmCliConfig || [ - `--cache=${dirname(this.cache)}`, - `--prefer-offline=${!!this.preferOffline}`, - `--prefer-online=${!!this.preferOnline}`, - `--offline=${!!this.offline}`, - ...(this.before ? [`--before=${this.before.toISOString()}`] : []), - '--no-progress', - '--no-save', - '--no-audit', - // override any omit settings from the environment - '--include=dev', - '--include=peer', - '--include=optional', - // we need the actual things, not just the lockfile - '--no-package-lock-only', - '--no-dry-run', - ] - } - - get integrity () { - return this.opts.integrity || null - } - set integrity (i) { - if (!i) - return - - i = ssri.parse(i) - const current = this.opts.integrity - - // do not ever update an existing hash value, but do - // merge in NEW algos and hashes that we don't already have. - if (current) - current.merge(i) - else - this.opts.integrity = i - } - - get notImplementedError () { - return new Error('not implemented in this fetcher type: ' + this.type) - } - - // override in child classes - // Returns a Promise that resolves to this.resolved string value - resolve () { - return this.resolved ? Promise.resolve(this.resolved) - : Promise.reject(this.notImplementedError) - } - - packument () { - return Promise.reject(this.notImplementedError) - } - - // override in child class - // returns a manifest containing: - // - name - // - version - // - _resolved - // - _integrity - // - plus whatever else was in there (corgi, full metadata, or pj file) - manifest () { - return Promise.reject(this.notImplementedError) - } - - // private, should be overridden. - // Note that they should *not* calculate or check integrity or cache, - // but *just* return the raw tarball data stream. - [_tarballFromResolved] () { - throw this.notImplementedError - } - - // public, should not be overridden - tarball () { - return this.tarballStream(stream => stream.concat().then(data => { - data.integrity = this.integrity && String(this.integrity) - data.resolved = this.resolved - data.from = this.from - return data - })) - } - - // private - // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match - [_tarballFromCache] () { - return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) - } - - get [_cacheFetches] () { - return true - } - - [_istream] (stream) { - // everyone will need one of these, either for verifying or calculating - // We always set it, because we have might only have a weak legacy hex - // sha1 in the packument, and this MAY upgrade it to a stronger algo. - // If we had an integrity, and it doesn't match, then this does not - // override that error; the istream will raise the error before it - // gets to the point of re-setting the integrity. - const istream = ssri.integrityStream(this.opts) - istream.on('integrity', i => this.integrity = i) - stream.on('error', er => istream.emit('error', er)) - - // if not caching this, just pipe through to the istream and return it - if (!this.opts.cache || !this[_cacheFetches]) - return stream.pipe(istream) - - // we have to return a stream that gets ALL the data, and proxies errors, - // but then pipe from the original tarball stream into the cache as well. - // To do this without losing any data, and since the cacache put stream - // is not a passthrough, we have to pipe from the original stream into - // the cache AFTER we pipe into the istream. Since the cache stream - // has an asynchronous flush to write its contents to disk, we need to - // defer the istream end until the cache stream ends. - stream.pipe(istream, { end: false }) - const cstream = cacache.put.stream( - this.opts.cache, - `pacote:tarball:${this.from}`, - this.opts - ) - stream.pipe(cstream) - // defer istream end until after cstream - // cache write errors should not crash the fetch, this is best-effort. - cstream.promise().catch(() => {}).then(() => istream.end()) - - return istream - } - - pickIntegrityAlgorithm () { - return this.integrity ? this.integrity.pickAlgorithm(this.opts) - : this.defaultIntegrityAlgorithm - } - - // TODO: check error class, once those are rolled out to our deps - isDataCorruptionError (er) { - return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR' - } - - // override the types getter - get types () {} - [_assertType] () { - if (this.types && !this.types.includes(this.spec.type)) { - throw new TypeError(`Wrong spec type (${ - this.spec.type - }) for ${ - this.constructor.name - }. Supported types: ${this.types.join(', ')}`) - } - } - - // We allow ENOENTs from cacache, but not anywhere else. - // An ENOENT trying to read a tgz file, for example, is Right Out. - isRetriableError (er) { - // TODO: check error class, once those are rolled out to our deps - return this.isDataCorruptionError(er) || - er.code === 'ENOENT' || - er.code === 'EISDIR' - } - - // Mostly internal, but has some uses - // Pass in a function which returns a promise - // Function will be called 1 or more times with streams that may fail. - // Retries: - // Function MUST handle errors on the stream by rejecting the promise, - // so that retry logic can pick it up and either retry or fail whatever - // promise it was making (ie, failing extraction, etc.) - // - // The return value of this method is a Promise that resolves the same - // as whatever the streamHandler resolves to. - // - // This should never be overridden by child classes, but it is public. - tarballStream (streamHandler) { - // Only short-circuit via cache if we have everything else we'll need, - // and the user has not expressed a preference for checking online. - - const fromCache = ( - !this.preferOnline && - this.integrity && - this.resolved - ) ? streamHandler(this[_tarballFromCache]()).catch(er => { - if (this.isDataCorruptionError(er)) { - this.log.warn('tarball', `cached data for ${ - this.spec - } (${this.integrity}) seems to be corrupted. Refreshing cache.`) - return this.cleanupCached().then(() => { throw er }) - } else { - throw er - } - }) : null - - const fromResolved = er => { - if (er) { - if (!this.isRetriableError(er)) - throw er - this.log.silly('tarball', `no local data for ${ - this.spec - }. Extracting by manifest.`) - } - return this.resolve().then(() => retry(tryAgain => - streamHandler(this[_istream](this[_tarballFromResolved]())) - .catch(er => { - // Most likely data integrity. A cache ENOENT error is unlikely - // here, since we're definitely not reading from the cache, but it - // IS possible that the fetch subsystem accessed the cache, and the - // entry got blown away or something. Try one more time to be sure. - if (this.isRetriableError(er)) { - this.log.warn('tarball', `tarball data for ${ - this.spec - } (${this.integrity}) seems to be corrupted. Trying again.`) - return this.cleanupCached().then(() => tryAgain(er)) - } - throw er - }), { retries: 1, minTimeout: 0, maxTimeout: 0 })) - } - - return fromCache ? fromCache.catch(fromResolved) : fromResolved() - } - - cleanupCached () { - return cacache.rm.content(this.cache, this.integrity, this.opts) - } - - async [_chown] (path, uid, gid) { - return selfOwner && (selfOwner.gid !== gid || selfOwner.uid !== uid) - ? chownr(path, uid, gid) - : /* istanbul ignore next - we don't test in root-owned folders */ null - } - - [_empty] (path) { - return getContents({path, depth: 1}).then(contents => Promise.all( - contents.map(entry => rimraf(entry)))) - } - - [_mkdir] (dest) { - // if we're bothering to do owner inference, then do it. - // otherwise just make the dir, and return an empty object. - // always empty the dir dir to start with, but do so - // _after_ inferring the owner, in case there's an existing folder - // there that we would want to preserve which differs from the - // parent folder (rare, but probably happens sometimes). - return !inferOwner - ? this[_empty](dest).then(() => mkdirp(dest)).then(() => ({})) - : inferOwner(dest).then(({uid, gid}) => - this[_empty](dest) - .then(() => mkdirp(dest)) - .then(made => { - // ignore the || dest part in coverage. It's there to handle - // race conditions where the dir may be made by someone else - // after being removed by us. - const dir = made || /* istanbul ignore next */ dest - return this[_chown](dir, uid, gid) - }) - .then(() => ({uid, gid}))) - } - - // extraction is always the same. the only difference is where - // the tarball comes from. - extract (dest) { - return this[_mkdir](dest).then(({uid, gid}) => - this.tarballStream(tarball => this[_extract](dest, tarball, uid, gid))) - } - - [_toFile] (dest) { - return this.tarballStream(str => new Promise((res, rej) => { - const writer = new fsm.WriteStream(dest) - str.on('error', er => writer.emit('error', er)) - writer.on('error', er => rej(er)) - writer.on('close', () => res({ - integrity: this.integrity && String(this.integrity), - resolved: this.resolved, - from: this.from, - })) - str.pipe(writer) - })) - } - - // don't use this[_mkdir] because we don't want to rimraf anything - tarballFile (dest) { - const dir = dirname(dest) - return !inferOwner - ? mkdirp(dir).then(() => this[_toFile](dest)) - : inferOwner(dest).then(({uid, gid}) => - mkdirp(dir).then(made => this[_toFile](dest) - .then(res => this[_chown](made || dir, uid, gid) - .then(() => res)))) - } - - [_extract] (dest, tarball, uid, gid) { - const extractor = tar.x(this[_tarxOptions]({ cwd: dest, uid, gid })) - const p = new Promise((resolve, reject) => { - extractor.on('end', () => { - resolve({ - resolved: this.resolved, - integrity: this.integrity && String(this.integrity), - from: this.from, - }) - }) - - extractor.on('error', er => { - this.log.warn('tar', er.message) - this.log.silly('tar', er) - reject(er) - }) - - tarball.on('error', er => reject(er)) - }) - - tarball.pipe(extractor) - return p - } - - // always ensure that entries are at least as permissive as our configured - // dmode/fmode, but never more permissive than the umask allows. - [_entryMode] (path, mode, type) { - const m = /Directory|GNUDumpDir/.test(type) ? this.dmode - : /File$/.test(type) ? this.fmode - : /* istanbul ignore next - should never happen in a pkg */ 0 - - // make sure package bins are executable - const exe = isPackageBin(this.package, path) ? 0o111 : 0 - // always ensure that files are read/writable by the owner - return ((mode | m) & ~this.umask) | exe | 0o600 - } - - [_tarxOptions] ({ cwd, uid, gid }) { - const sawIgnores = new Set() - return { - cwd, - noChmod: true, - noMtime: true, - filter: (name, entry) => { - if (/Link$/.test(entry.type)) - return false - entry.mode = this[_entryMode](entry.path, entry.mode, entry.type) - // this replicates the npm pack behavior where .gitignore files - // are treated like .npmignore files, but only if a .npmignore - // file is not present. - if (/File$/.test(entry.type)) { - const base = basename(entry.path) - if (base === '.npmignore') - sawIgnores.add(entry.path) - else if (base === '.gitignore' && !this.allowGitIgnore) { - // rename, but only if there's not already a .npmignore - const ni = entry.path.replace(/\.gitignore$/, '.npmignore') - if (sawIgnores.has(ni)) - return false - entry.path = ni - } - return true - } - }, - strip: 1, - onwarn: /* istanbul ignore next - we can trust that tar logs */ - (code, msg, data) => { - this.log.warn('tar', code, msg) - this.log.silly('tar', code, msg, data) - }, - uid, - gid, - umask: this.umask, - } - } -} - -module.exports = FetcherBase - -// Child classes -const GitFetcher = require('./git.js') -const RegistryFetcher = require('./registry.js') -const FileFetcher = require('./file.js') -const DirFetcher = require('./dir.js') -const RemoteFetcher = require('./remote.js') - -// Get an appropriate fetcher object from a spec and options -FetcherBase.get = (rawSpec, opts = {}) => { - const spec = npa(rawSpec, opts.where) - switch (spec.type) { - case 'git': - return new GitFetcher(spec, opts) - - case 'remote': - return new RemoteFetcher(spec, opts) - - case 'version': - case 'range': - case 'tag': - case 'alias': - return new RegistryFetcher(spec.subSpec || spec, opts) - - case 'file': - return new FileFetcher(spec, opts) - - case 'directory': - return new DirFetcher(spec, opts) - - default: - throw new TypeError('Unknown spec type: ' + spec.type) - } -} diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/file.js b/node_modules/libnpmexec/node_modules/pacote/lib/file.js deleted file mode 100644 index d5c601aabd873..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/file.js +++ /dev/null @@ -1,93 +0,0 @@ -const Fetcher = require('./fetcher.js') -const fsm = require('fs-minipass') -const cacache = require('cacache') -const { promisify } = require('util') -const readPackageJson = require('read-package-json-fast') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const _exeBins = Symbol('_exeBins') -const { resolve } = require('path') -const fs = require('fs') - -class FileFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - // just the fully resolved filename - this.resolved = this.spec.fetchSpec - } - - get types () { - return ['file'] - } - - manifest () { - if (this.package) - return Promise.resolve(this.package) - - // have to unpack the tarball for this. - return cacache.tmp.withTmp(this.cache, this.opts, dir => - this.extract(dir) - .then(() => readPackageJson(dir + '/package.json')) - .then(mani => this.package = { - ...mani, - _integrity: this.integrity && String(this.integrity), - _resolved: this.resolved, - _from: this.from, - })) - } - - [_exeBins] (pkg, dest) { - if (!pkg.bin) - return Promise.resolve() - - return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => { - const script = resolve(dest, pkg.bin[k]) - // Best effort. Ignore errors here, the only result is that - // a bin script is not executable. But if it's missing or - // something, we just leave it for a later stage to trip over - // when we can provide a more useful contextual error. - fs.stat(script, (er, st) => { - if (er) - return res() - const mode = st.mode | 0o111 - if (mode === st.mode) - return res() - fs.chmod(script, mode, res) - }) - }))) - } - - extract (dest) { - // if we've already loaded the manifest, then the super got it. - // but if not, read the unpacked manifest and chmod properly. - return super.extract(dest) - .then(result => this.package ? result - : readPackageJson(dest + '/package.json').then(pkg => - this[_exeBins](pkg, dest)).then(() => result)) - } - - [_tarballFromResolved] () { - // create a read stream and return it - return new fsm.ReadStream(this.resolved) - } - - packument () { - // simulate based on manifest - return this.manifest().then(mani => ({ - name: mani.name, - 'dist-tags': { - [this.defaultTag]: mani.version - }, - versions: { - [mani.version]: { - ...mani, - dist: { - tarball: `file:${this.resolved}`, - integrity: this.integrity && String(this.integrity), - } - } - } - })) - } -} - -module.exports = FileFetcher diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/git.js b/node_modules/libnpmexec/node_modules/pacote/lib/git.js deleted file mode 100644 index 18f42547bb3ac..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/git.js +++ /dev/null @@ -1,304 +0,0 @@ -const Fetcher = require('./fetcher.js') -const FileFetcher = require('./file.js') -const RemoteFetcher = require('./remote.js') -const DirFetcher = require('./dir.js') -const hashre = /^[a-f0-9]{40}$/ -const git = require('@npmcli/git') -const pickManifest = require('npm-pick-manifest') -const npa = require('npm-package-arg') -const url = require('url') -const Minipass = require('minipass') -const cacache = require('cacache') -const { promisify } = require('util') -const readPackageJson = require('read-package-json-fast') -const npm = require('./util/npm.js') - -const _resolvedFromRepo = Symbol('_resolvedFromRepo') -const _resolvedFromHosted = Symbol('_resolvedFromHosted') -const _resolvedFromClone = Symbol('_resolvedFromClone') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const _addGitSha = Symbol('_addGitSha') -const addGitSha = require('./util/add-git-sha.js') -const _clone = Symbol('_clone') -const _cloneHosted = Symbol('_cloneHosted') -const _cloneRepo = Symbol('_cloneRepo') -const _setResolvedWithSha = Symbol('_setResolvedWithSha') -const _prepareDir = Symbol('_prepareDir') - -// get the repository url. -// prefer https if there's auth, since ssh will drop that. -// otherwise, prefer ssh if available (more secure). -// We have to add the git+ back because npa suppresses it. -const repoUrl = (h, opts) => - h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) || - h.https && addGitPlus(h.https(opts)) - -// add git+ to the url, but only one time. -const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+') - -class GitFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - this.resolvedRef = null - if (this.spec.hosted) - this.from = this.spec.hosted.shortcut({ noCommittish: false }) - - // shortcut: avoid full clone when we can go straight to the tgz - // if we have the full sha and it's a hosted git platform - if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) { - this.resolvedSha = this.spec.gitCommittish - // use hosted.tarball() when we shell to RemoteFetcher later - this.resolved = this.spec.hosted - ? repoUrl(this.spec.hosted, { noCommittish: false }) - : this.spec.fetchSpec + '#' + this.spec.gitCommittish - } else - this.resolvedSha = '' - } - - // just exposed to make it easier to test all the combinations - static repoUrl (hosted, opts) { - return repoUrl(hosted, opts) - } - - get types () { - return ['git'] - } - - resolve () { - // likely a hosted git repo with a sha, so get the tarball url - // but in general, no reason to resolve() more than necessary! - if (this.resolved) - return super.resolve() - - // fetch the git repo and then look at the current hash - const h = this.spec.hosted - // try to use ssh, fall back to git. - return h ? this[_resolvedFromHosted](h) - : this[_resolvedFromRepo](this.spec.fetchSpec) - } - - // first try https, since that's faster and passphrase-less for - // public repos, and supports private repos when auth is provided. - // Fall back to SSH to support private repos - // NB: we always store the https url in resolved field if auth - // is present, otherwise ssh if the hosted type provides it - [_resolvedFromHosted] (hosted) { - return this[_resolvedFromRepo](hosted.https && hosted.https()) - .catch(er => { - // Throw early since we know pathspec errors will fail again if retried - if (er instanceof git.errors.GitPathspecError) - throw er - const ssh = hosted.sshurl && hosted.sshurl() - // no fallthrough if we can't fall through or have https auth - if (!ssh || hosted.auth) - throw er - return this[_resolvedFromRepo](ssh) - }) - } - - [_resolvedFromRepo] (gitRemote) { - // XXX make this a custom error class - if (!gitRemote) - return Promise.reject(new Error(`No git url for ${this.spec}`)) - const gitRange = this.spec.gitRange - const name = this.spec.name - return git.revs(gitRemote, this.opts).then(remoteRefs => { - return gitRange ? pickManifest({ - versions: remoteRefs.versions, - 'dist-tags': remoteRefs['dist-tags'], - name, - }, gitRange, this.opts) - : this.spec.gitCommittish ? - remoteRefs.refs[this.spec.gitCommittish] || - remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]] - : remoteRefs.refs.HEAD // no git committish, get default head - }).then(revDoc => { - // the committish provided isn't in the rev list - // things like HEAD~3 or @yesterday can land here. - if (!revDoc || !revDoc.sha) - return this[_resolvedFromClone]() - - this.resolvedRef = revDoc - this.resolvedSha = revDoc.sha - this[_addGitSha](revDoc.sha) - return this.resolved - }) - } - - [_setResolvedWithSha] (withSha) { - // we haven't cloned, so a tgz download is still faster - // of course, if it's not a known host, we can't do that. - this.resolved = !this.spec.hosted ? withSha - : repoUrl(npa(withSha).hosted, { noCommittish: false }) - } - - // when we get the git sha, we affix it to our spec to build up - // either a git url with a hash, or a tarball download URL - [_addGitSha] (sha) { - this[_setResolvedWithSha](addGitSha(this.spec, sha)) - } - - [_resolvedFromClone] () { - // do a full or shallow clone, then look at the HEAD - // kind of wasteful, but no other option, really - return this[_clone](dir => this.resolved) - } - - [_prepareDir] (dir) { - return readPackageJson(dir + '/package.json').then(mani => { - // no need if we aren't going to do any preparation. - const scripts = mani.scripts - if (!scripts || !( - scripts.postinstall || - scripts.build || - scripts.preinstall || - scripts.install || - scripts.prepare)) - return - - // to avoid cases where we have an cycle of git deps that depend - // on one another, we only ever do preparation for one instance - // of a given git dep along the chain of installations. - // Note that this does mean that a dependency MAY in theory end up - // trying to run its prepare script using a dependency that has not - // been properly prepared itself, but that edge case is smaller - // and less hazardous than a fork bomb of npm and git commands. - const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? [] - : process.env._PACOTE_NO_PREPARE_.split('\n') - if (noPrepare.includes(this.resolved)) { - this.log.info('prepare', 'skip prepare, already seen', this.resolved) - return - } - noPrepare.push(this.resolved) - - // the DirFetcher will do its own preparation to run the prepare scripts - // All we have to do is put the deps in place so that it can succeed. - return npm( - this.npmBin, - [].concat(this.npmInstallCmd).concat(this.npmCliConfig), - dir, - { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') }, - { message: 'git dep preparation failed' } - ) - }) - } - - [_tarballFromResolved] () { - const stream = new Minipass() - stream.resolved = this.resolved - stream.integrity = this.integrity - stream.from = this.from - - // check it out and then shell out to the DirFetcher tarball packer - this[_clone](dir => this[_prepareDir](dir) - .then(() => new Promise((res, rej) => { - const df = new DirFetcher(`file:${dir}`, { - ...this.opts, - resolved: null, - integrity: null, - }) - const dirStream = df[_tarballFromResolved]() - dirStream.on('error', rej) - dirStream.on('end', res) - dirStream.pipe(stream) - }))).catch( - /* istanbul ignore next: very unlikely and hard to test */ - er => stream.emit('error', er) - ) - return stream - } - - // clone a git repo into a temp folder (or fetch and unpack if possible) - // handler accepts a directory, and returns a promise that resolves - // when we're done with it, at which point, cacache deletes it - // - // TODO: after cloning, create a tarball of the folder, and add to the cache - // with cacache.put.stream(), using a key that's deterministic based on the - // spec and repo, so that we don't ever clone the same thing multiple times. - [_clone] (handler, tarballOk = true) { - const o = { tmpPrefix: 'git-clone' } - const ref = this.resolvedSha || this.spec.gitCommittish - const h = this.spec.hosted - const resolved = this.resolved - - // can be set manually to false to fall back to actual git clone - tarballOk = tarballOk && - h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball - - return cacache.tmp.withTmp(this.cache, o, tmp => { - // if we're resolved, and have a tarball url, shell out to RemoteFetcher - if (tarballOk) { - const nameat = this.spec.name ? `${this.spec.name}@` : '' - return new RemoteFetcher(h.tarball({ noCommittish: false }), { - ...this.opts, - allowGitIgnore: true, - pkgid: `git:${nameat}${this.resolved}`, - resolved: this.resolved, - integrity: null, // it'll always be different, if we have one - }).extract(tmp).then(() => handler(tmp), er => { - // fall back to ssh download if tarball fails - if (er.constructor.name.match(/^Http/)) - return this[_clone](handler, false) - else - throw er - }) - } - - return ( - h ? this[_cloneHosted](ref, tmp) - : this[_cloneRepo](this.spec.fetchSpec, ref, tmp) - ).then(sha => { - this.resolvedSha = sha - if (!this.resolved) - this[_addGitSha](sha) - }) - .then(() => handler(tmp)) - }) - } - - // first try https, since that's faster and passphrase-less for - // public repos, and supports private repos when auth is provided. - // Fall back to SSH to support private repos - // NB: we always store the https url in resolved field if auth - // is present, otherwise ssh if the hosted type provides it - [_cloneHosted] (ref, tmp) { - const hosted = this.spec.hosted - return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp) - .catch(er => { - // Throw early since we know pathspec errors will fail again if retried - if (er instanceof git.errors.GitPathspecError) - throw er - const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) - // no fallthrough if we can't fall through or have https auth - if (!ssh || hosted.auth) - throw er - return this[_cloneRepo](ssh, ref, tmp) - }) - } - - [_cloneRepo] (repo, ref, tmp) { - const { opts, spec } = this - return git.clone(repo, ref, tmp, { ...opts, spec }) - } - - manifest () { - if (this.package) - return Promise.resolve(this.package) - - return this.spec.hosted && this.resolved - ? FileFetcher.prototype.manifest.apply(this) - : this[_clone](dir => - readPackageJson(dir + '/package.json') - .then(mani => this.package = { - ...mani, - _integrity: this.integrity && String(this.integrity), - _resolved: this.resolved, - _from: this.from, - })) - } - - packument () { - return FileFetcher.prototype.packument.apply(this) - } -} -module.exports = GitFetcher diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/index.js b/node_modules/libnpmexec/node_modules/pacote/lib/index.js deleted file mode 100644 index cbcbd7c92d15f..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/index.js +++ /dev/null @@ -1,23 +0,0 @@ -const { get } = require('./fetcher.js') -const GitFetcher = require('./git.js') -const RegistryFetcher = require('./registry.js') -const FileFetcher = require('./file.js') -const DirFetcher = require('./dir.js') -const RemoteFetcher = require('./remote.js') - -module.exports = { - GitFetcher, - RegistryFetcher, - FileFetcher, - DirFetcher, - RemoteFetcher, - resolve: (spec, opts) => get(spec, opts).resolve(), - extract: (spec, dest, opts) => get(spec, opts).extract(dest), - manifest: (spec, opts) => get(spec, opts).manifest(), - tarball: (spec, opts) => get(spec, opts).tarball(), - packument: (spec, opts) => get(spec, opts).packument(), -} -module.exports.tarball.stream = (spec, handler, opts) => - get(spec, opts).tarballStream(handler) -module.exports.tarball.file = (spec, dest, opts) => - get(spec, opts).tarballFile(dest) diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/registry.js b/node_modules/libnpmexec/node_modules/pacote/lib/registry.js deleted file mode 100644 index e0a310717420d..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/registry.js +++ /dev/null @@ -1,182 +0,0 @@ -const Fetcher = require('./fetcher.js') -const RemoteFetcher = require('./remote.js') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const pacoteVersion = require('../package.json').version -const npa = require('npm-package-arg') -const rpj = require('read-package-json-fast') -const pickManifest = require('npm-pick-manifest') -const ssri = require('ssri') -const Minipass = require('minipass') - -// Corgis are cute. 🐕🐶 -const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' -const fullDoc = 'application/json' - -const fetch = require('npm-registry-fetch') - -// TODO: memoize reg requests, so we don't even have to check cache - -const _headers = Symbol('_headers') -class RegistryFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - - // you usually don't want to fetch the same packument multiple times in - // the span of a given script or command, no matter how many pacote calls - // are made, so this lets us avoid doing that. It's only relevant for - // registry fetchers, because other types simulate their packument from - // the manifest, which they memoize on this.package, so it's very cheap - // already. - this.packumentCache = this.opts.packumentCache || null - - // handle case when npm-package-arg guesses wrong. - if (this.spec.type === 'tag' && - this.spec.rawSpec === '' && - this.defaultTag !== 'latest') - this.spec = npa(`${this.spec.name}@${this.defaultTag}`) - this.registry = fetch.pickRegistry(spec, opts) - this.packumentUrl = this.registry.replace(/\/*$/, '/') + - this.spec.escapedName - - // XXX pacote <=9 has some logic to ignore opts.resolved if - // the resolved URL doesn't go to the same registry. - // Consider reproducing that here, to throw away this.resolved - // in that case. - } - - resolve () { - if (this.resolved) - return Promise.resolve(this.resolved) - - // fetching the manifest sets resolved and (usually) integrity - return this.manifest().then(() => { - if (this.resolved) - return this.resolved - - throw Object.assign( - new Error('Invalid package manifest: no `dist.tarball` field'), - { package: this.spec.toString() } - ) - }) - } - - [_headers] () { - return { - // npm will override UA, but ensure that we always send *something* - 'user-agent': this.opts.userAgent || - `pacote/${pacoteVersion} node/${process.version}`, - ...(this.opts.headers || {}), - 'pacote-version': pacoteVersion, - 'pacote-req-type': 'packument', - 'pacote-pkg-id': `registry:${this.spec.name}`, - accept: this.fullMetadata ? fullDoc : corgiDoc, - } - } - - async packument () { - // note this might be either an in-flight promise for a request, - // or the actual packument, but we never want to make more than - // one request at a time for the same thing regardless. - if (this.packumentCache && this.packumentCache.has(this.packumentUrl)) - return this.packumentCache.get(this.packumentUrl) - - // npm-registry-fetch the packument - // set the appropriate header for corgis if fullMetadata isn't set - // return the res.json() promise - const p = fetch(this.packumentUrl, { - ...this.opts, - headers: this[_headers](), - spec: this.spec, - // never check integrity for packuments themselves - integrity: null, - }).then(res => res.json().then(packument => { - packument._cached = res.headers.has('x-local-cache') - packument._contentLength = +res.headers.get('content-length') - if (this.packumentCache) - this.packumentCache.set(this.packumentUrl, packument) - return packument - })).catch(er => { - if (this.packumentCache) - this.packumentCache.delete(this.packumentUrl) - if (er.code === 'E404' && !this.fullMetadata) { - // possible that corgis are not supported by this registry - this.fullMetadata = true - return this.packument() - } - throw er - }) - if (this.packumentCache) - this.packumentCache.set(this.packumentUrl, p) - return p - } - - manifest () { - if (this.package) - return Promise.resolve(this.package) - - return this.packument() - .then(packument => pickManifest(packument, this.spec.fetchSpec, { - ...this.opts, - defaultTag: this.defaultTag, - before: this.before, - }) /* XXX add ETARGET and E403 revalidation of cached packuments here */) - .then(mani => { - // add _resolved and _integrity from dist object - const { dist } = mani - if (dist) { - this.resolved = mani._resolved = dist.tarball - mani._from = this.from - const distIntegrity = dist.integrity ? ssri.parse(dist.integrity) - : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', {...this.opts}) - : null - if (distIntegrity) { - if (!this.integrity) - this.integrity = distIntegrity - else if (!this.integrity.match(distIntegrity)) { - // only bork if they have algos in common. - // otherwise we end up breaking if we have saved a sha512 - // previously for the tarball, but the manifest only - // provides a sha1, which is possible for older publishes. - // Otherwise, this is almost certainly a case of holding it - // wrong, and will result in weird or insecure behavior - // later on when building package tree. - for (const algo of Object.keys(this.integrity)) { - if (distIntegrity[algo]) { - throw Object.assign(new Error( - `Integrity checksum failed when using ${algo}: `+ - `wanted ${this.integrity} but got ${distIntegrity}.` - ), { code: 'EINTEGRITY' }) - } - } - // made it this far, the integrity is worthwhile. accept it. - // the setter here will take care of merging it into what we - // already had. - this.integrity = distIntegrity - } - } - } - if (this.integrity) - mani._integrity = String(this.integrity) - this.package = rpj.normalize(mani) - return this.package - }) - } - - [_tarballFromResolved] () { - // we use a RemoteFetcher to get the actual tarball stream - return new RemoteFetcher(this.resolved, { - ...this.opts, - resolved: this.resolved, - pkgid: `registry:${this.spec.name}@${this.resolved}`, - })[_tarballFromResolved]() - } - - get types () { - return [ - 'tag', - 'version', - 'range', - ] - } -} -module.exports = RegistryFetcher diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/remote.js b/node_modules/libnpmexec/node_modules/pacote/lib/remote.js deleted file mode 100644 index 727a8bfc8e608..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/remote.js +++ /dev/null @@ -1,84 +0,0 @@ -const Fetcher = require('./fetcher.js') -const FileFetcher = require('./file.js') -const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') -const pacoteVersion = require('../package.json').version -const fetch = require('npm-registry-fetch') -const ssri = require('ssri') -const Minipass = require('minipass') -// The default registry URL is a string of great magic. -const magic = /^https?:\/\/registry\.npmjs\.org\// - -const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') -const _headers = Symbol('_headers') -class RemoteFetcher extends Fetcher { - constructor (spec, opts) { - super(spec, opts) - this.resolved = this.spec.fetchSpec - if (magic.test(this.resolved) && !magic.test(this.registry + '/')) - this.resolved = this.resolved.replace(magic, this.registry + '/') - - // nam is a fermented pork sausage that is good to eat - const nameat = this.spec.name ? `${this.spec.name}@` : '' - this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}` - } - - // Don't need to cache tarball fetches in pacote, because make-fetch-happen - // will write into cacache anyway. - get [_cacheFetches] () { - return false - } - - [_tarballFromResolved] () { - const stream = new Minipass() - const fetchOpts = { - ...this.opts, - headers: this[_headers](), - spec: this.spec, - integrity: this.integrity, - algorithms: [ this.pickIntegrityAlgorithm() ], - } - fetch(this.resolved, fetchOpts).then(res => { - const hash = res.headers.get('x-local-cache-hash') - if (hash) { - this.integrity = decodeURIComponent(hash) - } - - res.body.on('error', - /* istanbul ignore next - exceedingly rare and hard to simulate */ - er => stream.emit('error', er) - ).pipe(stream) - }).catch(er => stream.emit('error', er)) - - return stream - } - - [_headers] () { - return { - // npm will override this, but ensure that we always send *something* - 'user-agent': this.opts.userAgent || - `pacote/${pacoteVersion} node/${process.version}`, - ...(this.opts.headers || {}), - 'pacote-version': pacoteVersion, - 'pacote-req-type': 'tarball', - 'pacote-pkg-id': this.pkgid, - ...(this.integrity ? { 'pacote-integrity': String(this.integrity) } - : {}), - ...(this.opts.headers || {}), - } - } - - get types () { - return ['remote'] - } - - // getting a packument and/or manifest is the same as with a file: spec. - // unpack the tarball stream, and then read from the package.json file. - packument () { - return FileFetcher.prototype.packument.apply(this) - } - - manifest () { - return FileFetcher.prototype.manifest.apply(this) - } -} -module.exports = RemoteFetcher diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/util/add-git-sha.js b/node_modules/libnpmexec/node_modules/pacote/lib/util/add-git-sha.js deleted file mode 100644 index 843fe5b600caf..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/util/add-git-sha.js +++ /dev/null @@ -1,15 +0,0 @@ -// add a sha to a git remote url spec -const addGitSha = (spec, sha) => { - if (spec.hosted) { - const h = spec.hosted - const opt = { noCommittish: true } - const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt) - - return `${base}#${sha}` - } else { - // don't use new URL for this, because it doesn't handle scp urls - return spec.rawSpec.replace(/#.*$/, '') + `#${sha}` - } -} - -module.exports = addGitSha diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/util/cache-dir.js b/node_modules/libnpmexec/node_modules/pacote/lib/util/cache-dir.js deleted file mode 100644 index abd2453232027..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/util/cache-dir.js +++ /dev/null @@ -1,12 +0,0 @@ -const os = require('os') -const {resolve} = require('path') - -module.exports = (fakePlatform = false) => { - const temp = os.tmpdir() - const uidOrPid = process.getuid ? process.getuid() : process.pid - const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid) - const platform = fakePlatform || process.platform - const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm' - const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home - return resolve(cacheRoot, cacheExtra, '_cacache') -} diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/libnpmexec/node_modules/pacote/lib/util/is-package-bin.js deleted file mode 100644 index 35cf0642703c7..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/util/is-package-bin.js +++ /dev/null @@ -1,24 +0,0 @@ -// Function to determine whether a path is in the package.bin set. -// Used to prevent issues when people publish a package from a -// windows machine, and then install with --no-bin-links. -// -// Note: this is not possible in remote or file fetchers, since -// we don't have the manifest until AFTER we've unpacked. But the -// main use case is registry fetching with git a distant second, -// so that's an acceptable edge case to not handle. - -const binObj = (name, bin) => - typeof bin === 'string' ? { [name]: bin } : bin - -const hasBin = (pkg, path) => { - const bin = binObj(pkg.name, pkg.bin) - const p = path.replace(/^[^\\\/]*\//, '') - for (const [k, v] of Object.entries(bin)) { - if (v === p) - return true - } - return false -} - -module.exports = (pkg, path) => - pkg && pkg.bin ? hasBin(pkg, path) : false diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/util/npm.js b/node_modules/libnpmexec/node_modules/pacote/lib/util/npm.js deleted file mode 100644 index f2f29bd0acbd1..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/util/npm.js +++ /dev/null @@ -1,15 +0,0 @@ -// run an npm command -const spawn = require('@npmcli/promise-spawn') -const {dirname} = require('path') - -module.exports = (npmBin, npmCommand, cwd, env, extra) => { - const isJS = npmBin.endsWith('.js') - const cmd = isJS ? process.execPath : npmBin - const args = (isJS ? [npmBin] : []).concat(npmCommand) - // when installing to run the `prepare` script for a git dep, we need - // to ensure that we don't run into a cycle of checking out packages - // in temp directories. this lets us link previously-seen repos that - // are also being prepared. - - return spawn(cmd, args, { cwd, stdioString: true, env }, extra) -} diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/util/proc-log.js b/node_modules/libnpmexec/node_modules/pacote/lib/util/proc-log.js deleted file mode 100644 index b2bdd9dc90205..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/util/proc-log.js +++ /dev/null @@ -1,21 +0,0 @@ -// default logger. -// emits 'log' events on the process -const LEVELS = [ - 'notice', - 'error', - 'warn', - 'info', - 'verbose', - 'http', - 'silly', - 'pause', - 'resume' -] - -const log = level => (...args) => process.emit('log', level, ...args) - -const logger = {} -for (const level of LEVELS) { - logger[level] = log(level) -} -module.exports = logger diff --git a/node_modules/libnpmexec/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/libnpmexec/node_modules/pacote/lib/util/tar-create-options.js deleted file mode 100644 index 31ab34c9d949f..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/lib/util/tar-create-options.js +++ /dev/null @@ -1,30 +0,0 @@ -const isPackageBin = require('./is-package-bin.js') - -const tarCreateOptions = manifest => ({ - cwd: manifest._resolved, - prefix: 'package/', - portable: true, - gzip: { - // forcing the level to 9 seems to avoid some - // platform specific optimizations that cause - // integrity mismatch errors due to differing - // end results after compression - level: 9 - }, - - // ensure that package bins are always executable - // Note that npm-packlist is already filtering out - // anything that is not a regular file, ignored by - // .npmignore or package.json "files", etc. - filter: (path, stat) => { - if (isPackageBin(manifest, path)) - stat.mode |= 0o111 - return true - }, - - // Provide a specific date in the 1980s for the benefit of zip, - // which is confounded by files dated at the Unix epoch 0. - mtime: new Date('1985-10-26T08:15:00.000Z'), -}) - -module.exports = tarCreateOptions diff --git a/node_modules/libnpmexec/node_modules/pacote/package.json b/node_modules/libnpmexec/node_modules/pacote/package.json deleted file mode 100644 index 437bb8f79e1d8..0000000000000 --- a/node_modules/libnpmexec/node_modules/pacote/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "name": "pacote", - "version": "11.3.5", - "description": "JavaScript package downloader", - "author": "Isaac Z. Schlueter (https://izs.me)", - "bin": { - "pacote": "lib/bin.js" - }, - "license": "ISC", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "tap": { - "timeout": 300, - "coverage-map": "map.js" - }, - "devDependencies": { - "mutate-fs": "^2.1.1", - "npm-registry-mock": "^1.3.1", - "tap": "^15.0.4" - }, - "files": [ - "lib/**/*.js" - ], - "keywords": [ - "packages", - "npm", - "git" - ], - "dependencies": { - "@npmcli/git": "^2.1.0", - "@npmcli/installed-package-contents": "^1.0.6", - "@npmcli/promise-spawn": "^1.2.0", - "@npmcli/run-script": "^1.8.2", - "cacache": "^15.0.5", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "infer-owner": "^1.0.4", - "minipass": "^3.1.3", - "mkdirp": "^1.0.3", - "npm-package-arg": "^8.0.1", - "npm-packlist": "^2.1.4", - "npm-pick-manifest": "^6.0.0", - "npm-registry-fetch": "^11.0.0", - "promise-retry": "^2.0.1", - "read-package-json-fast": "^2.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.0" - }, - "engines": { - "node": ">=10" - }, - "repository": "git@github.com:npm/pacote" -} diff --git a/node_modules/libnpmexec/node_modules/string-width/index.js b/node_modules/libnpmexec/node_modules/string-width/index.js deleted file mode 100644 index b9bec62440a46..0000000000000 --- a/node_modules/libnpmexec/node_modules/string-width/index.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; -var stripAnsi = require('strip-ansi'); -var codePointAt = require('code-point-at'); -var isFullwidthCodePoint = require('is-fullwidth-code-point'); - -// https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1345 -module.exports = function (str) { - if (typeof str !== 'string' || str.length === 0) { - return 0; - } - - var width = 0; - - str = stripAnsi(str); - - for (var i = 0; i < str.length; i++) { - var code = codePointAt(str, i); - - // ignore control characters - if (code <= 0x1f || (code >= 0x7f && code <= 0x9f)) { - continue; - } - - // surrogates - if (code >= 0x10000) { - i++; - } - - if (isFullwidthCodePoint(code)) { - width += 2; - } else { - width++; - } - } - - return width; -}; diff --git a/node_modules/libnpmexec/node_modules/string-width/license b/node_modules/libnpmexec/node_modules/string-width/license deleted file mode 100644 index 654d0bfe94343..0000000000000 --- a/node_modules/libnpmexec/node_modules/string-width/license +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/libnpmexec/node_modules/string-width/package.json b/node_modules/libnpmexec/node_modules/string-width/package.json deleted file mode 100644 index 5ba436166e9af..0000000000000 --- a/node_modules/libnpmexec/node_modules/string-width/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "string-width", - "version": "1.0.2", - "description": "Get the visual width of a string - the number of columns required to display it", - "license": "MIT", - "repository": "sindresorhus/string-width", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "xo && ava" - }, - "files": [ - "index.js" - ], - "keywords": [ - "string", - "str", - "character", - "char", - "unicode", - "width", - "visual", - "column", - "columns", - "fullwidth", - "full-width", - "full", - "ansi", - "escape", - "codes", - "cli", - "command-line", - "terminal", - "console", - "cjk", - "chinese", - "japanese", - "korean", - "fixed-width" - ], - "dependencies": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - }, - "devDependencies": { - "ava": "*", - "xo": "*" - } -} diff --git a/node_modules/libnpmexec/package.json b/node_modules/libnpmexec/package.json index 2668f11731e6f..067c39ae744fc 100644 --- a/node_modules/libnpmexec/package.json +++ b/node_modules/libnpmexec/package.json @@ -1,12 +1,12 @@ { "name": "libnpmexec", - "version": "2.0.1", + "version": "3.0.0", "files": [ "lib" ], "main": "lib/index.js", "engines": { - "node": ">=10" + "node": "^12.13.0 || ^14.15.0 || >=16" }, "description": "npm exec (npx) programmatic API", "repository": "https://github.com/npm/libnpmexec", @@ -37,6 +37,7 @@ "prepublishOnly": "git push origin --follow-tags" }, "tap": { + "color": true, "check-coverage": true }, "devDependencies": { @@ -49,13 +50,13 @@ "tap": "^15.0.6" }, "dependencies": { - "@npmcli/arborist": "^2.3.0", + "@npmcli/arborist": "^3.0.0", "@npmcli/ci-detect": "^1.3.0", - "@npmcli/run-script": "^1.8.4", + "@npmcli/run-script": "^2.0.0", "chalk": "^4.1.0", "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^8.1.2", - "pacote": "^11.3.1", + "pacote": "^12.0.0", "proc-log": "^1.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", diff --git a/package-lock.json b/package-lock.json index 6dd724893b73e..4c68baa605c17 100644 --- a/package-lock.json +++ b/package-lock.json @@ -110,7 +110,7 @@ "json-parse-even-better-errors": "^2.3.1", "libnpmaccess": "^4.0.2", "libnpmdiff": "^2.0.4", - "libnpmexec": "^2.0.1", + "libnpmexec": "^3.0.0", "libnpmfund": "^2.0.0", "libnpmhook": "^6.0.2", "libnpmorg": "^2.0.2", @@ -1115,7 +1115,7 @@ "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "inBundle": true, + "dev": true, "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -1286,7 +1286,7 @@ "version": "0.2.4", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", - "inBundle": true, + "dev": true, "dependencies": { "safer-buffer": "~2.1.0" } @@ -1295,7 +1295,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", - "inBundle": true, + "dev": true, "engines": { "node": ">=0.8" } @@ -1322,13 +1322,13 @@ "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "inBundle": true + "dev": true }, "node_modules/aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "inBundle": true, + "dev": true, "engines": { "node": "*" } @@ -1337,7 +1337,7 @@ "version": "1.11.0", "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", - "inBundle": true + "dev": true }, "node_modules/babel-plugin-apply-mdx-type-prop": { "version": "1.6.22", @@ -1421,7 +1421,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", - "inBundle": true, + "dev": true, "dependencies": { "tweetnacl": "^0.14.3" } @@ -1632,7 +1632,7 @@ "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "inBundle": true + "dev": true }, "node_modules/ccount": { "version": "1.1.0", @@ -1952,7 +1952,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "inBundle": true, + "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -2013,7 +2013,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "inBundle": true + "dev": true }, "node_modules/correct-license-metadata": { "version": "1.4.0", @@ -2085,7 +2085,7 @@ "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "inBundle": true, + "dev": true, "dependencies": { "assert-plus": "^1.0.0" }, @@ -2227,7 +2227,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "inBundle": true, + "dev": true, "engines": { "node": ">=0.4.0" } @@ -2340,7 +2340,7 @@ "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", - "inBundle": true, + "dev": true, "dependencies": { "jsbn": "~0.1.0", "safer-buffer": "^2.1.0" @@ -3029,28 +3029,28 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "inBundle": true + "dev": true }, "node_modules/extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "dev": true, "engines": [ "node >=0.6.0" - ], - "inBundle": true + ] }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "inBundle": true + "dev": true }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "inBundle": true + "dev": true }, "node_modules/fast-levenshtein": { "version": "2.0.6", @@ -3247,7 +3247,7 @@ "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "inBundle": true, + "dev": true, "engines": { "node": "*" } @@ -3426,7 +3426,7 @@ "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "inBundle": true, + "dev": true, "dependencies": { "assert-plus": "^1.0.0" } @@ -3488,7 +3488,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "inBundle": true, + "dev": true, "engines": { "node": ">=4" } @@ -3498,7 +3498,7 @@ "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", "deprecated": "this library is no longer supported", - "inBundle": true, + "dev": true, "dependencies": { "ajv": "^6.12.3", "har-schema": "^2.0.0" @@ -3738,7 +3738,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "inBundle": true, + "dev": true, "dependencies": { "assert-plus": "^1.0.0", "jsprim": "^1.2.2", @@ -4264,7 +4264,7 @@ "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "inBundle": true + "dev": true }, "node_modules/istanbul-lib-coverage": { "version": "3.0.0", @@ -4426,7 +4426,7 @@ "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "inBundle": true + "dev": true }, "node_modules/jsdom": { "version": "16.7.0", @@ -4520,13 +4520,13 @@ "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", - "inBundle": true + "dev": true }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "inBundle": true + "dev": true }, "node_modules/json-stable-stringify-without-jsonify": { "version": "1.0.1", @@ -4547,7 +4547,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", - "inBundle": true + "dev": true }, "node_modules/json5": { "version": "2.2.0", @@ -4577,10 +4577,10 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "dev": true, "engines": [ "node >=0.6.0" ], - "inBundle": true, "dependencies": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -4642,211 +4642,25 @@ "link": true }, "node_modules/libnpmexec": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-2.0.1.tgz", - "integrity": "sha512-4SqBB7eJvJWmUKNF42Q5qTOn20DRjEE4TgvEh2yneKlAiRlwlhuS9MNR45juWwmoURJlf2K43bozlVt7OZiIOw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-3.0.0.tgz", + "integrity": "sha512-qyt0gSMSHeHIqb/a+vcryfw3VXfNNgle8jK9QVnrNQAyoDvyVQ6auyoB3ycqWPIl2swTEXPEVremSUaDzOiEgw==", "inBundle": true, "dependencies": { - "@npmcli/arborist": "^2.3.0", + "@npmcli/arborist": "^3.0.0", "@npmcli/ci-detect": "^1.3.0", - "@npmcli/run-script": "^1.8.4", + "@npmcli/run-script": "^2.0.0", "chalk": "^4.1.0", "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^8.1.2", - "pacote": "^11.3.1", + "pacote": "^12.0.0", "proc-log": "^1.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", "walk-up-path": "^1.0.0" }, "engines": { - "node": ">=10" - } - }, - "node_modules/libnpmexec/node_modules/@npmcli/arborist": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.10.0.tgz", - "integrity": "sha512-CLnD+zXG9oijEEzViimz8fbOoFVb7hoypiaf7p6giJhvYtrxLAyY3cZAMPIFQvsG731+02eMDp3LqVBNo7BaZA==", - "inBundle": true, - "dependencies": { - "@isaacs/string-locale-compare": "^1.0.1", - "@npmcli/installed-package-contents": "^1.0.7", - "@npmcli/map-workspaces": "^1.0.2", - "@npmcli/metavuln-calculator": "^1.1.0", - "@npmcli/move-file": "^1.1.0", - "@npmcli/name-from-folder": "^1.0.1", - "@npmcli/node-gyp": "^1.0.1", - "@npmcli/package-json": "^1.0.1", - "@npmcli/run-script": "^1.8.2", - "bin-links": "^2.2.1", - "cacache": "^15.0.3", - "common-ancestor-path": "^1.0.1", - "json-parse-even-better-errors": "^2.3.1", - "json-stringify-nice": "^1.1.4", - "mkdirp": "^1.0.4", - "mkdirp-infer-owner": "^2.0.0", - "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.1.5", - "npm-pick-manifest": "^6.1.0", - "npm-registry-fetch": "^11.0.0", - "pacote": "^11.3.5", - "parse-conflict-json": "^1.1.1", - "proc-log": "^1.0.0", - "promise-all-reject-late": "^1.0.0", - "promise-call-limit": "^1.0.1", - "read-package-json-fast": "^2.0.2", - "readdir-scoped-modules": "^1.1.0", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "ssri": "^8.0.1", - "treeverse": "^1.0.4", - "walk-up-path": "^1.0.0" - }, - "bin": { - "arborist": "bin/index.js" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/libnpmexec/node_modules/@npmcli/metavuln-calculator": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.1.tgz", - "integrity": "sha512-9xe+ZZ1iGVaUovBVFI9h3qW+UuECUzhvZPxK9RaEA2mjU26o5D0JloGYWwLYvQELJNmBdQB6rrpuN8jni6LwzQ==", - "inBundle": true, - "dependencies": { - "cacache": "^15.0.5", - "pacote": "^11.1.11", - "semver": "^7.3.2" - } - }, - "node_modules/libnpmexec/node_modules/@npmcli/run-script": { - "version": "1.8.6", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.8.6.tgz", - "integrity": "sha512-e42bVZnC6VluBZBAFEr3YrdqSspG3bgilyg4nSLBJ7TRGNCzxHa92XAHxQBLYg0BmgwO4b2mf3h/l5EkEWRn3g==", - "inBundle": true, - "dependencies": { - "@npmcli/node-gyp": "^1.0.2", - "@npmcli/promise-spawn": "^1.3.2", - "node-gyp": "^7.1.0", - "read-package-json-fast": "^2.0.1" - } - }, - "node_modules/libnpmexec/node_modules/aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", - "inBundle": true - }, - "node_modules/libnpmexec/node_modules/gauge": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", - "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", - "inBundle": true, - "dependencies": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } - }, - "node_modules/libnpmexec/node_modules/is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "inBundle": true, - "dependencies": { - "number-is-nan": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/libnpmexec/node_modules/node-gyp": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-7.1.2.tgz", - "integrity": "sha512-CbpcIo7C3eMu3dL1c3d0xw449fHIGALIJsRP4DDPHpyiW8vcriNY7ubh9TE4zEKfSxscY7PjeFnshE7h75ynjQ==", - "inBundle": true, - "dependencies": { - "env-paths": "^2.2.0", - "glob": "^7.1.4", - "graceful-fs": "^4.2.3", - "nopt": "^5.0.0", - "npmlog": "^4.1.2", - "request": "^2.88.2", - "rimraf": "^3.0.2", - "semver": "^7.3.2", - "tar": "^6.0.2", - "which": "^2.0.2" - }, - "bin": { - "node-gyp": "bin/node-gyp.js" - }, - "engines": { - "node": ">= 10.12.0" - } - }, - "node_modules/libnpmexec/node_modules/npmlog": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", - "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", - "inBundle": true, - "dependencies": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, - "node_modules/libnpmexec/node_modules/pacote": { - "version": "11.3.5", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.5.tgz", - "integrity": "sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==", - "inBundle": true, - "dependencies": { - "@npmcli/git": "^2.1.0", - "@npmcli/installed-package-contents": "^1.0.6", - "@npmcli/promise-spawn": "^1.2.0", - "@npmcli/run-script": "^1.8.2", - "cacache": "^15.0.5", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "infer-owner": "^1.0.4", - "minipass": "^3.1.3", - "mkdirp": "^1.0.3", - "npm-package-arg": "^8.0.1", - "npm-packlist": "^2.1.4", - "npm-pick-manifest": "^6.0.0", - "npm-registry-fetch": "^11.0.0", - "promise-retry": "^2.0.1", - "read-package-json-fast": "^2.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.0" - }, - "bin": { - "pacote": "lib/bin.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/libnpmexec/node_modules/string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "inBundle": true, - "dependencies": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" + "node": "^12.13.0 || ^14.15.0 || >=16" } }, "node_modules/libnpmfund": { @@ -5269,7 +5083,7 @@ "version": "1.49.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==", - "inBundle": true, + "dev": true, "engines": { "node": ">= 0.6" } @@ -5278,7 +5092,7 @@ "version": "2.1.32", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", - "inBundle": true, + "dev": true, "dependencies": { "mime-db": "1.49.0" }, @@ -5995,7 +5809,7 @@ "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", - "inBundle": true, + "dev": true, "engines": { "node": "*" } @@ -6334,7 +6148,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "inBundle": true + "dev": true }, "node_modules/picomatch": { "version": "2.3.0", @@ -6579,7 +6393,7 @@ "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", - "inBundle": true + "dev": true }, "node_modules/pump": { "version": "3.0.0", @@ -6595,7 +6409,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "inBundle": true, + "dev": true, "engines": { "node": ">=6" } @@ -6613,7 +6427,7 @@ "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", - "inBundle": true, + "dev": true, "engines": { "node": ">=0.6" } @@ -6991,7 +6805,7 @@ "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142", - "inBundle": true, + "dev": true, "dependencies": { "aws-sign2": "~0.7.0", "aws4": "^1.8.0", @@ -7022,7 +6836,7 @@ "version": "2.3.3", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "inBundle": true, + "dev": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.6", @@ -7036,7 +6850,7 @@ "version": "2.5.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "inBundle": true, + "dev": true, "dependencies": { "psl": "^1.1.28", "punycode": "^2.1.1" @@ -7162,6 +6976,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "devOptional": true, "inBundle": true }, "node_modules/saxes": { @@ -7467,7 +7282,7 @@ "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", - "inBundle": true, + "dev": true, "dependencies": { "asn1": "~0.2.3", "assert-plus": "^1.0.0", @@ -9926,7 +9741,7 @@ "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "inBundle": true, + "dev": true, "dependencies": { "safe-buffer": "^5.0.1" }, @@ -9938,7 +9753,7 @@ "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "inBundle": true + "dev": true }, "node_modules/type-check": { "version": "0.4.0", @@ -10166,7 +9981,7 @@ "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "inBundle": true, + "dev": true, "dependencies": { "punycode": "^2.1.0" } @@ -10191,7 +10006,7 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "inBundle": true, + "dev": true, "bin": { "uuid": "bin/uuid" } @@ -10225,10 +10040,10 @@ "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "dev": true, "engines": [ "node >=0.6.0" ], - "inBundle": true, "dependencies": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", @@ -11534,6 +11349,7 @@ "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, "requires": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -11656,6 +11472,7 @@ "version": "0.2.4", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "dev": true, "requires": { "safer-buffer": "~2.1.0" } @@ -11663,7 +11480,8 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "dev": true }, "astral-regex": { "version": "2.0.0", @@ -11680,17 +11498,20 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "dev": true }, "aws4": { "version": "1.11.0", "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", - "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", + "dev": true }, "babel-plugin-apply-mdx-type-prop": { "version": "1.6.22", @@ -11748,6 +11569,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "dev": true, "requires": { "tweetnacl": "^0.14.3" } @@ -11909,7 +11731,8 @@ "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true }, "ccount": { "version": "1.1.0", @@ -12127,6 +11950,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, "requires": { "delayed-stream": "~1.0.0" } @@ -12178,7 +12002,8 @@ "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "dev": true }, "correct-license-metadata": { "version": "1.4.0", @@ -12240,6 +12065,7 @@ "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dev": true, "requires": { "assert-plus": "^1.0.0" } @@ -12345,7 +12171,8 @@ "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true }, "delegates": { "version": "1.0.0", @@ -12432,6 +12259,7 @@ "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "dev": true, "requires": { "jsbn": "~0.1.0", "safer-buffer": "^2.1.0" @@ -12953,22 +12781,26 @@ "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true }, "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "dev": true }, "fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true }, "fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true }, "fast-levenshtein": { "version": "2.0.6", @@ -13120,7 +12952,8 @@ "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "dev": true }, "form-data": { "version": "3.0.1", @@ -13246,6 +13079,7 @@ "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dev": true, "requires": { "assert-plus": "^1.0.0" } @@ -13292,12 +13126,14 @@ "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "dev": true }, "har-validator": { "version": "5.1.5", "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "dev": true, "requires": { "ajv": "^6.12.3", "har-schema": "^2.0.0" @@ -13470,6 +13306,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dev": true, "requires": { "assert-plus": "^1.0.0", "jsprim": "^1.2.2", @@ -13805,7 +13642,8 @@ "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", + "dev": true }, "istanbul-lib-coverage": { "version": "3.0.0", @@ -13936,7 +13774,8 @@ "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "dev": true }, "jsdom": { "version": "16.7.0", @@ -14007,12 +13846,14 @@ "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "dev": true }, "json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true }, "json-stable-stringify-without-jsonify": { "version": "1.0.1", @@ -14028,7 +13869,8 @@ "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "dev": true }, "json5": { "version": "2.2.0", @@ -14048,6 +13890,7 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "dev": true, "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -14112,175 +13955,21 @@ } }, "libnpmexec": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-2.0.1.tgz", - "integrity": "sha512-4SqBB7eJvJWmUKNF42Q5qTOn20DRjEE4TgvEh2yneKlAiRlwlhuS9MNR45juWwmoURJlf2K43bozlVt7OZiIOw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-3.0.0.tgz", + "integrity": "sha512-qyt0gSMSHeHIqb/a+vcryfw3VXfNNgle8jK9QVnrNQAyoDvyVQ6auyoB3ycqWPIl2swTEXPEVremSUaDzOiEgw==", "requires": { - "@npmcli/arborist": "^2.3.0", + "@npmcli/arborist": "^3.0.0", "@npmcli/ci-detect": "^1.3.0", - "@npmcli/run-script": "^1.8.4", + "@npmcli/run-script": "^2.0.0", "chalk": "^4.1.0", "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^8.1.2", - "pacote": "^11.3.1", + "pacote": "^12.0.0", "proc-log": "^1.0.0", "read": "^1.0.7", "read-package-json-fast": "^2.0.2", "walk-up-path": "^1.0.0" - }, - "dependencies": { - "@npmcli/arborist": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.10.0.tgz", - "integrity": "sha512-CLnD+zXG9oijEEzViimz8fbOoFVb7hoypiaf7p6giJhvYtrxLAyY3cZAMPIFQvsG731+02eMDp3LqVBNo7BaZA==", - "requires": { - "@isaacs/string-locale-compare": "^1.0.1", - "@npmcli/installed-package-contents": "^1.0.7", - "@npmcli/map-workspaces": "^1.0.2", - "@npmcli/metavuln-calculator": "^1.1.0", - "@npmcli/move-file": "^1.1.0", - "@npmcli/name-from-folder": "^1.0.1", - "@npmcli/node-gyp": "^1.0.1", - "@npmcli/package-json": "^1.0.1", - "@npmcli/run-script": "^1.8.2", - "bin-links": "^2.2.1", - "cacache": "^15.0.3", - "common-ancestor-path": "^1.0.1", - "json-parse-even-better-errors": "^2.3.1", - "json-stringify-nice": "^1.1.4", - "mkdirp": "^1.0.4", - "mkdirp-infer-owner": "^2.0.0", - "npm-install-checks": "^4.0.0", - "npm-package-arg": "^8.1.5", - "npm-pick-manifest": "^6.1.0", - "npm-registry-fetch": "^11.0.0", - "pacote": "^11.3.5", - "parse-conflict-json": "^1.1.1", - "proc-log": "^1.0.0", - "promise-all-reject-late": "^1.0.0", - "promise-call-limit": "^1.0.1", - "read-package-json-fast": "^2.0.2", - "readdir-scoped-modules": "^1.1.0", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "ssri": "^8.0.1", - "treeverse": "^1.0.4", - "walk-up-path": "^1.0.0" - } - }, - "@npmcli/metavuln-calculator": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.1.tgz", - "integrity": "sha512-9xe+ZZ1iGVaUovBVFI9h3qW+UuECUzhvZPxK9RaEA2mjU26o5D0JloGYWwLYvQELJNmBdQB6rrpuN8jni6LwzQ==", - "requires": { - "cacache": "^15.0.5", - "pacote": "^11.1.11", - "semver": "^7.3.2" - } - }, - "@npmcli/run-script": { - "version": "1.8.6", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.8.6.tgz", - "integrity": "sha512-e42bVZnC6VluBZBAFEr3YrdqSspG3bgilyg4nSLBJ7TRGNCzxHa92XAHxQBLYg0BmgwO4b2mf3h/l5EkEWRn3g==", - "requires": { - "@npmcli/node-gyp": "^1.0.2", - "@npmcli/promise-spawn": "^1.3.2", - "node-gyp": "^7.1.0", - "read-package-json-fast": "^2.0.1" - } - }, - "aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" - }, - "gauge": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", - "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", - "requires": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "node-gyp": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-7.1.2.tgz", - "integrity": "sha512-CbpcIo7C3eMu3dL1c3d0xw449fHIGALIJsRP4DDPHpyiW8vcriNY7ubh9TE4zEKfSxscY7PjeFnshE7h75ynjQ==", - "requires": { - "env-paths": "^2.2.0", - "glob": "^7.1.4", - "graceful-fs": "^4.2.3", - "nopt": "^5.0.0", - "npmlog": "^4.1.2", - "request": "^2.88.2", - "rimraf": "^3.0.2", - "semver": "^7.3.2", - "tar": "^6.0.2", - "which": "^2.0.2" - } - }, - "npmlog": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", - "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", - "requires": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, - "pacote": { - "version": "11.3.5", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.5.tgz", - "integrity": "sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==", - "requires": { - "@npmcli/git": "^2.1.0", - "@npmcli/installed-package-contents": "^1.0.6", - "@npmcli/promise-spawn": "^1.2.0", - "@npmcli/run-script": "^1.8.2", - "cacache": "^15.0.5", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "infer-owner": "^1.0.4", - "minipass": "^3.1.3", - "mkdirp": "^1.0.3", - "npm-package-arg": "^8.0.1", - "npm-packlist": "^2.1.4", - "npm-pick-manifest": "^6.0.0", - "npm-registry-fetch": "^11.0.0", - "promise-retry": "^2.0.1", - "read-package-json-fast": "^2.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.1.0" - } - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - } } }, "libnpmfund": { @@ -14605,12 +14294,14 @@ "mime-db": { "version": "1.49.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", - "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==" + "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==", + "dev": true }, "mime-types": { "version": "2.1.32", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", + "dev": true, "requires": { "mime-db": "1.49.0" } @@ -15151,7 +14842,8 @@ "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "dev": true }, "object-assign": { "version": "4.1.1", @@ -15400,7 +15092,8 @@ "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", + "dev": true }, "picomatch": { "version": "2.3.0", @@ -15591,7 +15284,8 @@ "psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", - "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", + "dev": true }, "pump": { "version": "3.0.0", @@ -15606,7 +15300,8 @@ "punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true }, "qrcode-terminal": { "version": "0.12.0", @@ -15616,7 +15311,8 @@ "qs": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", + "dev": true }, "queue-microtask": { "version": "1.2.3", @@ -15914,6 +15610,7 @@ "version": "2.88.2", "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "dev": true, "requires": { "aws-sign2": "~0.7.0", "aws4": "^1.8.0", @@ -15941,6 +15638,7 @@ "version": "2.3.3", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.6", @@ -15951,6 +15649,7 @@ "version": "2.5.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "dev": true, "requires": { "psl": "^1.1.28", "punycode": "^2.1.1" @@ -16022,7 +15721,8 @@ "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "devOptional": true }, "saxes": { "version": "5.0.1", @@ -16256,6 +15956,7 @@ "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "dev": true, "requires": { "asn1": "~0.2.3", "assert-plus": "^1.0.0", @@ -18010,6 +17711,7 @@ "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dev": true, "requires": { "safe-buffer": "^5.0.1" } @@ -18017,7 +17719,8 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "dev": true }, "type-check": { "version": "0.4.0", @@ -18186,6 +17889,7 @@ "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, "requires": { "punycode": "^2.1.0" } @@ -18207,7 +17911,8 @@ "uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "dev": true }, "v8-compile-cache": { "version": "2.3.0", @@ -18236,6 +17941,7 @@ "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "dev": true, "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", diff --git a/package.json b/package.json index c41cca768155b..e53f030be24c4 100644 --- a/package.json +++ b/package.json @@ -79,7 +79,7 @@ "json-parse-even-better-errors": "^2.3.1", "libnpmaccess": "^4.0.2", "libnpmdiff": "^2.0.4", - "libnpmexec": "^2.0.1", + "libnpmexec": "^3.0.0", "libnpmfund": "^2.0.0", "libnpmhook": "^6.0.2", "libnpmorg": "^2.0.2",