diff --git a/.eslintrc.json b/.eslintrc.json index 8566613e748a7..b39431d2cb074 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,214 +1,3 @@ { - "parserOptions": { - "ecmaVersion": 2018, - "ecmaFeatures": {}, - "sourceType": "script" - }, - - "env": { - "es6": true, - "node": true - }, - - "plugins": [ - "import", - "node", - "promise", - "standard" - ], - - "globals": { - "document": "readonly", - "navigator": "readonly", - "window": "readonly" - }, - - "rules": { - "accessor-pairs": "error", - "array-bracket-spacing": ["error", "never"], - "arrow-spacing": ["error", { "before": true, "after": true }], - "block-spacing": ["error", "always"], - "brace-style": ["error", "1tbs", { "allowSingleLine": false }], - "camelcase": ["error", { "properties": "never" }], - "comma-dangle": ["error", { - "arrays": "always-multiline", - "objects": "always-multiline", - "imports": "always-multiline", - "exports": "always-multiline", - "functions": "never" - }], - "comma-spacing": ["error", { "before": false, "after": true }], - "comma-style": ["error", "last"], - "computed-property-spacing": ["error", "never"], - "constructor-super": "error", - "curly": ["error", "multi-or-nest"], - "dot-location": ["error", "property"], - "dot-notation": ["error", { "allowKeywords": true }], - "eol-last": "error", - "eqeqeq": ["error", "always", { "null": "ignore" }], - "func-call-spacing": ["error", "never"], - "generator-star-spacing": ["error", { "before": true, "after": true }], - "handle-callback-err": ["error", "^(err|error)$" ], - "indent": ["error", 2, { - "SwitchCase": 1, - "VariableDeclarator": 1, - "outerIIFEBody": 1, - "MemberExpression": 1, - "FunctionDeclaration": { "parameters": 1, "body": 1 }, - "FunctionExpression": { "parameters": 1, "body": 1 }, - "CallExpression": { "arguments": 1 }, - "ArrayExpression": 1, - "ObjectExpression": 1, - "ImportDeclaration": 1, - "flatTernaryExpressions": true, - "ignoreComments": false, - "ignoredNodes": ["TemplateLiteral *"] - }], - "key-spacing": ["error", { "beforeColon": false, "afterColon": true }], - "keyword-spacing": ["error", { "before": true, "after": true }], - "lines-between-class-members": ["error", "always", { "exceptAfterSingleLine": true }], - "max-len": ["error", 80, { - "ignoreUrls": true, - "ignoreComments": false, - "ignoreRegExpLiterals": true, - "ignoreStrings": true, - "ignoreTemplateLiterals": true - }], - "new-cap": ["error", { "newIsCap": true, "capIsNew": false, "properties": true }], - "new-parens": "error", - "no-array-constructor": "error", - "no-async-promise-executor": "error", - "no-caller": "error", - "no-case-declarations": "error", - "no-class-assign": "error", - "no-compare-neg-zero": "error", - "no-cond-assign": "off", - "no-const-assign": "error", - "no-constant-condition": ["error", { "checkLoops": false }], - "no-control-regex": "error", - "no-debugger": "error", - "no-delete-var": "error", - "no-dupe-args": "error", - "no-dupe-class-members": "error", - "no-dupe-keys": "error", - "no-duplicate-case": "error", - "no-empty-character-class": "error", - "no-empty-pattern": "error", - "no-eval": "error", - "no-ex-assign": "error", - "no-extend-native": "error", - "no-extra-bind": "error", - "no-extra-boolean-cast": "error", - "no-extra-parens": ["error", "functions"], - "no-fallthrough": "error", - "no-floating-decimal": "error", - "no-func-assign": "error", - "no-global-assign": "error", - "no-implied-eval": "error", - "no-inner-declarations": ["error", "functions"], - "no-invalid-regexp": "error", - "no-irregular-whitespace": "error", - "no-iterator": "error", - "no-labels": ["error", { "allowLoop": true, "allowSwitch": false }], - "no-lone-blocks": "error", - "no-misleading-character-class": "error", - "no-prototype-builtins": "error", - "no-useless-catch": "error", - "no-mixed-operators": "off", - "no-mixed-spaces-and-tabs": "error", - "no-multi-spaces": "error", - "no-multi-str": "error", - "no-multiple-empty-lines": ["error", { "max": 1, "maxEOF": 0 }], - "no-negated-in-lhs": "error", - "no-new": "off", - "no-new-func": "error", - "no-new-object": "error", - "no-new-require": "error", - "no-new-symbol": "error", - "no-new-wrappers": "error", - "no-obj-calls": "error", - "no-octal": "error", - "no-octal-escape": "error", - "no-path-concat": "error", - "no-proto": "error", - "no-redeclare": ["error", { "builtinGlobals": false }], - "no-regex-spaces": "error", - "no-return-assign": "off", - "no-self-assign": "off", - "no-self-compare": "error", - "no-sequences": "error", - "no-shadow-restricted-names": "error", - "no-sparse-arrays": "error", - "no-tabs": "error", - "no-template-curly-in-string": "off", - "no-this-before-super": "error", - "no-throw-literal": "off", - "no-trailing-spaces": "error", - "no-undef": "error", - "no-undef-init": "error", - "no-unexpected-multiline": "error", - "no-unmodified-loop-condition": "error", - "no-unneeded-ternary": ["error", { "defaultAssignment": false }], - "no-unreachable": "error", - "no-unsafe-finally": 0, - "no-unsafe-negation": "error", - "no-unused-expressions": ["error", { "allowShortCircuit": true, "allowTernary": true, "allowTaggedTemplates": true }], - "no-unused-vars": ["error", { "vars": "all", "args": "none", "ignoreRestSiblings": true }], - "no-use-before-define": ["error", { "functions": false, "classes": false, "variables": false }], - "no-useless-call": "error", - "no-useless-computed-key": "error", - "no-useless-constructor": "error", - "no-useless-escape": "error", - "no-useless-rename": "error", - "no-useless-return": "error", - "no-void": "error", - "no-whitespace-before-property": "error", - "no-with": "error", - "nonblock-statement-body-position": [2, "below"], - "object-curly-newline": "off", - "object-curly-spacing": "off", - "object-property-newline": ["error", { "allowMultiplePropertiesPerLine": true }], - "one-var": ["error", { "initialized": "never" }], - "operator-linebreak": "off", - "padded-blocks": ["error", { "blocks": "never", "switches": "never", "classes": "never" }], - "prefer-const": ["error", {"destructuring": "all"}], - "prefer-promise-reject-errors": "error", - "quote-props": ["error", "as-needed"], - "quotes": ["error", "single", { "avoidEscape": true, "allowTemplateLiterals": true }], - "rest-spread-spacing": ["error", "never"], - "semi": ["error", "never"], - "semi-spacing": ["error", { "before": false, "after": true }], - "space-before-blocks": ["error", "always"], - "space-before-function-paren": ["error", "always"], - "space-in-parens": ["error", "never"], - "space-infix-ops": "error", - "space-unary-ops": ["error", { "words": true, "nonwords": false }], - "spaced-comment": ["error", "always", { - "line": { "markers": ["*package", "!", "/", ",", "="] }, - "block": { "balanced": true, "markers": ["*package", "!", ",", ":", "::", "flow-include"], "exceptions": ["*"] } - }], - "symbol-description": "error", - "template-curly-spacing": ["error", "never"], - "template-tag-spacing": ["error", "never"], - "unicode-bom": ["error", "never"], - "use-isnan": "error", - "valid-typeof": ["error", { "requireStringLiterals": true }], - "wrap-iife": ["error", "any", { "functionPrototypeMethods": true }], - "yield-star-spacing": ["error", "both"], - "yoda": ["error", "never"], - - "import/export": "error", - "import/first": "error", - "import/no-absolute-path": ["error", { "esmodule": true, "commonjs": true, "amd": false }], - "import/no-duplicates": "error", - "import/no-named-default": "error", - "import/no-webpack-loader-syntax": "error", - - "node/no-deprecated-api": "error", - "node/process-exit-as-throw": "error", - - "promise/param-names": "off", - - "standard/no-callback-literal": "error" - } + "extends": ["@npmcli"] } diff --git a/bin/npx-cli.js b/bin/npx-cli.js index 7a3fb39837d27..cb05e1cb706c6 100755 --- a/bin/npx-cli.js +++ b/bin/npx-cli.js @@ -26,7 +26,7 @@ const removed = new Set([ const { definitions, shorthands } = require('../lib/utils/config/index.js') const npmSwitches = Object.entries(definitions) - .filter(([key, {type}]) => type === Boolean || + .filter(([key, { type }]) => type === Boolean || (Array.isArray(type) && type.includes(Boolean))) .map(([key]) => key) @@ -65,9 +65,9 @@ let i let sawRemovedFlags = false for (i = 3; i < process.argv.length; i++) { const arg = process.argv[i] - if (arg === '--') + if (arg === '--') { break - else if (/^-/.test(arg)) { + } else if (/^-/.test(arg)) { const [key, ...v] = arg.replace(/^-+/, '').split('=') switch (key) { @@ -87,8 +87,9 @@ for (i = 3; i < process.argv.length; i++) { // resolve shorthands and run again if (shorthands[key] && !removed.has(key)) { const a = [...shorthands[key]] - if (v.length) + if (v.length) { a.push(v.join('=')) + } process.argv.splice(i, 1, ...a) i-- continue @@ -109,8 +110,9 @@ for (i = 3; i < process.argv.length; i++) { if (removed.has(key)) { // also remove the value for the cut key. process.argv.splice(i + 1, 1) - } else + } else { i++ + } } } else { // found a positional arg, put -- in front of it, and we're done @@ -119,7 +121,8 @@ for (i = 3; i < process.argv.length; i++) { } } -if (sawRemovedFlags) +if (sawRemovedFlags) { console.error('See `npm help exec` for more information') +} cli(process) diff --git a/docs/dockhand.js b/docs/dockhand.js index 77a20f7de1357..e2b9111ed4341 100644 --- a/docs/dockhand.js +++ b/docs/dockhand.js @@ -22,8 +22,9 @@ const run = async function () { const navPaths = await getNavigationPaths() const fsPaths = await renderFilesystemPaths() - if (!ensureNavigationComplete(navPaths, fsPaths)) + if (!ensureNavigationComplete(navPaths, fsPaths)) { process.exit(1) + } } catch (error) { console.error(error) } @@ -32,19 +33,21 @@ const run = async function () { run() function ensureNavigationComplete (navPaths, fsPaths) { - const unmatchedNav = { }; const unmatchedFs = { } + const unmatchedNav = {} + const unmatchedFs = {} - for (const navPath of navPaths) + for (const navPath of navPaths) { unmatchedNav[navPath] = true + } for (let fsPath of fsPaths) { fsPath = '/' + fsPath.replace(/\.md$/, '') - if (unmatchedNav[fsPath]) + if (unmatchedNav[fsPath]) { delete unmatchedNav[fsPath] - - else + } else { unmatchedFs[fsPath] = true + } } const missingNav = Object.keys(unmatchedNav).sort() @@ -56,15 +59,17 @@ function ensureNavigationComplete (navPaths, fsPaths) { if (missingNav.length > 0) { message += '\nThe following path(s) exist on disk but are not present in nav.yml:\n\n' - for (const nav of missingNav) + for (const nav of missingNav) { message += ` ${nav}\n` + } } if (missingNav.length > 0 && missingFs.length > 0) { message += '\nThe following path(s) exist in nav.yml but are not present on disk:\n\n' - for (const fs of missingFs) + for (const fs of missingFs) { message += ` ${fs}\n` + } } message += '\nUpdate nav.yml to ensure that all files are listed in the appropriate place.' @@ -88,11 +93,11 @@ function walkNavigation (entries) { const paths = [] for (const entry of entries) { - if (entry.children) + if (entry.children) { paths.push(...walkNavigation(entry.children)) - - else + } else { paths.push(entry.url) + } } return paths @@ -109,15 +114,12 @@ async function walkFilesystem (root, dirRelative) { const children = fs.readdirSync(dirPath) for (const childFilename of children) { - const childRelative = dirRelative ? - path.join(dirRelative, childFilename) : - childFilename + const childRelative = dirRelative ? path.join(dirRelative, childFilename) : childFilename const childPath = path.join(root, childRelative) - if (fs.lstatSync(childPath).isDirectory()) - paths.push(...await walkFilesystem(root, childRelative)) - - else { + if (fs.lstatSync(childPath).isDirectory()) { + paths.push(...(await walkFilesystem(root, childRelative))) + } else { await renderFile(childRelative) paths.push(childRelative) } @@ -137,7 +139,7 @@ async function renderFile (childPath) { const outputPath = path.join(outputRoot, childPath.replace(/\.md$/, '.html')) let md = fs.readFileSync(inputPath).toString() - let frontmatter = { } + let frontmatter = {} // Take the leading frontmatter out of the markdown md = md.replace(/^---\n([\s\S]+)\n---\n/, (header, fm) => { @@ -222,8 +224,9 @@ async function renderFile (childPath) { url = url.replace(/^\//, prefix) - if (linktype.suffix) + if (linktype.suffix) { url += linktype.suffix + } tag.setAttribute(linktype.attr, url) } @@ -238,12 +241,16 @@ async function renderFile (childPath) { continue } - const headerText = header.textContent.replace(/[A-Z]/g, x => x.toLowerCase()).replace(/ /g, '-').replace(/[^a-z0-9-]/g, '') + const headerText = header.textContent + .replace(/[A-Z]/g, x => x.toLowerCase()) + .replace(/ /g, '-') + .replace(/[^a-z0-9-]/g, '') let headerId = headerText let headerIncrement = 1 - while (document.getElementById(headerId) !== null) - headerId = headerText + (++headerIncrement) + while (document.getElementById(headerId) !== null) { + headerId = headerText + ++headerIncrement + } headerIds.push(headerId) header.setAttribute('id', headerId) @@ -252,8 +259,9 @@ async function renderFile (childPath) { // Walk the dom and build a table of contents const toc = document.getElementById('_table_of_contents') - if (toc) + if (toc) { toc.appendChild(generateTableOfContents(document)) + } // Write the final output const output = dom.serialize() @@ -272,21 +280,17 @@ function generateTableOfContents (document) { for (const header of headers) { const level = headerLevel(header) - while ( - hierarchy.length && - hierarchy[hierarchy.length - 1].headerLevel > level - ) + while (hierarchy.length && hierarchy[hierarchy.length - 1].headerLevel > level) { hierarchy.pop() + } - if ( - !hierarchy.length || - hierarchy[hierarchy.length - 1].headerLevel < level - ) { + if (!hierarchy.length || hierarchy[hierarchy.length - 1].headerLevel < level) { const newList = document.createElement('ul') newList.headerLevel = level - if (hierarchy.length) + if (hierarchy.length) { hierarchy[hierarchy.length - 1].appendChild(newList) + } hierarchy.push(newList) } @@ -307,8 +311,9 @@ function generateTableOfContents (document) { function walkHeaders (element, headers) { for (const child of element.childNodes) { - if (headerLevel(child)) + if (headerLevel(child)) { headers.push(child) + } walkHeaders(child, headers) } diff --git a/index.js b/index.js index 570df352b9d4b..5447643e04b66 100644 --- a/index.js +++ b/index.js @@ -1,4 +1,5 @@ -if (require.main === module) +if (require.main === module) { require('./lib/cli.js')(process) -else +} else { throw new Error('The programmatic API was removed in npm v8.0.0') +} diff --git a/lib/auth/legacy.js b/lib/auth/legacy.js index 8659446dc4c02..2da82e361db40 100644 --- a/lib/auth/legacy.js +++ b/lib/auth/legacy.js @@ -40,10 +40,11 @@ const login = async (npm, opts) => { opts ) } catch (err) { - if (err.code === 'EOTP') + if (err.code === 'EOTP') { newUser = await requestOTP() - else + } else { throw err + } } return newUser @@ -58,20 +59,20 @@ const login = async (npm, opts) => { opts.creds.username && opts.creds.password && opts.creds.email) - if (err.code === 'EOTP') + if (err.code === 'EOTP') { res = await requestOTP() - else if (needsMoreInfo) + } else if (needsMoreInfo) { throw err - else { + } else { // TODO: maybe this needs to check for err.code === 'E400' instead? res = await addNewUser() } } const newCreds = {} - if (res && res.token) + if (res && res.token) { newCreds.token = res.token - else { + } else { newCreds.username = opts.creds.username newCreds.password = opts.creds.password newCreds.email = opts.creds.email diff --git a/lib/auth/sso.js b/lib/auth/sso.js index 56cff3c06e292..6fcfc30e5d3a8 100644 --- a/lib/auth/sso.js +++ b/lib/auth/sso.js @@ -25,8 +25,9 @@ const pollForSession = ({ registry, token, opts }) => { return sleep(opts.ssoPollFrequency).then(() => { return pollForSession({ registry, token, opts }) }) - } else + } else { throw err + } } ) } @@ -41,8 +42,9 @@ const login = async (npm, { creds, registry, scope }) => { const opts = { ...npm.flatOptions, creds, registry, scope } const { ssoType } = opts - if (!ssoType) + if (!ssoType) { throw new Error('Missing option: sso-type') + } // We're reusing the legacy login endpoint, so we need some dummy // stuff here to pass validation. They're never used. @@ -57,10 +59,12 @@ const login = async (npm, { creds, registry, scope }) => { opts => profile.loginCouch(auth.username, auth.password, opts) ) - if (!token) + if (!token) { throw new Error('no SSO token returned') - if (!sso) + } + if (!sso) { throw new Error('no SSO URL returned by services') + } await openUrl(npm, sso, 'to complete your login please visit') diff --git a/lib/base-command.js b/lib/base-command.js index 9657110f83291..7c6404a0f1a6f 100644 --- a/lib/base-command.js +++ b/lib/base-command.js @@ -19,17 +19,22 @@ class BaseCommand { get usage () { let usage = `npm ${this.constructor.name}\n\n` - if (this.constructor.description) + if (this.constructor.description) { usage = `${usage}${this.constructor.description}\n\n` + } usage = `${usage}Usage:\n` - if (!this.constructor.usage) + if (!this.constructor.usage) { usage = `${usage}npm ${this.constructor.name}` - else - usage = `${usage}${this.constructor.usage.map(u => `npm ${this.constructor.name} ${u}`).join('\n')}` + } else { + usage = `${usage}${this.constructor.usage + .map(u => `npm ${this.constructor.name} ${u}`) + .join('\n')}` + } - if (this.constructor.params) + if (this.constructor.params) { usage = `${usage}\n\nOptions:\n${this.wrappedParams}` + } // Mostly this just appends aliases, this could be more clear usage = usageUtil(this.constructor.name, usage) @@ -43,7 +48,7 @@ class BaseCommand { for (const param of this.constructor.params) { const usage = `[${ConfigDefinitions[param].usage}]` - if (line.length && (line.length + usage.length) > this.wrapWidth) { + if (line.length && line.length + usage.length > this.wrapWidth) { results = [results, line].filter(Boolean).join('\n') line = '' } @@ -66,15 +71,15 @@ class BaseCommand { } async execWorkspaces (args, filters) { - throw Object.assign( - new Error('This command does not support workspaces.'), - { code: 'ENOWORKSPACES' } - ) + throw Object.assign(new Error('This command does not support workspaces.'), { + code: 'ENOWORKSPACES', + }) } async setWorkspaces (filters) { - if (this.isArboristCmd) + if (this.isArboristCmd) { this.includeWorkspaceRoot = false + } const ws = await getWorkspaces(filters, { path: this.npm.localPrefix, diff --git a/lib/cli.js b/lib/cli.js index 0e6301517f445..9dcd9d04d2ff2 100644 --- a/lib/cli.js +++ b/lib/cli.js @@ -1,13 +1,10 @@ // Separated out for easier unit testing -module.exports = async (process) => { +module.exports = async process => { // set it here so that regardless of what happens later, we don't // leak any private CLI configs to other programs process.title = 'npm' - const { - checkForBrokenNode, - checkForUnsupportedNode, - } = require('../lib/utils/unsupported.js') + const { checkForBrokenNode, checkForUnsupportedNode } = require('../lib/utils/unsupported.js') checkForBrokenNode() @@ -25,8 +22,9 @@ module.exports = async (process) => { // if npm is called as "npmg" or "npm_g", then // run in global mode. - if (process.argv[1][process.argv[1].length - 1] === 'g') + if (process.argv[1][process.argv[1].length - 1] === 'g') { process.argv.splice(1, 1, 'npm', '-g') + } const replaceInfo = require('../lib/utils/replace-info.js') log.verbose('cli', replaceInfo(process.argv)) @@ -70,7 +68,8 @@ module.exports = async (process) => { if (err.code === 'EUNKNOWNCOMMAND') { const didYouMean = require('./utils/did-you-mean.js') const suggestions = await didYouMean(npm, npm.localPrefix, cmd) - npm.output(`Unknown command: "${cmd}"${suggestions}\n\nTo see a list of supported npm commands, run:\n npm help`) + npm.output(`Unknown command: "${cmd}"${suggestions}\n`) + npm.output('To see a list of supported npm commands, run:\n npm help') process.exitCode = 1 return exitHandler() } diff --git a/lib/commands/access.js b/lib/commands/access.js index 15e51a450aa2a..df783c35fe0f7 100644 --- a/lib/commands/access.js +++ b/lib/commands/access.js @@ -52,15 +52,17 @@ class Access extends BaseCommand { async completion (opts) { const argv = opts.conf.argv.remain - if (argv.length === 2) + if (argv.length === 2) { return subcommands + } switch (argv[2]) { case 'grant': - if (argv.length === 3) + if (argv.length === 3) { return ['read-only', 'read-write'] - else + } else { return [] + } case 'public': case 'restricted': @@ -77,11 +79,13 @@ class Access extends BaseCommand { } async exec ([cmd, ...args]) { - if (!cmd) + if (!cmd) { throw this.usageError('Subcommand is required.') + } - if (!subcommands.includes(cmd) || !this[cmd]) + if (!subcommands.includes(cmd) || !this[cmd]) { throw this.usageError(`${cmd} is not a recognized subcommand.`) + } return this[cmd](args, this.npm.flatOptions) } @@ -95,11 +99,13 @@ class Access extends BaseCommand { } async grant ([perms, scopeteam, pkg], opts) { - if (!perms || (perms !== 'read-only' && perms !== 'read-write')) + if (!perms || (perms !== 'read-only' && perms !== 'read-write')) { throw this.usageError('First argument must be either `read-only` or `read-write`.') + } - if (!scopeteam) + if (!scopeteam) { throw this.usageError('`` argument is required.') + } const [, scope, team] = scopeteam.match(/^@?([^:]+):(.*)$/) || [] @@ -115,8 +121,9 @@ class Access extends BaseCommand { } async revoke ([scopeteam, pkg], opts) { - if (!scopeteam) + if (!scopeteam) { throw this.usageError('`` argument is required.') + } const [, scope, team] = scopeteam.match(/^@?([^:]+):(.*)$/) || [] @@ -152,8 +159,9 @@ class Access extends BaseCommand { } async lsPackages ([owner], opts) { - if (!owner) + if (!owner) { owner = await getIdentity(this.npm, opts) + } const pkgs = await libaccess.lsPackages(owner, opts) @@ -183,9 +191,9 @@ class Access extends BaseCommand { } async getPackage (name, requireScope) { - if (name && name.trim()) + if (name && name.trim()) { return name.trim() - else { + } else { try { const pkg = await readPackageJson(path.resolve(this.npm.prefix, 'package.json')) name = pkg.name @@ -194,14 +202,16 @@ class Access extends BaseCommand { throw new Error( 'no package name passed to command and no package.json found' ) - } else + } else { throw err + } } - if (requireScope && !name.match(/^@[^/]+\/.*$/)) + if (requireScope && !name.match(/^@[^/]+\/.*$/)) { throw this.usageError('This command is only available for scoped packages.') - else + } else { return name + } } } } diff --git a/lib/commands/adduser.js b/lib/commands/adduser.js index 6136eb726fa7e..aa3d8a336a51b 100644 --- a/lib/commands/adduser.js +++ b/lib/commands/adduser.js @@ -54,8 +54,9 @@ class AddUser extends BaseCommand { if (scope) { const scopedRegistry = this.npm.config.get(`${scope}:registry`) const cliRegistry = this.npm.config.get('registry', 'cli') - if (scopedRegistry && !cliRegistry) + if (scopedRegistry && !cliRegistry) { return scopedRegistry + } } return registry } @@ -63,8 +64,9 @@ class AddUser extends BaseCommand { getAuthType ({ authType }) { const type = authTypes[authType] - if (!type) + if (!type) { throw new Error('no such auth module') + } return type } @@ -72,8 +74,9 @@ class AddUser extends BaseCommand { async updateConfig ({ newCreds, registry, scope }) { this.npm.config.delete('_token', 'user') // prevent legacy pollution this.npm.config.setCredentialsByURI(registry, newCreds) - if (scope) + if (scope) { this.npm.config.set(scope + ':registry', registry, 'user') + } await this.npm.config.save('user') } } diff --git a/lib/commands/audit.js b/lib/commands/audit.js index d05633ab0fe09..ebc9f65c58e1f 100644 --- a/lib/commands/audit.js +++ b/lib/commands/audit.js @@ -36,8 +36,9 @@ class Audit extends ArboristWorkspaceCmd { async completion (opts) { const argv = opts.conf.argv.remain - if (argv.length === 2) + if (argv.length === 2) { return ['fix'] + } switch (argv[2]) { case 'fix': @@ -60,9 +61,9 @@ class Audit extends ArboristWorkspaceCmd { const arb = new Arborist(opts) const fix = args[0] === 'fix' await arb.audit({ fix }) - if (fix) + if (fix) { await reifyFinish(this.npm, arb) - else { + } else { // will throw if there's an error, because this is an audit command auditError(this.npm, arb.auditReport) const result = auditReport(arb.auditReport, opts) diff --git a/lib/commands/bin.js b/lib/commands/bin.js index 9a894f3bb58f3..68559e4133e90 100644 --- a/lib/commands/bin.js +++ b/lib/commands/bin.js @@ -17,8 +17,9 @@ class Bin extends BaseCommand { async exec (args) { const b = this.npm.bin this.npm.output(b) - if (this.npm.config.get('global') && !envPath.includes(b)) + if (this.npm.config.get('global') && !envPath.includes(b)) { console.error('(not in PATH env variable)') + } } } module.exports = Bin diff --git a/lib/commands/bugs.js b/lib/commands/bugs.js index 863a7ffeca56b..ecf50f32e35f9 100644 --- a/lib/commands/bugs.js +++ b/lib/commands/bugs.js @@ -23,8 +23,9 @@ class Bugs extends BaseCommand { } async exec (args) { - if (!args || !args.length) + if (!args || !args.length) { args = ['.'] + } await Promise.all(args.map(pkg => this.getBugs(pkg))) } @@ -39,20 +40,24 @@ class Bugs extends BaseCommand { getBugsUrl (mani) { if (mani.bugs) { - if (typeof mani.bugs === 'string') + if (typeof mani.bugs === 'string') { return mani.bugs + } - if (typeof mani.bugs === 'object' && mani.bugs.url) + if (typeof mani.bugs === 'object' && mani.bugs.url) { return mani.bugs.url + } - if (typeof mani.bugs === 'object' && mani.bugs.email) + if (typeof mani.bugs === 'object' && mani.bugs.email) { return `mailto:${mani.bugs.email}` + } } // try to get it from the repo, if possible const info = hostedFromMani(mani) - if (info) + if (info) { return info.bugs() + } // just send them to the website, hopefully that has some info! return `https://www.npmjs.com/package/${mani.name}` diff --git a/lib/commands/cache.js b/lib/commands/cache.js index 43f52e4e95e68..af311f36d1d2a 100644 --- a/lib/commands/cache.js +++ b/lib/commands/cache.js @@ -12,9 +12,13 @@ const localeCompare = require('@isaacs/string-locale-compare')('en') const searchCachePackage = async (path, spec, cacheKeys) => { const parsed = npa(spec) - if (parsed.rawSpec !== '' && parsed.type === 'tag') + if (parsed.rawSpec !== '' && parsed.type === 'tag') { throw new Error(`Cannot list cache keys for a tagged package.`) - const searchMFH = new RegExp(`^make-fetch-happen:request-cache:.*(? { const noExt = filename.slice(0, -4) const noScope = `${parsed.name.split('/').pop()}-` const ver = noExt.slice(noScope.length) - if (semver.satisfies(ver, parsed.rawSpec)) + if (semver.satisfies(ver, parsed.rawSpec)) { results.add(key) + } continue } // is this key a packument? - if (!searchPack.test(key)) + if (!searchPack.test(key)) { continue + } results.add(key) let packument, details @@ -43,16 +49,20 @@ const searchCachePackage = async (path, spec, cacheKeys) => { // if we couldn't parse the packument, abort continue } - if (!packument.versions || typeof packument.versions !== 'object') + if (!packument.versions || typeof packument.versions !== 'object') { continue + } // assuming this is a packument for (const ver of Object.keys(packument.versions)) { if (semver.satisfies(ver, parsed.rawSpec)) { - if (packument.versions[ver].dist - && typeof packument.versions[ver].dist === 'object' - && packument.versions[ver].dist.tarball !== undefined - && cacheKeys.has(`make-fetch-happen:request-cache:${packument.versions[ver].dist.tarball}`)) + if ( + packument.versions[ver].dist && + typeof packument.versions[ver].dist === 'object' && + packument.versions[ver].dist.tarball !== undefined && + cacheKeys.has(`make-fetch-happen:request-cache:${packument.versions[ver].dist.tarball}`) + ) { results.add(`make-fetch-happen:request-cache:${packument.versions[ver].dist.tarball}`) + } } } } @@ -90,8 +100,9 @@ class Cache extends BaseCommand { async completion (opts) { const argv = opts.conf.argv.remain - if (argv.length === 2) + if (argv.length === 2) { return ['add', 'clean', 'verify', 'ls', 'delete'] + } // TODO - eventually... switch (argv[2]) { @@ -107,11 +118,14 @@ class Cache extends BaseCommand { async exec (args) { const cmd = args.shift() switch (cmd) { - case 'rm': case 'clear': case 'clean': + case 'rm': + case 'clear': + case 'clean': return await this.clean(args) case 'add': return await this.add(args) - case 'verify': case 'check': + case 'verify': + case 'check': return await this.verify() case 'ls': return await this.ls(args) @@ -161,37 +175,47 @@ class Cache extends BaseCommand { // npm cache add ... // npm cache add ... async add (args) { - const usage = 'Usage:\n' + + const usage = + 'Usage:\n' + ' npm cache add ...\n' + ' npm cache add @...\n' + ' npm cache add ...\n' + ' npm cache add ...\n' log.silly('cache add', 'args', args) - if (args.length === 0) + if (args.length === 0) { throw Object.assign(new Error(usage), { code: 'EUSAGE' }) + } - return Promise.all(args.map(spec => { - log.silly('cache add', 'spec', spec) - // we ask pacote for the thing, and then just throw the data - // away so that it tee-pipes it into the cache like it does - // for a normal request. - return pacote.tarball.stream(spec, stream => { - stream.resume() - return stream.promise() - }, this.npm.flatOptions) - })) + return Promise.all( + args.map(spec => { + log.silly('cache add', 'spec', spec) + // we ask pacote for the thing, and then just throw the data + // away so that it tee-pipes it into the cache like it does + // for a normal request. + return pacote.tarball.stream( + spec, + stream => { + stream.resume() + return stream.promise() + }, + this.npm.flatOptions + ) + }) + ) } async verify () { const cache = path.join(this.npm.cache, '_cacache') - const prefix = cache.indexOf(process.env.HOME) === 0 - ? `~${cache.substr(process.env.HOME.length)}` - : cache + const prefix = + cache.indexOf(process.env.HOME) === 0 ? `~${cache.substr(process.env.HOME.length)}` : cache const stats = await cacache.verify(cache) this.npm.output(`Cache verified and compressed (${prefix})`) this.npm.output(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`) stats.badContentCount && this.npm.output(`Corrupted content removed: ${stats.badContentCount}`) - stats.reclaimedCount && this.npm.output(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`) + stats.reclaimedCount && + this.npm.output( + `Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)` + ) stats.missingContent && this.npm.output(`Missing content: ${stats.missingContent}`) this.npm.output(`Index entries: ${stats.totalEntries}`) this.npm.output(`Finished in ${stats.runTime.total / 1000}s`) @@ -206,8 +230,9 @@ class Cache extends BaseCommand { const results = new Set() for (const spec of specs) { const keySet = await searchCachePackage(cachePath, spec, cacheKeys) - for (const key of keySet) + for (const key of keySet) { results.add(key) + } } [...results].sort(localeCompare).forEach(key => this.npm.output(key)) return diff --git a/lib/commands/completion.js b/lib/commands/completion.js index fbbde0df70ea7..bce6c3619ccc4 100644 --- a/lib/commands/completion.js +++ b/lib/commands/completion.js @@ -57,8 +57,9 @@ class Completion extends BaseCommand { // completion for the completion command async completion (opts) { - if (opts.w > 2) + if (opts.w > 2) { return + } const { resolve } = require('path') const [bashExists, zshExists] = await Promise.all([ @@ -66,11 +67,13 @@ class Completion extends BaseCommand { fileExists(resolve(process.env.HOME, '.zshrc')), ]) const out = [] - if (zshExists) + if (zshExists) { out.push(['>>', '~/.zshrc']) + } - if (bashExists) + if (bashExists) { out.push(['>>', '~/.bashrc']) + } return out } @@ -88,8 +91,9 @@ class Completion extends BaseCommand { // if the COMP_* isn't in the env, then just dump the script. if (COMP_CWORD === undefined || COMP_LINE === undefined || - COMP_POINT === undefined) + COMP_POINT === undefined) { return dumpScript() + } // ok we're actually looking at the envs and outputting the suggestions // get the partial line and partial word, @@ -106,8 +110,9 @@ class Completion extends BaseCommand { // figure out where in that last word the point is. const partialWordRaw = args[w] let i = partialWordRaw.length - while (partialWordRaw.substr(0, i) !== partialLine.substr(-1 * i) && i > 0) + while (partialWordRaw.substr(0, i) !== partialLine.substr(-1 * i) && i > 0) { i-- + } const partialWord = unescape(partialWordRaw.substr(0, i)) partialWords.push(partialWord) @@ -126,8 +131,9 @@ class Completion extends BaseCommand { } if (partialWords.slice(0, -1).indexOf('--') === -1) { - if (word.charAt(0) === '-') + if (word.charAt(0) === '-') { return this.wrap(opts, configCompl(opts)) + } if (words[w - 1] && words[w - 1].charAt(0) === '-' && @@ -151,8 +157,9 @@ class Completion extends BaseCommand { nopt(types, shorthands, partialWords.slice(0, -1), 0) // check if there's a command already. const cmd = parsed.argv.remain[1] - if (!cmd) + if (!cmd) { return this.wrap(opts, cmdCompl(opts)) + } Object.keys(parsed).forEach(k => this.npm.config.set(k, parsed[k])) @@ -173,17 +180,20 @@ class Completion extends BaseCommand { // Ie, returning ['a', 'b c', ['d', 'e']] would allow it to expand // to: 'a', 'b c', or 'd' 'e' wrap (opts, compls) { - if (!Array.isArray(compls)) + if (!Array.isArray(compls)) { compls = compls ? [compls] : [] + } compls = compls.map(c => Array.isArray(c) ? c.map(escape).join(' ') : escape(c)) - if (opts.partialWord) + if (opts.partialWord) { compls = compls.filter(c => c.startsWith(opts.partialWord)) + } - if (compls.length > 0) + if (compls.length > 0) { this.npm.output(compls.join('\n')) + } } } @@ -197,8 +207,9 @@ const dumpScript = async () => { await new Promise((res, rej) => { let done = false process.stdout.on('error', er => { - if (done) + if (done) { return + } done = true @@ -214,15 +225,17 @@ const dumpScript = async () => { // can never ever work on OS X. // TODO Ignoring coverage, see 'non EPIPE errors cause failures' test. /* istanbul ignore next */ - if (er.errno === 'EPIPE') + if (er.errno === 'EPIPE') { res() - else + } else { rej(er) + } }) process.stdout.write(d, () => { - if (done) + if (done) { return + } done = true res() @@ -258,7 +271,7 @@ const isFlag = word => { const split = word.match(/^(-*)((?:no-)+)?(.*)$/) const no = split[2] const conf = split[3] - const {type} = definitions[conf] + const { type } = definitions[conf] return no || type === Boolean || (Array.isArray(type) && type.includes(Boolean)) || @@ -269,12 +282,14 @@ const isFlag = word => { // if they all resolve to the same thing, just return the thing it already is const cmdCompl = opts => { const matches = fullList.filter(c => c.startsWith(opts.partialWord)) - if (!matches.length) + if (!matches.length) { return matches + } const derefs = new Set([...matches.map(c => deref(c))]) - if (derefs.size === 1) + if (derefs.size === 1) { return [...derefs] + } return fullList } diff --git a/lib/commands/config.js b/lib/commands/config.js index fc482edb6a688..0e92f6f3e50ba 100644 --- a/lib/commands/config.js +++ b/lib/commands/config.js @@ -64,13 +64,15 @@ class Config extends BaseCommand { async completion (opts) { const argv = opts.conf.argv.remain - if (argv[1] !== 'config') + if (argv[1] !== 'config') { argv.unshift('config') + } if (argv.length === 2) { const cmds = ['get', 'set', 'delete', 'ls', 'rm', 'edit'] - if (opts.partialWord !== 'l') + if (opts.partialWord !== 'l') { cmds.push('list') + } return cmds } @@ -79,8 +81,9 @@ class Config extends BaseCommand { switch (action) { case 'set': // todo: complete with valid values, if possible. - if (argv.length > 3) + if (argv.length > 3) { return [] + } // fallthrough /* eslint no-fallthrough:0 */ @@ -132,28 +135,32 @@ class Config extends BaseCommand { } async set (args) { - if (!args.length) + if (!args.length) { throw this.usageError() + } const where = this.npm.flatOptions.location for (const [key, val] of Object.entries(keyValues(args))) { this.npm.log.info('config', 'set %j %j', key, val) this.npm.config.set(key, val || '', where) - if (!this.npm.config.validate(where)) + if (!this.npm.config.validate(where)) { this.npm.log.warn('config', 'omitting invalid config values') + } } await this.npm.config.save(where) } async get (keys) { - if (!keys.length) + if (!keys.length) { return this.list() + } const out = [] for (const key of keys) { - if (!publicVar(key)) + if (!publicVar(key)) { throw `The ${key} option is protected, and cannot be retrieved in this way` + } const pref = keys.length > 1 ? `${key}=` : '' out.push(pref + this.npm.config.get(key)) @@ -162,12 +169,14 @@ class Config extends BaseCommand { } async del (keys) { - if (!keys.length) + if (!keys.length) { throw this.usageError() + } const where = this.npm.flatOptions.location - for (const key of keys) + for (const key of keys) { this.npm.config.delete(key, where) + } await this.npm.config.save(where) } @@ -220,8 +229,9 @@ ${defData} const [bin, ...args] = e.split(/\s+/) const editor = spawn(bin, [...args, file], { stdio: 'inherit' }) editor.on('exit', (code) => { - if (code) + if (code) { return reject(new Error(`editor process exited with code: ${code}`)) + } return resolve() }) }) @@ -232,12 +242,14 @@ ${defData} // long does not have a flattener const long = this.npm.config.get('long') for (const [where, { data, source }] of this.npm.config.data.entries()) { - if (where === 'default' && !long) + if (where === 'default' && !long) { continue + } const keys = Object.keys(data).sort(localeCompare) - if (!keys.length) + if (!keys.length) { continue + } msg.push(`; "${where}" config from ${source}`, '') for (const k of keys) { @@ -265,8 +277,9 @@ ${defData} async listJson () { const publicConf = {} for (const key in this.npm.config.list[0]) { - if (!publicVar(key)) + if (!publicVar(key)) { continue + } publicConf[key] = this.npm.config.get(key) } diff --git a/lib/commands/deprecate.js b/lib/commands/deprecate.js index 37b9d2dc27b66..1e33b98bf312f 100644 --- a/lib/commands/deprecate.js +++ b/lib/commands/deprecate.js @@ -30,8 +30,9 @@ class Deprecate extends BaseCommand { } async completion (opts) { - if (opts.conf.argv.remain.length > 1) + if (opts.conf.argv.remain.length > 1) { return [] + } const username = await getIdentity(this.npm, this.npm.flatOptions) const packages = await libaccess.lsPackages(username, this.npm.flatOptions) @@ -44,8 +45,9 @@ class Deprecate extends BaseCommand { async exec ([pkg, msg]) { // msg == null because '' is a valid value, it indicates undeprecate - if (!pkg || msg == null) + if (!pkg || msg == null) { throw this.usageError() + } // fetch the data and make sure it exists. const p = npa(pkg) @@ -53,8 +55,9 @@ class Deprecate extends BaseCommand { // "*" is the appropriate default. const spec = p.rawSpec === '' ? '*' : p.fetchSpec - if (semver.validRange(spec, true) === null) + if (semver.validRange(spec, true) === null) { throw new Error(`invalid version range: ${spec}`) + } const uri = '/' + p.escapedName const packument = await fetch.json(uri, { diff --git a/lib/commands/diff.js b/lib/commands/diff.js index da9b9f5d2c655..339cfcf971082 100644 --- a/lib/commands/diff.js +++ b/lib/commands/diff.js @@ -23,9 +23,7 @@ class Diff extends BaseCommand { /* istanbul ignore next - see test/lib/load-all-commands.js */ static get usage () { - return [ - '[...]', - ] + return ['[...]'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -51,24 +49,25 @@ class Diff extends BaseCommand { const specs = this.npm.config.get('diff').filter(d => d) if (specs.length > 2) { throw new TypeError( - 'Can\'t use more than two --diff arguments.\n\n' + - `Usage:\n${this.usage}` + "Can't use more than two --diff arguments.\n\n" + `Usage:\n${this.usage}` ) } // execWorkspaces may have set this already - if (!this.prefix) + if (!this.prefix) { this.prefix = this.npm.prefix + } // this is the "top" directory, one up from node_modules // in global mode we have to walk one up from globalDir because our // node_modules is sometimes under ./lib, and in global mode we're only ever // walking through node_modules (because we will have been given a package // name already) - if (this.npm.config.get('global')) + if (this.npm.config.get('global')) { this.top = resolve(this.npm.globalDir, '..') - else + } else { this.top = this.prefix + } const [a, b] = await this.retrieveSpecs(specs) npmlog.info('diff', { src: a, dst: b }) @@ -100,8 +99,9 @@ class Diff extends BaseCommand { npmlog.verbose('diff', 'could not read project dir package.json') } - if (!name) + if (!name) { throw this.usageError('Needs multiple arguments to compare or run from a project dir.\n') + } return name } @@ -116,10 +116,7 @@ class Diff extends BaseCommand { // to its latest published registry version if (!a) { const pkgName = await this.packageName(this.prefix) - return [ - `${pkgName}@${this.npm.config.get('tag')}`, - `file:${this.prefix}`, - ] + return [`${pkgName}@${this.npm.config.get('tag')}`, `file:${this.prefix}`] } // single argument, used to compare wanted versions of an @@ -133,18 +130,18 @@ class Diff extends BaseCommand { noPackageJson = true } - const missingPackageJson = this.usageError('Needs multiple arguments to compare or run from a project dir.\n') + const missingPackageJson = this.usageError( + 'Needs multiple arguments to compare or run from a project dir.\n' + ) // using a valid semver range, that means it should just diff // the cwd against a published version to the registry using the // same project name and the provided semver range if (semver.validRange(a)) { - if (!pkgName) + if (!pkgName) { throw missingPackageJson - return [ - `${pkgName}@${a}`, - `file:${this.prefix}`, - ] + } + return [`${pkgName}@${a}`, `file:${this.prefix}`] } // when using a single package name as arg and it's part of the current @@ -161,28 +158,24 @@ class Diff extends BaseCommand { } const arb = new Arborist(opts) actualTree = await arb.loadActual(opts) - node = actualTree && - actualTree.inventory.query('name', spec.name) - .values().next().value + node = actualTree && actualTree.inventory.query('name', spec.name).values().next().value } catch (e) { npmlog.verbose('diff', 'failed to load actual install tree') } if (!node || !node.name || !node.package || !node.package.version) { - if (noPackageJson) + if (noPackageJson) { throw missingPackageJson - return [ - `${spec.name}@${spec.fetchSpec}`, - `file:${this.prefix}`, - ] + } + return [`${spec.name}@${spec.fetchSpec}`, `file:${this.prefix}`] } - const tryRootNodeSpec = () => - (actualTree && actualTree.edgesOut.get(spec.name) || {}).spec + const tryRootNodeSpec = () => ((actualTree && actualTree.edgesOut.get(spec.name)) || {}).spec const tryAnySpec = () => { - for (const edge of node.edgesIn) + for (const edge of node.edgesIn) { return edge.spec + } } const aSpec = `file:${node.realpath}` @@ -192,12 +185,10 @@ class Diff extends BaseCommand { // work from the top of the arborist tree to find the original semver // range declared in the package that depends on the package. let bSpec - if (spec.rawSpec) + if (spec.rawSpec) { bSpec = spec.rawSpec - else { - const bTargetVersion = - tryRootNodeSpec() - || tryAnySpec() + } else { + const bTargetVersion = tryRootNodeSpec() || tryAnySpec() // figure out what to compare against, // follows same logic to npm outdated "Wanted" results @@ -205,24 +196,15 @@ class Diff extends BaseCommand { ...this.npm.flatOptions, preferOnline: true, }) - bSpec = pickManifest( - packument, - bTargetVersion, - { ...this.npm.flatOptions } - ).version + bSpec = pickManifest(packument, bTargetVersion, { ...this.npm.flatOptions }).version } - return [ - `${spec.name}@${aSpec}`, - `${spec.name}@${bSpec}`, - ] + return [`${spec.name}@${aSpec}`, `${spec.name}@${bSpec}`] } else if (spec.type === 'directory') { - return [ - `file:${spec.fetchSpec}`, - `file:${this.prefix}`, - ] - } else + return [`file:${spec.fetchSpec}`, `file:${this.prefix}`] + } else { throw this.usageError(`Spec type ${spec.type} not supported.\n`) + } } async convertVersionsToSpecs ([a, b]) { @@ -238,19 +220,24 @@ class Diff extends BaseCommand { npmlog.verbose('diff', 'could not read project dir package.json') } - if (!pkgName) - throw this.usageError('Needs to be run from a project dir in order to diff two versions.\n') + if (!pkgName) { + throw this.usageError( + 'Needs to be run from a project dir in order to diff two versions.\n' + ) + } return [`${pkgName}@${a}`, `${pkgName}@${b}`] } // otherwise uses the name from the other arg to // figure out the spec.name of what to compare - if (!semverA && semverB) + if (!semverA && semverB) { return [a, `${npa(a).name}@${b}`] + } - if (semverA && !semverB) + if (semverA && !semverB) { return [`${npa(b).name}@${a}`, b] + } // no valid semver ranges used return [a, b] @@ -271,16 +258,15 @@ class Diff extends BaseCommand { return specs.map(i => { const spec = npa(i) - if (spec.rawSpec) + if (spec.rawSpec) { return i + } - const node = actualTree - && actualTree.inventory.query('name', spec.name) - .values().next().value + const node = + actualTree && actualTree.inventory.query('name', spec.name).values().next().value - const res = !node || !node.package || !node.package.version - ? spec.fetchSpec - : `file:${node.realpath}` + const res = + !node || !node.package || !node.package.version ? spec.fetchSpec : `file:${node.realpath}` return `${spec.name}@${res}` }) diff --git a/lib/commands/dist-tag.js b/lib/commands/dist-tag.js index b7baa3d463e5d..9c938851afd83 100644 --- a/lib/commands/dist-tag.js +++ b/lib/commands/dist-tag.js @@ -33,8 +33,9 @@ class DistTag extends BaseCommand { async completion (opts) { const argv = opts.conf.argv.remain - if (argv.length === 2) + if (argv.length === 2) { return ['add', 'rm', 'ls'] + } switch (argv[2]) { default: @@ -45,21 +46,25 @@ class DistTag extends BaseCommand { async exec ([cmdName, pkg, tag]) { const opts = this.npm.flatOptions - if (['add', 'a', 'set', 's'].includes(cmdName)) + if (['add', 'a', 'set', 's'].includes(cmdName)) { return this.add(pkg, tag, opts) + } - if (['rm', 'r', 'del', 'd', 'remove'].includes(cmdName)) + if (['rm', 'r', 'del', 'd', 'remove'].includes(cmdName)) { return this.remove(pkg, tag, opts) + } - if (['ls', 'l', 'sl', 'list'].includes(cmdName)) + if (['ls', 'l', 'sl', 'list'].includes(cmdName)) { return this.list(pkg, opts) + } if (!pkg) { // when only using the pkg name the default behavior // should be listing the existing tags return this.list(cmdName, opts) - } else + } else { throw this.usageError() + } } async execWorkspaces ([cmdName, pkg, tag], filters) { @@ -68,16 +73,18 @@ class DistTag extends BaseCommand { // - unset // - . // - .@version - if (['ls', 'l', 'sl', 'list'].includes(cmdName) && (!pkg || pkg === '.' || /^\.@/.test(pkg))) + if (['ls', 'l', 'sl', 'list'].includes(cmdName) && (!pkg || pkg === '.' || /^\.@/.test(pkg))) { return this.listWorkspaces(filters) + } // pkg is unset // cmdName is one of: // - unset // - . // - .@version - if (!pkg && (!cmdName || cmdName === '.' || /^\.@/.test(cmdName))) + if (!pkg && (!cmdName || cmdName === '.' || /^\.@/.test(cmdName))) { return this.listWorkspaces(filters) + } // anything else is just a regular dist-tag command // so we fallback to the non-workspaces implementation @@ -92,13 +99,15 @@ class DistTag extends BaseCommand { log.verbose('dist-tag add', defaultTag, 'to', spec.name + '@' + version) - if (!spec.name || !version || !defaultTag) + if (!spec.name || !version || !defaultTag) { throw this.usageError() + } const t = defaultTag.trim() - if (semver.validRange(t)) + if (semver.validRange(t)) { throw new Error('Tag name must not be a valid SemVer range: ' + t) + } const tags = await this.fetchTags(spec, opts) if (tags[t] === version) { @@ -125,8 +134,9 @@ class DistTag extends BaseCommand { spec = npa(spec || '') log.verbose('dist-tag del', tag, 'from', spec.name) - if (!spec.name) + if (!spec.name) { throw this.usageError() + } const tags = await this.fetchTags(spec, opts) if (!tags[tag]) { @@ -148,11 +158,13 @@ class DistTag extends BaseCommand { async list (spec, opts) { if (!spec) { - if (this.npm.config.get('global')) + if (this.npm.config.get('global')) { throw this.usageError() + } const pkg = await readPackageName(this.npm.prefix) - if (!pkg) + if (!pkg) { throw this.usageError() + } return this.list(pkg, opts) } @@ -190,10 +202,12 @@ class DistTag extends BaseCommand { `/-/package/${spec.escapedName}/dist-tags`, { ...opts, 'prefer-online': true, spec } ) - if (data && typeof data === 'object') + if (data && typeof data === 'object') { delete data._etag - if (!data || !Object.keys(data).length) + } + if (!data || !Object.keys(data).length) { throw new Error('No dist-tags found for ' + spec.name) + } return data } diff --git a/lib/commands/docs.js b/lib/commands/docs.js index 4482678ea7f0c..e73528d180993 100644 --- a/lib/commands/docs.js +++ b/lib/commands/docs.js @@ -32,8 +32,9 @@ class Docs extends BaseCommand { } async exec (args) { - if (!args || !args.length) + if (!args || !args.length) { args = ['.'] + } await Promise.all(args.map(pkg => this.getDocs(pkg))) } @@ -52,12 +53,14 @@ class Docs extends BaseCommand { } getDocsUrl (mani) { - if (mani.homepage) + if (mani.homepage) { return mani.homepage + } const info = hostedFromMani(mani) - if (info) + if (info) { return info.docs() + } return 'https://www.npmjs.com/package/' + mani.name } diff --git a/lib/commands/doctor.js b/lib/commands/doctor.js index b6363467c6487..8cb774ee33e57 100644 --- a/lib/commands/doctor.js +++ b/lib/commands/doctor.js @@ -11,21 +11,26 @@ const { promisify } = require('util') const ansiTrim = require('../utils/ansi-trim.js') const isWindows = require('../utils/is-windows.js') const ping = require('../utils/ping.js') -const { registry: { default: defaultRegistry } } = require('../utils/config/definitions.js') +const { + registry: { default: defaultRegistry }, +} = require('../utils/config/definitions.js') const lstat = promisify(fs.lstat) const readdir = promisify(fs.readdir) const access = promisify(fs.access) const { R_OK, W_OK, X_OK } = fs.constants const maskLabel = mask => { const label = [] - if (mask & R_OK) + if (mask & R_OK) { label.push('readable') + } - if (mask & W_OK) + if (mask & W_OK) { label.push('writable') + } - if (mask & X_OK) + if (mask & X_OK) { label.push('executable') + } return label.join(', ') } @@ -59,13 +64,31 @@ class Doctor extends BaseCommand { ['node -v', 'getLatestNodejsVersion', []], ['npm config get registry', 'checkNpmRegistry', []], ['which git', 'getGitPath', []], - ...(isWindows ? [] : [ - ['Perms check on cached files', 'checkFilesPermission', [this.npm.cache, true, R_OK]], - ['Perms check on local node_modules', 'checkFilesPermission', [this.npm.localDir, true]], - ['Perms check on global node_modules', 'checkFilesPermission', [this.npm.globalDir, false]], - ['Perms check on local bin folder', 'checkFilesPermission', [this.npm.localBin, false, R_OK | W_OK | X_OK]], - ['Perms check on global bin folder', 'checkFilesPermission', [this.npm.globalBin, false, X_OK]], - ]), + ...(isWindows + ? [] + : [ + ['Perms check on cached files', 'checkFilesPermission', [this.npm.cache, true, R_OK]], + [ + 'Perms check on local node_modules', + 'checkFilesPermission', + [this.npm.localDir, true], + ], + [ + 'Perms check on global node_modules', + 'checkFilesPermission', + [this.npm.globalDir, false], + ], + [ + 'Perms check on local bin folder', + 'checkFilesPermission', + [this.npm.localBin, false, R_OK | W_OK | X_OK], + ], + [ + 'Perms check on global bin folder', + 'checkFilesPermission', + [this.npm.globalBin, false, X_OK], + ], + ]), ['Verify cache contents', 'verifyCachedFiles', [this.npm.flatOptions.cache]], // TODO: // - ensure arborist.loadActual() runs without errors and no invalid edges @@ -85,39 +108,43 @@ class Doctor extends BaseCommand { messages.push(line) } - const outHead = ['Check', 'Value', 'Recommendation/Notes'] - .map(!this.npm.color ? h => h : h => chalk.underline(h)) + const outHead = ['Check', 'Value', 'Recommendation/Notes'].map( + !this.npm.color ? h => h : h => chalk.underline(h) + ) let allOk = true - const outBody = messages.map(!this.npm.color - ? item => { - allOk = allOk && item[1] - item[1] = item[1] ? 'ok' : 'not ok' - item[2] = String(item[2]) - return item - } - : item => { - allOk = allOk && item[1] - if (!item[1]) { - item[0] = chalk.red(item[0]) - item[2] = chalk.magenta(String(item[2])) + const outBody = messages.map( + !this.npm.color + ? item => { + allOk = allOk && item[1] + item[1] = item[1] ? 'ok' : 'not ok' + item[2] = String(item[2]) + return item } - item[1] = item[1] ? chalk.green('ok') : chalk.red('not ok') - return item - }) + : item => { + allOk = allOk && item[1] + if (!item[1]) { + item[0] = chalk.red(item[0]) + item[2] = chalk.magenta(String(item[2])) + } + item[1] = item[1] ? chalk.green('ok') : chalk.red('not ok') + return item + } + ) const outTable = [outHead, ...outBody] const tableOpts = { stringLength: s => ansiTrim(s).length, } - const silent = this.npm.log.levels[this.npm.log.level] > - this.npm.log.levels.error + const silent = this.npm.log.levels[this.npm.log.level] > this.npm.log.levels.error if (!silent) { this.npm.output(table(outTable, tableOpts)) - if (!allOk) + if (!allOk) { console.error('') + } } - if (!allOk) + if (!allOk) { throw new Error('Some problems found. See above for recommendations.') + } } async checkPing () { @@ -127,10 +154,11 @@ class Doctor extends BaseCommand { await ping(this.npm.flatOptions) return '' } catch (er) { - if (/^E\d{3}$/.test(er.code || '')) + if (/^E\d{3}$/.test(er.code || '')) { throw er.code.substr(1) + ' ' + er.message - else + } else { throw er.message + } } finally { tracker.finish() } @@ -141,10 +169,11 @@ class Doctor extends BaseCommand { tracker.info('getLatestNpmVersion', 'Getting npm package information') try { const latest = (await pacote.manifest('npm@latest', this.npm.flatOptions)).version - if (semver.gte(this.npm.version, latest)) + if (semver.gte(this.npm.version, latest)) { return `current: v${this.npm.version}, latest: v${latest}` - else + } else { throw `Use npm v${latest}` + } } finally { tracker.finish() } @@ -163,26 +192,29 @@ class Doctor extends BaseCommand { let maxCurrent = '0.0.0' let maxLTS = '0.0.0' for (const { lts, version } of data) { - if (lts && semver.gt(version, maxLTS)) + if (lts && semver.gt(version, maxLTS)) { maxLTS = version + } - if (semver.satisfies(version, currentRange) && - semver.gt(version, maxCurrent)) + if (semver.satisfies(version, currentRange) && semver.gt(version, maxCurrent)) { maxCurrent = version + } } const recommended = semver.gt(maxCurrent, maxLTS) ? maxCurrent : maxLTS - if (semver.gte(process.version, recommended)) + if (semver.gte(process.version, recommended)) { return `current: ${current}, recommended: ${recommended}` - else + } else { throw `Use node ${recommended} (current: ${current})` + } } finally { tracker.finish() } } async checkFilesPermission (root, shouldOwn, mask = null) { - if (mask === null) + if (mask === null) { mask = shouldOwn ? R_OK | W_OK : R_OK + } let ok = true @@ -194,24 +226,25 @@ class Doctor extends BaseCommand { const files = new Set([root]) for (const f of files) { tracker.silly('checkFilesPermission', f.substr(root.length + 1)) - const st = await lstat(f) - .catch(er => { - ok = false - tracker.warn('checkFilesPermission', 'error getting info for ' + f) - }) + const st = await lstat(f).catch(er => { + ok = false + tracker.warn('checkFilesPermission', 'error getting info for ' + f) + }) tracker.completeWork(1) - if (!st) + if (!st) { continue + } if (shouldOwn && (uid !== st.uid || gid !== st.gid)) { tracker.warn('checkFilesPermission', 'should be owner of ' + f) ok = false } - if (!st.isDirectory() && !st.isFile()) + if (!st.isDirectory() && !st.isFile()) { continue + } try { await access(f, mask) @@ -223,23 +256,26 @@ class Doctor extends BaseCommand { } if (st.isDirectory()) { - const entries = await readdir(f) - .catch(er => { - ok = false - tracker.warn('checkFilesPermission', 'error reading directory ' + f) - return [] - }) - for (const entry of entries) + const entries = await readdir(f).catch(er => { + ok = false + tracker.warn('checkFilesPermission', 'error reading directory ' + f) + return [] + }) + for (const entry of entries) { files.add(resolve(f, entry)) + } } } } finally { tracker.finish() if (!ok) { - throw `Check the permissions of files in ${root}` + + throw ( + `Check the permissions of files in ${root}` + (shouldOwn ? ' (should be owned by current user)' : '') - } else + ) + } else { return '' + } } } @@ -261,27 +297,29 @@ class Doctor extends BaseCommand { tracker.info('verifyCachedFiles', 'Verifying the npm cache') try { const stats = await cacache.verify(this.npm.flatOptions.cache) - const { - badContentCount, - reclaimedCount, - missingContent, - reclaimedSize, - } = stats + const { badContentCount, reclaimedCount, missingContent, reclaimedSize } = stats if (badContentCount || reclaimedCount || missingContent) { - if (badContentCount) + if (badContentCount) { tracker.warn('verifyCachedFiles', `Corrupted content removed: ${badContentCount}`) + } - if (reclaimedCount) - tracker.warn('verifyCachedFiles', `Content garbage-collected: ${reclaimedCount} (${reclaimedSize} bytes)`) + if (reclaimedCount) { + tracker.warn( + 'verifyCachedFiles', + `Content garbage-collected: ${reclaimedCount} (${reclaimedSize} bytes)` + ) + } - if (missingContent) + if (missingContent) { tracker.warn('verifyCachedFiles', `Missing content: ${missingContent}`) + } tracker.warn('verifyCachedFiles', 'Cache issues have been fixed') } - tracker.info('verifyCachedFiles', `Verification complete. Stats: ${ - JSON.stringify(stats, null, 2) - }`) + tracker.info( + 'verifyCachedFiles', + `Verification complete. Stats: ${JSON.stringify(stats, null, 2)}` + ) return `verified ${stats.verifiedContent} tarballs` } finally { tracker.finish() @@ -289,10 +327,11 @@ class Doctor extends BaseCommand { } async checkNpmRegistry () { - if (this.npm.flatOptions.registry !== defaultRegistry) + if (this.npm.flatOptions.registry !== defaultRegistry) { throw `Try \`npm config set registry=${defaultRegistry}\`` - else + } else { return `using default registry (${defaultRegistry})` + } } } diff --git a/lib/commands/edit.js b/lib/commands/edit.js index db9be4a267bfa..5aa3ecaca512d 100644 --- a/lib/commands/edit.js +++ b/lib/commands/edit.js @@ -34,8 +34,9 @@ class Edit extends BaseCommand { } async exec (args) { - if (args.length !== 1) + if (args.length !== 1) { throw new Error(this.usage) + } const path = splitPackageNames(args[0]) const dir = resolve(this.npm.dir, path) @@ -43,13 +44,15 @@ class Edit extends BaseCommand { // graceful-fs does not promisify await new Promise((resolve, reject) => { fs.lstat(dir, (err) => { - if (err) + if (err) { return reject(err) + } const [bin, ...args] = this.npm.config.get('editor').split(/\s+/) const editor = spawn(bin, [...args, dir], { stdio: 'inherit' }) editor.on('exit', (code) => { - if (code) + if (code) { return reject(new Error(`editor process exited with code: ${code}`)) + } this.npm.exec('rebuild', [dir]).catch(reject).then(resolve) }) }) diff --git a/lib/commands/exec.js b/lib/commands/exec.js index 8f7f3c3e58bfd..dc914d79c5ce8 100644 --- a/lib/commands/exec.js +++ b/lib/commands/exec.js @@ -60,11 +60,13 @@ class Exec extends BaseCommand { } async exec (_args, { locationMsg, path, runPath } = {}) { - if (!path) + if (!path) { path = this.npm.localPrefix + } - if (!runPath) + if (!runPath) { runPath = process.cwd() + } const args = [..._args] const call = this.npm.config.get('call') @@ -79,8 +81,9 @@ class Exec extends BaseCommand { const packages = this.npm.config.get('package') const yes = this.npm.config.get('yes') - if (call && _args.length) + if (call && _args.length) { throw this.usage + } return libexec({ ...flatOptions, diff --git a/lib/commands/explain.js b/lib/commands/explain.js index 0ef41559f7a9e..03930c2f71777 100644 --- a/lib/commands/explain.js +++ b/lib/commands/explain.js @@ -36,8 +36,9 @@ class Explain extends ArboristWorkspaceCmd { } async exec (args) { - if (!args.length) + if (!args.length) { throw this.usageError() + } const arb = new Arborist({ path: this.npm.prefix, ...this.npm.flatOptions }) const tree = await arb.loadActual() @@ -45,9 +46,9 @@ class Explain extends ArboristWorkspaceCmd { if (this.npm.flatOptions.workspacesEnabled && this.workspaceNames && this.workspaceNames.length - ) + ) { this.filterSet = arb.workspaceDependencySet(tree, this.workspaceNames) - else if (!this.npm.flatOptions.workspacesEnabled) { + } else if (!this.npm.flatOptions.workspacesEnabled) { this.filterSet = arb.excludeWorkspacesDependencySet(tree) } @@ -58,20 +59,22 @@ class Explain extends ArboristWorkspaceCmd { const filteredOut = this.filterSet && this.filterSet.size > 0 && !this.filterSet.has(node) - if (!filteredOut) + if (!filteredOut) { nodes.add(node) + } } } - if (nodes.size === 0) + if (nodes.size === 0) { throw new Error(`No dependencies found matching ${args.join(', ')}`) + } const expls = [] for (const node of nodes) { const { extraneous, dev, optional, devOptional, peer, inBundle } = node const expl = node.explain() - if (extraneous) + if (extraneous) { expl.extraneous = true - else { + } else { expl.dev = dev expl.optional = optional expl.devOptional = devOptional @@ -81,9 +84,9 @@ class Explain extends ArboristWorkspaceCmd { expls.push(expl) } - if (this.npm.flatOptions.json) + if (this.npm.flatOptions.json) { this.npm.output(JSON.stringify(expls, null, 2)) - else { + } else { this.npm.output(expls.map(expl => { return explainNode(expl, Infinity, this.npm.color) }).join('\n\n')) @@ -93,21 +96,24 @@ class Explain extends ArboristWorkspaceCmd { getNodes (tree, arg) { // if it's just a name, return packages by that name const { validForOldPackages: valid } = validName(arg) - if (valid) + if (valid) { return tree.inventory.query('packageName', arg) + } // if it's a location, get that node const maybeLoc = arg.replace(/\\/g, '/').replace(/\/+$/, '') const nodeByLoc = tree.inventory.get(maybeLoc) - if (nodeByLoc) + if (nodeByLoc) { return [nodeByLoc] + } // maybe a path to a node_modules folder const maybePath = relative(this.npm.prefix, resolve(maybeLoc)) .replace(/\\/g, '/').replace(/\/+$/, '') const nodeByPath = tree.inventory.get(maybePath) - if (nodeByPath) + if (nodeByPath) { return [nodeByPath] + } // otherwise, try to select all matching nodes try { @@ -119,8 +125,9 @@ class Explain extends ArboristWorkspaceCmd { getNodesByVersion (tree, arg) { const spec = npa(arg, this.npm.prefix) - if (spec.type !== 'version' && spec.type !== 'range') + if (spec.type !== 'version' && spec.type !== 'range') { return [] + } return tree.inventory.filter(node => { return node.package.name === spec.name && diff --git a/lib/commands/explore.js b/lib/commands/explore.js index 8ff88ddf67a23..251877da08220 100644 --- a/lib/commands/explore.js +++ b/lib/commands/explore.js @@ -33,15 +33,17 @@ class Explore extends BaseCommand { } async exec (args) { - if (args.length < 1 || !args[0]) + if (args.length < 1 || !args[0]) { throw this.usage + } const pkgname = args.shift() // detect and prevent any .. shenanigans const path = join(this.npm.dir, join('/', pkgname)) - if (relative(path, this.npm.dir) === '') + if (relative(path, this.npm.dir) === '') { throw this.usage + } // run as if running a script named '_explore', which we set to either // the set of arguments, or the shell config, and let @npmcli/run-script @@ -58,8 +60,9 @@ class Explore extends BaseCommand { _explore: args.join(' ').trim() || shell, } - if (!args.length) + if (!args.length) { this.npm.output(`\nExploring ${path}\nType 'exit' or ^D when finished\n`) + } this.npm.log.disableProgress() try { return await runScript({ @@ -76,8 +79,9 @@ class Explore extends BaseCommand { // if it's not an exit error, or non-interactive, throw it const isProcExit = er.message === 'command failed' && (typeof er.code === 'number' || /^SIG/.test(er.signal || '')) - if (args.length || !isProcExit) + if (args.length || !isProcExit) { throw er + } }) } finally { this.npm.log.enableProgress() diff --git a/lib/commands/fund.js b/lib/commands/fund.js index fbf78051d97a9..e2a158bd4cce9 100644 --- a/lib/commands/fund.js +++ b/lib/commands/fund.js @@ -5,11 +5,7 @@ const pacote = require('pacote') const semver = require('semver') const npa = require('npm-package-arg') const { depth } = require('treeverse') -const { - readTree: getFundingInfo, - normalizeFunding, - isValidFunding, -} = require('libnpmfund') +const { readTree: getFundingInfo, normalizeFunding, isValidFunding } = require('libnpmfund') const completion = require('../utils/completion/installed-deep.js') const openUrl = require('../utils/open-url.js') @@ -33,13 +29,7 @@ class Fund extends ArboristWorkspaceCmd { /* istanbul ignore next - see test/lib/load-all-commands.js */ static get params () { - return [ - 'json', - 'browser', - 'unicode', - 'workspace', - 'which', - ] + return ['json', 'browser', 'unicode', 'workspace', 'which'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -59,11 +49,12 @@ class Fund extends ArboristWorkspaceCmd { const fundingSourceNumber = numberArg && parseInt(numberArg, 10) const badFundingSourceNumber = - numberArg !== null && - (String(fundingSourceNumber) !== numberArg || fundingSourceNumber < 1) + numberArg !== null && (String(fundingSourceNumber) !== numberArg || fundingSourceNumber < 1) if (badFundingSourceNumber) { - const err = new Error('`npm fund [<@scope>/] [--which=fundingSourceNumber]` must be given a positive integer') + const err = new Error( + '`npm fund [<@scope>/] [--which=fundingSourceNumber]` must be given a positive integer' + ) err.code = 'EFUNDNUMBER' throw err } @@ -95,10 +86,11 @@ class Fund extends ArboristWorkspaceCmd { workspaces: this.workspaceNames, }) - if (this.npm.config.get('json')) + if (this.npm.config.get('json')) { this.npm.output(this.printJSON(fundingInfo)) - else + } else { this.npm.output(this.printHuman(fundingInfo)) + } } printJSON (fundingInfo) { @@ -110,8 +102,7 @@ class Fund extends ArboristWorkspaceCmd { const unicode = this.npm.config.get('unicode') const seenUrls = new Map() - const tree = obj => - archy(obj, '', { unicode }) + const tree = obj => archy(obj, '', { unicode }) const result = depth({ tree: fundingInfo, @@ -119,9 +110,7 @@ class Fund extends ArboristWorkspaceCmd { // composes human readable package name // and creates a new archy item for readable output visit: ({ name, version, funding }) => { - const [fundingSource] = [] - .concat(normalizeFunding(funding)) - .filter(isValidFunding) + const [fundingSource] = [].concat(normalizeFunding(funding)).filter(isValidFunding) const { url } = fundingSource || {} const pkgRef = getPrintableName({ name, version }) let item = { @@ -139,8 +128,9 @@ class Fund extends ArboristWorkspaceCmd { item = seenUrls.get(url) item.label += `, ${pkgRef}` return null - } else + } else { seenUrls.set(url, item) + } } return item @@ -149,20 +139,20 @@ class Fund extends ArboristWorkspaceCmd { // puts child nodes back into returned archy // output while also filtering out missing items leave: (item, children) => { - if (item) + if (item) { item.nodes = children.filter(Boolean) + } return item }, // turns tree-like object return by libnpmfund // into children to be properly read by treeverse - getChildren: (node) => - Object.keys(node.dependencies || {}) - .map(key => ({ - name: key, - ...node.dependencies[key], - })), + getChildren: node => + Object.keys(node.dependencies || {}).map(key => ({ + name: key, + ...node.dependencies[key], + })), }) const res = tree(result) @@ -179,8 +169,9 @@ class Fund extends ArboristWorkspaceCmd { } else { // matches any file path within current arborist inventory for (const item of tree.inventory.values()) { - if (item.path === arg.fetchSpec) + if (item.path === arg.fetchSpec) { return item.package + } } } } else { @@ -190,17 +181,17 @@ class Fund extends ArboristWorkspaceCmd { .filter(i => semver.valid(i.package.version)) .sort((a, b) => semver.rcompare(a.package.version, b.package.version)) - if (item) + if (item) { return item.package + } } } - const { funding } = retrievePackageMetadata() || - await pacote.manifest(arg, this.npm.flatOptions).catch(() => ({})) + const { funding } = + retrievePackageMetadata() || + (await pacote.manifest(arg, this.npm.flatOptions).catch(() => ({}))) - const validSources = [] - .concat(normalizeFunding(funding)) - .filter(isValidFunding) + const validSources = [].concat(normalizeFunding(funding)).filter(isValidFunding) const matchesValidSource = validSources.length === 1 || @@ -218,7 +209,10 @@ class Fund extends ArboristWorkspaceCmd { const msg = `${typePrefix} available at the following URL` this.npm.output(`${i + 1}: ${msg}: ${url}`) }) - this.npm.output('Run `npm fund [<@scope>/] --which=1`, for example, to open the first funding URL listed in that package') + this.npm.output( + /* eslint-disable-next-line max-len */ + 'Run `npm fund [<@scope>/] --which=1`, for example, to open the first funding URL listed in that package' + ) } else { const noFundingError = new Error(`No valid funding method available for: ${spec}`) noFundingError.code = 'ENOFUND' diff --git a/lib/commands/help-search.js b/lib/commands/help-search.js index 4ab4a0896af44..6c1536e0932c0 100644 --- a/lib/commands/help-search.js +++ b/lib/commands/help-search.js @@ -27,18 +27,20 @@ class HelpSearch extends BaseCommand { } async exec (args) { - if (!args.length) + if (!args.length) { return this.npm.output(this.usage) + } const docPath = path.resolve(__dirname, '..', '..', 'docs/content') const files = await glob(`${docPath}/*/*.md`) const data = await this.readFiles(files) const results = await this.searchFiles(args, data, files) const formatted = this.formatResults(args, results) - if (!formatted.trim()) + if (!formatted.trim()) { this.npm.output(`No matches in help for: ${args.join(' ')}\n`) - else + } else { this.npm.output(formatted) + } } async readFiles (files) { @@ -55,8 +57,9 @@ class HelpSearch extends BaseCommand { for (const [file, content] of Object.entries(data)) { const lowerCase = content.toLowerCase() // skip if no matches at all - if (!args.some(a => lowerCase.includes(a.toLowerCase()))) + if (!args.some(a => lowerCase.includes(a.toLowerCase()))) { continue + } const lines = content.split(/\n+/) @@ -90,17 +93,20 @@ class HelpSearch extends BaseCommand { // now squish any string of nulls into a single null const pruned = lines.reduce((l, r) => { - if (!(r === null && l[l.length - 1] === null)) + if (!(r === null && l[l.length - 1] === null)) { l.push(r) + } return l }, []) - if (pruned[pruned.length - 1] === null) + if (pruned[pruned.length - 1] === null) { pruned.pop() + } - if (pruned[0] === null) + if (pruned[0] === null) { pruned.shift() + } // now count how many args were found const found = {} @@ -157,15 +163,17 @@ class HelpSearch extends BaseCommand { out.push(' '.repeat((Math.max(1, cols - out.join(' ').length - r.length - 1)))) out.push(r) - if (!this.npm.config.get('long')) + if (!this.npm.config.get('long')) { return out.join('') + } out.unshift('\n\n') out.push('\n') out.push('-'.repeat(cols - 1) + '\n') res.lines.forEach((line, i) => { - if (line === null || i > 3) + if (line === null || i > 3) { return + } if (!this.npm.color) { out.push(line + '\n') diff --git a/lib/commands/help.js b/lib/commands/help.js index bfc7f8b60e5b3..0de047615711a 100644 --- a/lib/commands/help.js +++ b/lib/commands/help.js @@ -34,8 +34,9 @@ class Help extends BaseCommand { } async completion (opts) { - if (opts.conf.argv.remain.length > 2) + if (opts.conf.argv.remain.length > 2) { return [] + } const g = path.resolve(__dirname, '../../man/man[0-9]/*.[0-9]') const files = await glob(g) @@ -51,15 +52,18 @@ class Help extends BaseCommand { // By default we search all of our man subdirectories, but if the user has // asked for a specific one we limit the search to just there let manSearch = 'man*' - if (/^\d+$/.test(args[0])) + if (/^\d+$/.test(args[0])) { manSearch = `man${args.shift()}` + } - if (!args.length) + if (!args.length) { return this.npm.output(await this.npm.usage) + } // npm help foo bar baz: search topics - if (args.length > 1) + if (args.length > 1) { return this.helpSearch(args) + } let section = this.npm.deref(args[0]) || args[0] @@ -76,17 +80,19 @@ class Help extends BaseCommand { const bManNumber = b.match(manNumberRegex)[1] // man number sort first so that 1 aka commands are preferred - if (aManNumber !== bManNumber) + if (aManNumber !== bManNumber) { return aManNumber - bManNumber + } return localeCompare(a, b) }) const man = mans[0] - if (man) + if (man) { await this.viewMan(man) - else + } else { return this.helpSearch(args) + } } helpSearch (args) { @@ -125,8 +131,9 @@ class Help extends BaseCommand { const proc = spawn(bin, args, opts) return new Promise((resolve, reject) => { proc.on('exit', (code) => { - if (code) + if (code) { return reject(new Error(`help process exited with code: ${code}`)) + } return resolve() }) diff --git a/lib/commands/hook.js b/lib/commands/hook.js index 96b6d96264ad3..78ce9834d5fe2 100644 --- a/lib/commands/hook.js +++ b/lib/commands/hook.js @@ -50,9 +50,9 @@ class Hook extends BaseCommand { async add (pkg, uri, secret, opts) { const hook = await hookApi.add(pkg, uri, secret, opts) - if (opts.json) + if (opts.json) { this.npm.output(JSON.stringify(hook, null, 2)) - else if (opts.parseable) { + } else if (opts.parseable) { this.npm.output(Object.keys(hook).join('\t')) this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!opts.silent && opts.loglevel !== 'silent') { @@ -64,20 +64,21 @@ class Hook extends BaseCommand { async ls (pkg, opts) { const hooks = await hookApi.ls({ ...opts, package: pkg }) - if (opts.json) + if (opts.json) { this.npm.output(JSON.stringify(hooks, null, 2)) - else if (opts.parseable) { + } else if (opts.parseable) { this.npm.output(Object.keys(hooks[0]).join('\t')) hooks.forEach(hook => { this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) }) - } else if (!hooks.length) + } else if (!hooks.length) { this.npm.output("You don't have any hooks configured yet.") - else if (!opts.silent && opts.loglevel !== 'silent') { - if (hooks.length === 1) + } else if (!opts.silent && opts.loglevel !== 'silent') { + if (hooks.length === 1) { this.npm.output('You have one hook configured.') - else + } else { this.npm.output(`You have ${hooks.length} hooks configured.`) + } const table = new Table({ head: ['id', 'target', 'endpoint'] }) hooks.forEach((hook) => { @@ -94,8 +95,9 @@ class Hook extends BaseCommand { }, hook.response_code, ]) - } else + } else { table.push([{ colSpan: 2, content: 'never triggered' }]) + } }) this.npm.output(table.toString()) } @@ -103,9 +105,9 @@ class Hook extends BaseCommand { async rm (id, opts) { const hook = await hookApi.rm(id, opts) - if (opts.json) + if (opts.json) { this.npm.output(JSON.stringify(hook, null, 2)) - else if (opts.parseable) { + } else if (opts.parseable) { this.npm.output(Object.keys(hook).join('\t')) this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!opts.silent && opts.loglevel !== 'silent') { @@ -117,9 +119,9 @@ class Hook extends BaseCommand { async update (id, uri, secret, opts) { const hook = await hookApi.update(id, uri, secret, opts) - if (opts.json) + if (opts.json) { this.npm.output(JSON.stringify(hook, null, 2)) - else if (opts.parseable) { + } else if (opts.parseable) { this.npm.output(Object.keys(hook).join('\t')) this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!opts.silent && opts.loglevel !== 'silent') { @@ -131,10 +133,12 @@ class Hook extends BaseCommand { hookName (hook) { let target = hook.name - if (hook.type === 'scope') + if (hook.type === 'scope') { target = '@' + target - if (hook.type === 'owner') + } + if (hook.type === 'owner') { target = '~' + target + } return target } } diff --git a/lib/commands/init.js b/lib/commands/init.js index b88b38436e910..1eb6fbf24a52b 100644 --- a/lib/commands/init.js +++ b/lib/commands/init.js @@ -38,8 +38,9 @@ class Init extends BaseCommand { async exec (args) { // npm exec style - if (args.length) + if (args.length) { return (await this.execCreate({ args, path: process.cwd() })) + } // no args, uses classic init-package-json boilerplate await this.template() @@ -47,8 +48,9 @@ class Init extends BaseCommand { async execWorkspaces (args, filters) { // if the root package is uninitiated, take care of it first - if (this.npm.flatOptions.includeWorkspaceRoot) + if (this.npm.flatOptions.includeWorkspaceRoot) { await this.exec(args) + } // reads package.json for the top-level folder first, by doing this we // ensure the command throw if no package.json is found before trying @@ -80,9 +82,9 @@ class Init extends BaseCommand { const [initerName, ...otherArgs] = args let packageName = initerName - if (/^@[^/]+$/.test(initerName)) + if (/^@[^/]+$/.test(initerName)) { packageName = initerName + '/create' - else { + } else { const req = npa(initerName) if (req.type === 'git' && req.hosted) { const { user, project } = req.hosted @@ -90,8 +92,9 @@ class Init extends BaseCommand { .replace(user + '/' + project, user + '/create-' + project) } else if (req.registry) { packageName = req.name.replace(/^(@[^/]+\/)?/, '$1create-') - if (req.rawSpec) + if (req.rawSpec) { packageName += '@' + req.rawSpec + } } else { throw Object.assign(new Error( 'Unrecognized initializer: ' + initerName + @@ -166,9 +169,9 @@ class Init extends BaseCommand { this.npm.log.warn('init', 'canceled') return res() } - if (er) + if (er) { rej(er) - else { + } else { this.npm.log.info('init', 'written successfully') res(data) } @@ -181,8 +184,9 @@ class Init extends BaseCommand { // skip setting workspace if current package.json glob already satisfies it for (const wPath of workspaces.values()) { - if (wPath === workspacePath) + if (wPath === workspacePath) { return + } } // if a create-pkg didn't generate a package.json at the workspace diff --git a/lib/commands/install.js b/lib/commands/install.js index ea3bbcee3fca0..95b5a5bac1d38 100644 --- a/lib/commands/install.js +++ b/lib/commands/install.js @@ -1,5 +1,4 @@ /* eslint-disable camelcase */ -/* eslint-disable standard/no-callback-literal */ const fs = require('fs') const util = require('util') const readdir = util.promisify(fs.readdir) @@ -78,10 +77,11 @@ class Install extends ArboristWorkspaceCmd { const partialName = partialWord.slice(lastSlashIdx + 1) const partialPath = partialWord.slice(0, lastSlashIdx) || '/' - const annotatePackageDirMatch = async (sibling) => { + const annotatePackageDirMatch = async sibling => { const fullPath = join(partialPath, sibling) - if (sibling.slice(0, partialName.length) !== partialName) - return null // not name match + if (sibling.slice(0, partialName.length) !== partialName) { + return null + } // not name match try { const contents = await readdir(fullPath) @@ -136,10 +136,15 @@ class Install extends ArboristWorkspaceCmd { try { checks.checkEngine(npmManifest, npmManifest.version, process.version) } catch (e) { - if (forced) - this.npm.log.warn('install', `Forcing global npm install with incompatible version ${npmManifest.version} into node ${process.version}`) - else + if (forced) { + this.npm.log.warn( + 'install', + /* eslint-disable-next-line max-len */ + `Forcing global npm install with incompatible version ${npmManifest.version} into node ${process.version}` + ) + } else { throw e + } } } @@ -147,12 +152,17 @@ class Install extends ArboristWorkspaceCmd { args = args.filter(a => resolve(a) !== this.npm.prefix) // `npm i -g` => "install this package globally" - if (where === globalTop && !args.length) + if (where === globalTop && !args.length) { args = ['.'] + } // TODO: Add warnings for other deprecated flags? or remove this one? - if (isDev) - log.warn('install', 'Usage of the `--dev` option is deprecated. Use `--include=dev` instead.') + if (isDev) { + log.warn( + 'install', + 'Usage of the `--dev` option is deprecated. Use `--include=dev` instead.' + ) + } const opts = { ...this.npm.flatOptions, diff --git a/lib/commands/link.js b/lib/commands/link.js index 4a800d7c60242..b4f4a1289ddb1 100644 --- a/lib/commands/link.js +++ b/lib/commands/link.js @@ -169,8 +169,9 @@ class Link extends ArboristWorkspaceCmd { // Returns a list of items that can't be fulfilled by // things found in the current arborist inventory missingArgsFromTree (tree, args) { - if (tree.isLink) + if (tree.isLink) { return this.missingArgsFromTree(tree.target, args) + } const foundNodes = [] const missing = args.filter(a => { @@ -193,8 +194,9 @@ class Link extends ArboristWorkspaceCmd { // remote nodes from the loaded tree in order // to avoid dropping them later when reifying - for (const node of foundNodes) + for (const node of foundNodes) { node.parent = null + } return missing } diff --git a/lib/commands/logout.js b/lib/commands/logout.js index 3c0bdc756508c..43a04bb577cd8 100644 --- a/lib/commands/logout.js +++ b/lib/commands/logout.js @@ -37,15 +37,16 @@ class Logout extends BaseCommand { method: 'DELETE', ignoreBody: true, }) - } else if (auth.isBasicAuth) + } else if (auth.isBasicAuth) { log.verbose('logout', `clearing user credentials for ${reg}`) - else { + } else { const msg = `not logged in to ${reg}, so can't log out!` throw Object.assign(new Error(msg), { code: 'ENEEDAUTH' }) } - if (scope) + if (scope) { this.npm.config.delete(regRef, 'user') + } this.npm.config.clearCredentialsByURI(reg) diff --git a/lib/commands/ls.js b/lib/commands/ls.js index af7d44ab41800..b8c1f0ef9c221 100644 --- a/lib/commands/ls.js +++ b/lib/commands/ls.js @@ -88,23 +88,26 @@ class LS extends ArboristWorkspaceCmd { legacyPeerDeps: false, path, }) - const tree = await this.initTree({arb, args, packageLockOnly }) + const tree = await this.initTree({ arb, args, packageLockOnly }) // filters by workspaces nodes when using -w // We only have to filter the first layer of edges, so we don't // explore anything that isn't part of the selected workspace set. let wsNodes - if (this.workspaceNames && this.workspaceNames.length) + if (this.workspaceNames && this.workspaceNames.length) { wsNodes = arb.workspaceNodes(tree, this.workspaceNames) + } const filterBySelectedWorkspaces = edge => { if (!workspacesEnabled && edge.from.isProjectRoot && edge.to.isWorkspace - ) + ) { return false + } - if (!wsNodes || !wsNodes.length) + if (!wsNodes || !wsNodes.length) { return true + } if (edge.from.isProjectRoot) { return edge.to && @@ -176,8 +179,9 @@ class LS extends ArboristWorkspaceCmd { // loop through list of node problems to add them to global list if (node[_include]) { - for (const problem of node[_problems]) + for (const problem of node[_problems]) { problems.add(problem) + } } seenItems.add(item) @@ -200,8 +204,9 @@ class LS extends ArboristWorkspaceCmd { ) // if filtering items, should exit with error code on no results - if (result && !result[_include] && args.length) + if (result && !result[_include] && args.length) { process.exitCode = 1 + } if (rootError) { throw Object.assign( @@ -237,8 +242,9 @@ class LS extends ArboristWorkspaceCmd { module.exports = LS const isGitNode = (node) => { - if (!node.resolved) + if (!node.resolved) { return + } try { const { type } = npa(node.resolved) @@ -257,14 +263,17 @@ const isExtraneous = (node, { global }) => const getProblems = (node, { global }) => { const problems = new Set() - if (node[_missing] && !isOptional(node)) + if (node[_missing] && !isOptional(node)) { problems.add(`missing: ${node.pkgid}, required by ${node[_missing]}`) + } - if (node[_invalid]) + if (node[_invalid]) { problems.add(`invalid: ${node.pkgid} ${node.path}`) + } - if (isExtraneous(node, { global })) + if (isExtraneous(node, { global })) { problems.add(`extraneous: ${node.pkgid} ${node.path}`) + } return problems } @@ -297,10 +306,11 @@ const getHumanOutputItem = (node, { args, color, global, long }) => { // special formatting for top-level package name if (node.isRoot) { const hasNoPackageJson = !Object.keys(node.package).length - if (hasNoPackageJson || global) + if (hasNoPackageJson || global) { printable = path - else + } else { printable += `${long ? EOL : ' '}${path}` + } } const highlightDepName = @@ -347,19 +357,22 @@ const getHumanOutputItem = (node, { args, color, global, long }) => { const getJsonOutputItem = (node, { global, long }) => { const item = {} - if (node.version) + if (node.version) { item.version = node.version + } - if (node.resolved) + if (node.resolved) { item.resolved = node.resolved + } item[_name] = node.name // special formatting for top-level package name const hasPackageJson = node && node.package && Object.keys(node.package).length - if (node.isRoot && hasPackageJson) + if (node.isRoot && hasPackageJson) { item.name = node.package.name || node.name + } if (long && !node[_missing]) { item.name = item[_name] @@ -376,18 +389,21 @@ const getJsonOutputItem = (node, { global, long }) => { } // augment json output items with extra metadata - if (isExtraneous(node, { global })) + if (isExtraneous(node, { global })) { item.extraneous = true + } - if (node[_invalid]) + if (node[_invalid]) { item.invalid = node[_invalid] + } if (node[_missing] && !isOptional(node)) { item.required = node[_required] item.missing = true } - if (node[_include] && node[_problems] && node[_problems].size) + if (node[_include] && node[_problems] && node[_problems].size) { item.problems = [...node[_problems]] + } return augmentItemWithIncludeMetadata(node, item) } @@ -436,8 +452,9 @@ const mapEdgesToNodes = ({ seenPaths }) => (edge) => { // item would appear twice given that it's a children of an extraneous item, // so it's marked extraneous but it will ALSO show up in edgesOuts of // its parent so it ends up as two diff nodes if we don't track it - if (node.path) + if (node.path) { seenPaths.add(node.path) + } node[_required] = edge.spec || '*' node[_type] = edge.type @@ -515,20 +532,23 @@ const humanOutput = ({ color, result, seenItems, unicode }) => { // so that all its ancestors should be displayed) // here is where we put items in their expected place for archy output for (const item of seenItems) { - if (item[_include] && item[_parent]) + if (item[_include] && item[_parent]) { item[_parent].nodes.push(item) + } } - if (!result.nodes.length) + if (!result.nodes.length) { result.nodes = ['(empty)'] + } const archyOutput = archy(result, '', { unicode }) return color ? chalk.reset(archyOutput) : archyOutput } const jsonOutput = ({ path, problems, result, rootError, seenItems }) => { - if (problems.size) + if (problems.size) { result.problems = [...problems] + } if (rootError) { result.problems = [ @@ -546,8 +566,9 @@ const jsonOutput = ({ path, problems, result, rootError, seenItems }) => { // append current item to its parent item.dependencies obj in order // to provide a json object structure that represents the installed tree if (item[_include] && item[_parent]) { - if (!item[_parent].dependencies) + if (!item[_parent].dependencies) { item[_parent].dependencies = {} + } item[_parent].dependencies[item[_name]] = item } diff --git a/lib/commands/org.js b/lib/commands/org.js index 6d0b8cd505758..18f73cb59f017 100644 --- a/lib/commands/org.js +++ b/lib/commands/org.js @@ -24,18 +24,14 @@ class Org extends BaseCommand { /* istanbul ignore next - see test/lib/load-all-commands.js */ static get params () { - return [ - 'registry', - 'otp', - 'json', - 'parseable', - ] + return ['registry', 'otp', 'json', 'parseable'] } async completion (opts) { const argv = opts.conf.argv.remain - if (argv.length === 2) + if (argv.length === 2) { return ['set', 'rm', 'ls'] + } switch (argv[2]) { case 'ls': @@ -66,85 +62,109 @@ class Org extends BaseCommand { set (org, user, role, opts) { role = role || 'developer' - if (!org) + if (!org) { throw new Error('First argument `orgname` is required.') + } - if (!user) + if (!user) { throw new Error('Second argument `username` is required.') + } - if (!['owner', 'admin', 'developer'].find(x => x === role)) - throw new Error('Third argument `role` must be one of `owner`, `admin`, or `developer`, with `developer` being the default value if omitted.') + if (!['owner', 'admin', 'developer'].find(x => x === role)) { + throw new Error( + /* eslint-disable-next-line max-len */ + 'Third argument `role` must be one of `owner`, `admin`, or `developer`, with `developer` being the default value if omitted.' + ) + } return liborg.set(org, user, role, opts).then(memDeets => { - if (opts.json) + if (opts.json) { this.npm.output(JSON.stringify(memDeets, null, 2)) - else if (opts.parseable) { + } else if (opts.parseable) { this.npm.output(['org', 'orgsize', 'user', 'role'].join('\t')) - this.npm.output([ - memDeets.org.name, - memDeets.org.size, - memDeets.user, - memDeets.role, - ].join('\t')) - } else if (!opts.silent && opts.loglevel !== 'silent') - this.npm.output(`Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${memDeets.org.size} member${memDeets.org.size === 1 ? '' : 's'} in this org.`) + this.npm.output( + [memDeets.org.name, memDeets.org.size, memDeets.user, memDeets.role].join('\t') + ) + } else if (!opts.silent && opts.loglevel !== 'silent') { + this.npm.output( + `Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${ + memDeets.org.size + } member${memDeets.org.size === 1 ? '' : 's'} in this org.` + ) + } return memDeets }) } rm (org, user, opts) { - if (!org) + if (!org) { throw new Error('First argument `orgname` is required.') + } - if (!user) + if (!user) { throw new Error('Second argument `username` is required.') + } - return liborg.rm(org, user, opts).then(() => { - return liborg.ls(org, opts) - }).then(roster => { - user = user.replace(/^[~@]?/, '') - org = org.replace(/^[~@]?/, '') - const userCount = Object.keys(roster).length - if (opts.json) { - this.npm.output(JSON.stringify({ - user, - org, - userCount, - deleted: true, - })) - } else if (opts.parseable) { - this.npm.output(['user', 'org', 'userCount', 'deleted'].join('\t')) - this.npm.output([user, org, userCount, true].join('\t')) - } else if (!opts.silent && opts.loglevel !== 'silent') - this.npm.output(`Successfully removed ${user} from ${org}. You now have ${userCount} member${userCount === 1 ? '' : 's'} in this org.`) - }) + return liborg + .rm(org, user, opts) + .then(() => { + return liborg.ls(org, opts) + }) + .then(roster => { + user = user.replace(/^[~@]?/, '') + org = org.replace(/^[~@]?/, '') + const userCount = Object.keys(roster).length + if (opts.json) { + this.npm.output( + JSON.stringify({ + user, + org, + userCount, + deleted: true, + }) + ) + } else if (opts.parseable) { + this.npm.output(['user', 'org', 'userCount', 'deleted'].join('\t')) + this.npm.output([user, org, userCount, true].join('\t')) + } else if (!opts.silent && opts.loglevel !== 'silent') { + this.npm.output( + `Successfully removed ${user} from ${org}. You now have ${userCount} member${ + userCount === 1 ? '' : 's' + } in this org.` + ) + } + }) } ls (org, user, opts) { - if (!org) + if (!org) { throw new Error('First argument `orgname` is required.') + } return liborg.ls(org, opts).then(roster => { if (user) { const newRoster = {} - if (roster[user]) + if (roster[user]) { newRoster[user] = roster[user] + } roster = newRoster } - if (opts.json) + if (opts.json) { this.npm.output(JSON.stringify(roster, null, 2)) - else if (opts.parseable) { + } else if (opts.parseable) { this.npm.output(['user', 'role'].join('\t')) Object.keys(roster).forEach(user => { this.npm.output([user, roster[user]].join('\t')) }) } else if (!opts.silent && opts.loglevel !== 'silent') { const table = new Table({ head: ['user', 'role'] }) - Object.keys(roster).sort().forEach(user => { - table.push([user, roster[user]]) - }) + Object.keys(roster) + .sort() + .forEach(user => { + table.push([user, roster[user]]) + }) this.npm.output(table.toString()) } }) diff --git a/lib/commands/outdated.js b/lib/commands/outdated.js index 119316d3b4890..01047b4d37018 100644 --- a/lib/commands/outdated.js +++ b/lib/commands/outdated.js @@ -91,19 +91,21 @@ class Outdated extends ArboristWorkspaceCmd { // sorts list alphabetically const outdated = this.list.sort((a, b) => localeCompare(a.name, b.name)) - if (outdated.length > 0) + if (outdated.length > 0) { process.exitCode = 1 + } // return if no outdated packages - if (outdated.length === 0 && !this.npm.config.get('json')) + if (outdated.length === 0 && !this.npm.config.get('json')) { return + } // display results - if (this.npm.config.get('json')) + if (this.npm.config.get('json')) { this.npm.output(this.makeJSON(outdated)) - else if (this.npm.config.get('parseable')) + } else if (this.npm.config.get('parseable')) { this.npm.output(this.makeParseable(outdated)) - else { + } else { const outList = outdated.map(x => this.makePretty(x)) const outHead = ['Package', 'Current', @@ -113,12 +115,14 @@ class Outdated extends ArboristWorkspaceCmd { 'Depended by', ] - if (this.npm.config.get('long')) + if (this.npm.config.get('long')) { outHead.push('Package Type', 'Homepage') + } const outTable = [outHead].concat(outList) - if (this.npm.color) + if (this.npm.color) { outTable[0] = outTable[0].map(heading => styles.underline(heading)) + } const tableOpts = { align: ['l', 'r', 'r', 'r', 'l'], @@ -145,18 +149,21 @@ class Outdated extends ArboristWorkspaceCmd { } getEdgesIn (node) { - for (const edge of node.edgesIn) + for (const edge of node.edgesIn) { this.trackEdge(edge) + } } getEdgesOut (node) { // TODO: normalize usage of edges and avoid looping through nodes here if (this.npm.config.get('global')) { - for (const child of node.children.values()) + for (const child of node.children.values()) { this.trackEdge(child) + } } else { - for (const edge of node.edgesOut.values()) + for (const edge of node.edgesOut.values()) { this.trackEdge(edge) + } } } @@ -167,15 +174,17 @@ class Outdated extends ArboristWorkspaceCmd { && this.filterSet.size > 0 && !this.filterSet.has(edge.from.target) - if (filteredOut) + if (filteredOut) { return + } this.edges.add(edge) } getWorkspacesEdges (node) { - if (this.npm.config.get('global')) + if (this.npm.config.get('global')) { return + } for (const edge of this.tree.edgesOut.values()) { const workspace = edge @@ -183,8 +192,9 @@ class Outdated extends ArboristWorkspaceCmd { && edge.to.target && edge.to.target.isWorkspace - if (workspace) + if (workspace) { this.getEdgesOut(edge.to.target) + } } } @@ -209,22 +219,25 @@ class Outdated extends ArboristWorkspaceCmd { : 'dependencies' for (const omitType of this.npm.config.get('omit')) { - if (node[omitType]) + if (node[omitType]) { return + } } // deps different from prod not currently // on disk are not included in the output - if (edge.error === 'MISSING' && type !== 'dependencies') + if (edge.error === 'MISSING' && type !== 'dependencies') { return + } try { const packument = await this.getPackument(spec) const expected = edge.spec // if it's not a range, version, or tag, skip it try { - if (!npa(`${edge.name}@${edge.spec}`).registry) + if (!npa(`${edge.name}@${edge.spec}`).registry) { return null + } } catch (err) { return null } @@ -259,14 +272,16 @@ class Outdated extends ArboristWorkspaceCmd { err.code === 'ETARGET' || err.code === 'E403' || err.code === 'E404') - ) + ) { throw err + } } } maybeWorkspaceName (node) { - if (!node.isWorkspace) + if (!node.isWorkspace) { return node.name + } const humanOutput = !this.npm.config.get('json') && !this.npm.config.get('parseable') @@ -331,8 +346,9 @@ class Outdated extends ArboristWorkspaceCmd { name + '@' + latest, dependent, ] - if (this.npm.config.get('long')) + if (this.npm.config.get('long')) { out.push(type, homepage) + } return out.join(':') }).join(os.EOL) diff --git a/lib/commands/owner.js b/lib/commands/owner.js index 5d28e2b750cf5..4d1fe9b9f8d0e 100644 --- a/lib/commands/owner.js +++ b/lib/commands/owner.js @@ -36,30 +36,36 @@ class Owner extends BaseCommand { async completion (opts) { const argv = opts.conf.argv.remain - if (argv.length > 3) + if (argv.length > 3) { return [] + } - if (argv[1] !== 'owner') + if (argv[1] !== 'owner') { argv.unshift('owner') + } - if (argv.length === 2) + if (argv.length === 2) { return ['add', 'rm', 'ls'] + } // reaches registry in order to autocomplete rm if (argv[2] === 'rm') { - if (this.npm.config.get('global')) + if (this.npm.config.get('global')) { return [] + } const pkgName = await readLocalPkgName(this.npm.prefix) - if (!pkgName) + if (!pkgName) { return [] + } const spec = npa(pkgName) const data = await pacote.packument(spec, { ...this.npm.flatOptions, fullMetadata: true, }) - if (data && data.maintainers && data.maintainers.length) + if (data && data.maintainers && data.maintainers.length) { return data.maintainers.map(m => m.name) + } } return [] } @@ -82,12 +88,14 @@ class Owner extends BaseCommand { async ls (pkg, opts) { if (!pkg) { - if (this.npm.config.get('global')) + if (this.npm.config.get('global')) { throw this.usageError() + } const pkgName = await readLocalPkgName(this.npm.prefix) - if (!pkgName) + if (!pkgName) { throw this.usageError() + } pkg = pkgName } @@ -97,10 +105,11 @@ class Owner extends BaseCommand { try { const packumentOpts = { ...opts, fullMetadata: true } const { maintainers } = await pacote.packument(spec, packumentOpts) - if (!maintainers || !maintainers.length) + if (!maintainers || !maintainers.length) { this.npm.output('no admin found') - else + } else { this.npm.output(maintainers.map(o => `${o.name} <${o.email}>`).join('\n')) + } return maintainers } catch (err) { @@ -110,15 +119,18 @@ class Owner extends BaseCommand { } async add (user, pkg, opts) { - if (!user) + if (!user) { throw this.usageError() + } if (!pkg) { - if (this.npm.config.get('global')) + if (this.npm.config.get('global')) { throw this.usageError() + } const pkgName = await readLocalPkgName(this.npm.prefix) - if (!pkgName) + if (!pkgName) { throw this.usageError() + } pkg = pkgName } @@ -130,15 +142,18 @@ class Owner extends BaseCommand { } async rm (user, pkg, opts) { - if (!user) + if (!user) { throw this.usageError() + } if (!pkg) { - if (this.npm.config.get('global')) + if (this.npm.config.get('global')) { throw this.usageError() + } const pkgName = await readLocalPkgName(this.npm.prefix) - if (!pkgName) + if (!pkgName) { throw this.usageError() + } pkg = pkgName } @@ -178,8 +193,9 @@ class Owner extends BaseCommand { const before = data.maintainers ? data.maintainers.length : 0 const m = validation(u, data.maintainers) - if (!m) - return // invalid owners + if (!m) { + return + } // invalid owners const body = { _id: data._id, @@ -197,10 +213,11 @@ class Owner extends BaseCommand { }) if (!res.error) { - if (m.length < before) + if (m.length < before) { this.npm.output(`- ${user} (${spec.name})`) - else + } else { this.npm.output(`+ ${user} (${spec.name})`) + } } else { throw Object.assign( new Error('Failed to update package: ' + JSON.stringify(res)), diff --git a/lib/commands/pack.js b/lib/commands/pack.js index 013e88b44a25d..27682878cb613 100644 --- a/lib/commands/pack.js +++ b/lib/commands/pack.js @@ -40,8 +40,9 @@ class Pack extends BaseCommand { } async exec (args) { - if (args.length === 0) + if (args.length === 0) { args = ['.'] + } const unicode = this.npm.config.get('unicode') const dryRun = this.npm.config.get('dry-run') @@ -53,8 +54,9 @@ class Pack extends BaseCommand { for (const arg of args) { const spec = npa(arg) const manifest = await pacote.manifest(spec, this.npm.flatOptions) - if (!manifest._id) + if (!manifest._id) { throw new Error('Invalid package, must have name and version') + } const filename = `${manifest.name}-${manifest.version}.tgz` .replace(/^@/, '').replace(/\//, '-') @@ -69,8 +71,9 @@ class Pack extends BaseCommand { const pkgContents = await getContents(manifest, tarballData) const tarballFilename = path.resolve(this.npm.config.get('pack-destination'), filename) - if (!dryRun) + if (!dryRun) { await writeFile(tarballFilename, tarballData) + } tarballs.push(pkgContents) } diff --git a/lib/commands/ping.js b/lib/commands/ping.js index d8ad1dc2a281d..0025573d6016f 100644 --- a/lib/commands/ping.js +++ b/lib/commands/ping.js @@ -30,8 +30,9 @@ class Ping extends BaseCommand { time, details, }, null, 2)) - } else if (Object.keys(details).length) + } else if (Object.keys(details).length) { log.notice('PONG', `${JSON.stringify(details, null, 2)}`) + } } } module.exports = Ping diff --git a/lib/commands/pkg.js b/lib/commands/pkg.js index c9b8f7d40d047..20166a8ef0777 100644 --- a/lib/commands/pkg.js +++ b/lib/commands/pkg.js @@ -32,10 +32,11 @@ class Pkg extends BaseCommand { } async exec (args, { prefix } = {}) { - if (!prefix) + if (!prefix) { this.prefix = this.npm.localPrefix - else + } else { this.prefix = prefix + } if (this.npm.config.get('global')) { throw Object.assign( @@ -81,15 +82,17 @@ class Pkg extends BaseCommand { // in case there's only a single result from the query // just prints that one element to stdout - if (Object.keys(result).length === 1) + if (Object.keys(result).length === 1) { result = result[args] + } } // only outputs if not running with workspaces config, // in case you're retrieving info for workspaces the pkgWorkspaces // will handle the output to make sure it get keyed by ws name - if (!this.workspaces) + if (!this.workspaces) { this.npm.output(JSON.stringify(result, null, 2)) + } return result } @@ -101,8 +104,9 @@ class Pkg extends BaseCommand { { code: 'EPKGSET' } ) - if (!args.length) + if (!args.length) { throw setError() + } const force = this.npm.config.get('force') const json = this.npm.config.get('json') @@ -111,8 +115,9 @@ class Pkg extends BaseCommand { for (const arg of args) { const [key, ...rest] = arg.split('=') const value = rest.join('=') - if (!key || !value) + if (!key || !value) { throw setError() + } q.set(key, json ? JSON.parse(value) : value, { force }) } @@ -128,14 +133,16 @@ class Pkg extends BaseCommand { { code: 'EPKGDELETE' } ) - if (!args.length) + if (!args.length) { throw setError() + } const pkgJson = await PackageJson.load(this.prefix) const q = new Queryable(pkgJson.content) for (const key of args) { - if (!key) + if (!key) { throw setError() + } q.delete(key) } diff --git a/lib/commands/profile.js b/lib/commands/profile.js index caab13d782fcc..a2eb3518a282f 100644 --- a/lib/commands/profile.js +++ b/lib/commands/profile.js @@ -70,8 +70,9 @@ class Profile extends BaseCommand { async completion (opts) { var argv = opts.conf.argv.remain - if (!argv[2]) + if (!argv[2]) { return ['enable-2fa', 'disable-2fa', 'get', 'set'] + } switch (argv[2]) { case 'enable-2fa': @@ -89,8 +90,9 @@ class Profile extends BaseCommand { } async exec (args) { - if (args.length === 0) + if (args.length === 0) { throw new Error(this.usage) + } log.gauge.show('profile') @@ -122,8 +124,9 @@ class Profile extends BaseCommand { npmProfile.get(this.npm.flatOptions) ) - if (!info.cidr_whitelist) + if (!info.cidr_whitelist) { delete info.cidr_whitelist + } if (this.npm.config.get('json')) { this.npm.output(JSON.stringify(info, null, 2)) @@ -132,21 +135,24 @@ class Profile extends BaseCommand { // clean up and format key/values for output const cleaned = {} - for (const key of knownProfileKeys) + for (const key of knownProfileKeys) { cleaned[key] = info[key] || '' + } const unknownProfileKeys = Object.keys(info).filter((k) => !(k in cleaned)) - for (const key of unknownProfileKeys) + for (const key of unknownProfileKeys) { cleaned[key] = info[key] || '' + } delete cleaned.tfa delete cleaned.email_verified cleaned.email += info.email_verified ? ' (verified)' : '(unverified)' - if (info.tfa && !info.tfa.pending) + if (info.tfa && !info.tfa.pending) { cleaned[tfa] = info.tfa.mode - else + } else { cleaned[tfa] = 'disabled' + } if (args.length) { const values = args // comma or space separated @@ -159,15 +165,17 @@ class Profile extends BaseCommand { } else { if (this.npm.config.get('parseable')) { for (const key of Object.keys(info)) { - if (key === 'tfa') + if (key === 'tfa') { this.npm.output(`${key}\t${cleaned[tfa]}`) - else + } else { this.npm.output(`${key}\t${info[key]}`) + } } } else { const table = new Table() - for (const key of Object.keys(cleaned)) + for (const key of Object.keys(cleaned)) { table.push({ [ansistyles.bright(key)]: cleaned[key] }) + } this.npm.output(table.toString()) } @@ -192,8 +200,9 @@ class Profile extends BaseCommand { return newpassword } - if (prop !== 'password' && value === null) + if (prop !== 'password' && value === null) { throw new Error('npm profile set ') + } if (prop === 'password' && value !== null) { throw new Error( @@ -217,26 +226,29 @@ class Profile extends BaseCommand { const user = await pulseTillDone.withPromise(npmProfile.get(conf)) const newUser = {} - for (const key of writableProfileKeys) + for (const key of writableProfileKeys) { newUser[key] = user[key] + } newUser[prop] = value const result = await otplease(conf, conf => npmProfile.set(newUser, conf)) - if (this.npm.config.get('json')) + if (this.npm.config.get('json')) { this.npm.output(JSON.stringify({ [prop]: result[prop] }, null, 2)) - else if (this.npm.config.get('parseable')) + } else if (this.npm.config.get('parseable')) { this.npm.output(prop + '\t' + result[prop]) - else if (result[prop] != null) + } else if (result[prop] != null) { this.npm.output('Set', prop, 'to', result[prop]) - else + } else { this.npm.output('Set', prop) + } } async enable2fa (args) { - if (args.length > 1) + if (args.length > 1) { throw new Error('npm profile enable-2fa [auth-and-writes|auth-only]') + } const mode = args[0] || 'auth-and-writes' if (mode !== 'auth-only' && mode !== 'auth-and-writes') { @@ -267,11 +279,11 @@ class Profile extends BaseCommand { const creds = this.npm.config.getCredentialsByURI(this.npm.config.get('registry')) const auth = {} - if (creds.token) + if (creds.token) { auth.token = creds.token - else if (creds.username) + } else if (creds.username) { auth.basic = { username: creds.username, password: creds.password } - else if (creds.auth) { + } else if (creds.auth) { const basic = Buffer.from(creds.auth, 'base64').toString().split(':', 2) auth.basic = { username: basic[0], password: basic[1] } } @@ -370,8 +382,9 @@ class Profile extends BaseCommand { 'if you lose your authentication device.' ) - for (const tfaCode of result.tfa) + for (const tfaCode of result.tfa) { this.npm.output('\t' + tfaCode) + } } async disable2fa (args) { @@ -396,12 +409,13 @@ class Profile extends BaseCommand { tfa: { password: password, mode: 'disable' }, }, conf)) - if (this.npm.config.get('json')) + if (this.npm.config.get('json')) { this.npm.output(JSON.stringify({ tfa: false }, null, 2)) - else if (this.npm.config.get('parseable')) + } else if (this.npm.config.get('parseable')) { this.npm.output('tfa\tfalse') - else + } else { this.npm.output('Two factor authentication disabled.') + } } } module.exports = Profile diff --git a/lib/commands/publish.js b/lib/commands/publish.js index 3bc309c12a15b..efa1485654b2b 100644 --- a/lib/commands/publish.js +++ b/lib/commands/publish.js @@ -56,10 +56,12 @@ class Publish extends BaseCommand { } async exec (args) { - if (args.length === 0) + if (args.length === 0) { args = ['.'] - if (args.length !== 1) + } + if (args.length !== 1) { throw this.usageError() + } log.verbose('publish', replaceInfo(args)) @@ -70,8 +72,9 @@ class Publish extends BaseCommand { const ignoreScripts = this.npm.config.get('ignore-scripts') const silent = log.level === 'silent' - if (semver.validRange(defaultTag)) + if (semver.validRange(defaultTag)) { throw new Error('Tag name must not be a valid SemVer range: ' + defaultTag.trim()) + } const opts = { ...this.npm.flatOptions } @@ -80,8 +83,9 @@ class Publish extends BaseCommand { const spec = npa(args[0]) let manifest = await this.getManifest(spec, opts) - if (manifest.publishConfig) + if (manifest.publishConfig) { flatten(manifest.publishConfig, opts) + } // only run scripts for directory type publishes if (spec.type === 'directory' && !ignoreScripts) { @@ -101,13 +105,15 @@ class Publish extends BaseCommand { // so that we send the latest and greatest thing to the registry // note that publishConfig might have changed as well! manifest = await this.getManifest(spec, opts) - if (manifest.publishConfig) + if (manifest.publishConfig) { flatten(manifest.publishConfig, opts) + } // note that logTar calls npmlog.notice(), so if we ARE in silent mode, // this will do nothing, but we still want it in the debuglog if it fails. - if (!json) + if (!json) { logTar(pkgContents, { log, unicode }) + } if (!dryRun) { const resolved = npa.resolve(manifest.name, manifest.version) @@ -140,10 +146,11 @@ class Publish extends BaseCommand { } if (!this.suppressOutput) { - if (!silent && json) + if (!silent && json) { this.npm.output(JSON.stringify(pkgContents, null, 2)) - else if (!silent) + } else if (!silent) { this.npm.output(`+ ${pkgContents.id}`) + } } return pkgContents @@ -180,21 +187,24 @@ class Publish extends BaseCommand { } // This needs to be in-line w/ the rest of the output that non-JSON // publish generates - if (!silent && !json) + if (!silent && !json) { this.npm.output(`+ ${pkgContents.id}`) - else + } else { results[name] = pkgContents + } } - if (!silent && json) + if (!silent && json) { this.npm.output(JSON.stringify(results, null, 2)) + } } // if it's a directory, read it from the file system // otherwise, get the full metadata from whatever it is getManifest (spec, opts) { - if (spec.type === 'directory') + if (spec.type === 'directory') { return readJson(`${spec.fetchSpec}/package.json`) + } return pacote.manifest(spec, { ...opts, fullMetadata: true }) } } diff --git a/lib/commands/rebuild.js b/lib/commands/rebuild.js index 3b9211e2e43bf..d95a865b49320 100644 --- a/lib/commands/rebuild.js +++ b/lib/commands/rebuild.js @@ -51,33 +51,39 @@ class Rebuild extends ArboristWorkspaceCmd { const tree = await arb.loadActual() const specs = args.map(arg => { const spec = npa(arg) - if (spec.type === 'tag' && spec.rawSpec === '') + if (spec.type === 'tag' && spec.rawSpec === '') { return spec + } - if (spec.type !== 'range' && spec.type !== 'version' && spec.type !== 'directory') + if (spec.type !== 'range' && spec.type !== 'version' && spec.type !== 'directory') { throw new Error('`npm rebuild` only supports SemVer version/range specifiers') + } return spec }) const nodes = tree.inventory.filter(node => this.isNode(specs, node)) await arb.rebuild({ nodes }) - } else + } else { await arb.rebuild() + } this.npm.output('rebuilt dependencies successfully') } isNode (specs, node) { return specs.some(spec => { - if (spec.type === 'directory') + if (spec.type === 'directory') { return node.path === spec.fetchSpec + } - if (spec.name !== node.name) + if (spec.name !== node.name) { return false + } - if (spec.rawSpec === '' || spec.rawSpec === '*') + if (spec.rawSpec === '' || spec.rawSpec === '*') { return true + } const { version } = node.package // TODO: add tests for a package with missing version diff --git a/lib/commands/repo.js b/lib/commands/repo.js index 372940512c6c9..f873098967f02 100644 --- a/lib/commands/repo.js +++ b/lib/commands/repo.js @@ -28,8 +28,9 @@ class Repo extends BaseCommand { } async exec (args) { - if (!args || !args.length) + if (!args || !args.length) { args = ['.'] + } await Promise.all(args.map(pkg => this.get(pkg))) } @@ -86,8 +87,9 @@ const unknownHostedUrl = url => { } = new URL(url) /* istanbul ignore next - URL ctor should prevent this */ - if (!protocol || !hostname) + if (!protocol || !hostname) { return null + } const proto = /(git\+)http:$/.test(protocol) ? 'http:' : 'https:' const path = pathname.replace(/\.git$/, '') diff --git a/lib/commands/run-script.js b/lib/commands/run-script.js index 34e96257c365a..832e166a684b0 100644 --- a/lib/commands/run-script.js +++ b/lib/commands/run-script.js @@ -66,17 +66,19 @@ class RunScript extends BaseCommand { } async exec (args) { - if (args.length) + if (args.length) { return this.run(args) - else + } else { return this.list(args) + } } async execWorkspaces (args, filters) { - if (args.length) + if (args.length) { return this.runWorkspaces(args, filters) - else + } else { return this.listWorkspaces(args, filters) + } } async run ([event, ...args], { path = this.npm.localPrefix, pkg } = {}) { @@ -87,32 +89,38 @@ class RunScript extends BaseCommand { pkg = pkg || (await rpj(`${path}/package.json`)) const { scripts = {} } = pkg - if (event === 'restart' && !scripts.restart) + if (event === 'restart' && !scripts.restart) { scripts.restart = 'npm stop --if-present && npm start' - else if (event === 'env' && !scripts.env) + } else if (event === 'env' && !scripts.env) { scripts.env = isWindowsShell ? 'SET' : 'env' + } pkg.scripts = scripts if ( !Object.prototype.hasOwnProperty.call(scripts, event) && - !(event === 'start' && await isServerPackage(path)) + !(event === 'start' && (await isServerPackage(path))) ) { - if (this.npm.config.get('if-present')) + if (this.npm.config.get('if-present')) { return + } const suggestions = await didYouMean(this.npm, path, event) - throw new Error(`Missing script: "${event}"${suggestions}\n\nTo see a list of scripts, run:\n npm run`) + throw new Error( + `Missing script: "${event}"${suggestions}\n\nTo see a list of scripts, run:\n npm run` + ) } // positional args only added to the main event, not pre/post const events = [[event, args]] if (!this.npm.config.get('ignore-scripts')) { - if (scripts[`pre${event}`]) + if (scripts[`pre${event}`]) { events.unshift([`pre${event}`, []]) + } - if (scripts[`post${event}`]) + if (scripts[`post${event}`]) { events.push([`post${event}`, []]) + } } const opts = { @@ -140,12 +148,14 @@ class RunScript extends BaseCommand { const pkgid = _id || name const color = this.npm.color - if (!scripts) + if (!scripts) { return [] + } const allScripts = Object.keys(scripts) - if (log.level === 'silent') + if (log.level === 'silent') { return allScripts + } if (this.npm.config.get('json')) { this.npm.output(JSON.stringify(scripts, null, 2)) @@ -153,8 +163,9 @@ class RunScript extends BaseCommand { } if (this.npm.config.get('parseable')) { - for (const [script, cmd] of Object.entries(scripts)) + for (const [script, cmd] of Object.entries(scripts)) { this.npm.output(`${script}:${cmd}`) + } return allScripts } @@ -170,24 +181,30 @@ class RunScript extends BaseCommand { const colorize = color ? chalk : nocolor if (cmds.length) { - this.npm.output(`${ - colorize.reset(colorize.bold('Lifecycle scripts'))} included in ${ - colorize.green(pkgid)}:`) + this.npm.output( + `${colorize.reset(colorize.bold('Lifecycle scripts'))} included in ${colorize.green( + pkgid + )}:` + ) } - for (const script of cmds) + for (const script of cmds) { this.npm.output(prefix + script + indent + colorize.dim(scripts[script])) + } if (!cmds.length && runScripts.length) { - this.npm.output(`${ - colorize.bold('Scripts') - } available in ${colorize.green(pkgid)} via \`${ - colorize.blue('npm run-script')}\`:`) - } else if (runScripts.length) + this.npm.output( + `${colorize.bold('Scripts')} available in ${colorize.green(pkgid)} via \`${colorize.blue( + 'npm run-script' + )}\`:` + ) + } else if (runScripts.length) { this.npm.output(`\navailable via \`${colorize.blue('npm run-script')}\`:`) + } - for (const script of runScripts) + for (const script of runScripts) { this.npm.output(prefix + script + indent + colorize.dim(scripts[script])) + } this.npm.output('') return allScripts @@ -212,8 +229,9 @@ class RunScript extends BaseCommand { // avoids exiting with error code in case there's scripts missing // in some workspaces since other scripts might have succeeded - if (!scriptMissing) + if (!scriptMissing) { process.exitCode = 1 + } return scriptMissing }) @@ -221,15 +239,17 @@ class RunScript extends BaseCommand { } // in case **all** tests are missing, then it should exit with error code - if (res.every(Boolean)) + if (res.every(Boolean)) { throw new Error(`Missing script: ${args[0]}`) + } } async listWorkspaces (args, filters) { await this.setWorkspaces(filters) - if (log.level === 'silent') + if (log.level === 'silent') { return + } if (this.npm.config.get('json')) { const res = {} @@ -244,14 +264,16 @@ class RunScript extends BaseCommand { if (this.npm.config.get('parseable')) { for (const workspacePath of this.workspacePaths) { const { scripts, name } = await rpj(`${workspacePath}/package.json`) - for (const [script, cmd] of Object.entries(scripts || {})) + for (const [script, cmd] of Object.entries(scripts || {})) { this.npm.output(`${name}:${script}:${cmd}`) + } } return } - for (const workspacePath of this.workspacePaths) + for (const workspacePath of this.workspacePaths) { await this.list(args, workspacePath) + } } } diff --git a/lib/commands/search.js b/lib/commands/search.js index e60f41afb03fc..6c5c995c38c80 100644 --- a/lib/commands/search.js +++ b/lib/commands/search.js @@ -14,10 +14,11 @@ function prepareIncludes (args) { function prepareExcludes (searchexclude) { var exclude - if (typeof searchexclude === 'string') + if (typeof searchexclude === 'string') { exclude = searchexclude.split(/\s+/) - else + } else { exclude = [] + } return exclude .map(s => s.toLowerCase()) @@ -66,16 +67,18 @@ class Search extends BaseCommand { exclude: prepareExcludes(this.npm.flatOptions.search.exclude), } - if (opts.include.length === 0) + if (opts.include.length === 0) { throw new Error('search must be called with arguments') + } // Used later to figure out whether we had any packages go out let anyOutput = false class FilterStream extends Minipass { write (pkg) { - if (packageFilter(pkg, opts.include, opts.exclude)) + if (packageFilter(pkg, opts.include, opts.exclude)) { super.write(pkg) + } } } @@ -96,14 +99,16 @@ class Search extends BaseCommand { ) p.on('data', chunk => { - if (!anyOutput) + if (!anyOutput) { anyOutput = true + } this.npm.output(chunk.toString('utf8')) }) await p.promise() - if (!anyOutput && !this.npm.config.get('json') && !this.npm.config.get('parseable')) + if (!anyOutput && !this.npm.config.get('json') && !this.npm.config.get('parseable')) { this.npm.output('No matches found for ' + (args.map(JSON.stringify).join(' '))) + } log.silly('search', 'search completed') log.clearProgress() diff --git a/lib/commands/set-script.js b/lib/commands/set-script.js index 00f9b5d5b1745..d99487e6713b4 100644 --- a/lib/commands/set-script.js +++ b/lib/commands/set-script.js @@ -36,19 +36,22 @@ class SetScript extends BaseCommand { } validate (args) { - if (process.env.npm_lifecycle_event === 'postinstall') + if (process.env.npm_lifecycle_event === 'postinstall') { throw new Error('Scripts can’t set from the postinstall script') + } // Parse arguments - if (args.length !== 2) + if (args.length !== 2) { throw new Error(`Expected 2 arguments: got ${args.length}`) + } } async exec (args) { this.validate(args) const warn = await this.doSetScript(this.npm.localPrefix, args[0], args[1]) - if (warn) + if (warn) { log.warn('set-script', `Script "${args[0]}" was overwritten`) + } } async execWorkspaces (args, filters) { @@ -86,8 +89,9 @@ class SetScript extends BaseCommand { && scripts[name] && scripts[name] !== value - if (overwriting) + if (overwriting) { warn = true + } pkgJson.update({ scripts: { diff --git a/lib/commands/set.js b/lib/commands/set.js index cdaabc04ac9ce..b38623c60c7ec 100644 --- a/lib/commands/set.js +++ b/lib/commands/set.js @@ -21,8 +21,9 @@ class Set extends BaseCommand { } async exec (args) { - if (!args.length) + if (!args.length) { throw this.usageError() + } return this.npm.exec('config', ['set'].concat(args)) } } diff --git a/lib/commands/shrinkwrap.js b/lib/commands/shrinkwrap.js index 42489a27f5bfb..2d57e7b914275 100644 --- a/lib/commands/shrinkwrap.js +++ b/lib/commands/shrinkwrap.js @@ -57,19 +57,22 @@ class Shrinkwrap extends BaseCommand { if (newFile) { let message = 'created a lockfile as npm-shrinkwrap.json' - if (updatedVersion) + if (updatedVersion) { message += ` with version ${updatedVersion}` + } log.notice('', message) } else if (notSW) { await unlink(oldFilename) let message = 'package-lock.json has been renamed to npm-shrinkwrap.json' - if (updatedVersion) + if (updatedVersion) { message += ` and updated to version ${updatedVersion}` + } log.notice('', message) - } else if (updatedVersion) + } else if (updatedVersion) { log.notice('', `npm-shrinkwrap.json updated to version ${updatedVersion}`) - else + } else { log.notice('', 'npm-shrinkwrap.json up to date') + } } } module.exports = Shrinkwrap diff --git a/lib/commands/star.js b/lib/commands/star.js index 3e5b0fc620830..4f7f2a127ab62 100644 --- a/lib/commands/star.js +++ b/lib/commands/star.js @@ -29,8 +29,9 @@ class Star extends BaseCommand { } async exec (args) { - if (!args.length) + if (!args.length) { throw new Error(this.usage) + } // if we're unstarring, then show an empty star image // otherwise, show the full star image @@ -52,8 +53,9 @@ class Star extends BaseCommand { }), ]) - if (!username) + if (!username) { throw new Error('You need to be logged in!') + } const body = { _id: fullData._id, diff --git a/lib/commands/stars.js b/lib/commands/stars.js index d430be2ced4ef..61fd45f8410e5 100644 --- a/lib/commands/stars.js +++ b/lib/commands/stars.js @@ -29,21 +29,25 @@ class Stars extends BaseCommand { async exec ([user]) { try { - if (!user) + if (!user) { user = await getIdentity(this.npm, this.npm.flatOptions) + } const { rows } = await fetch.json('/-/_view/starredByUser', { ...this.npm.flatOptions, query: { key: `"${user}"` }, }) - if (rows.length === 0) + if (rows.length === 0) { log.warn('stars', 'user has not starred any packages') + } - for (const row of rows) + for (const row of rows) { this.npm.output(row.value) + } } catch (err) { - if (err.code === 'ENEEDAUTH') + if (err.code === 'ENEEDAUTH') { log.warn('stars', 'auth is required to look up your username') + } throw err } } diff --git a/lib/commands/team.js b/lib/commands/team.js index 11a7deb522b3a..620282ceda6fc 100644 --- a/lib/commands/team.js +++ b/lib/commands/team.js @@ -39,11 +39,13 @@ class Team extends BaseCommand { const { conf: { argv: { remain: argv } } } = opts const subcommands = ['create', 'destroy', 'add', 'rm', 'ls'] - if (argv.length === 2) + if (argv.length === 2) { return subcommands + } - if (subcommands.includes(argv[2])) + if (subcommands.includes(argv[2])) { return [] + } throw new Error(argv[2] + ' not recognized') } @@ -62,10 +64,11 @@ class Team extends BaseCommand { case 'rm': return this.rm(entity, user, opts) case 'ls': { const match = entity.match(/[^:]+:.+/) - if (match) + if (match) { return this.listUsers(entity, opts) - else + } else { return this.listTeams(entity, opts) + } } default: throw this.usage @@ -80,10 +83,11 @@ class Team extends BaseCommand { created: true, team: entity, })) - } else if (opts.parseable) + } else if (opts.parseable) { this.npm.output(`${entity}\tcreated`) - else if (!opts.silent && opts.loglevel !== 'silent') + } else if (!opts.silent && opts.loglevel !== 'silent') { this.npm.output(`+@${entity}`) + } } async destroy (entity, opts) { @@ -93,10 +97,11 @@ class Team extends BaseCommand { deleted: true, team: entity, })) - } else if (opts.parseable) + } else if (opts.parseable) { this.npm.output(`${entity}\tdeleted`) - else if (!opts.silent && opts.loglevel !== 'silent') + } else if (!opts.silent && opts.loglevel !== 'silent') { this.npm.output(`-@${entity}`) + } } async add (entity, user, opts) { @@ -107,10 +112,11 @@ class Team extends BaseCommand { team: entity, user, })) - } else if (opts.parseable) + } else if (opts.parseable) { this.npm.output(`${user}\t${entity}\tadded`) - else if (!opts.silent && opts.loglevel !== 'silent') + } else if (!opts.silent && opts.loglevel !== 'silent') { this.npm.output(`${user} added to @${entity}`) + } } async rm (entity, user, opts) { @@ -121,19 +127,20 @@ class Team extends BaseCommand { team: entity, user, })) - } else if (opts.parseable) + } else if (opts.parseable) { this.npm.output(`${user}\t${entity}\tremoved`) - else if (!opts.silent && opts.loglevel !== 'silent') + } else if (!opts.silent && opts.loglevel !== 'silent') { this.npm.output(`${user} removed from @${entity}`) + } } async listUsers (entity, opts) { const users = (await libteam.lsUsers(entity, opts)).sort() - if (opts.json) + if (opts.json) { this.npm.output(JSON.stringify(users, null, 2)) - else if (opts.parseable) + } else if (opts.parseable) { this.npm.output(users.join('\n')) - else if (!opts.silent && opts.loglevel !== 'silent') { + } else if (!opts.silent && opts.loglevel !== 'silent') { const plural = users.length === 1 ? '' : 's' const more = users.length === 0 ? '' : ':\n' this.npm.output(`\n@${entity} has ${users.length} user${plural}${more}`) @@ -143,11 +150,11 @@ class Team extends BaseCommand { async listTeams (entity, opts) { const teams = (await libteam.lsTeams(entity, opts)).sort() - if (opts.json) + if (opts.json) { this.npm.output(JSON.stringify(teams, null, 2)) - else if (opts.parseable) + } else if (opts.parseable) { this.npm.output(teams.join('\n')) - else if (!opts.silent && opts.loglevel !== 'silent') { + } else if (!opts.silent && opts.loglevel !== 'silent') { const plural = teams.length === 1 ? '' : 's' const more = teams.length === 0 ? '' : ':\n' this.npm.output(`\n@${entity} has ${teams.length} team${plural}${more}`) diff --git a/lib/commands/token.js b/lib/commands/token.js index f7b92ea1dde7d..499ea3d024d6d 100644 --- a/lib/commands/token.js +++ b/lib/commands/token.js @@ -21,39 +21,33 @@ class Token extends BaseCommand { /* istanbul ignore next - see test/lib/load-all-commands.js */ static get usage () { - return [ - 'list', - 'revoke ', - 'create [--read-only] [--cidr=list]', - ] + return ['list', 'revoke ', 'create [--read-only] [--cidr=list]'] } /* istanbul ignore next - see test/lib/load-all-commands.js */ static get params () { - return [ - 'read-only', - 'cidr', - 'registry', - 'otp', - ] + return ['read-only', 'cidr', 'registry', 'otp'] } async completion (opts) { const argv = opts.conf.argv.remain const subcommands = ['list', 'revoke', 'create'] - if (argv.length === 2) + if (argv.length === 2) { return subcommands + } - if (subcommands.includes(argv[2])) + if (subcommands.includes(argv[2])) { return [] + } throw new Error(argv[2] + ' not recognized') } async exec (args, cb) { log.gauge.show('token') - if (args.length === 0) + if (args.length === 0) { return this.list() + } switch (args[0]) { case 'list': case 'ls': @@ -79,25 +73,26 @@ class Token extends BaseCommand { return } else if (conf.parseable) { this.npm.output(['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t')) - tokens.forEach((token) => { - this.npm.output([ - token.key, - token.token, - token.created, - token.readonly ? 'true' : 'false', - token.cidr_whitelist ? token.cidr_whitelist.join(',') : '', - ].join('\t')) + tokens.forEach(token => { + this.npm.output( + [ + token.key, + token.token, + token.created, + token.readonly ? 'true' : 'false', + token.cidr_whitelist ? token.cidr_whitelist.join(',') : '', + ].join('\t') + ) }) return } this.generateTokenIds(tokens, 6) - const idWidth = tokens.reduce((acc, token) => - Math.max(acc, token.id.length), 0) + const idWidth = tokens.reduce((acc, token) => Math.max(acc, token.id.length), 0) const table = new Table({ head: ['id', 'token', 'created', 'readonly', 'CIDR whitelist'], colWidths: [Math.max(idWidth, 2) + 2, 9, 12, 10], }) - tokens.forEach((token) => { + tokens.forEach(token => { table.push([ token.id, token.token + '…', @@ -110,39 +105,47 @@ class Token extends BaseCommand { } async rm (args) { - if (args.length === 0) + if (args.length === 0) { throw this.usageError('`` argument is required.') + } const conf = this.config() const toRemove = [] const progress = log.newItem('removing tokens', toRemove.length) progress.info('token', 'getting existing list') const tokens = await pulseTillDone.withPromise(profile.listTokens(conf)) - args.forEach((id) => { - const matches = tokens.filter((token) => token.key.indexOf(id) === 0) - if (matches.length === 1) + args.forEach(id => { + const matches = tokens.filter(token => token.key.indexOf(id) === 0) + if (matches.length === 1) { toRemove.push(matches[0].key) - else if (matches.length > 1) - throw new Error(`Token ID "${id}" was ambiguous, a new token may have been created since you last ran \`npm token list\`.`) - else { + } else if (matches.length > 1) { + throw new Error( + /* eslint-disable-next-line max-len */ + `Token ID "${id}" was ambiguous, a new token may have been created since you last ran \`npm token list\`.` + ) + } else { const tokenMatches = tokens.some(t => id.indexOf(t.token) === 0) - if (!tokenMatches) + if (!tokenMatches) { throw new Error(`Unknown token id or value "${id}".`) + } toRemove.push(id) } }) - await Promise.all(toRemove.map(key => { - return otplease(conf, conf => { - return profile.removeToken(key, conf) + await Promise.all( + toRemove.map(key => { + return otplease(conf, conf => { + return profile.removeToken(key, conf) + }) }) - })) - if (conf.json) + ) + if (conf.json) { this.npm.output(JSON.stringify(toRemove)) - else if (conf.parseable) + } else if (conf.parseable) { this.npm.output(toRemove.join('\t')) - else + } else { this.npm.output('Removed ' + toRemove.length + ' token' + (toRemove.length !== 1 ? 's' : '')) + } } async create (args) { @@ -150,34 +153,40 @@ class Token extends BaseCommand { const cidr = conf.cidr const readonly = conf.readOnly - return readUserInfo.password().then((password) => { - const validCIDR = this.validateCIDRList(cidr) - log.info('token', 'creating') - return pulseTillDone.withPromise(otplease(conf, conf => { - return profile.createToken(password, readonly, validCIDR, conf) - })) - }).then((result) => { - delete result.key - delete result.updated - if (conf.json) - this.npm.output(JSON.stringify(result)) - else if (conf.parseable) - Object.keys(result).forEach((k) => this.npm.output(k + '\t' + result[k])) - else { - const table = new Table() - for (const k of Object.keys(result)) - table.push({ [ansistyles.bright(k)]: String(result[k]) }) - this.npm.output(table.toString()) - } - }) + return readUserInfo + .password() + .then(password => { + const validCIDR = this.validateCIDRList(cidr) + log.info('token', 'creating') + return pulseTillDone.withPromise( + otplease(conf, conf => { + return profile.createToken(password, readonly, validCIDR, conf) + }) + ) + }) + .then(result => { + delete result.key + delete result.updated + if (conf.json) { + this.npm.output(JSON.stringify(result)) + } else if (conf.parseable) { + Object.keys(result).forEach(k => this.npm.output(k + '\t' + result[k])) + } else { + const table = new Table() + for (const k of Object.keys(result)) { + table.push({ [ansistyles.bright(k)]: String(result[k]) }) + } + this.npm.output(table.toString()) + } + }) } config () { const conf = { ...this.npm.flatOptions } const creds = this.npm.config.getCredentialsByURI(conf.registry) - if (creds.token) + if (creds.token) { conf.auth = { token: creds.token } - else if (creds.username) { + } else if (creds.username) { conf.auth = { basic: { username: creds.username, @@ -192,11 +201,13 @@ class Token extends BaseCommand { password: auth[1], }, } - } else + } else { conf.auth = {} + } - if (conf.otp) + if (conf.otp) { conf.auth.otp = conf.otp + } return conf } @@ -209,9 +220,9 @@ class Token extends BaseCommand { for (const token of tokens) { token.id = token.key for (let ii = minLength; ii < token.key.length; ++ii) { - const match = tokens.some(ot => - ot !== token && - ot.key.slice(0, ii) === token.key.slice(0, ii)) + const match = tokens.some( + ot => ot !== token && ot.key.slice(0, ii) === token.key.slice(0, ii) + ) if (!match) { token.id = token.key.slice(0, ii) break @@ -226,11 +237,15 @@ class Token extends BaseCommand { const maybeList = cidrs ? (Array.isArray(cidrs) ? cidrs : [cidrs]) : [] const list = maybeList.length === 1 ? maybeList[0].split(/,\s*/) : maybeList for (const cidr of list) { - if (isCidrV6(cidr)) - throw this.invalidCIDRError('CIDR whitelist can only contain IPv4 addresses, ' + cidr + ' is IPv6') + if (isCidrV6(cidr)) { + throw this.invalidCIDRError( + 'CIDR whitelist can only contain IPv4 addresses, ' + cidr + ' is IPv6' + ) + } - if (!isCidrV4(cidr)) + if (!isCidrV4(cidr)) { throw this.invalidCIDRError('CIDR whitelist contains invalid CIDR entry: ' + cidr) + } } return list } diff --git a/lib/commands/uninstall.js b/lib/commands/uninstall.js index 09b6e47a78f0c..aaebd1a907f8b 100644 --- a/lib/commands/uninstall.js +++ b/lib/commands/uninstall.js @@ -39,18 +39,19 @@ class Uninstall extends ArboristWorkspaceCmd { : this.npm.localPrefix if (!args.length) { - if (!global) + if (!global) { throw new Error('Must provide a package name to remove') - else { + } else { let pkg try { pkg = await rpj(resolve(this.npm.localPrefix, 'package.json')) } catch (er) { - if (er.code !== 'ENOENT' && er.code !== 'ENOTDIR') + if (er.code !== 'ENOENT' && er.code !== 'ENOTDIR') { throw er - else + } else { throw this.usageError() + } } args.push(pkg.name) diff --git a/lib/commands/unpublish.js b/lib/commands/unpublish.js index 60ab4a5f9be8b..6ef3f282ac24f 100644 --- a/lib/commands/unpublish.js +++ b/lib/commands/unpublish.js @@ -33,38 +33,44 @@ class Unpublish extends BaseCommand { async completion (args) { const { partialWord, conf } = args - if (conf.argv.remain.length >= 3) + if (conf.argv.remain.length >= 3) { return [] + } const opts = this.npm.flatOptions const username = await getIdentity(this.npm, { ...opts }).catch(() => null) - if (!username) + if (!username) { return [] + } const access = await libaccess.lsPackages(username, opts) // do a bit of filtering at this point, so that we don't need // to fetch versions for more than one thing, but also don't // accidentally unpublish a whole project let pkgs = Object.keys(access || {}) - if (!partialWord || !pkgs.length) + if (!partialWord || !pkgs.length) { return pkgs + } const pp = npa(partialWord).name pkgs = pkgs.filter(p => !p.indexOf(pp)) - if (pkgs.length > 1) + if (pkgs.length > 1) { return pkgs + } const json = await npmFetch.json(npa(pkgs[0]).escapedName, opts) const versions = Object.keys(json.versions) - if (!versions.length) + if (!versions.length) { return pkgs - else + } else { return versions.map(v => `${pkgs[0]}@${v}`) + } } async exec (args) { - if (args.length > 1) + if (args.length > 1) { throw this.usageError() + } const spec = args.length && npa(args[0]) const force = this.npm.config.get('force') @@ -93,10 +99,11 @@ class Unpublish extends BaseCommand { try { manifest = await readJson(pkgJson) } catch (err) { - if (err && err.code !== 'ENOENT' && err.code !== 'ENOTDIR') + if (err && err.code !== 'ENOENT' && err.code !== 'ENOTDIR') { throw err - else + } else { throw this.usageError() + } } this.npm.log.verbose('unpublish', manifest) @@ -104,19 +111,22 @@ class Unpublish extends BaseCommand { const { name, version, publishConfig } = manifest const pkgJsonSpec = npa.resolve(name, version) const optsWithPub = { ...opts, publishConfig } - if (!dryRun) + if (!dryRun) { await otplease(opts, opts => libunpub(pkgJsonSpec, optsWithPub)) + } pkgName = name pkgVersion = version ? `@${version}` : '' } else { - if (!dryRun) + if (!dryRun) { await otplease(opts, opts => libunpub(spec, opts)) + } pkgName = spec.name pkgVersion = spec.type === 'version' ? `@${spec.rawSpec}` : '' } - if (!silent) + if (!silent) { this.npm.output(`- ${pkgName}${pkgVersion}`) + } } async execWorkspaces (args, filters) { @@ -130,8 +140,9 @@ class Unpublish extends BaseCommand { ) } - for (const name of this.workspaceNames) + for (const name of this.workspaceNames) { await this.exec([name]) + } } } module.exports = Unpublish diff --git a/lib/commands/version.js b/lib/commands/version.js index 60e1e36f58080..1572a38454e99 100644 --- a/lib/commands/version.js +++ b/lib/commands/version.js @@ -32,13 +32,21 @@ class Version extends BaseCommand { /* istanbul ignore next - see test/lib/load-all-commands.js */ static get usage () { - return ['[ | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git]'] + return [ + /* eslint-disable-next-line max-len */ + '[ | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git]', + ] } async completion (opts) { - const { conf: { argv: { remain } } } = opts - if (remain.length > 2) + const { + conf: { + argv: { remain }, + }, + } = opts + if (remain.length > 2) { return [] + } return [ 'major', @@ -104,17 +112,20 @@ class Version extends BaseCommand { .then(data => JSON.parse(data)) .catch(() => ({})) - if (pkg.name && pkg.version) + if (pkg.name && pkg.version) { results[pkg.name] = pkg.version + } results.npm = this.npm.version - for (const [key, version] of Object.entries(process.versions)) + for (const [key, version] of Object.entries(process.versions)) { results[key] = version + } - if (this.npm.config.get('json')) + if (this.npm.config.get('json')) { this.npm.output(JSON.stringify(results, null, 2)) - else + } else { this.npm.output(results) + } } async listWorkspaces (filters) { @@ -123,11 +134,11 @@ class Version extends BaseCommand { for (const path of this.workspacePaths) { const pj = resolve(path, 'package.json') // setWorkspaces has already parsed package.json so we know it won't error - const pkg = await readFile(pj, 'utf8') - .then(data => JSON.parse(data)) + const pkg = await readFile(pj, 'utf8').then(data => JSON.parse(data)) - if (pkg.name && pkg.version) + if (pkg.name && pkg.version) { results[pkg.name] = pkg.version + } } return this.list(results) } diff --git a/lib/commands/view.js b/lib/commands/view.js index 24d13cfcfb3df..08bed57e09a49 100644 --- a/lib/commands/view.js +++ b/lib/commands/view.js @@ -67,44 +67,51 @@ class View extends BaseCommand { function getFields (d, f, pref) { f = f || [] - if (!d) + if (!d) { return f + } pref = pref || [] Object.keys(d).forEach((k) => { - if (k.charAt(0) === '_' || k.indexOf('.') !== -1) + if (k.charAt(0) === '_' || k.indexOf('.') !== -1) { return + } const p = pref.concat(k).join('.') f.push(p) if (Array.isArray(d[k])) { d[k].forEach((val, i) => { const pi = p + '[' + i + ']' - if (val && typeof val === 'object') + if (val && typeof val === 'object') { getFields(val, f, [p]) - else + } else { f.push(pi) + } }) return } - if (typeof d[k] === 'object') + if (typeof d[k] === 'object') { getFields(d[k], f, [p]) + } }) return f } } async exec (args) { - if (!args.length) + if (!args.length) { args = ['.'] + } let pkg = args.shift() const local = /^\.@/.test(pkg) || pkg === '.' if (local) { - if (this.npm.config.get('global')) + if (this.npm.config.get('global')) { throw new Error('Cannot use view command in global mode.') + } const dir = this.npm.prefix const manifest = await readJson(resolve(dir, 'package.json')) - if (!manifest.name) + if (!manifest.name) { throw new Error('Invalid package.json, no "name" field') + } // put the version back if it existed pkg = `${manifest.name}${pkg.slice(1)}` } @@ -131,14 +138,16 @@ class View extends BaseCommand { log.disableProgress() const msg = await this.jsonData(reducedData, pckmnt._id) - if (msg !== '') + if (msg !== '') { console.log(msg) + } } } async execWorkspaces (args, filters) { - if (!args.length) + if (!args.length) { args = ['.'] + } const pkg = args.shift() @@ -166,22 +175,25 @@ class View extends BaseCommand { } if (!this.npm.config.get('json')) { - if (wholePackument) + if (wholePackument) { data.map((v) => this.prettyView(pckmnt, v[Object.keys(v)[0]][''])) - else { + } else { console.log(`${name}:`) const msg = await this.jsonData(reducedData, pckmnt._id) - if (msg !== '') + if (msg !== '') { console.log(msg) + } } } else { const msg = await this.jsonData(reducedData, pckmnt._id) - if (msg !== '') + if (msg !== '') { results[name] = JSON.parse(msg) + } } } - if (Object.keys(results).length > 0) + if (Object.keys(results).length > 0) { console.log(JSON.stringify(results, null, 2)) + } } async getData (pkg, args) { @@ -196,13 +208,15 @@ class View extends BaseCommand { // get the data about this package let version = this.npm.config.get('tag') // rawSpec is the git url if this is from git - if (spec.type !== 'git' && spec.rawSpec) + if (spec.type !== 'git' && spec.rawSpec) { version = spec.rawSpec + } const pckmnt = await packument(spec, opts) - if (pckmnt['dist-tags'] && pckmnt['dist-tags'][version]) + if (pckmnt['dist-tags'] && pckmnt['dist-tags'][version]) { version = pckmnt['dist-tags'][version] + } if (pckmnt.time && pckmnt.time.unpublished) { const u = pckmnt.time.unpublished @@ -218,15 +232,17 @@ class View extends BaseCommand { pckmnt.versions = Object.keys(versions).sort(semver.compareLoose) // remove readme unless we asked for it - if (args.indexOf('readme') === -1) + if (args.indexOf('readme') === -1) { delete pckmnt.readme + } Object.keys(versions).forEach((v) => { if (semver.satisfies(v, version, true)) { args.forEach(arg => { // remove readme unless we asked for it - if (args.indexOf('readme') !== -1) + if (args.indexOf('readme') !== -1) { delete versions[v].readme + } data.push(showFields(pckmnt, versions[v], arg)) }) @@ -237,8 +253,9 @@ class View extends BaseCommand { !this.npm.config.get('json') && args.length === 1 && args[0] === '' - ) + ) { pckmnt.version = version + } return [pckmnt, data] } @@ -254,17 +271,19 @@ class View extends BaseCommand { versions.forEach((v) => { const fields = Object.keys(data[v]) includeFields = includeFields || (fields.length > 1) - if (json) + if (json) { msgJson.push({}) + } fields.forEach((f) => { let d = cleanup(data[v][f]) - if (fields.length === 1 && json) + if (fields.length === 1 && json) { msgJson[msgJson.length - 1][f] = d + } if (includeVersions || includeFields || typeof d !== 'string') { - if (json) + if (json) { msgJson[msgJson.length - 1][f] = d - else { + } else { d = inspect(d, { showHidden: false, depth: 5, @@ -272,12 +291,14 @@ class View extends BaseCommand { maxArrayLength: null, }) } - } else if (typeof d === 'string' && json) + } else if (typeof d === 'string' && json) { d = JSON.stringify(d) + } if (!json) { - if (f && includeFields) + if (f && includeFields) { f += ' = ' + } msg += (includeVersions ? name + '@' + v + ' ' : '') + (includeFields ? f : '') + d + '\n' } @@ -289,10 +310,11 @@ class View extends BaseCommand { const k = Object.keys(msgJson[0])[0] msgJson = msgJson.map(m => m[k]) } - if (msgJson.length === 1) + if (msgJson.length === 1) { msg = JSON.stringify(msgJson[0], null, 2) + '\n' - else if (msgJson.length > 1) + } else if (msgJson.length > 1) { msg = JSON.stringify(msgJson, null, 2) + '\n' + } } return msg.trim() @@ -351,10 +373,11 @@ class View extends BaseCommand { manifest.dist.fileCount && color.yellow(manifest.dist.fileCount), unpackedSize: unpackedSize && color.yellow(unpackedSize), } - if (info.license.toLowerCase().trim() === 'proprietary') + if (info.license.toLowerCase().trim() === 'proprietary') { info.license = style.bright(color.red(info.license)) - else + } else { info.license = color.green(info.license) + } console.log('') console.log( @@ -364,8 +387,9 @@ class View extends BaseCommand { ' | versions: ' + info.versions ) info.description && console.log(info.description) - if (info.repo || info.site) + if (info.repo || info.site) { info.site && console.log(color.cyan(info.site)) + } const warningSign = unicode ? ' ⚠️ ' : '!!' info.deprecated && console.log( @@ -396,8 +420,9 @@ class View extends BaseCommand { console.log('') console.log('dependencies:') console.log(columns(info.deps.slice(0, maxDeps), { padding: 1 })) - if (info.deps.length > maxDeps) + if (info.deps.length > maxDeps) { console.log(`(...and ${info.deps.length - maxDeps} more.)`) + } } if (info.maintainers && info.maintainers.length) { @@ -412,10 +437,12 @@ class View extends BaseCommand { if (info.publisher || info.modified) { let publishInfo = 'published' - if (info.modified) + if (info.modified) { publishInfo += ` ${info.modified}` - if (info.publisher) + } + if (info.publisher) { publishInfo += ` by ${info.publisher}` + } console.log('') console.log(publishInfo) } @@ -458,24 +485,28 @@ function showFields (data, version, fields) { const s = queryable.query(fields) const res = { [version.version]: s } - if (s) + if (s) { return res + } } function cleanup (data) { - if (Array.isArray(data)) + if (Array.isArray(data)) { return data.map(cleanup) + } - if (!data || typeof data !== 'object') + if (!data || typeof data !== 'object') { return data + } const keys = Object.keys(data) if (keys.length <= 3 && data.name && (keys.length === 1 || (keys.length === 3 && data.email && data.url) || - (keys.length === 2 && (data.email || data.url)))) + (keys.length === 2 && (data.email || data.url)))) { data = unparsePerson(data) + } return data } diff --git a/lib/npm.js b/lib/npm.js index 4b7b3440ff5ca..ecc7f0a7de206 100644 --- a/lib/npm.js +++ b/lib/npm.js @@ -14,18 +14,19 @@ const timers = new Map() // Finished timers const timings = {} -const processOnTimeHandler = (name) => { +const processOnTimeHandler = name => { timers.set(name, Date.now()) } -const processOnTimeEndHandler = (name) => { +const processOnTimeEndHandler = name => { if (timers.has(name)) { const ms = Date.now() - timers.get(name) log.timing(name, `Completed in ${ms}ms`) timings[name] = ms timers.delete(name) - } else + } else { log.silly('timing', "Tried to end timer that doesn't exist:", name) + } } const { definitions, flatten, shorthands } = require('./utils/config/index.js') @@ -113,17 +114,23 @@ class Npm extends EventEmitter { // Options are prefixed by a hyphen-minus (-, \u2d). // Other dash-type chars look similar but are invalid. if (!warnedNonDashArg) { - args.filter(arg => /^[\u2010-\u2015\u2212\uFE58\uFE63\uFF0D]/.test(arg)) + args + .filter(arg => /^[\u2010-\u2015\u2212\uFE58\uFE63\uFF0D]/.test(arg)) .forEach(arg => { warnedNonDashArg = true - this.log.error('arg', 'Argument starts with non-ascii dash, this is probably invalid:', arg) + this.log.error( + 'arg', + 'Argument starts with non-ascii dash, this is probably invalid:', + arg + ) }) } const workspacesEnabled = this.config.get('workspaces') const workspacesFilters = this.config.get('workspace') - if (workspacesEnabled === false && workspacesFilters.length > 0) + if (workspacesEnabled === false && workspacesFilters.length > 0) { throw new Error('Can not use --no-workspaces and --workspace at the same time') + } const filterByWorkspaces = workspacesEnabled || workspacesFilters.length > 0 // normally this would go in the constructor, but our tests don't @@ -141,8 +148,9 @@ class Npm extends EventEmitter { return } if (filterByWorkspaces) { - if (this.config.get('global')) + if (this.config.get('global')) { throw new Error('Workspaces not supported for global packages') + } return command.execWorkspaces(args, this.config.get('workspace')).finally(() => { process.emit('timeEnd', `command:${cmd}`) @@ -159,16 +167,20 @@ class Npm extends EventEmitter { process.emit('time', 'npm:load') this.log.pause() this.loadPromise = new Promise((resolve, reject) => { - this[_load]().catch(er => er).then((er) => { - this.loadErr = er - if (!er && this.config.get('force')) - this.log.warn('using --force', 'Recommended protections disabled.') - - process.emit('timeEnd', 'npm:load') - if (er) - return reject(er) - resolve() - }) + this[_load]() + .catch(er => er) + .then(er => { + this.loadErr = er + if (!er && this.config.get('force')) { + this.log.warn('using --force', 'Recommended protections disabled.') + } + + process.emit('timeEnd', 'npm:load') + if (er) { + return reject(er) + } + resolve() + }) }) } return this.loadPromise @@ -215,7 +227,8 @@ class Npm extends EventEmitter { // args keeps those from being leaked. process.emit('time', 'npm:load:setTitle') const tokrev = deref(this.argv[0]) === 'token' && this.argv[1] === 'revoke' - this.title = tokrev ? 'npm token revoke' + (this.argv[2] ? ' ***' : '') + this.title = tokrev + ? 'npm token revoke' + (this.argv[2] ? ' ***' : '') : ['npm', ...this.argv].join(' ') process.emit('timeEnd', 'npm:load:setTitle') @@ -232,20 +245,21 @@ class Npm extends EventEmitter { process.emit('time', 'npm:load:configScope') const configScope = this.config.get('scope') - if (configScope && !/^@/.test(configScope)) + if (configScope && !/^@/.test(configScope)) { this.config.set('scope', `@${configScope}`, this.config.find('scope')) + } process.emit('timeEnd', 'npm:load:configScope') process.emit('time', 'npm:load:projectScope') - this.projectScope = this.config.get('scope') || - getProjectScope(this.prefix) + this.projectScope = this.config.get('scope') || getProjectScope(this.prefix) process.emit('timeEnd', 'npm:load:projectScope') } get flatOptions () { const { flat } = this.config - if (this.command) + if (this.command) { flat.npmCommand = this.command + } return flat } @@ -298,7 +312,7 @@ class Npm extends EventEmitter { } get dir () { - return (this.config.get('global')) ? this.globalDir : this.localDir + return this.config.get('global') ? this.globalDir : this.localDir } get globalBin () { diff --git a/lib/search/format-package-stream.js b/lib/search/format-package-stream.js index fb7d81856d63f..7ff44e9e2049d 100644 --- a/lib/search/format-package-stream.js +++ b/lib/search/format-package-stream.js @@ -31,8 +31,9 @@ class JSONOutputStream extends Minipass { if (!this._didFirst) { super.write('[\n') this._didFirst = true - } else + } else { super.write('\n,\n') + } try { return super.write(JSON.stringify(obj)) @@ -93,8 +94,9 @@ function prettify (data, num, opts) { } ) output = trimToMaxWidth(output) - if (opts.color) + if (opts.color) { output = highlightSearchTerms(output, opts.args) + } return output } diff --git a/lib/search/package-filter.js b/lib/search/package-filter.js index 2e7d8e82ae3e7..45a67835b8c4a 100644 --- a/lib/search/package-filter.js +++ b/lib/search/package-filter.js @@ -19,13 +19,15 @@ function getWords (data, opts) { function filterWords (data, include, exclude, opts) { var words = getWords(data, opts) for (var i = 0, l = include.length; i < l; i++) { - if (!match(words, include[i])) + if (!match(words, include[i])) { return false + } } for (i = 0, l = exclude.length; i < l; i++) { - if (match(words, exclude[i])) + if (match(words, exclude[i])) { return false + } } return true diff --git a/lib/utils/audit-error.js b/lib/utils/audit-error.js index c58c1d16e6885..b4ab26fd0c697 100644 --- a/lib/utils/audit-error.js +++ b/lib/utils/audit-error.js @@ -4,11 +4,13 @@ // returns 'true' if there was an error, false otherwise const auditError = (npm, report) => { - if (!report || !report.error) + if (!report || !report.error) { return false + } - if (npm.command !== 'audit') + if (npm.command !== 'audit') { return true + } const { error } = report @@ -25,8 +27,9 @@ const auditError = (npm, report) => { statusCode: error.statusCode, body, }, null, 2)) - } else + } else { npm.output(body) + } throw 'audit endpoint returned an error' } diff --git a/lib/utils/cleanup-log-files.js b/lib/utils/cleanup-log-files.js index 13d1e50da82e9..8fb0fa1550281 100644 --- a/lib/utils/cleanup-log-files.js +++ b/lib/utils/cleanup-log-files.js @@ -8,23 +8,26 @@ const { resolve } = require('path') const rimraf = require('rimraf') const glob = require('glob') module.exports = (cache, max, warn) => { - /* eslint-disable promise/param-names */ return new Promise(done => { glob(resolve(cache, '_logs', '*-debug.log'), (er, files) => { - if (er) + if (er) { return done() + } let pending = files.length - max - if (pending <= 0) + if (pending <= 0) { return done() + } for (let i = 0; i < files.length - max; i++) { - rimraf(files[i], (er) => { - if (er) + rimraf(files[i], er => { + if (er) { warn('log', 'failed to remove log file', files[i]) + } - if (--pending === 0) + if (--pending === 0) { done() + } }) } }) diff --git a/lib/utils/completion/installed-deep.js b/lib/utils/completion/installed-deep.js index 62686f9b2d3c9..7098d81fe7b49 100644 --- a/lib/utils/completion/installed-deep.js +++ b/lib/utils/completion/installed-deep.js @@ -27,14 +27,16 @@ const installedDeep = async (npm) => { }) const gTree = await gArb.loadActual({ global: true }) - for (const node of getValues(gTree)) + for (const node of getValues(gTree)) { res.add(global ? node.name : [node.name, '-g']) + } if (!global) { const arb = new Arborist({ global: false, path: prefix, workspacesEnabled }) const tree = await arb.loadActual() - for (const node of getValues(tree)) + for (const node of getValues(tree)) { res.add(node.name) + } } return [...res] diff --git a/lib/utils/completion/installed-shallow.js b/lib/utils/completion/installed-shallow.js index 1c9b8ef5acb0f..686c95e63245e 100644 --- a/lib/utils/completion/installed-shallow.js +++ b/lib/utils/completion/installed-shallow.js @@ -4,8 +4,9 @@ const readdir = promisify(require('readdir-scoped-modules')) const installedShallow = async (npm, opts) => { const names = global => readdir(global ? npm.globalDir : npm.localDir) const { conf: { argv: { remain } } } = opts - if (remain.length > 3) + if (remain.length > 3) { return null + } const { global } = npm.flatOptions const locals = global ? [] : await names(false) diff --git a/lib/utils/config/definition.js b/lib/utils/config/definition.js index 1354851326adf..fc46bc3d6a5ef 100644 --- a/lib/utils/config/definition.js +++ b/lib/utils/config/definition.js @@ -6,12 +6,7 @@ // say "these are for registry access", "these are for // version resolution" etc. -const required = [ - 'type', - 'description', - 'default', - 'key', -] +const required = ['type', 'description', 'default', 'key'] const allowed = [ 'default', @@ -44,42 +39,50 @@ class Definition { this.envExport = true Object.assign(this, def) this.validate() - if (!this.defaultDescription) + if (!this.defaultDescription) { this.defaultDescription = describeValue(this.default) - if (!this.typeDescription) + } + if (!this.typeDescription) { this.typeDescription = describeType(this.type) + } // hint is only used for non-boolean values if (!this.hint) { - if (this.type === Number) + if (this.type === Number) { this.hint = '' - else + } else { this.hint = `<${this.key}>` + } } - if (!this.usage) + if (!this.usage) { this.usage = describeUsage(this) + } } validate () { for (const req of required) { - if (!Object.prototype.hasOwnProperty.call(this, req)) + if (!Object.prototype.hasOwnProperty.call(this, req)) { throw new Error(`config lacks ${req}: ${this.key}`) + } } - if (!this.key) + if (!this.key) { throw new Error(`config lacks key: ${this.key}`) + } for (const field of Object.keys(this)) { - if (!allowed.includes(field)) + if (!allowed.includes(field)) { throw new Error(`config defines unknown field ${field}: ${this.key}`) + } } } // a textual description of this config, suitable for help output describe () { const description = unindent(this.description) - const noEnvExport = this.envExport ? '' : ` + const noEnvExport = this.envExport + ? '' + : ` This value is not exported to the environment for child processes. ` - const deprecated = !this.deprecated ? '' - : `* DEPRECATED: ${unindent(this.deprecated)}\n` + const deprecated = !this.deprecated ? '' : `* DEPRECATED: ${unindent(this.deprecated)}\n` return wrapAll(`#### \`${this.key}\` * Default: ${unindent(this.defaultDescription)} @@ -90,28 +93,32 @@ ${noEnvExport}`) } } -const describeUsage = (def) => { +const describeUsage = def => { let key = '' // Single type if (!Array.isArray(def.type)) { - if (def.short) + if (def.short) { key = `-${def.short}|` + } - if (def.type === Boolean && def.default !== false) + if (def.type === Boolean && def.default !== false) { key = `${key}--no-${def.key}` - else + } else { key = `${key}--${def.key}` + } - if (def.type !== Boolean) + if (def.type !== Boolean) { key = `${key} ${def.hint}` + } return key } key = `--${def.key}` - if (def.short) + if (def.short) { key = `-${def.short}|--${def.key}` + } // Multiple types let types = def.type @@ -122,14 +129,15 @@ const describeUsage = (def) => { // all non-optional params have defaults so we render everything as optional types = types.filter(t => t !== null && t !== Array && t !== Boolean) - if (!types.length) + if (!types.length) { return key + } let description - if (!types.some(t => typeof t !== 'string')) + if (!types.some(t => typeof t !== 'string')) { // Specific values, use specifics given description = `<${types.filter(d => d).join('|')}>` - else { + } else { // Generic values, use hint description = def.hint } @@ -142,17 +150,16 @@ const describeUsage = (def) => { } const usage = `${key} ${description}` - if (multiple) + if (multiple) { return `${usage} [${usage} ...]` - else + } else { return usage + } } const describeType = type => { if (Array.isArray(type)) { - const descriptions = type - .filter(t => t !== Array) - .map(t => describeType(t)) + const descriptions = type.filter(t => t !== Array).map(t => describeType(t)) // [a] => "a" // [a, b] => "a or b" @@ -162,8 +169,7 @@ const describeType = type => { const last = descriptions.length > 1 ? [descriptions.pop()] : [] const oxford = descriptions.length > 1 ? ', or ' : ' or ' const words = [descriptions.join(', ')].concat(last).join(oxford) - const multiple = type.includes(Array) ? ' (can be set multiple times)' - : '' + const multiple = type.includes(Array) ? ' (can be set multiple times)' : '' return `${words}${multiple}` } @@ -193,8 +199,7 @@ const describeType = type => { } // if it's a string, quote it. otherwise, just cast to string. -const describeValue = val => - typeof val === 'string' ? JSON.stringify(val) : String(val) +const describeValue = val => (typeof val === 'string' ? JSON.stringify(val) : String(val)) const unindent = s => { // get the first \n followed by a bunch of spaces, and pluck off @@ -203,30 +208,44 @@ const unindent = s => { return !match ? s.trim() : s.split(match[0]).join('\n').trim() } -const wrap = (s) => { +const wrap = s => { const cols = Math.min(Math.max(20, process.stdout.columns) || 80, 80) - 5 - return unindent(s).split(/[ \n]+/).reduce((left, right) => { - const last = left.split('\n').pop() - const join = last.length && last.length + right.length > cols ? '\n' : ' ' - return left + join + right - }) + return unindent(s) + .split(/[ \n]+/) + .reduce((left, right) => { + const last = left.split('\n').pop() + const join = last.length && last.length + right.length > cols ? '\n' : ' ' + return left + join + right + }) } const wrapAll = s => { let inCodeBlock = false - return s.split('\n\n').map(block => { - if (inCodeBlock || block.startsWith('```')) { - inCodeBlock = !block.endsWith('```') - return block - } - - if (block.charAt(0) === '*') { - return '* ' + block.substr(1).trim().split('\n* ').map(li => { - return wrap(li).replace(/\n/g, '\n ') - }).join('\n* ') - } else - return wrap(block) - }).join('\n\n') + return s + .split('\n\n') + .map(block => { + if (inCodeBlock || block.startsWith('```')) { + inCodeBlock = !block.endsWith('```') + return block + } + + if (block.charAt(0) === '*') { + return ( + '* ' + + block + .substr(1) + .trim() + .split('\n* ') + .map(li => { + return wrap(li).replace(/\n/g, '\n ') + }) + .join('\n* ') + ) + } else { + return wrap(block) + } + }) + .join('\n\n') } module.exports = Definition diff --git a/lib/utils/config/definitions.js b/lib/utils/config/definitions.js index a725ee0fa1d6f..b47a46de85e2e 100644 --- a/lib/utils/config/definitions.js +++ b/lib/utils/config/definitions.js @@ -16,8 +16,9 @@ const maybeReadFile = file => { try { return fs.readFileSync(file, 'utf8') } catch (er) { - if (er.code !== 'ENOENT') + if (er.code !== 'ENOENT') { throw er + } return null } } @@ -27,27 +28,32 @@ const buildOmitList = obj => { const omit = obj.omit || [] const only = obj.only - if (/^prod(uction)?$/.test(only) || obj.production) + if (/^prod(uction)?$/.test(only) || obj.production) { omit.push('dev') - else if (obj.production === false) + } else if (obj.production === false) { include.push('dev') + } - if (/^dev/.test(obj.also)) + if (/^dev/.test(obj.also)) { include.push('dev') + } - if (obj.dev) + if (obj.dev) { include.push('dev') + } - if (obj.optional === false) + if (obj.optional === false) { omit.push('optional') - else if (obj.optional === true) + } else if (obj.optional === true) { include.push('optional') + } obj.omit = [...new Set(omit)].filter(type => !include.includes(type)) obj.include = [...new Set(include)] - if (obj.omit.includes('dev')) + if (obj.omit.includes('dev')) { process.env.NODE_ENV = 'production' + } return obj.omit } @@ -98,8 +104,9 @@ const { const define = (key, def) => { /* istanbul ignore if - this should never happen, prevents mistakes below */ - if (definitions[key]) + if (definitions[key]) { throw new Error(`defining key more than once: ${key}`) + } definitions[key] = new Definition(key, def) } @@ -342,8 +349,9 @@ define('cache-max', { This option has been deprecated in favor of \`--prefer-online\` `, flatten (key, obj, flatOptions) { - if (obj[key] <= 0) + if (obj[key] <= 0) { flatOptions.preferOnline = true + } }, }) @@ -357,8 +365,9 @@ define('cache-min', { This option has been deprecated in favor of \`--prefer-offline\`. `, flatten (key, obj, flatOptions) { - if (obj[key] >= 9999) + if (obj[key] >= 9999) { flatOptions.preferOffline = true + } }, }) @@ -372,12 +381,14 @@ define('cafile', { `, flatten (key, obj, flatOptions) { // always set to null in defaults - if (!obj.cafile) + if (!obj.cafile) { return + } const raw = maybeReadFile(obj.cafile) - if (!raw) + if (!raw) { return + } const delim = '-----END CERTIFICATE-----' flatOptions.ca = raw.replace(/\r\n/g, '\n').split(delim) @@ -806,8 +817,9 @@ define('global', { `, flatten: (key, obj, flatOptions) => { flatten(key, obj, flatOptions) - if (flatOptions.global) + if (flatOptions.global) { flatOptions.location = 'global' + } }, }) @@ -1150,8 +1162,9 @@ define('location', { `, flatten: (key, obj, flatOptions) => { flatten(key, obj, flatOptions) - if (flatOptions.global) + if (flatOptions.global) { flatOptions.location = 'global' + } }, }) @@ -1281,10 +1294,11 @@ define('noproxy', { Also accepts a comma-delimited string. `, flatten (key, obj, flatOptions) { - if (Array.isArray(obj[key])) + if (Array.isArray(obj[key])) { flatOptions.noProxy = obj[key].join(',') - else + } else { flatOptions.noProxy = obj[key] + } }, }) @@ -1403,8 +1417,9 @@ define('package-lock', { `, flatten: (key, obj, flatOptions) => { flatten(key, obj, flatOptions) - if (flatOptions.packageLockOnly) + if (flatOptions.packageLockOnly) { flatOptions.packageLock = true + } }, }) @@ -1423,8 +1438,9 @@ define('package-lock-only', { `, flatten: (key, obj, flatOptions) => { flatten(key, obj, flatOptions) - if (flatOptions.packageLockOnly) + if (flatOptions.packageLockOnly) { flatOptions.packageLock = true + } }, }) @@ -1607,8 +1623,9 @@ define('save-dev', { `, flatten (key, obj, flatOptions) { if (!obj[key]) { - if (flatOptions.saveType === 'dev') + if (flatOptions.saveType === 'dev') { delete flatOptions.saveType + } return } @@ -1640,20 +1657,23 @@ define('save-optional', { `, flatten (key, obj, flatOptions) { if (!obj[key]) { - if (flatOptions.saveType === 'optional') + if (flatOptions.saveType === 'optional') { delete flatOptions.saveType - else if (flatOptions.saveType === 'peerOptional') + } else if (flatOptions.saveType === 'peerOptional') { flatOptions.saveType = 'peer' + } return } - if (flatOptions.saveType === 'peerOptional') + if (flatOptions.saveType === 'peerOptional') { return + } - if (flatOptions.saveType === 'peer') + if (flatOptions.saveType === 'peer') { flatOptions.saveType = 'peerOptional' - else + } else { flatOptions.saveType = 'optional' + } }, }) @@ -1665,20 +1685,23 @@ define('save-peer', { `, flatten (key, obj, flatOptions) { if (!obj[key]) { - if (flatOptions.saveType === 'peer') + if (flatOptions.saveType === 'peer') { delete flatOptions.saveType - else if (flatOptions.saveType === 'peerOptional') + } else if (flatOptions.saveType === 'peerOptional') { flatOptions.saveType = 'optional' + } return } - if (flatOptions.saveType === 'peerOptional') + if (flatOptions.saveType === 'peerOptional') { return + } - if (flatOptions.saveType === 'optional') + if (flatOptions.saveType === 'optional') { flatOptions.saveType = 'peerOptional' - else + } else { flatOptions.saveType = 'peer' + } }, }) @@ -1715,8 +1738,9 @@ define('save-prod', { `, flatten (key, obj, flatOptions) { if (!obj[key]) { - if (flatOptions.saveType === 'prod') + if (flatOptions.saveType === 'prod') { delete flatOptions.saveType + } return } @@ -2085,8 +2109,9 @@ define('user-agent', { const value = obj[key] const ciName = obj['ci-name'] let inWorkspaces = false - if (obj.workspaces || obj.workspace && obj.workspace.length) + if (obj.workspaces || obj.workspace && obj.workspace.length) { inWorkspaces = true + } flatOptions.userAgent = value.replace(/\{node-version\}/gi, obj['node-version']) .replace(/\{npm-version\}/gi, obj['npm-version']) diff --git a/lib/utils/config/describe-all.js b/lib/utils/config/describe-all.js index 23a10ae97783c..39f8d5fe4d453 100644 --- a/lib/utils/config/describe-all.js +++ b/lib/utils/config/describe-all.js @@ -5,7 +5,7 @@ const describeAll = () => { /* istanbul ignore next - typically already sorted in the definitions file, * but this is here so that our help doc will stay consistent if we decide * to move them around. */ - const sort = ([keya, {deprecated: depa}], [keyb, {deprecated: depb}]) => { + const sort = ([keya, { deprecated: depa }], [keyb, { deprecated: depb }]) => { return depa && !depb ? 1 : !depa && depb ? -1 : localeCompare(keya, keyb) diff --git a/lib/utils/config/flatten.js b/lib/utils/config/flatten.js index f6d6124bddf7a..588d05bf0d77d 100644 --- a/lib/utils/config/flatten.js +++ b/lib/utils/config/flatten.js @@ -11,10 +11,11 @@ const definitions = require('./definitions.js') const flatten = (obj, flat = {}) => { for (const [key, val] of Object.entries(obj)) { const def = definitions[key] - if (def && def.flatten) + if (def && def.flatten) { def.flatten(key, obj, flat) - else if (/@.*:registry$/i.test(key) || /^\/\//.test(key)) + } else if (/@.*:registry$/i.test(key) || /^\/\//.test(key)) { flat[key] = val + } } // XXX make this the bin/npm-cli.js file explicitly instead diff --git a/lib/utils/config/index.js b/lib/utils/config/index.js index a24f5865242bf..d8706d50c612d 100644 --- a/lib/utils/config/index.js +++ b/lib/utils/config/index.js @@ -27,12 +27,14 @@ const shorthands = { reg: ['--registry'], } -for (const [key, {short}] of Object.entries(definitions)) { - if (!short) +for (const [key, { short }] of Object.entries(definitions)) { + if (!short) { continue + } // can be either an array or string - for (const s of [].concat(short)) + for (const s of [].concat(short)) { shorthands[s] = [`--${key}`] + } } module.exports = { diff --git a/lib/utils/deref-command.js b/lib/utils/deref-command.js index f45c2627e49f6..dd89fb5a4f2b2 100644 --- a/lib/utils/deref-command.js +++ b/lib/utils/deref-command.js @@ -7,21 +7,25 @@ const abbrev = require('abbrev') const abbrevs = abbrev(fullList) module.exports = c => { - if (!c || typeof c !== 'string') + if (!c || typeof c !== 'string') { return '' + } - if (c.match(/[A-Z]/)) + if (c.match(/[A-Z]/)) { c = c.replace(/([A-Z])/g, m => '-' + m.toLowerCase()) + } - if (plumbing.indexOf(c) !== -1) + if (plumbing.indexOf(c) !== -1) { return c + } // first deref the abbrev, if there is one // then resolve any aliases // so `npm install-cl` will resolve to `install-clean` then to `ci` let a = abbrevs[c] - while (aliases[a]) + while (aliases[a]) { a = aliases[a] + } return a || '' } diff --git a/lib/utils/did-you-mean.js b/lib/utils/did-you-mean.js index 953048309856b..b859abaaf5d23 100644 --- a/lib/utils/did-you-mean.js +++ b/lib/utils/did-you-mean.js @@ -4,8 +4,7 @@ const { cmdList } = require('./cmd-list.js') const didYouMean = async (npm, path, scmd) => { // const cmd = await npm.cmd(str) - const close = cmdList - .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && scmd !== cmd) + const close = cmdList.filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && scmd !== cmd) let best = [] for (const str of close) { const cmd = await npm.cmd(str) @@ -17,22 +16,25 @@ const didYouMean = async (npm, path, scmd) => { const { bin, scripts } = await readJson(`${path}/package.json`) best = best.concat( Object.keys(scripts || {}) - .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && - !runScripts.includes(cmd)) + .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && !runScripts.includes(cmd)) .map(str => ` npm run ${str} # run the "${str}" package script`), Object.keys(bin || {}) .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4) + /* eslint-disable-next-line max-len */ .map(str => ` npm exec ${str} # run the "${str}" command from either this or a remote npm package`) ) } catch (_) { // gracefully ignore not being in a folder w/ a package.json } - if (best.length === 0) + if (best.length === 0) { return '' + } - const suggestion = best.length === 1 ? `\n\nDid you mean this?\n${best[0]}` - : `\n\nDid you mean one of these?\n${best.slice(0, 3).join('\n')}` + const suggestion = + best.length === 1 + ? `\n\nDid you mean this?\n${best[0]}` + : `\n\nDid you mean one of these?\n${best.slice(0, 3).join('\n')}` return suggestion } module.exports = didYouMean diff --git a/lib/utils/error-message.js b/lib/utils/error-message.js index 9343d37d54149..48ad4676f471e 100644 --- a/lib/utils/error-message.js +++ b/lib/utils/error-message.js @@ -9,10 +9,12 @@ module.exports = (er, npm) => { const short = [] const detail = [] - if (er.message) + if (er.message) { er.message = replaceInfo(er.message) - if (er.stack) + } + if (er.stack) { er.stack = replaceInfo(er.stack) + } switch (er.code) { case 'ERESOLVE': @@ -46,10 +48,14 @@ module.exports = (er, npm) => { case 'EACCES': case 'EPERM': { - const isCachePath = typeof er.path === 'string' && - npm.config.loaded && er.path.startsWith(npm.config.get('cache')) - const isCacheDest = typeof er.dest === 'string' && - npm.config.loaded && er.dest.startsWith(npm.config.get('cache')) + const isCachePath = + typeof er.path === 'string' && + npm.config.loaded && + er.path.startsWith(npm.config.get('cache')) + const isCacheDest = + typeof er.dest === 'string' && + npm.config.loaded && + er.dest.startsWith(npm.config.get('cache')) const isWindows = require('./is-windows.js') @@ -64,7 +70,9 @@ module.exports = (er, npm) => { 'previous versions of npm which has since been addressed.', '', 'To permanently fix this problem, please run:', - ` sudo chown -R ${process.getuid()}:${process.getgid()} ${JSON.stringify(npm.config.get('cache'))}`, + ` sudo chown -R ${process.getuid()}:${process.getgid()} ${JSON.stringify( + npm.config.get('cache') + )}`, ].join('\n'), ]) } else { @@ -73,14 +81,17 @@ module.exports = (er, npm) => { '', [ '\nThe operation was rejected by your operating system.', - (isWindows - ? 'It\'s possible that the file was already in use (by a text editor or antivirus),\n' + + isWindows + /* eslint-disable-next-line max-len */ + ? "It's possible that the file was already in use (by a text editor or antivirus),\n" + 'or that you lack permissions to access it.' - : 'It is likely you do not have the permissions to access this file as the current user'), + /* eslint-disable-next-line max-len */ + : 'It is likely you do not have the permissions to access this file as the current user', '\nIf you believe this might be a permissions issue, please double-check the', 'permissions of the file and its containing directories, or try running', 'the command again as root/Administrator.', - ].join('\n')]) + ].join('\n'), + ]) } break } @@ -89,11 +100,9 @@ module.exports = (er, npm) => { short.push(['', er.message]) detail.push([ '', - [ - '', - 'Failed using git.', - 'Please check if you have git installed and in your PATH.', - ].join('\n'), + ['', 'Failed using git.', 'Please check if you have git installed and in your PATH.'].join( + '\n' + ), ]) break @@ -101,8 +110,7 @@ module.exports = (er, npm) => { // Check whether we ran into a conflict in our own package.json if (er.path === resolve(npm.prefix, 'package.json')) { const { isDiff } = require('parse-conflict-json') - const txt = require('fs').readFileSync(er.path, 'utf8') - .replace(/\r\n/g, '\n') + const txt = require('fs').readFileSync(er.path, 'utf8').replace(/\r\n/g, '\n') if (isDiff(txt)) { detail.push([ '', @@ -141,17 +149,19 @@ module.exports = (er, npm) => { } else { // npm ERR! code E401 // npm ERR! Unable to authenticate, need: Basic - const auth = !er.headers || !er.headers['www-authenticate'] ? [] - : er.headers['www-authenticate'].map((au) => au.split(/[,\s]+/))[0] + const auth = + !er.headers || !er.headers['www-authenticate'] + ? [] + : er.headers['www-authenticate'].map(au => au.split(/[,\s]+/))[0] if (auth.includes('Bearer')) { - short.push(['', 'Unable to authenticate, your authentication token seems to be invalid.']) + short.push([ + '', + 'Unable to authenticate, your authentication token seems to be invalid.', + ]) detail.push([ '', - [ - 'To correct this please trying logging in again with:', - ' npm login', - ].join('\n'), + ['To correct this please trying logging in again with:', ' npm login'].join('\n'), ]) } else if (auth.includes('Basic')) { short.push(['', 'Incorrect or missing password.']) @@ -169,8 +179,9 @@ module.exports = (er, npm) => { ' npm login', ].join('\n'), ]) - } else + } else { short.push(['', er.message || er]) + } } break @@ -185,19 +196,16 @@ module.exports = (er, npm) => { const valResult = nameValidator(pkg) - if (valResult.validForNewPackages) - detail.push(['404', 'You should bug the author to publish it (or use the name yourself!)']) - else { + if (valResult.validForNewPackages) { + detail.push([ + '404', + 'You should bug the author to publish it (or use the name yourself!)', + ]) + } else { detail.push(['404', 'This package name is not valid, because', '']) - const errorsArray = [ - ...(valResult.errors || []), - ...(valResult.warnings || []), - ] - errorsArray.forEach((item, idx) => detail.push([ - '404', - ' ' + (idx + 1) + '. ' + item, - ])) + const errorsArray = [...(valResult.errors || []), ...(valResult.warnings || [])] + errorsArray.forEach((item, idx) => detail.push(['404', ' ' + (idx + 1) + '. ' + item])) } detail.push(['404', '\nNote that you can also install from a']) @@ -218,26 +226,30 @@ module.exports = (er, npm) => { short.push(['git', ' ' + er.path]) detail.push([ 'git', - [ - 'Refusing to remove it. Update manually,', - 'or move it out of the way first.', - ].join('\n'), + ['Refusing to remove it. Update manually,', 'or move it out of the way first.'].join('\n'), ]) break case 'EBADPLATFORM': { - const validOs = er.required && - er.required.os && - er.required.os.join ? er.required.os.join(',') : er.required.os - const validArch = er.required && - er.required.cpu && - er.required.cpu.join ? er.required.cpu.join(',') : er.required.cpu + const validOs = + er.required && er.required.os && er.required.os.join + ? er.required.os.join(',') + : er.required.os + const validArch = + er.required && er.required.cpu && er.required.cpu.join + ? er.required.cpu.join(',') + : er.required.cpu const expected = { os: validOs, arch: validArch } const actual = { os: process.platform, arch: process.arch } short.push([ 'notsup', [ - format('Unsupported platform for %s: wanted %j (current: %j)', er.pkgid, expected, actual), + format( + 'Unsupported platform for %s: wanted %j (current: %j)', + er.pkgid, + expected, + actual + ), ].join('\n'), ]) detail.push([ @@ -283,19 +295,25 @@ module.exports = (er, npm) => { case 'ETARGET': short.push(['notarget', er.message]) - detail.push(['notarget', [ - 'In most cases you or one of your dependencies are requesting', - "a package version that doesn't exist.", - ].join('\n')]) + detail.push([ + 'notarget', + [ + 'In most cases you or one of your dependencies are requesting', + "a package version that doesn't exist.", + ].join('\n'), + ]) break case 'E403': short.push(['403', er.message]) - detail.push(['403', [ - 'In most cases, you or one of your dependencies are requesting', - 'a package version that is forbidden by your security policy, or', - 'on a server you do not have access to.', - ].join('\n')]) + detail.push([ + '403', + [ + 'In most cases, you or one of your dependencies are requesting', + 'a package version that is forbidden by your security policy, or', + 'on a server you do not have access to.', + ].join('\n'), + ]) break case 'EBADENGINE': @@ -306,10 +324,11 @@ module.exports = (er, npm) => { [ 'Not compatible with your version of node/npm: ' + er.pkgid, 'Required: ' + JSON.stringify(er.required), - 'Actual: ' + JSON.stringify({ - npm: npm.version, - node: npm.config.loaded ? npm.config.get('node-version') : process.version, - }), + 'Actual: ' + + JSON.stringify({ + npm: npm.version, + node: npm.config.loaded ? npm.config.get('node-version') : process.version, + }), ].join('\n'), ]) break @@ -363,17 +382,21 @@ module.exports = (er, npm) => { default: short.push(['', er.message || er]) - if (er.signal) + if (er.signal) { detail.push(['signal', er.signal]) + } - if (er.cmd && Array.isArray(er.args)) + if (er.cmd && Array.isArray(er.args)) { detail.push(['command', ...[er.cmd, ...er.args.map(replaceInfo)]]) + } - if (er.stdout) + if (er.stdout) { detail.push(['', er.stdout.trim()]) + } - if (er.stderr) + if (er.stderr) { detail.push(['', er.stderr.trim()]) + } break } diff --git a/lib/utils/exit-handler.js b/lib/utils/exit-handler.js index 7be138d2c361f..5b2811468eca3 100644 --- a/lib/utils/exit-handler.js +++ b/lib/utils/exit-handler.js @@ -15,8 +15,13 @@ let wroteLogFile = false const getLogFile = () => { // we call this multiple times, so we need to treat it as a singleton because // the date is part of the name - if (!logFileName) - logFileName = path.resolve(npm.config.get('cache'), '_logs', (new Date()).toISOString().replace(/[.:]/g, '_') + '-debug.log') + if (!logFileName) { + logFileName = path.resolve( + npm.config.get('cache'), + '_logs', + new Date().toISOString().replace(/[.:]/g, '_') + '-debug.log' + ) + } return logFileName } @@ -26,8 +31,9 @@ process.on('exit', code => { // unfinished timer check below process.emit('timeEnd', 'npm') npm.log.disableProgress() - for (const [name, timers] of npm.timers) + for (const [name, timers] of npm.timers) { npm.log.verbose('unfinished npm timer', name, timers) + } if (npm.config.loaded && npm.config.get('timing')) { try { @@ -35,12 +41,15 @@ process.on('exit', code => { const dir = path.dirname(npm.config.get('cache')) mkdirp.sync(dir) - fs.appendFileSync(file, JSON.stringify({ - command: process.argv.slice(2), - logfile: getLogFile(), - version: npm.version, - ...npm.timings, - }) + '\n') + fs.appendFileSync( + file, + JSON.stringify({ + command: process.argv.slice(2), + logfile: getLogFile(), + version: npm.version, + ...npm.timings, + }) + '\n' + ) const st = fs.lstatSync(path.dirname(npm.config.get('cache'))) fs.chownSync(dir, st.uid, st.gid) @@ -50,10 +59,11 @@ process.on('exit', code => { } } - if (!code) + if (!code) { npm.log.info('ok') - else + } else { npm.log.verbose('code', code) + } if (!exitHandlerCalled) { process.exitCode = code || 1 @@ -65,19 +75,18 @@ process.on('exit', code => { writeLogFile() } // In timing mode we always write the log file - if (npm.config.loaded && npm.config.get('timing') && !wroteLogFile) + if (npm.config.loaded && npm.config.get('timing') && !wroteLogFile) { writeLogFile() + } if (wroteLogFile) { // just a line break - if (npm.log.levels[npm.log.level] <= npm.log.levels.error) + if (npm.log.levels[npm.log.level] <= npm.log.levels.error) { console.error('') + } npm.log.error( '', - [ - 'A complete log of this run can be found in:', - ' ' + getLogFile(), - ].join('\n') + ['A complete log of this run can be found in:', ' ' + getLogFile()].join('\n') ) } @@ -86,7 +95,7 @@ process.on('exit', code => { wroteLogFile = false }) -const exitHandler = (err) => { +const exitHandler = err => { npm.log.disableProgress() if (!npm.config.loaded) { err = err || new Error('Exit prior to config file resolving.') @@ -131,8 +140,9 @@ const exitHandler = (err) => { for (const k of ['type', 'stack', 'statusCode', 'pkgid']) { const v = err[k] - if (v) + if (v) { npm.log.verbose(k, replaceInfo(v)) + } } npm.log.verbose('cwd', process.cwd()) @@ -145,13 +155,15 @@ const exitHandler = (err) => { for (const k of ['code', 'syscall', 'file', 'path', 'dest', 'errno']) { const v = err[k] - if (v) + if (v) { npm.log.error(k, v) + } } const msg = errorMessage(err, npm) - for (const errline of [...msg.summary, ...msg.detail]) + for (const errline of [...msg.summary, ...msg.detail]) { npm.log.error(...errline) + } if (npm.config.loaded && npm.config.get('json')) { const error = { @@ -164,21 +176,24 @@ const exitHandler = (err) => { console.error(JSON.stringify(error, null, 2)) } - if (typeof err.errno === 'number') + if (typeof err.errno === 'number') { exitCode = err.errno - else if (typeof err.code === 'number') + } else if (typeof err.code === 'number') { exitCode = err.code + } } } npm.log.verbose('exit', exitCode || 0) - if (npm.log.level === 'silent') + if (npm.log.level === 'silent') { noLog = true + } // noLog is true if there was an error, including if config wasn't loaded, so // this doesn't need a config.loaded guard - if (exitCode && !noLog) + if (exitCode && !noLog) { writeLogFile() + } // explicitly call process.exit now so we don't hang on things like the // update notifier, also flush stdout beforehand because process.exit doesn't @@ -193,11 +208,14 @@ const writeLogFile = () => { let logOutput = '' npm.log.record.forEach(m => { const p = [m.id, m.level] - if (m.prefix) + if (m.prefix) { p.push(m.prefix) + } const pref = p.join(' ') - m.message.trim().split(/\r?\n/) + m.message + .trim() + .split(/\r?\n/) .map(line => (pref + ' ' + line).trim()) .forEach(line => { logOutput += line + os.EOL @@ -216,12 +234,10 @@ const writeLogFile = () => { // truncate once it's been written. npm.log.record.length = 0 wroteLogFile = true - } catch (ex) { - - } + } catch (ex) {} } module.exports = exitHandler -module.exports.setNpm = (n) => { +module.exports.setNpm = n => { npm = n } diff --git a/lib/utils/explain-dep.js b/lib/utils/explain-dep.js index 944b4be62bacf..107f68549ef1d 100644 --- a/lib/utils/explain-dep.js +++ b/lib/utils/explain-dep.js @@ -43,20 +43,25 @@ const printNode = (node, color) => { } = node const { bold, dim, green } = color ? chalk : nocolor const extra = [] - if (extraneous) + if (extraneous) { extra.push(' ' + bold(colorType('extraneous', color))) + } - if (dev) + if (dev) { extra.push(' ' + bold(colorType('dev', color))) + } - if (optional) + if (optional) { extra.push(' ' + bold(colorType('optional', color))) + } - if (peer) + if (peer) { extra.push(' ' + bold(colorType('peer', color))) + } - if (bundled) + if (bundled) { extra.push(' ' + bold(colorType('bundled', color))) + } const pkgid = isWorkspace ? green(`${name}@${version}`) @@ -67,8 +72,9 @@ const printNode = (node, color) => { } const explainLinksIn = ({ linksIn }, depth, color) => { - if (!linksIn || !linksIn.length || depth <= 0) + if (!linksIn || !linksIn.length || depth <= 0) { return '' + } const messages = linksIn.map(link => explainNode(link, depth - 1, color)) const str = '\n' + messages.join('\n') @@ -76,8 +82,9 @@ const explainLinksIn = ({ linksIn }, depth, color) => { } const explainDependents = ({ name, dependents }, depth, color) => { - if (!dependents || !dependents.length || depth <= 0) + if (!dependents || !dependents.length || depth <= 0) { return '' + } const max = Math.ceil(depth / 2) const messages = dependents.slice(0, max) @@ -118,8 +125,9 @@ const explainEdge = ({ name, type, bundled, from, spec }, depth, color) => { } const explainFrom = (from, depth, color) => { - if (!from.name && !from.version) + if (!from.name && !from.version) { return 'the root project' + } return printNode(from, color) + explainDependents(from, depth - 1, color) + diff --git a/lib/utils/explain-eresolve.js b/lib/utils/explain-eresolve.js index b25e3e4a9ccd0..7f6a10869c73c 100644 --- a/lib/utils/explain-eresolve.js +++ b/lib/utils/explain-eresolve.js @@ -15,20 +15,22 @@ const explain = (expl, color, depth) => { const whileInstalling = dep && dep.whileInstalling || current && current.whileInstalling || edge && edge.from && edge.from.whileInstalling - if (whileInstalling) + if (whileInstalling) { out.push('While resolving: ' + printNode(whileInstalling, color)) + } // it "should" be impossible for an ERESOLVE explanation to lack both // current and currentEdge, but better to have a less helpful error // than a crashing failure. - if (current) + if (current) { out.push('Found: ' + explainNode(current, depth, color)) - else if (peerConflict && peerConflict.current) + } else if (peerConflict && peerConflict.current) { out.push('Found: ' + explainNode(peerConflict.current, depth, color)) - else if (currentEdge) + } else if (currentEdge) { out.push('Found: ' + explainEdge(currentEdge, depth, color)) - else /* istanbul ignore else - should always have one */ if (edge) + } else /* istanbul ignore else - should always have one */ if (edge) { out.push('Found: ' + explainEdge(edge, depth, color)) + } out.push('\nCould not resolve dependency:\n' + explainEdge(edge, depth, color)) diff --git a/lib/utils/format-bytes.js b/lib/utils/format-bytes.js index 87fb561aabef1..d7cf6d144e339 100644 --- a/lib/utils/format-bytes.js +++ b/lib/utils/format-bytes.js @@ -4,17 +4,24 @@ const formatBytes = (bytes, space = true) => { let spacer = '' - if (space) + if (space) { spacer = ' ' + } - if (bytes < 1000) // B + if (bytes < 1000) { + // B return `${bytes}${spacer}B` + } - if (bytes < 1000000) // kB + if (bytes < 1000000) { + // kB return `${(bytes / 1000).toFixed(1)}${spacer}kB` + } - if (bytes < 1000000000) // MB + if (bytes < 1000000000) { + // MB return `${(bytes / 1000000).toFixed(1)}${spacer}MB` + } return `${(bytes / 1000000000).toFixed(1)}${spacer}GB` } diff --git a/lib/utils/get-identity.js b/lib/utils/get-identity.js index e92a2c524ec45..e77c2eea4c5ab 100644 --- a/lib/utils/get-identity.js +++ b/lib/utils/get-identity.js @@ -5,8 +5,9 @@ const needsAuthError = (msg) => module.exports = async (npm, opts = {}) => { const { registry } = opts - if (!registry) + if (!registry) { throw Object.assign(new Error('No registry specified.'), { code: 'ENOREGISTRY' }) + } // First, check if we have a user/pass-based auth const creds = npm.config.getCredentialsByURI(registry) @@ -22,9 +23,9 @@ module.exports = async (npm, opts = {}) => { }) const { username: usernameFromRegistry } = registryData // Retrieved username from registry; return it - if (usernameFromRegistry) + if (usernameFromRegistry) { return usernameFromRegistry - else { + } else { // Didn't get username from registry; bad token throw needsAuthError( 'Your auth token is no longer valid. Please login again.' diff --git a/lib/utils/get-project-scope.js b/lib/utils/get-project-scope.js index 3ce84d5bb0a59..dc1b4deba3dc2 100644 --- a/lib/utils/get-project-scope.js +++ b/lib/utils/get-project-scope.js @@ -2,12 +2,14 @@ const { resolve } = require('path') module.exports = prefix => { try { const { name } = require(resolve(prefix, 'package.json')) - if (!name || typeof name !== 'string') + if (!name || typeof name !== 'string') { return '' + } const split = name.split('/') - if (split.length < 2) + if (split.length < 2) { return '' + } const scope = split[0] return /^@/.test(scope) ? scope : '' diff --git a/lib/utils/npm-usage.js b/lib/utils/npm-usage.js index d54c8ac5479dc..b0c98b2ae6c0b 100644 --- a/lib/utils/npm-usage.js +++ b/lib/utils/npm-usage.js @@ -32,8 +32,9 @@ npm@${npm.version} ${dirname(dirname(__dirname))}` } const allCommands = async (npm) => { - if (npm.config.get('long')) + if (npm.config.get('long')) { return usages(npm) + } return ('\n ' + wrap(cmdList)) } @@ -45,9 +46,9 @@ const wrap = (arr) => { let l = 0 for (const c of arr.sort((a, b) => a < b ? -1 : 1)) { - if (out[l].length + c.length + 2 < line) + if (out[l].length + c.length + 2 < line) { out[l] += ', ' + c - else { + } else { out[l++] += ',' out[l] = c } diff --git a/lib/utils/open-url.js b/lib/utils/open-url.js index 331ca96fa96d0..21368efe7bc0d 100644 --- a/lib/utils/open-url.js +++ b/lib/utils/open-url.js @@ -25,8 +25,9 @@ const open = async (npm, url, errMsg) => { } try { - if (!/^(https?|file):$/.test(new URL(url).protocol)) + if (!/^(https?|file):$/.test(new URL(url).protocol)) { throw new Error() + } } catch (_) { throw new Error('Invalid URL: ' + url) } @@ -35,10 +36,11 @@ const open = async (npm, url, errMsg) => { await new Promise((resolve, reject) => { opener(url, { command }, (err) => { if (err) { - if (err.code === 'ENOENT') + if (err.code === 'ENOENT') { printAlternateMsg() - else + } else { return reject(err) + } } return resolve() }) diff --git a/lib/utils/otplease.js b/lib/utils/otplease.js index ca271526ccb5c..0e32493f9eb3e 100644 --- a/lib/utils/otplease.js +++ b/lib/utils/otplease.js @@ -8,11 +8,11 @@ module.exports = otplease function otplease (opts, fn) { opts = { prompt, ...opts } return Promise.resolve().then(() => fn(opts)).catch(err => { - if (!isOtpError(err)) + if (!isOtpError(err)) { throw err - else if (!process.stdin.isTTY || !process.stdout.isTTY) + } else if (!process.stdin.isTTY || !process.stdout.isTTY) { throw err - else { + } else { return readUserInfo.otp(opts.prompt) .then(otp => fn({ ...opts, otp })) } diff --git a/lib/utils/queryable.js b/lib/utils/queryable.js index e10eba3b5f092..ceb06bdccd103 100644 --- a/lib/utils/queryable.js +++ b/lib/utils/queryable.js @@ -19,10 +19,10 @@ const replaceAppendSymbols = str => { return [str] } -const parseKeys = (key) => { +const parseKeys = key => { const sqBracketItems = new Set() sqBracketItems.add(_append) - const parseSqBrackets = (str) => { + const parseSqBrackets = str => { const index = sqBracketsMatcher(str) // once we find square brackets, we recursively parse all these @@ -48,11 +48,7 @@ const parseKeys = (key) => { return [ ...parseSqBrackets(preSqBracketPortion), foundKey, - ...( - postSqBracketPortion - ? parseSqBrackets(postSqBracketPortion) - : [] - ), + ...(postSqBracketPortion ? parseSqBrackets(postSqBracketPortion) : []), ] } @@ -72,13 +68,14 @@ const parseKeys = (key) => { for (const k of sqBracketKeys) { // keys parsed from square brackets should just be added to list of // resulting keys as they might have dots as part of the key - if (sqBracketItems.has(k)) + if (sqBracketItems.has(k)) { res.push(k) - else { + } else { // splits the dot-sep property names and add them to the list of keys - for (const splitKey of k.split('.')) - /* eslint-disable-next-line no-new-wrappers */ - res.push(new String(splitKey)) + /* eslint-disable-next-line no-new-wrappers */ + for (const splitKey of k.split('.')) { + res.push(String(splitKey)) + } } } @@ -98,10 +95,9 @@ const getter = ({ data, key }) => { for (const k of keys) { // empty-bracket-shortcut-syntax is not supported on getter if (k === _append) { - throw Object.assign( - new Error('Empty brackets are not valid syntax for retrieving values.'), - { code: 'EINVALIDSYNTAX' } - ) + throw Object.assign(new Error('Empty brackets are not valid syntax for retrieving values.'), { + code: 'EINVALIDSYNTAX', + }) } // extra logic to take into account printing array, along with its @@ -118,8 +114,9 @@ const getter = ({ data, key }) => { } else { // if can't find any more values, it means it's just over // and there's nothing to return - if (!_data[k]) + if (!_data[k]) { return undefined + } // otherwise sets the next value _data = _data[k] @@ -130,8 +127,9 @@ const getter = ({ data, key }) => { // these are some legacy expectations from // the old API consumed by lib/view.js - if (Array.isArray(_data) && _data.length <= 1) + if (Array.isArray(_data) && _data.length <= 1) { _data = _data[0] + } return { [key]: _data, @@ -151,30 +149,32 @@ const setter = ({ data, key, value, force }) => { try { maybeIndex = Number(_key) } catch (err) {} - if (!Number.isNaN(maybeIndex)) + if (!Number.isNaN(maybeIndex)) { _key = maybeIndex + } // creates new array in case key is an index // and the array obj is not yet defined const keyIsAnArrayIndex = _key === maybeIndex || _key === _append const dataHasNoItems = !Object.keys(_data).length - if (keyIsAnArrayIndex && dataHasNoItems && !Array.isArray(_data)) + if (keyIsAnArrayIndex && dataHasNoItems && !Array.isArray(_data)) { _data = [] + } // converting from array to an object is also possible, in case the // user is using force mode, we should also convert existing arrays // to an empty object if the current _data is an array - if (force && Array.isArray(_data) && !keyIsAnArrayIndex) + if (force && Array.isArray(_data) && !keyIsAnArrayIndex) { _data = { ..._data } + } // the _append key is a special key that is used to represent // the empty-bracket notation, e.g: arr[] -> arr[arr.length] if (_key === _append) { if (!Array.isArray(_data)) { - throw Object.assign( - new Error(`Can't use append syntax in non-Array element`), - { code: 'ENOAPPEND' } - ) + throw Object.assign(new Error(`Can't use append syntax in non-Array element`), { + code: 'ENOAPPEND', + }) } _key = _data.length } @@ -182,23 +182,15 @@ const setter = ({ data, key, value, force }) => { // retrieves the next data object to recursively iterate on, // throws if trying to override a literal value or add props to an array const next = () => { - const haveContents = - !force && - _data[_key] != null && - value !== _delete - const shouldNotOverrideLiteralValue = - !(typeof _data[_key] === 'object') + const haveContents = !force && _data[_key] != null && value !== _delete + const shouldNotOverrideLiteralValue = !(typeof _data[_key] === 'object') // if the next obj to recurse is an array and the next key to be // appended to the resulting obj is not an array index, then it // should throw since we can't append arbitrary props to arrays const shouldNotAddPropsToArrays = - typeof keys[0] !== 'symbol' && - Array.isArray(_data[_key]) && - Number.isNaN(Number(keys[0])) + typeof keys[0] !== 'symbol' && Array.isArray(_data[_key]) && Number.isNaN(Number(keys[0])) - const overrideError = - haveContents && - shouldNotOverrideLiteralValue + const overrideError = haveContents && shouldNotOverrideLiteralValue if (overrideError) { throw Object.assign( new Error(`Property ${_key} already exists and is not an Array or Object.`), @@ -206,14 +198,11 @@ const setter = ({ data, key, value, force }) => { ) } - const addPropsToArrayError = - haveContents && - shouldNotAddPropsToArrays + const addPropsToArrayError = haveContents && shouldNotAddPropsToArrays if (addPropsToArrayError) { - throw Object.assign( - new Error(`Can't add property ${key} to an Array.`), - { code: 'ENOADDPROP' } - ) + throw Object.assign(new Error(`Can't add property ${key} to an Array.`), { + code: 'ENOADDPROP', + }) } return typeof _data[_key] === 'object' ? _data[_key] || {} : {} @@ -222,18 +211,20 @@ const setter = ({ data, key, value, force }) => { // sets items from the parsed array of keys as objects, recurses to // setKeys in case there are still items to be handled, otherwise it // just sets the original value set by the user - if (keys.length) + if (keys.length) { _data[_key] = setKeys(next(), keys.shift()) - else { + } else { // handles special deletion cases for obj props / array items if (value === _delete) { - if (Array.isArray(_data)) + if (Array.isArray(_data)) { _data.splice(_key, 1) - else + } else { delete _data[_key] - } else + } + } else { // finally, sets the value in its right place _data[_key] = value + } } return _data @@ -245,10 +236,9 @@ const setter = ({ data, key, value, force }) => { class Queryable { constructor (obj) { if (!obj || typeof obj !== 'object') { - throw Object.assign( - new Error('Queryable needs an object to query properties from.'), - { code: 'ENOQUERYABLEOBJ' } - ) + throw Object.assign(new Error('Queryable needs an object to query properties from.'), { + code: 'ENOQUERYABLEOBJ', + }) } this[_data] = obj @@ -258,28 +248,33 @@ class Queryable { // this ugly interface here is meant to be a compatibility layer // with the legacy API lib/view.js is consuming, if at some point // we refactor that command then we can revisit making this nicer - if (queries === '') + if (queries === '') { return { '': this[_data] } + } - const q = query => getter({ - data: this[_data], - key: query, - }) + const q = query => + getter({ + data: this[_data], + key: query, + }) if (Array.isArray(queries)) { let res = {} - for (const query of queries) + for (const query of queries) { res = { ...res, ...q(query) } + } return res - } else + } else { return q(queries) + } } // return the value for a single query if found, otherwise returns undefined get (query) { const obj = this.query(query) - if (obj) + if (obj) { return obj[query] + } } // creates objects along the way for the provided `query` parameter diff --git a/lib/utils/read-user-info.js b/lib/utils/read-user-info.js index e3c4a9fbe51ca..993aa886f6b4c 100644 --- a/lib/utils/read-user-info.js +++ b/lib/utils/read-user-info.js @@ -23,16 +23,18 @@ function read (opts) { } function readOTP (msg = otpPrompt, otp, isRetry) { - if (isRetry && otp && /^[\d ]+$|^[A-Fa-f0-9]{64,64}$/.test(otp)) + if (isRetry && otp && /^[\d ]+$|^[A-Fa-f0-9]{64,64}$/.test(otp)) { return otp.replace(/\s+/g, '') + } return read({ prompt: msg, default: otp || '' }) .then((otp) => readOTP(msg, otp, true)) } function readPassword (msg = passwordPrompt, password, isRetry) { - if (isRetry && password) + if (isRetry && password) { return password + } return read({ prompt: msg, silent: true, default: password || '' }) .then((password) => readPassword(msg, password, true)) @@ -41,10 +43,11 @@ function readPassword (msg = passwordPrompt, password, isRetry) { function readUsername (msg = usernamePrompt, username, opts = {}, isRetry) { if (isRetry && username) { const error = userValidate.username(username) - if (error) + if (error) { opts.log && opts.log.warn(error.message) - else + } else { return Promise.resolve(username.trim()) + } } return read({ prompt: msg, default: username || '' }) @@ -54,10 +57,11 @@ function readUsername (msg = usernamePrompt, username, opts = {}, isRetry) { function readEmail (msg = emailPrompt, email, opts = {}, isRetry) { if (isRetry && email) { const error = userValidate.email(email) - if (error) + if (error) { opts.log && opts.log.warn(error.message) - else + } else { return email.trim() + } } return read({ prompt: msg, default: email || '' }) diff --git a/lib/utils/reify-finish.js b/lib/utils/reify-finish.js index a9ac4c61f5b8e..9b43abcb7610a 100644 --- a/lib/utils/reify-finish.js +++ b/lib/utils/reify-finish.js @@ -1,7 +1,7 @@ const reifyOutput = require('./reify-output.js') const ini = require('ini') const { writeFile } = require('fs').promises -const {resolve} = require('path') +const { resolve } = require('path') const reifyFinish = async (npm, arb) => { await saveBuiltinConfig(npm, arb) @@ -10,18 +10,21 @@ const reifyFinish = async (npm, arb) => { const saveBuiltinConfig = async (npm, arb) => { const { options: { global }, actualTree } = arb - if (!global) + if (!global) { return + } // if we are using a builtin config, and just installed npm as // a top-level global package, we have to preserve that config. const npmNode = actualTree.inventory.get('node_modules/npm') - if (!npmNode) + if (!npmNode) { return + } const builtinConf = npm.config.data.get('builtin') - if (builtinConf.loadError) + if (builtinConf.loadError) { return + } const content = ini.stringify(builtinConf.raw).trim() + '\n' await writeFile(resolve(npmNode.path, 'npmrc'), content) diff --git a/lib/utils/reify-output.js b/lib/utils/reify-output.js index bf3fa7fb2e13d..7741b72200dd8 100644 --- a/lib/utils/reify-output.js +++ b/lib/utils/reify-output.js @@ -92,14 +92,16 @@ const reifyOutput = (npm, arb) => { // to get the exitCode set appropriately. const printAuditReport = (npm, report) => { const res = getAuditReport(npm, report) - if (!res || !res.report) + if (!res || !res.report) { return + } npm.output(`\n${res.report}`) } const getAuditReport = (npm, report) => { - if (!report) + if (!report) { return + } // when in silent mode, we print nothing. the JSON output is // going to just JSON.stringify() the report object. @@ -115,8 +117,9 @@ const getAuditReport = (npm, report) => { ...npm.flatOptions, auditLevel, }) - if (npm.command === 'audit') + if (npm.command === 'audit') { process.exitCode = process.exitCode || res.exitCode + } return res } @@ -124,43 +127,52 @@ const packagesChangedMessage = (npm, { added, removed, changed, audited }) => { const msg = ['\n'] if (added === 0 && removed === 0 && changed === 0) { msg.push('up to date') - if (audited) + if (audited) { msg.push(', ') + } } else { - if (added) + if (added) { msg.push(`added ${added} package${added === 1 ? '' : 's'}`) + } if (removed) { - if (added) + if (added) { msg.push(', ') + } - if (added && !audited && !changed) + if (added && !audited && !changed) { msg.push('and ') + } msg.push(`removed ${removed} package${removed === 1 ? '' : 's'}`) } if (changed) { - if (added || removed) + if (added || removed) { msg.push(', ') + } - if (!audited && (added || removed)) + if (!audited && (added || removed)) { msg.push('and ') + } msg.push(`changed ${changed} package${changed === 1 ? '' : 's'}`) } - if (audited) + if (audited) { msg.push(', and ') + } } - if (audited) + if (audited) { msg.push(`audited ${audited} package${audited === 1 ? '' : 's'}`) + } msg.push(` in ${ms(Date.now() - npm.started)}`) npm.output(msg.join('')) } const packagesFundingMessage = (npm, { funding }) => { - if (!funding) + if (!funding) { return + } npm.output('') const pkg = funding === 1 ? 'package' : 'packages' diff --git a/lib/utils/replace-info.js b/lib/utils/replace-info.js index 7c7489bc17b16..e9d19ef5fb2ba 100644 --- a/lib/utils/replace-info.js +++ b/lib/utils/replace-info.js @@ -5,8 +5,9 @@ function replaceInfo (arg) { const isArray = Array.isArray(arg) const isString = str => typeof str === 'string' - if (!isArray && !isString(arg)) + if (!isArray && !isString(arg)) { return arg + } const testUrlAndReplace = str => { try { @@ -19,8 +20,9 @@ function replaceInfo (arg) { const args = isString(arg) ? arg.split(' ') : arg const info = args.map(a => { - if (isString(a) && a.indexOf(' ') > -1) + if (isString(a) && a.indexOf(' ') > -1) { return a.split(' ').map(testUrlAndReplace).join(' ') + } return testUrlAndReplace(a) }) diff --git a/lib/utils/setup-log.js b/lib/utils/setup-log.js index aaf7fa47e266d..05ca38c828240 100644 --- a/lib/utils/setup-log.js +++ b/lib/utils/setup-log.js @@ -35,28 +35,32 @@ module.exports = (config) => { return warn(heading, ...args) } - if (config.get('timing') && config.get('loglevel') === 'notice') + if (config.get('timing') && config.get('loglevel') === 'notice') { log.level = 'timing' - else + } else { log.level = config.get('loglevel') + } log.heading = config.get('heading') || 'npm' - if (enableColorStderr) + if (enableColorStderr) { log.enableColor() - else + } else { log.disableColor() + } - if (config.get('unicode')) + if (config.get('unicode')) { log.enableUnicode() - else + } else { log.disableUnicode() + } // if it's more than error, don't show progress const quiet = log.levels[log.level] > log.levels.error - if (config.get('progress') && stderrNotDumb && !quiet) + if (config.get('progress') && stderrNotDumb && !quiet) { log.enableProgress() - else + } else { log.disableProgress() + } } diff --git a/lib/utils/split-package-names.js b/lib/utils/split-package-names.js index bb6e449bac243..395c2517d5934 100644 --- a/lib/utils/split-package-names.js +++ b/lib/utils/split-package-names.js @@ -4,15 +4,17 @@ const splitPackageNames = (path) => { return path.split('/') // combine scoped parts .reduce((parts, part) => { - if (parts.length === 0) + if (parts.length === 0) { return [part] + } const lastPart = parts[parts.length - 1] // check if previous part is the first part of a scoped package - if (lastPart[0] === '@' && !lastPart.includes('/')) + if (lastPart[0] === '@' && !lastPart.includes('/')) { parts[parts.length - 1] += '/' + part - else + } else { parts.push(part) + } return parts }, []) diff --git a/lib/utils/tar.js b/lib/utils/tar.js index 0ff822370d4da..26e7a98df6b49 100644 --- a/lib/utils/tar.js +++ b/lib/utils/tar.js @@ -14,39 +14,58 @@ const logTar = (tarball, opts = {}) => { log.notice('', `${unicode ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`) log.notice('=== Tarball Contents ===') if (tarball.files.length) { - log.notice('', columnify(tarball.files.map((f) => { - const bytes = formatBytes(f.size, false) - return (/^node_modules\//.test(f.path)) ? null - : { path: f.path, size: `${bytes}` } - }).filter(f => f), { - include: ['size', 'path'], - showHeaders: false, - })) + log.notice( + '', + columnify( + tarball.files + .map(f => { + const bytes = formatBytes(f.size, false) + return /^node_modules\//.test(f.path) ? null : { path: f.path, size: `${bytes}` } + }) + .filter(f => f), + { + include: ['size', 'path'], + showHeaders: false, + } + ) + ) } if (tarball.bundled.length) { log.notice('=== Bundled Dependencies ===') - tarball.bundled.forEach((name) => log.notice('', name)) + tarball.bundled.forEach(name => log.notice('', name)) } log.notice('=== Tarball Details ===') - log.notice('', columnify([ - { name: 'name:', value: tarball.name }, - { name: 'version:', value: tarball.version }, - tarball.filename && { name: 'filename:', value: tarball.filename }, - { name: 'package size:', value: formatBytes(tarball.size) }, - { name: 'unpacked size:', value: formatBytes(tarball.unpackedSize) }, - { name: 'shasum:', value: tarball.shasum }, - { - name: 'integrity:', - value: tarball.integrity.toString().substr(0, 20) + '[...]' + tarball.integrity.toString().substr(80), - }, - tarball.bundled.length && { name: 'bundled deps:', value: tarball.bundled.length }, - tarball.bundled.length && { name: 'bundled files:', value: tarball.entryCount - tarball.files.length }, - tarball.bundled.length && { name: 'own files:', value: tarball.files.length }, - { name: 'total files:', value: tarball.entryCount }, - ].filter((x) => x), { - include: ['name', 'value'], - showHeaders: false, - })) + log.notice( + '', + columnify( + [ + { name: 'name:', value: tarball.name }, + { name: 'version:', value: tarball.version }, + tarball.filename && { name: 'filename:', value: tarball.filename }, + { name: 'package size:', value: formatBytes(tarball.size) }, + { name: 'unpacked size:', value: formatBytes(tarball.unpackedSize) }, + { name: 'shasum:', value: tarball.shasum }, + { + name: 'integrity:', + value: + tarball.integrity.toString().substr(0, 20) + + '[...]' + + tarball.integrity.toString().substr(80), + }, + tarball.bundled.length && { name: 'bundled deps:', value: tarball.bundled.length }, + tarball.bundled.length && { + name: 'bundled files:', + value: tarball.entryCount - tarball.files.length, + }, + tarball.bundled.length && { name: 'own files:', value: tarball.files.length }, + { name: 'total files:', value: tarball.entryCount }, + ].filter(x => x), + { + include: ['name', 'value'], + showHeaders: false, + } + ) + ) log.notice('', '') } @@ -81,7 +100,7 @@ const getContents = async (manifest, tarball) => { const comparator = ({ path: a }, { path: b }) => localeCompare(a, b) - const isUpper = (str) => { + const isUpper = str => { const ch = str.charAt(0) return ch === ch.toUpperCase() } diff --git a/lib/utils/update-notifier.js b/lib/utils/update-notifier.js index 14c4fac0d58b3..2b45d54c815e0 100644 --- a/lib/utils/update-notifier.js +++ b/lib/utils/update-notifier.js @@ -37,8 +37,9 @@ const updateNotifier = async (npm, spec = 'latest') => { // never check for updates in CI, when updating npm already, or opted out if (!npm.config.get('update-notifier') || isGlobalNpmUpdate(npm) || - ciDetect()) + ciDetect()) { return null + } // if we're on a prerelease train, then updates are coming fast // check for a new one daily. otherwise, weekly. @@ -46,15 +47,17 @@ const updateNotifier = async (npm, spec = 'latest') => { const current = semver.parse(version) // if we're on a beta train, always get the next beta - if (current.prerelease.length) + if (current.prerelease.length) { spec = `^${version}` + } // while on a beta train, get updates daily const duration = spec !== 'latest' ? DAILY : WEEKLY // if we've already checked within the specified duration, don't check again - if (!(await checkTimeout(npm, duration))) + if (!(await checkTimeout(npm, duration))) { return null + } // if they're currently using a prerelease, nudge to the next prerelease // otherwise, nudge to latest. @@ -67,8 +70,9 @@ const updateNotifier = async (npm, spec = 'latest') => { }).catch(() => null) // if pacote failed, give up - if (!mani) + if (!mani) { return null + } const latest = mani.version @@ -76,12 +80,14 @@ const updateNotifier = async (npm, spec = 'latest') => { // and should get the updates from that release train. // Note that this isn't another http request over the network, because // the packument will be cached by pacote from previous request. - if (semver.gt(version, latest) && spec === 'latest') + if (semver.gt(version, latest) && spec === 'latest') { return updateNotifier(npm, `^${version}`) + } // if we already have something >= the desired spec, then we're done - if (semver.gte(version, latest)) + if (semver.gte(version, latest)) { return null + } // ok! notify the user about this update they should get. // The message is saved for printing at process exit so it will not get diff --git a/lib/utils/usage.js b/lib/utils/usage.js index 5f4eca73ea5d3..e23e50c51c42a 100644 --- a/lib/utils/usage.js +++ b/lib/utils/usage.js @@ -3,13 +3,15 @@ const aliases = require('../utils/cmd-list').aliases module.exports = function usage (cmd, txt, opt) { const post = Object.keys(aliases).reduce(function (p, c) { var val = aliases[c] - if (val !== cmd) + if (val !== cmd) { return p + } return p.concat(c) }, []) - if (opt || post.length > 0) + if (opt || post.length > 0) { txt += '\n\n' + } if (post.length === 1) { txt += 'alias: ' @@ -20,8 +22,9 @@ module.exports = function usage (cmd, txt, opt) { } if (opt) { - if (post.length > 0) + if (post.length > 0) { txt += '\n' + } txt += 'common options: ' + opt } diff --git a/lib/workspaces/get-workspaces.js b/lib/workspaces/get-workspaces.js index 3eb8e4865b706..a59b5a6c54b70 100644 --- a/lib/workspaces/get-workspaces.js +++ b/lib/workspaces/get-workspaces.js @@ -10,18 +10,21 @@ const getWorkspaces = async (filters, { path, includeWorkspaceRoot }) => { const pkg = await rpj(resolve(path, 'package.json')) const workspaces = await mapWorkspaces({ cwd: path, pkg }) let res = new Map() - if (includeWorkspaceRoot) + if (includeWorkspaceRoot) { res.set(pkg.name, path) + } - if (!filters.length) + if (!filters.length) { res = new Map([...res, ...workspaces]) + } for (const filterArg of filters) { for (const [workspaceName, workspacePath] of workspaces.entries()) { if (filterArg === workspaceName || resolve(path, filterArg) === workspacePath - || minimatch(workspacePath, `${resolve(path, filterArg)}/*`)) + || minimatch(workspacePath, `${resolve(path, filterArg)}/*`)) { res.set(workspaceName, workspacePath) + } } } diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 69731618ef376..6b301e324febf 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -56,8 +56,8 @@ readme* /@istanbuljs/schema /@mdx-js/mdx /@mdx-js/util +/@npmcli/eslint-config /@types/hast -/@types/json5 /@types/mdast /@types/parse5 /@types/unist @@ -72,8 +72,6 @@ readme* /append-transform /argparse /array-find-index -/array-includes -/array.prototype.flat /asn1 /assert-plus /astral-regex @@ -136,20 +134,14 @@ readme* /ecc-jsbn /end-of-stream /enquirer -/error-ex /es-abstract /es-to-primitive /es6-error /escape-string-regexp /escodegen /eslint -/eslint-import-resolver-node -/eslint-module-utils /eslint-plugin-es -/eslint-plugin-import /eslint-plugin-node -/eslint-plugin-promise -/eslint-plugin-standard /eslint-scope /eslint-utils /eslint-visitor-keys @@ -170,7 +162,6 @@ readme* /file-uri-to-path /fill-range /find-cache-dir -/find-up /findit /flat-cache /flatted @@ -213,7 +204,6 @@ readme* /inline-style-parser /is-alphabetical /is-alphanumerical -/is-arrayish /is-bigint /is-binary-path /is-boolean-object @@ -250,7 +240,6 @@ readme* /jsbn /jsdom /jsesc -/json-parse-better-errors /json-parse-errback /json-schema /json-schema-traverse @@ -262,8 +251,6 @@ readme* /levn /libtap /licensee -/load-json-file -/locate-path /lodash /lodash.clonedeep /lodash.flattendeep @@ -301,27 +288,17 @@ readme* /object-keys /object.assign /object.getownpropertydescriptors -/object.values /optionator /own-or /own-or-env -/p-limit -/p-locate -/p-try /package-hash /parent-module /parse-entities -/parse-json /parse5 -/path-exists /path-key /path-parse -/path-type /performance-now /picomatch -/pify -/pkg-dir -/pkg-up /prebuild-install /prelude-ls /process-on-spawn @@ -337,8 +314,6 @@ readme* /react /react-is /read-package-tree -/read-pkg -/read-pkg-up /readdirp /regexpp /release-zalgo @@ -376,7 +351,6 @@ readme* /state-toggle /string.prototype.trimend /string.prototype.trimstart -/strip-bom /strip-json-comments /style-to-object /symbol-tree @@ -397,7 +371,6 @@ readme* /trim-trailing-lines /trivial-deferred /trough -/tsconfig-paths /tunnel-agent /tweetnacl /type-check diff --git a/package-lock.json b/package-lock.json index 84a12e2926664..043815d3cd817 100644 --- a/package-lock.json +++ b/package-lock.json @@ -161,11 +161,9 @@ "npx": "bin/npx-cli.js" }, "devDependencies": { + "@npmcli/eslint-config": "^1.0.2", "eslint": "^7.31.0", - "eslint-plugin-import": "^2.23.4", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.1.0", - "eslint-plugin-standard": "^5.0.0", "licensee": "^8.2.0", "spawk": "^1.7.1", "tap": "^15.0.9" @@ -850,6 +848,16 @@ "node": ">=10" } }, + "node_modules/@npmcli/eslint-config": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@npmcli/eslint-config/-/eslint-config-1.0.2.tgz", + "integrity": "sha512-7fycAADbqtTTcDbYvUeWy56p+F+gju6JZrUSfx5ARle6LjhnoLyniCiuJueuxJkxYFcTUvcbaVGU4CzaD21PxA==", + "dev": true, + "peerDependencies": { + "eslint": ">= 7", + "eslint-plugin-node": "^11.1.0" + } + }, "node_modules/@npmcli/fs": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.0.0.tgz", @@ -995,12 +1003,6 @@ "@types/unist": "*" } }, - "node_modules/@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=", - "dev": true - }, "node_modules/@types/mdast": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz", @@ -1242,42 +1244,6 @@ "node": ">=0.10.0" } }, - "node_modules/array-includes": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz", - "integrity": "sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.2", - "get-intrinsic": "^1.1.1", - "is-string": "^1.0.5" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flat": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz", - "integrity": "sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.0", - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", @@ -2444,15 +2410,6 @@ "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", "inBundle": true }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, "node_modules/es-abstract": { "version": "1.18.3", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.3.tgz", @@ -2667,47 +2624,6 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint-import-resolver-node": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", - "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", - "dev": true, - "dependencies": { - "debug": "^3.2.7", - "resolve": "^1.20.0" - } - }, - "node_modules/eslint-import-resolver-node/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-module-utils": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.2.tgz", - "integrity": "sha512-QG8pcgThYOuqxupd06oYTZoNOGaUdTY1PqK+oS6ElF6vs4pBdk/aYxFVQQXzcrAqp9m7cl7lb2ubazX+g16k2Q==", - "dev": true, - "dependencies": { - "debug": "^3.2.7", - "pkg-dir": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-module-utils/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, "node_modules/eslint-plugin-es": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz", @@ -2727,62 +2643,6 @@ "eslint": ">=4.19.1" } }, - "node_modules/eslint-plugin-import": { - "version": "2.24.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.24.2.tgz", - "integrity": "sha512-hNVtyhiEtZmpsabL4neEj+6M5DCLgpYyG9nzJY8lZQeQXEn5UPW1DpUdsMHMXsq98dbNm7nt1w9ZMSVpfJdi8Q==", - "dev": true, - "dependencies": { - "array-includes": "^3.1.3", - "array.prototype.flat": "^1.2.4", - "debug": "^2.6.9", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-module-utils": "^2.6.2", - "find-up": "^2.0.0", - "has": "^1.0.3", - "is-core-module": "^2.6.0", - "minimatch": "^3.0.4", - "object.values": "^1.1.4", - "pkg-up": "^2.0.0", - "read-pkg-up": "^3.0.0", - "resolve": "^1.20.0", - "tsconfig-paths": "^3.11.0" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0" - } - }, - "node_modules/eslint-plugin-import/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/eslint-plugin-import/node_modules/doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eslint-plugin-import/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, "node_modules/eslint-plugin-node": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", @@ -2821,42 +2681,6 @@ "semver": "bin/semver.js" } }, - "node_modules/eslint-plugin-promise": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-5.1.0.tgz", - "integrity": "sha512-NGmI6BH5L12pl7ScQHbg7tvtk4wPxxj8yPHH47NvSmMtFneC077PSeY3huFj06ZWZvtbfxSPt3RuOQD5XcR4ng==", - "dev": true, - "engines": { - "node": "^10.12.0 || >=12.0.0" - }, - "peerDependencies": { - "eslint": "^7.0.0" - } - }, - "node_modules/eslint-plugin-standard": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-5.0.0.tgz", - "integrity": "sha512-eSIXPc9wBM4BrniMzJRBm2uoVuXz2EPa+NXPk2+itrVt+r5SbKFERx/IgrK/HmfjddyKVz2f+j+7gBRvu19xLg==", - "deprecated": "standard 16.0.0 and eslint-config-standard 16.0.0 no longer require the eslint-plugin-standard package. You can remove it from your dependencies with 'npm rm eslint-plugin-standard'. More info here: https://github.com/standard/standard/issues/1316", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "peerDependencies": { - "eslint": ">=5.0.0" - } - }, "node_modules/eslint-scope": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", @@ -3239,18 +3063,6 @@ "node": ">=8" } }, - "node_modules/find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "dev": true, - "dependencies": { - "locate-path": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/findit": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz", @@ -3999,12 +3811,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", - "dev": true - }, "node_modules/is-bigint": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz", @@ -4141,9 +3947,9 @@ } }, "node_modules/is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "dependencies": { "is-extglob": "^2.1.1" @@ -4547,12 +4353,6 @@ "node": ">=4" } }, - "node_modules/json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", - "dev": true - }, "node_modules/json-parse-errback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/json-parse-errback/-/json-parse-errback-2.0.1.tgz", @@ -4893,34 +4693,6 @@ "semver": "bin/semver.js" } }, - "node_modules/load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "dev": true, - "dependencies": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", @@ -5885,23 +5657,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.values": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz", - "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.18.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -5952,30 +5707,6 @@ "own-or": "^1.0.0" } }, - "node_modules/p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "dev": true, - "dependencies": { - "p-try": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "dev": true, - "dependencies": { - "p-limit": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/p-map": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", @@ -5991,15 +5722,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/package-hash": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", @@ -6089,34 +5811,12 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "dependencies": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/parse5": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", "dev": true }, - "node_modules/path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", @@ -6141,18 +5841,6 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "node_modules/path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "dependencies": { - "pify": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -6171,39 +5859,6 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/pkg-dir": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", - "dev": true, - "dependencies": { - "find-up": "^2.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", - "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", - "dev": true, - "dependencies": { - "find-up": "^2.1.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/prebuild-install": { "version": "6.1.2", "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.2.tgz", @@ -6607,102 +6262,48 @@ "semver": "bin/semver" } }, - "node_modules/read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "inBundle": true, "dependencies": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" }, "engines": { - "node": ">=4" + "node": ">= 6" } }, - "node_modules/read-pkg-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", - "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", + "node_modules/readdir-scoped-modules": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", + "integrity": "sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==", + "inBundle": true, + "dependencies": { + "debuglog": "^1.0.1", + "dezalgo": "^1.0.0", + "graceful-fs": "^4.1.2", + "once": "^1.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", + "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", "dev": true, "dependencies": { - "find-up": "^2.0.0", - "read-pkg": "^3.0.0" + "picomatch": "^2.2.1" }, "engines": { - "node": ">=4" - } - }, - "node_modules/read-pkg/node_modules/hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", - "dev": true - }, - "node_modules/read-pkg/node_modules/normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "dependencies": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "node_modules/read-pkg/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "inBundle": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/readdir-scoped-modules": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz", - "integrity": "sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw==", - "inBundle": true, - "dependencies": { - "debuglog": "^1.0.1", - "dezalgo": "^1.0.0", - "graceful-fs": "^4.1.2", - "once": "^1.3.0" - } - }, - "node_modules/readdirp": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", - "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", - "dev": true, - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" + "node": ">=8.10.0" } }, "node_modules/regexpp": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz", - "integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", "dev": true, "engines": { "node": ">=8" @@ -7437,15 +7038,6 @@ "node": ">=0.10.0" } }, - "node_modules/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", @@ -9722,30 +9314,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/tsconfig-paths": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.11.0.tgz", - "integrity": "sha512-7ecdYDnIdmv639mmDwslG6KQg1Z9STTz1j7Gcz0xa+nshh/gKDAHcPxRbWOsA3SPp0tXP2leTcY9Kw+NAkfZzA==", - "dev": true, - "dependencies": { - "@types/json5": "^0.0.29", - "json5": "^1.0.1", - "minimist": "^1.2.0", - "strip-bom": "^3.0.0" - } - }, - "node_modules/tsconfig-paths/node_modules/json5": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", - "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", - "dev": true, - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "json5": "lib/cli.js" - } - }, "node_modules/tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", @@ -10574,16 +10142,263 @@ "tar": "^6.1.0" }, "devDependencies": { - "eslint": "^7.28.0", - "eslint-plugin-import": "^2.23.4", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.1.0", - "eslint-plugin-standard": "^5.0.0", + "eslint": "^8.1.0", "tap": "^15.0.9" }, "engines": { "node": ">=10" } + }, + "packages/libnpmdiff/node_modules/@eslint/eslintrc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.0.3.tgz", + "integrity": "sha512-DHI1wDPoKCBPoLZA3qDR91+3te/wDSc1YhKg3jR8NxKKRJq2hwHwcWv31cSwSYvIBrmbENoYMWcenW8uproQqg==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.0.0", + "globals": "^13.9.0", + "ignore": "^4.0.6", + "import-fresh": "^3.2.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "packages/libnpmdiff/node_modules/@humanwhocodes/config-array": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.6.0.tgz", + "integrity": "sha512-JQlEKbcgEUjBFhLIF4iqM7u/9lwgHRBcpHrmUNCALK0Q3amXN6lxdoXLnF0sm11E9VqTmBALR87IlUg1bZ8A9A==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "packages/libnpmdiff/node_modules/acorn": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", + "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "packages/libnpmdiff/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "packages/libnpmdiff/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "packages/libnpmdiff/node_modules/eslint": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.1.0.tgz", + "integrity": "sha512-JZvNneArGSUsluHWJ8g8MMs3CfIEzwaLx9KyH4tZ2i+R2/rPWzL8c0zg3rHdwYVpN/1sB9gqnjHwz9HoeJpGHw==", + "dev": true, + "dependencies": { + "@eslint/eslintrc": "^1.0.3", + "@humanwhocodes/config-array": "^0.6.0", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^6.0.0", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.0.0", + "espree": "^9.0.0", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^6.0.1", + "globals": "^13.6.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.0.4", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "progress": "^2.0.0", + "regexpp": "^3.2.0", + "semver": "^7.2.1", + "strip-ansi": "^6.0.0", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "packages/libnpmdiff/node_modules/eslint-scope": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-6.0.0.tgz", + "integrity": "sha512-uRDL9MWmQCkaFus8RF5K9/L/2fn+80yoW3jkD53l4shjCh26fCtvJGasxjUqP5OT87SYTxCVA3BwTUzuELx9kA==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "packages/libnpmdiff/node_modules/eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^2.0.0" + }, + "engines": { + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" + } + }, + "packages/libnpmdiff/node_modules/eslint/node_modules/eslint-visitor-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.0.0.tgz", + "integrity": "sha512-mJOZa35trBTb3IyRmo8xmKBZlxf+N7OnUl4+ZhJHs/r+0770Wh/LEACE2pqMGMe27G/4y8P2bYGk4J70IC5k1Q==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "packages/libnpmdiff/node_modules/eslint/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "packages/libnpmdiff/node_modules/espree": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.0.0.tgz", + "integrity": "sha512-r5EQJcYZ2oaGbeR0jR0fFVijGOcwai07/690YRXLINuhmVeRY4UKSAsQPe/0BNuDgwP7Ophoc1PRsr2E3tkbdQ==", + "dev": true, + "dependencies": { + "acorn": "^8.5.0", + "acorn-jsx": "^5.3.1", + "eslint-visitor-keys": "^3.0.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "packages/libnpmdiff/node_modules/espree/node_modules/eslint-visitor-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.0.0.tgz", + "integrity": "sha512-mJOZa35trBTb3IyRmo8xmKBZlxf+N7OnUl4+ZhJHs/r+0770Wh/LEACE2pqMGMe27G/4y8P2bYGk4J70IC5k1Q==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "packages/libnpmdiff/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "packages/libnpmdiff/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "packages/libnpmdiff/node_modules/globals": { + "version": "13.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", + "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "packages/libnpmdiff/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "packages/libnpmdiff/node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } } }, "dependencies": { @@ -11145,6 +10960,13 @@ "ansi-styles": "^4.3.0" } }, + "@npmcli/eslint-config": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@npmcli/eslint-config/-/eslint-config-1.0.2.tgz", + "integrity": "sha512-7fycAADbqtTTcDbYvUeWy56p+F+gju6JZrUSfx5ARle6LjhnoLyniCiuJueuxJkxYFcTUvcbaVGU4CzaD21PxA==", + "dev": true, + "requires": {} + }, "@npmcli/fs": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.0.0.tgz", @@ -11260,12 +11082,6 @@ "@types/unist": "*" } }, - "@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=", - "dev": true - }, "@types/mdast": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz", @@ -11448,30 +11264,6 @@ "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=", "dev": true }, - "array-includes": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.3.tgz", - "integrity": "sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.2", - "get-intrinsic": "^1.1.1", - "is-string": "^1.0.5" - } - }, - "array.prototype.flat": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz", - "integrity": "sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==", - "dev": true, - "requires": { - "call-bind": "^1.0.0", - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" - } - }, "asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", @@ -12346,15 +12138,6 @@ "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==" }, - "error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "dev": true, - "requires": { - "is-arrayish": "^0.2.1" - } - }, "es-abstract": { "version": "1.18.3", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.3.tgz", @@ -12537,66 +12320,24 @@ "resolved": "https://registry.npmjs.org/globals/-/globals-13.9.0.tgz", "integrity": "sha512-74/FduwI/JaIrr1H8e71UbDE+5x7pIPs1C2rrwC52SszOo043CsWOZEMW7o2Y58xwm9b+0RBKDxY5n2sUpEFxA==", "dev": true, - "requires": { - "type-fest": "^0.20.2" - } - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - }, - "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true - } - } - }, - "eslint-import-resolver-node": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", - "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", - "dev": true, - "requires": { - "debug": "^3.2.7", - "resolve": "^1.20.0" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } - } - }, - "eslint-module-utils": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.2.tgz", - "integrity": "sha512-QG8pcgThYOuqxupd06oYTZoNOGaUdTY1PqK+oS6ElF6vs4pBdk/aYxFVQQXzcrAqp9m7cl7lb2ubazX+g16k2Q==", - "dev": true, - "requires": { - "debug": "^3.2.7", - "pkg-dir": "^2.0.0" - }, - "dependencies": { - "debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "type-fest": "^0.20.2" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "dev": true, "requires": { - "ms": "^2.1.1" + "ansi-regex": "^5.0.0" } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true } } }, @@ -12610,55 +12351,6 @@ "regexpp": "^3.0.0" } }, - "eslint-plugin-import": { - "version": "2.24.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.24.2.tgz", - "integrity": "sha512-hNVtyhiEtZmpsabL4neEj+6M5DCLgpYyG9nzJY8lZQeQXEn5UPW1DpUdsMHMXsq98dbNm7nt1w9ZMSVpfJdi8Q==", - "dev": true, - "requires": { - "array-includes": "^3.1.3", - "array.prototype.flat": "^1.2.4", - "debug": "^2.6.9", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-module-utils": "^2.6.2", - "find-up": "^2.0.0", - "has": "^1.0.3", - "is-core-module": "^2.6.0", - "minimatch": "^3.0.4", - "object.values": "^1.1.4", - "pkg-up": "^2.0.0", - "read-pkg-up": "^3.0.0", - "resolve": "^1.20.0", - "tsconfig-paths": "^3.11.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - } - } - }, "eslint-plugin-node": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", @@ -12687,20 +12379,6 @@ } } }, - "eslint-plugin-promise": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-5.1.0.tgz", - "integrity": "sha512-NGmI6BH5L12pl7ScQHbg7tvtk4wPxxj8yPHH47NvSmMtFneC077PSeY3huFj06ZWZvtbfxSPt3RuOQD5XcR4ng==", - "dev": true, - "requires": {} - }, - "eslint-plugin-standard": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-5.0.0.tgz", - "integrity": "sha512-eSIXPc9wBM4BrniMzJRBm2uoVuXz2EPa+NXPk2+itrVt+r5SbKFERx/IgrK/HmfjddyKVz2f+j+7gBRvu19xLg==", - "dev": true, - "requires": {} - }, "eslint-scope": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", @@ -12947,15 +12625,6 @@ } } }, - "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", - "dev": true, - "requires": { - "locate-path": "^2.0.0" - } - }, "findit": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz", @@ -13488,12 +13157,6 @@ "is-decimal": "^1.0.0" } }, - "is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", - "dev": true - }, "is-bigint": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.2.tgz", @@ -13570,9 +13233,9 @@ "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" }, "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "requires": { "is-extglob": "^2.1.1" @@ -13865,12 +13528,6 @@ "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "dev": true }, - "json-parse-better-errors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", - "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", - "dev": true - }, "json-parse-errback": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/json-parse-errback/-/json-parse-errback-2.0.1.tgz", @@ -13981,16 +13638,200 @@ "@npmcli/installed-package-contents": "^1.0.7", "binary-extensions": "^2.2.0", "diff": "^5.0.0", - "eslint": "^7.28.0", - "eslint-plugin-import": "^2.23.4", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.1.0", - "eslint-plugin-standard": "^5.0.0", + "eslint": "^8.1.0", "minimatch": "^3.0.4", "npm-package-arg": "^8.1.4", "pacote": "^12.0.0", "tap": "^15.0.9", "tar": "^6.1.0" + }, + "dependencies": { + "@eslint/eslintrc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.0.3.tgz", + "integrity": "sha512-DHI1wDPoKCBPoLZA3qDR91+3te/wDSc1YhKg3jR8NxKKRJq2hwHwcWv31cSwSYvIBrmbENoYMWcenW8uproQqg==", + "dev": true, + "requires": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.0.0", + "globals": "^13.9.0", + "ignore": "^4.0.6", + "import-fresh": "^3.2.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "strip-json-comments": "^3.1.1" + } + }, + "@humanwhocodes/config-array": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.6.0.tgz", + "integrity": "sha512-JQlEKbcgEUjBFhLIF4iqM7u/9lwgHRBcpHrmUNCALK0Q3amXN6lxdoXLnF0sm11E9VqTmBALR87IlUg1bZ8A9A==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + } + }, + "acorn": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", + "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "eslint": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.1.0.tgz", + "integrity": "sha512-JZvNneArGSUsluHWJ8g8MMs3CfIEzwaLx9KyH4tZ2i+R2/rPWzL8c0zg3rHdwYVpN/1sB9gqnjHwz9HoeJpGHw==", + "dev": true, + "requires": { + "@eslint/eslintrc": "^1.0.3", + "@humanwhocodes/config-array": "^0.6.0", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^6.0.0", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.0.0", + "espree": "^9.0.0", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^6.0.1", + "globals": "^13.6.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.0.4", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "progress": "^2.0.0", + "regexpp": "^3.2.0", + "semver": "^7.2.1", + "strip-ansi": "^6.0.0", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.0.0.tgz", + "integrity": "sha512-mJOZa35trBTb3IyRmo8xmKBZlxf+N7OnUl4+ZhJHs/r+0770Wh/LEACE2pqMGMe27G/4y8P2bYGk4J70IC5k1Q==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + } + } + }, + "eslint-scope": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-6.0.0.tgz", + "integrity": "sha512-uRDL9MWmQCkaFus8RF5K9/L/2fn+80yoW3jkD53l4shjCh26fCtvJGasxjUqP5OT87SYTxCVA3BwTUzuELx9kA==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + } + }, + "eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^2.0.0" + } + }, + "espree": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.0.0.tgz", + "integrity": "sha512-r5EQJcYZ2oaGbeR0jR0fFVijGOcwai07/690YRXLINuhmVeRY4UKSAsQPe/0BNuDgwP7Ophoc1PRsr2E3tkbdQ==", + "dev": true, + "requires": { + "acorn": "^8.5.0", + "acorn-jsx": "^5.3.1", + "eslint-visitor-keys": "^3.0.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.0.0.tgz", + "integrity": "sha512-mJOZa35trBTb3IyRmo8xmKBZlxf+N7OnUl4+ZhJHs/r+0770Wh/LEACE2pqMGMe27G/4y8P2bYGk4J70IC5k1Q==", + "dev": true + } + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + }, + "glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "requires": { + "is-glob": "^4.0.3" + } + }, + "globals": { + "version": "13.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", + "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true + } } }, "libnpmexec": { @@ -14149,28 +13990,6 @@ } } }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - } - }, - "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", - "dev": true, - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - } - }, "lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", @@ -14892,17 +14711,6 @@ "es-abstract": "^1.18.0-next.2" } }, - "object.values": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.4.tgz", - "integrity": "sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==", - "dev": true, - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.18.2" - } - }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -14945,24 +14753,6 @@ "own-or": "^1.0.0" } }, - "p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "dev": true, - "requires": { - "p-try": "^1.0.0" - } - }, - "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", - "dev": true, - "requires": { - "p-limit": "^1.1.0" - } - }, "p-map": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", @@ -14971,12 +14761,6 @@ "aggregate-error": "^3.0.0" } }, - "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", - "dev": true - }, "package-hash": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", @@ -15048,28 +14832,12 @@ "is-hexadecimal": "^1.0.0" } }, - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - }, "parse5": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", "dev": true }, - "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true - }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", @@ -15087,15 +14855,6 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "requires": { - "pify": "^3.0.0" - } - }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -15108,30 +14867,6 @@ "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", "dev": true }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - }, - "pkg-dir": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", - "dev": true, - "requires": { - "find-up": "^2.1.0" - } - }, - "pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", - "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", - "dev": true, - "requires": { - "find-up": "^2.1.0" - } - }, "prebuild-install": { "version": "6.1.2", "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.2.tgz", @@ -15452,53 +15187,6 @@ } } }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - }, - "dependencies": { - "hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", - "dev": true - }, - "normalize-package-data": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", - "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", - "dev": true, - "requires": { - "hosted-git-info": "^2.1.4", - "resolve": "^1.10.0", - "semver": "2 || 3 || 4 || 5", - "validate-npm-package-license": "^3.0.1" - } - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - } - } - }, - "read-pkg-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", - "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", - "dev": true, - "requires": { - "find-up": "^2.0.0", - "read-pkg": "^3.0.0" - } - }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -15530,9 +15218,9 @@ } }, "regexpp": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz", - "integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", "dev": true }, "release-zalgo": { @@ -16072,12 +15760,6 @@ "ansi-regex": "^2.0.0" } }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", - "dev": true - }, "strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", @@ -17691,29 +17373,6 @@ "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", "dev": true }, - "tsconfig-paths": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.11.0.tgz", - "integrity": "sha512-7ecdYDnIdmv639mmDwslG6KQg1Z9STTz1j7Gcz0xa+nshh/gKDAHcPxRbWOsA3SPp0tXP2leTcY9Kw+NAkfZzA==", - "dev": true, - "requires": { - "@types/json5": "^0.0.29", - "json5": "^1.0.1", - "minimist": "^1.2.0", - "strip-bom": "^3.0.0" - }, - "dependencies": { - "json5": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", - "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", - "dev": true, - "requires": { - "minimist": "^1.2.0" - } - } - } - }, "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", diff --git a/package.json b/package.json index 7735df88a7955..0bc69f6643cc9 100644 --- a/package.json +++ b/package.json @@ -198,11 +198,9 @@ "write-file-atomic" ], "devDependencies": { + "@npmcli/eslint-config": "^1.0.2", "eslint": "^7.31.0", - "eslint-plugin-import": "^2.23.4", "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.1.0", - "eslint-plugin-standard": "^5.0.0", "licensee": "^8.2.0", "spawk": "^1.7.1", "tap": "^15.0.9" diff --git a/packages/libnpmdiff/.eslintrc.json b/packages/libnpmdiff/.eslintrc.json index 6232a8f82187f..b39431d2cb074 100644 --- a/packages/libnpmdiff/.eslintrc.json +++ b/packages/libnpmdiff/.eslintrc.json @@ -1,207 +1,3 @@ { - "parserOptions": { - "ecmaVersion": 2018, - "ecmaFeatures": {}, - "sourceType": "script" - }, - - "env": { - "es6": true, - "node": true - }, - - "plugins": [ - "import", - "node", - "promise", - "standard" - ], - - "globals": { - "document": "readonly", - "navigator": "readonly", - "window": "readonly" - }, - - "rules": { - "accessor-pairs": "error", - "array-bracket-spacing": ["error", "never"], - "arrow-spacing": ["error", { "before": true, "after": true }], - "block-spacing": ["error", "always"], - "brace-style": ["error", "1tbs", { "allowSingleLine": false }], - "camelcase": ["error", { "properties": "never" }], - "comma-dangle": ["error", { - "arrays": "always-multiline", - "objects": "always-multiline", - "imports": "always-multiline", - "exports": "always-multiline", - "functions": "never" - }], - "comma-spacing": ["error", { "before": false, "after": true }], - "comma-style": ["error", "last"], - "computed-property-spacing": ["error", "never"], - "constructor-super": "error", - "curly": ["error", "multi-or-nest"], - "dot-location": ["error", "property"], - "dot-notation": ["error", { "allowKeywords": true }], - "eol-last": "error", - "eqeqeq": ["error", "always", { "null": "ignore" }], - "func-call-spacing": ["error", "never"], - "generator-star-spacing": ["error", { "before": true, "after": true }], - "handle-callback-err": ["error", "^(err|error)$" ], - "indent": ["error", 2, { - "SwitchCase": 1, - "VariableDeclarator": 1, - "outerIIFEBody": 1, - "MemberExpression": 1, - "FunctionDeclaration": { "parameters": 1, "body": 1 }, - "FunctionExpression": { "parameters": 1, "body": 1 }, - "CallExpression": { "arguments": 1 }, - "ArrayExpression": 1, - "ObjectExpression": 1, - "ImportDeclaration": 1, - "flatTernaryExpressions": true, - "ignoreComments": false, - "ignoredNodes": ["TemplateLiteral *"] - }], - "key-spacing": ["error", { "beforeColon": false, "afterColon": true }], - "keyword-spacing": ["error", { "before": true, "after": true }], - "lines-between-class-members": ["error", "always", { "exceptAfterSingleLine": true }], - "new-cap": ["error", { "newIsCap": true, "capIsNew": false, "properties": true }], - "new-parens": "error", - "no-array-constructor": "error", - "no-async-promise-executor": "error", - "no-caller": "error", - "no-case-declarations": "error", - "no-class-assign": "error", - "no-compare-neg-zero": "error", - "no-cond-assign": "off", - "no-const-assign": "error", - "no-constant-condition": ["error", { "checkLoops": false }], - "no-control-regex": "error", - "no-debugger": "error", - "no-delete-var": "error", - "no-dupe-args": "error", - "no-dupe-class-members": "error", - "no-dupe-keys": "error", - "no-duplicate-case": "error", - "no-empty-character-class": "error", - "no-empty-pattern": "error", - "no-eval": "error", - "no-ex-assign": "error", - "no-extend-native": "error", - "no-extra-bind": "error", - "no-extra-boolean-cast": "error", - "no-extra-parens": ["error", "functions"], - "no-fallthrough": "error", - "no-floating-decimal": "error", - "no-func-assign": "error", - "no-global-assign": "error", - "no-implied-eval": "error", - "no-inner-declarations": ["error", "functions"], - "no-invalid-regexp": "error", - "no-irregular-whitespace": "error", - "no-iterator": "error", - "no-labels": ["error", { "allowLoop": true, "allowSwitch": false }], - "no-lone-blocks": "error", - "no-misleading-character-class": "error", - "no-prototype-builtins": "error", - "no-useless-catch": "error", - "no-mixed-operators": "off", - "no-mixed-spaces-and-tabs": "error", - "no-multi-spaces": "error", - "no-multi-str": "error", - "no-multiple-empty-lines": ["error", { "max": 1, "maxEOF": 0 }], - "no-negated-in-lhs": "error", - "no-new": "off", - "no-new-func": "error", - "no-new-object": "error", - "no-new-require": "error", - "no-new-symbol": "error", - "no-new-wrappers": "error", - "no-obj-calls": "error", - "no-octal": "error", - "no-octal-escape": "error", - "no-path-concat": "error", - "no-proto": "error", - "no-redeclare": ["error", { "builtinGlobals": false }], - "no-regex-spaces": "error", - "no-return-assign": "off", - "no-self-assign": "off", - "no-self-compare": "error", - "no-sequences": "error", - "no-shadow-restricted-names": "error", - "no-sparse-arrays": "error", - "no-tabs": "error", - "no-template-curly-in-string": "error", - "no-this-before-super": "error", - "no-throw-literal": "off", - "no-trailing-spaces": "error", - "no-undef": "error", - "no-undef-init": "error", - "no-unexpected-multiline": "error", - "no-unmodified-loop-condition": "error", - "no-unneeded-ternary": ["error", { "defaultAssignment": false }], - "no-unreachable": "error", - "no-unsafe-finally": 0, - "no-unsafe-negation": "error", - "no-unused-expressions": ["error", { "allowShortCircuit": true, "allowTernary": true, "allowTaggedTemplates": true }], - "no-unused-vars": ["error", { "vars": "all", "args": "none", "ignoreRestSiblings": true }], - "no-use-before-define": ["error", { "functions": false, "classes": false, "variables": false }], - "no-useless-call": "error", - "no-useless-computed-key": "error", - "no-useless-constructor": "error", - "no-useless-escape": "error", - "no-useless-rename": "error", - "no-useless-return": "error", - "no-void": "error", - "no-whitespace-before-property": "error", - "no-with": "error", - "nonblock-statement-body-position": [2, "below"], - "object-curly-newline": "off", - "object-curly-spacing": "off", - "object-property-newline": ["error", { "allowMultiplePropertiesPerLine": true }], - "one-var": ["error", { "initialized": "never" }], - "operator-linebreak": "off", - "padded-blocks": ["error", { "blocks": "never", "switches": "never", "classes": "never" }], - "prefer-const": ["error", {"destructuring": "all"}], - "prefer-promise-reject-errors": "error", - "quote-props": ["error", "as-needed"], - "quotes": ["error", "single", { "avoidEscape": true, "allowTemplateLiterals": true }], - "rest-spread-spacing": ["error", "never"], - "semi": ["error", "never"], - "semi-spacing": ["error", { "before": false, "after": true }], - "space-before-blocks": ["error", "always"], - "space-before-function-paren": ["error", "always"], - "space-in-parens": ["error", "never"], - "space-infix-ops": "error", - "space-unary-ops": ["error", { "words": true, "nonwords": false }], - "spaced-comment": ["error", "always", { - "line": { "markers": ["*package", "!", "/", ",", "="] }, - "block": { "balanced": true, "markers": ["*package", "!", ",", ":", "::", "flow-include"], "exceptions": ["*"] } - }], - "symbol-description": "error", - "template-curly-spacing": ["error", "never"], - "template-tag-spacing": ["error", "never"], - "unicode-bom": ["error", "never"], - "use-isnan": "error", - "valid-typeof": ["error", { "requireStringLiterals": true }], - "wrap-iife": ["error", "any", { "functionPrototypeMethods": true }], - "yield-star-spacing": ["error", "both"], - "yoda": ["error", "never"], - - "import/export": "error", - "import/first": "error", - "import/no-absolute-path": ["error", { "esmodule": true, "commonjs": true, "amd": false }], - "import/no-duplicates": "error", - "import/no-named-default": "error", - "import/no-webpack-loader-syntax": "error", - - "node/no-deprecated-api": "error", - "node/process-exit-as-throw": "error", - - "promise/param-names": "off", - - "standard/no-callback-literal": "error" - } + "extends": ["@npmcli"] } diff --git a/packages/libnpmdiff/index.js b/packages/libnpmdiff/index.js index 73dc3ee64e3ce..4189365602141 100644 --- a/packages/libnpmdiff/index.js +++ b/packages/libnpmdiff/index.js @@ -10,8 +10,9 @@ const argsError = () => { code: 'EDIFFARGS' } ) const diff = async (specs, opts = {}) => { - if (specs.length !== 2) + if (specs.length !== 2) { throw argsError() + } const [ aManifest, diff --git a/packages/libnpmdiff/lib/format-diff.js b/packages/libnpmdiff/lib/format-diff.js index 979ce8873c54f..211386cb5390e 100644 --- a/packages/libnpmdiff/lib/format-diff.js +++ b/packages/libnpmdiff/lib/format-diff.js @@ -30,8 +30,9 @@ const formatDiff = ({ files, opts = {}, refs, versions }) => { b: filenames.b && filenames.b.mode, } - if (contents.a === contents.b && modes.a === modes.b) + if (contents.a === contents.b && modes.a === modes.b) { continue + } if (opts.diffNameOnly) { res += `${filename}${EOL}` @@ -47,18 +48,19 @@ const formatDiff = ({ files, opts = {}, refs, versions }) => { // manually build a git diff-compatible header header(`diff --git ${names.a} ${names.b}`) - if (modes.a === modes.b) + if (modes.a === modes.b) { fileMode = filenames.a.mode - else { - if (modes.a && !modes.b) + } else { + if (modes.a && !modes.b) { header(`deleted file mode ${modes.a}`) - else if (!modes.a && modes.b) + } else if (!modes.a && modes.b) { header(`new file mode ${modes.b}`) - else { + } else { header(`old mode ${modes.a}`) header(`new mode ${modes.b}`) } } + /* eslint-disable-next-line max-len */ header(`index ${opts.tagVersionPrefix || 'v'}${versions.a}..${opts.tagVersionPrefix || 'v'}${versions.b} ${fileMode}`) if (shouldPrintPatch(filename)) { diff --git a/packages/libnpmdiff/lib/should-print-patch.js b/packages/libnpmdiff/lib/should-print-patch.js index aeb015c1a04b3..a954811407d4f 100644 --- a/packages/libnpmdiff/lib/should-print-patch.js +++ b/packages/libnpmdiff/lib/should-print-patch.js @@ -5,8 +5,9 @@ const binaryExtensions = require('binary-extensions') // we should try to print patches as long as the // extension is not identified as binary files const shouldPrintPatch = (path, opts = {}) => { - if (opts.diffText) + if (opts.diffText) { return true + } const filename = basename(path) const extension = ( diff --git a/packages/libnpmdiff/lib/tarball.js b/packages/libnpmdiff/lib/tarball.js index 0c8fb177a3885..4d01d69c9c413 100644 --- a/packages/libnpmdiff/lib/tarball.js +++ b/packages/libnpmdiff/lib/tarball.js @@ -24,8 +24,9 @@ const tarball = (manifest, opts) => { const fromNodeModules = npa(resolved).type === 'directory' && /node_modules[\\/](@[^\\/]+\/)?[^\\/]+[\\/]?$/.test(relative(where, resolved)) - if (fromNodeModules) + if (fromNodeModules) { return nodeModulesTarball(manifest, opts) + } return pacote.tarball(manifest._resolved, opts) } diff --git a/packages/libnpmdiff/package.json b/packages/libnpmdiff/package.json index 529fc79539bac..129d9b90cd7fe 100644 --- a/packages/libnpmdiff/package.json +++ b/packages/libnpmdiff/package.json @@ -46,11 +46,7 @@ ] }, "devDependencies": { - "eslint": "^7.28.0", - "eslint-plugin-import": "^2.23.4", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.1.0", - "eslint-plugin-standard": "^5.0.0", + "eslint": "^8.1.0", "tap": "^15.0.9" }, "dependencies": { diff --git a/packages/libnpmdiff/test/index.js b/packages/libnpmdiff/test/index.js index 88b474c111f15..80b3daaa60161 100644 --- a/packages/libnpmdiff/test/index.js +++ b/packages/libnpmdiff/test/index.js @@ -130,7 +130,7 @@ t.test('folder in node_modules', async t => { t.resolveMatchSnapshot(diff([ `file:${resolve(path, 'node_modules/a/node_modules/b')}`, `file:${resolve(path, 'packages/b')}`, - ], { where: path}), 'should output expected diff') + ], { where: path }), 'should output expected diff') }) t.test('nested, relative path', async t => { const _cwd = process.cwd() diff --git a/scripts/changelog.js b/scripts/changelog.js index 0951bd0275cfc..ef5497d7d07c9 100644 --- a/scripts/changelog.js +++ b/scripts/changelog.js @@ -13,21 +13,26 @@ the result to the changelog. */ const execSync = require('child_process').execSync const branch = process.argv[2] || 'origin/latest' -const log = execSync(`git log --reverse --pretty='format:%h %H%d %s (%aN)%n%b%n---%n' ${branch}...`).toString().split(/\n/) +const log = execSync(`git log --reverse --pretty='format:%h %H%d %s (%aN)%n%b%n---%n' ${branch}...`) + .toString() + .split(/\n/) main() function shortname (url) { - const matched = url.match(/https:\/\/github\.com\/([^/]+\/[^/]+)\/(?:pull|issues)\/(\d+)/) || - url.match(/https:\/\/(npm\.community)\/t\/(?:[^/]+\/)(\d+)/) - if (!matched) + const matched = + url.match(/https:\/\/github\.com\/([^/]+\/[^/]+)\/(?:pull|issues)\/(\d+)/) || + url.match(/https:\/\/(npm\.community)\/t\/(?:[^/]+\/)(\d+)/) + if (!matched) { return false + } const repo = matched[1] const id = matched[2] - if (repo !== 'npm/cli') + if (repo !== 'npm/cli') { return `${repo}#${id}` - else + } else { return `#${id}` + } } function printCommit (c) { @@ -35,21 +40,23 @@ function printCommit (c) { if (c.fixes.length) { for (const fix of c.fixes) { const label = shortname(fix) - if (label) + if (label) { console.log(` [${label}](${fix})`) + } } } else if (c.prurl) { const label = shortname(c.prurl) - if (label) + if (label) { console.log(` [${label}](${c.prurl})`) - else + } else { console.log(` [#](${c.prurl})`) + } } const msg = c.message - .replace(/^\s+/mg, '') + .replace(/^\s+/gm, '') .replace(/^[-a-z]+: /, '') - .replace(/^/mg, ' ') - .replace(/^ {2}Reviewed-by: @.*/mg, '') + .replace(/^/gm, ' ') + .replace(/^ {2}Reviewed-by: @.*/gm, '') .replace(/\n$/, '') // backtickify package@version .replace(/^(\s*@?[^@\s]+@\d+[.]\d+[.]\d+)\b(\s*\S)/g, '$1:$2') @@ -63,8 +70,9 @@ function printCommit (c) { c.credit.forEach(function (credit) { console.log(` ([@${credit}](https://github.com/${credit}))`) }) - } else + } else { console.log(` ([@${c.author}](https://github.com/${c.author}))`) + } } } @@ -74,9 +82,11 @@ function main () { line = line.replace(/\r/g, '') let m /* eslint no-cond-assign:0 */ - if (/^---$/.test(line)) + if (/^---$/.test(line)) { printCommit(commit) - else if (m = line.match(/^([a-f0-9]{7,10}) ([a-f0-9]+) (?:[(]([^)]+)[)] )?(.*?) [(](.*?)[)]/)) { + } else if ( + (m = line.match(/^([a-f0-9]{7,10}) ([a-f0-9]+) (?:[(]([^)]+)[)] )?(.*?) [(](.*?)[)]/)) + ) { commit = { shortid: m[1], fullid: m[2], @@ -87,21 +97,23 @@ function main () { fixes: [], credit: null, } - } else if (m = line.match(/^PR-URL: (.*)/)) + } else if ((m = line.match(/^PR-URL: (.*)/))) { commit.prurl = m[1] - else if (m = line.match(/^Credit: @(.*)/)) { - if (!commit.credit) + } else if ((m = line.match(/^Credit: @(.*)/))) { + if (!commit.credit) { commit.credit = [] + } commit.credit.push(m[1]) - } else if (m = line.match(/^(?:Fix(?:es)|Closes?): #?([0-9]+)/)) + } else if ((m = line.match(/^(?:Fix(?:es)|Closes?): #?([0-9]+)/))) { commit.fixes.push(`https://github.com/npm/cli/issues/${m[1]}`) - else if (m = line.match(/^(?:Fix(?:es)|Closes?): ([^#]+)#([0-9]*)/)) + } else if ((m = line.match(/^(?:Fix(?:es)|Closes?): ([^#]+)#([0-9]*)/))) { commit.fixes.push(`https://github.com/${m[1]}/issues/${m[2]}`) - else if (m = line.match(/^(?:Fix(?:es)|Closes?): (https?:\/\/.*)/)) + } else if ((m = line.match(/^(?:Fix(?:es)|Closes?): (https?:\/\/.*)/))) { commit.fixes.push(m[1]) - else if (m = line.match(/^Reviewed-By: @(.*)/)) + } else if ((m = line.match(/^Reviewed-By: @(.*)/))) { commit.reviewed = m[1] - else if (/\S/.test(line)) + } else if (/\S/.test(line)) { commit.message += `\n${line}` + } }) } diff --git a/scripts/config-doc-command.js b/scripts/config-doc-command.js index 085ac958d21fa..9db026f304281 100644 --- a/scripts/config-doc-command.js +++ b/scripts/config-doc-command.js @@ -16,12 +16,14 @@ const describeAll = () => const addBetweenTags = (doc, startTag, endTag, body) => { const startSplit = doc.split(startTag) - if (startSplit.length !== 2) + if (startSplit.length !== 2) { throw new Error('Did not find exactly one start tag') + } const endSplit = startSplit[1].split(endTag) - if (endSplit.length !== 2) + if (endSplit.length !== 2) { throw new Error('Did not find exactly one end tag') + } return [ startSplit[0], @@ -47,6 +49,7 @@ const addDescriptions = doc => { const doc = readFileSync(configDoc, 'utf8') const hasTag = doc.includes('') const newDoc = params && hasTag ? addDescriptions(doc) : doc -if (params && !hasTag) +if (params && !hasTag) { console.error('WARNING: did not find config description section', configDoc) +} writeFileSync(configDoc, newDoc) diff --git a/scripts/config-doc.js b/scripts/config-doc.js index 03e8fbc2d07ca..b14baa381f411 100644 --- a/scripts/config-doc.js +++ b/scripts/config-doc.js @@ -5,12 +5,14 @@ const configDoc = resolve(__dirname, '../docs/content/using-npm/config.md') const addBetweenTags = (doc, startTag, endTag, body) => { const startSplit = doc.split(startTag) - if (startSplit.length !== 2) + if (startSplit.length !== 2) { throw new Error('Did not find exactly one start tag') + } const endSplit = startSplit[1].split(endTag) - if (endSplit.length !== 2) + if (endSplit.length !== 2) { throw new Error('Did not find exactly one end tag') + } return [ startSplit[0], diff --git a/scripts/docs-build.js b/scripts/docs-build.js index 8e217d2259a54..63658c79b1631 100644 --- a/scripts/docs-build.js +++ b/scripts/docs-build.js @@ -8,16 +8,18 @@ var src = args[0] var dest = args[1] || src fs.readFile(src, 'utf8', function (err, data) { - if (err) + if (err) { return console.log(err) + } function frontmatter (match, p1) { const fm = { } p1.split(/\r?\n/).forEach((kv) => { const result = kv.match(/^([^\s:]+):\s*(.*)/) - if (result) + if (result) { fm[result[1]] = result[2] + } }) return `# ${fm.title}(${fm.section}) - ${fm.description}` @@ -35,7 +37,8 @@ fs.readFile(src, 'utf8', function (err, data) { .trim() fs.writeFile(dest, marked(result), 'utf8', function (err) { - if (err) + if (err) { return console.log(err) + } }) }) diff --git a/scripts/git-dirty.js b/scripts/git-dirty.js index 4199768deb000..484a4d23e7be2 100644 --- a/scripts/git-dirty.js +++ b/scripts/git-dirty.js @@ -10,7 +10,8 @@ if (status || signal) { console.error({ status, signal }) process.exitCode = status || 1 } -if (stdout.trim() !== '') +if (stdout.trim() !== '') { throw new Error('git dirty') -else +} else { console.log('git clean') +} diff --git a/scripts/update-dist-tags.js b/scripts/update-dist-tags.js index 371d0c03a47d6..825823ce512ea 100644 --- a/scripts/update-dist-tags.js +++ b/scripts/update-dist-tags.js @@ -20,8 +20,8 @@ const { execSync } = require('child_process') const semver = require('semver') const path = require('path') -const getMajorVersion = (input) => semver.parse(input).major -const getMinorVersion = (input) => semver.parse(input).minor +const getMajorVersion = input => semver.parse(input).major +const getMinorVersion = input => semver.parse(input).minor // INFO: String templates to generate the tags to update const LATEST_TAG = (strings, major) => `latest-${major}` @@ -51,7 +51,7 @@ function main () { const removeTag = REMOVE_TAG`${major}${minor}` const updateList = [].concat(TAG_LIST, latestTag, nextTag) - updateList.forEach((tag) => { + updateList.forEach(tag => { setDistTag(tag, version, otp) }) removeDistTag(removeTag, version, otp) @@ -79,8 +79,9 @@ function parseOTP (args) { } case 1: { // --otp=123456 or --otp123456 - if (otp) + if (otp) { return otp + } console.error('Invalid otp value supplied. [CASE 1]') process.exit(1) @@ -89,8 +90,9 @@ function parseOTP (args) { // --otp 123456 // INFO: validating the second argument is an otp code const isValidOtp = PARSE_OTP_VALUE.test(args[1]) - if (isValidOtp) + if (isValidOtp) { return args[1] + } console.error('Invalid otp value supplied. [CASE 2]') process.exit(1) @@ -104,7 +106,9 @@ function parseOTP (args) { function setDistTag (tag, version, otp) { try { - const result = execSync(`npm dist-tag set npm@${version} ${tag} --otp=${otp}`, { encoding: 'utf-8' }) + const result = execSync(`npm dist-tag set npm@${version} ${tag} --otp=${otp}`, { + encoding: 'utf-8', + }) console.log('Result:', result) } catch (err) { console.error('Bad dist-tag command.') diff --git a/smoke-tests/index.js b/smoke-tests/index.js index bf99b47307fd8..06ca3dee6422e 100644 --- a/smoke-tests/index.js +++ b/smoke-tests/index.js @@ -7,14 +7,20 @@ const rimraf = promisify(require('rimraf')) const normalizePath = path => path.replace(/[A-Z]:/, '').replace(/\\/g, '/') const cwd = normalizePath(process.cwd()) -t.cleanSnapshot = s => s.split(cwd).join('{CWD}') - .split(registry).join('https://registry.npmjs.org/') - .split(normalizePath(process.execPath)).join('node') - .split(process.cwd()).join('{CWD}') - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') - .replace(/ \(in a browser\)/g, '') - .replace(/^npm@.* /mg, 'npm ') +t.cleanSnapshot = s => + s + .split(cwd) + .join('{CWD}') + .split(registry) + .join('https://registry.npmjs.org/') + .split(normalizePath(process.execPath)) + .join('node') + .split(process.cwd()) + .join('{CWD}') + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + .replace(/ \(in a browser\)/g, '') + .replace(/^npm@.* /gm, 'npm ') // setup server const { start, stop, registry } = require('./server.js') @@ -37,16 +43,23 @@ const env = { HOME: path, PATH: `${process.env.PATH}:${binLocation}`, } -const npmOpts = `--registry=${registry} --cache="${cacheLocation}" --userconfig="${userconfigLocation}" --no-audit --no-update-notifier --loglevel=silly` +const npmOpts = [ + `--registry=${registry}`, + `--cache="${cacheLocation}"`, + `--userconfig="${userconfigLocation}"`, + '--no-audit', + '--no-update-notifier', + '--loglevel=silly', +].join(' ') const npmBin = `"${process.execPath}" "${npmLocation}" ${npmOpts}` const exec = async cmd => { const res = await execAsync(cmd, { cwd: localPrefix, env }) - if (res.stderr) + if (res.stderr) { console.error(res.stderr) + } return String(res.stdout) } -const readFile = filename => - String(fs.readFileSync(resolve(localPrefix, filename))) +const readFile = filename => String(fs.readFileSync(resolve(localPrefix, filename))) // this test must come first, its package.json will be destroyed and the one // created in the next test (npm init) will create a new one that must be @@ -75,14 +88,22 @@ t.test('npm install sends correct user-agent', async t => { }) const cmd = `${npmBin} install fail_reflect_user_agent` - await t.rejects(exec(cmd), { - stderr: /workspaces\/false/, - }, 'workspaces/false is present in output') + await t.rejects( + exec(cmd), + { + stderr: /workspaces\/false/, + }, + 'workspaces/false is present in output' + ) const wsCmd = `${npmBin} install fail_reflect_user_agent --workspaces` - await t.rejects(exec(wsCmd), { - stderr: /workspaces\/true/, - }, 'workspaces/true is present in output') + await t.rejects( + exec(wsCmd), + { + stderr: /workspaces\/true/, + }, + 'workspaces/true is present in output' + ) }) t.test('npm init', async t => { @@ -97,39 +118,29 @@ t.test('npm init', async t => { t.test('npm (no args)', async t => { const cmd = `"${process.execPath}" "${npmLocation}" --no-audit --no-update-notifier` - const cmdRes = await execAsync(cmd, { cwd: localPrefix, env }) - .catch(err => { - t.equal(err.code, 1, 'should exit with error code') - return err - }) + const cmdRes = await execAsync(cmd, { cwd: localPrefix, env }).catch(err => { + t.equal(err.code, 1, 'should exit with error code') + return err + }) t.equal(cmdRes.stderr, '', 'should have no stderr output') - t.matchSnapshot(String(cmdRes.stdout), - 'should have expected no args output') + t.matchSnapshot(String(cmdRes.stdout), 'should have expected no args output') }) t.test('npm install prodDep@version', async t => { const cmd = `${npmBin} install abbrev@1.0.4` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes.replace(/in.*s/, ''), - 'should have expected install reify output') - t.matchSnapshot( - readFile('package.json'), - 'should have expected package.json result' - ) - t.matchSnapshot( - readFile('package-lock.json'), - 'should have expected lockfile result' - ) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), 'should have expected install reify output') + t.matchSnapshot(readFile('package.json'), 'should have expected package.json result') + t.matchSnapshot(readFile('package-lock.json'), 'should have expected lockfile result') }) t.test('npm install dev dep', async t => { const cmd = `${npmBin} install -D promise-all-reject-late` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes.replace(/in.*s/, ''), - 'should have expected dev dep added reify output') + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), 'should have expected dev dep added reify output') t.matchSnapshot( readFile('package.json'), 'should have expected dev dep added package.json result' @@ -144,32 +155,28 @@ t.test('npm ls', async t => { const cmd = `${npmBin} ls` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes, - 'should have expected ls output') + t.matchSnapshot(cmdRes, 'should have expected ls output') }) t.test('npm fund', async t => { const cmd = `${npmBin} fund` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes, - 'should have expected fund output') + t.matchSnapshot(cmdRes, 'should have expected fund output') }) t.test('npm explain', async t => { const cmd = `${npmBin} explain abbrev` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes, - 'should have expected explain output') + t.matchSnapshot(cmdRes, 'should have expected explain output') }) t.test('npm diff', async t => { const cmd = `${npmBin} diff --diff=abbrev@1.0.4 --diff=abbrev@1.1.1` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes, - 'should have expected diff output') + t.matchSnapshot(cmdRes, 'should have expected diff output') }) t.test('npm outdated', async t => { @@ -180,16 +187,14 @@ t.test('npm outdated', async t => { }) t.not(cmdRes.stderr, '', 'should have stderr output') - t.matchSnapshot(String(cmdRes.stdout), - 'should have expected outdated output') + t.matchSnapshot(String(cmdRes.stdout), 'should have expected outdated output') }) t.test('npm set-script', async t => { const cmd = `${npmBin} set-script "hello" "echo Hello"` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes, - 'should have expected set-script output') + t.matchSnapshot(cmdRes, 'should have expected set-script output') t.matchSnapshot( readFile('package.json'), 'should have expected script added package.json result' @@ -200,68 +205,49 @@ t.test('npm run-script', async t => { const cmd = `${npmBin} run hello` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes, - 'should have expected run-script output') + t.matchSnapshot(cmdRes, 'should have expected run-script output') }) t.test('npm prefix', async t => { const cmd = `${npmBin} prefix` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes, - 'should have expected prefix output') + t.matchSnapshot(cmdRes, 'should have expected prefix output') }) t.test('npm view', async t => { const cmd = `${npmBin} view abbrev@1.0.4` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes, - 'should have expected view output') + t.matchSnapshot(cmdRes, 'should have expected view output') }) t.test('npm update dep', async t => { const cmd = `${npmBin} update abbrev` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes.replace(/in.*s/, ''), - 'should have expected update reify output') - t.matchSnapshot( - readFile('package.json'), - 'should have expected update package.json result' - ) - t.matchSnapshot( - readFile('package-lock.json'), - 'should have expected update lockfile result' - ) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), 'should have expected update reify output') + t.matchSnapshot(readFile('package.json'), 'should have expected update package.json result') + t.matchSnapshot(readFile('package-lock.json'), 'should have expected update lockfile result') }) t.test('npm uninstall', async t => { const cmd = `${npmBin} uninstall promise-all-reject-late` const cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes.replace(/in.*s/, ''), - 'should have expected uninstall reify output') - t.matchSnapshot( - readFile('package.json'), - 'should have expected uninstall package.json result' - ) - t.matchSnapshot( - readFile('package-lock.json'), - 'should have expected uninstall lockfile result' - ) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), 'should have expected uninstall reify output') + t.matchSnapshot(readFile('package.json'), 'should have expected uninstall package.json result') + t.matchSnapshot(readFile('package-lock.json'), 'should have expected uninstall lockfile result') }) t.test('npm pkg', async t => { let cmd = `${npmBin} pkg get license` let cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes.replace(/in.*s/, ''), - 'should have expected pkg get output') + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), 'should have expected pkg get output') cmd = `${npmBin} pkg set tap[test-env][0]=LC_ALL=sk` cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes.replace(/in.*s/, ''), - 'should have expected pkg set output') + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), 'should have expected pkg set output') t.matchSnapshot( readFile('package.json'), @@ -270,13 +256,11 @@ t.test('npm pkg', async t => { cmd = `${npmBin} pkg get` cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes.replace(/in.*s/, ''), - 'should print package.json contents') + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), 'should print package.json contents') cmd = `${npmBin} pkg delete tap` cmdRes = await exec(cmd) - t.matchSnapshot(cmdRes.replace(/in.*s/, ''), - 'should have expected pkg delete output') + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), 'should have expected pkg delete output') t.matchSnapshot( readFile('package.json'), diff --git a/smoke-tests/server.js b/smoke-tests/server.js index e0ac0c94eb5ab..31ffebb2ad4e7 100644 --- a/smoke-tests/server.js +++ b/smoke-tests/server.js @@ -1,6 +1,6 @@ /* istanbul ignore file */ -const {join, dirname, basename} = require('path') -const {existsSync, readFileSync, writeFileSync} = require('fs') +const { join, dirname, basename } = require('path') +const { existsSync, readFileSync, writeFileSync } = require('fs') const PORT = 12345 + (+process.env.TAP_CHILD_ID || 0) const http = require('http') const https = require('https') @@ -86,8 +86,9 @@ const startServer = () => new Promise((res, rej) => { https.request(opts) .on('response', upstream => handleUpstream(upstream)) .end(Buffer.concat(body)) - } else + } else { handleUpstream(upstream) + } }).end(Buffer.concat(body)) } else { res.setHeader('content-encoding', 'gzip') @@ -195,8 +196,9 @@ const startServer = () => new Promise((res, rej) => { const errorStatus = upstream.statusCode >= 300 || upstream.statusCode < 200 - if (errorStatus) + if (errorStatus) { console.error('UPSTREAM ERROR', upstream.statusCode) + } const ct = upstream.headers['content-type'] const isJson = ct.includes('application/json') @@ -217,8 +219,9 @@ const startServer = () => new Promise((res, rej) => { const minFile = file.replace(/\.json$/, '.min.json') writeFileSync(minFile, JSON.stringify(mrm(obj), 0, 2) + '\n') console.error('WROTE JSONS', [file, minFile]) - } else + } else { writeFileSync(file, out) + } } res.end(out) }) @@ -227,8 +230,9 @@ const startServer = () => new Promise((res, rej) => { } res.statusCode = er.code === 'ENOENT' ? 404 : 500 - if (res.method === 'GET') + if (res.method === 'GET') { console.error(er) + } res.setHeader('content-type', 'text/plain') res.end(er.stack) } diff --git a/test/bin/npx-cli.js b/test/bin/npx-cli.js index 5eeee30184363..b526f2dfbe32e 100644 --- a/test/bin/npx-cli.js +++ b/test/bin/npx-cli.js @@ -6,7 +6,7 @@ const npm = require.resolve('../../bin/npm-cli.js') const logs = [] console.error = (...msg) => logs.push(msg) -t.afterEach(() => logs.length = 0) +t.afterEach(() => (logs.length = 0)) t.test('npx foo -> npm exec -- foo', t => { process.argv = ['node', npx, 'foo'] @@ -39,7 +39,17 @@ t.test('npx --x=y --no-install foo -z -> npm exec --x=y -- foo -z', t => { t.test('transform renamed options into proper values', t => { process.argv = ['node', npx, '-y', '--shell=bash', '-p', 'foo', '-c', 'asdf'] t.mock(npx, { [cli]: () => {} }) - t.strictSame(process.argv, ['node', npm, 'exec', '--yes', '--script-shell=bash', '--package', 'foo', '--call', 'asdf']) + t.strictSame(process.argv, [ + 'node', + npm, + 'exec', + '--yes', + '--script-shell=bash', + '--package', + 'foo', + '--call', + 'asdf', + ]) t.end() }) diff --git a/test/bin/windows-shims.js b/test/bin/windows-shims.js index 8d73e39f2c4d1..8d08e603b03e3 100644 --- a/test/bin/windows-shims.js +++ b/test/bin/windows-shims.js @@ -10,9 +10,9 @@ const has = path => { // If WSL is installed, it *has* a bash.exe, but it fails if // there is no distro installed, so we need to detect that. const result = spawnSync(path, ['-l', '-c', 'exit 0']) - if (result.status === 0) + if (result.status === 0) { return true - else { + } else { // print whatever error we got throw result.error || Object.assign(new Error(String(result.stderr)), { code: result.status, diff --git a/test/coverage-map.js b/test/coverage-map.js index aff7a6527715c..b29fcd8618557 100644 --- a/test/coverage-map.js +++ b/test/coverage-map.js @@ -11,8 +11,9 @@ const coverageMap = (filename) => { // this one doesn't provide any coverage nyc can track return [] } - if (/^test\/(lib\/|bin\/|index\.js$)/.test(filename)) + if (/^test\/(lib\/|bin\/|index\.js$)/.test(filename)) { return filename.replace(/^test\//, '') + } return [] } diff --git a/test/fixtures/mock-npm.js b/test/fixtures/mock-npm.js index 0a7bc0e8ad8f0..abb45613c85a9 100644 --- a/test/fixtures/mock-npm.js +++ b/test/fixtures/mock-npm.js @@ -9,8 +9,9 @@ procLog.reset() require('events').defaultMaxListeners = Infinity const realLog = {} -for (const level in npmlog.levels) +for (const level in npmlog.levels) { realLog[level] = npmlog[level] +} const { title, execPath } = process @@ -53,8 +54,9 @@ const RealMockNpm = (t, otherMocks = {}) => { process.removeAllListeners('time') process.removeAllListeners('timeEnd') npmlog.record.length = 0 - for (const level in npmlog.levels) + for (const level in npmlog.levels) { npmlog[level] = realLog[level] + } procLog.reset() process.title = title process.execPath = execPath @@ -79,18 +81,19 @@ class MockNpm { const config = base.config || {} for (const attr in base) { - if (attr !== 'config') + if (attr !== 'config') { this[attr] = base[attr] + } } this.flatOptions = base.flatOptions || {} this.config = { // for now just set `find` to what config.find should return // this works cause `find` is not an existing config entry - find: (k) => ({...realConfig.defaults, ...config})[k], - get: (k) => ({...realConfig.defaults, ...config})[k], + find: (k) => ({ ...realConfig.defaults, ...config })[k], + get: (k) => ({ ...realConfig.defaults, ...config })[k], set: (k, v) => config[k] = v, - list: [{ ...realConfig.defaults, ...config}], + list: [{ ...realConfig.defaults, ...config }], } if (!this.log) { this.log = { @@ -110,8 +113,9 @@ class MockNpm { } output (...msg) { - if (this.base.output) + if (this.base.output) { return this.base.output(msg) + } this._mockOutputs.push(msg) } } diff --git a/test/fixtures/sandbox.js b/test/fixtures/sandbox.js index 626e2ab5c509c..b012790fb535d 100644 --- a/test/fixtures/sandbox.js +++ b/test/fixtures/sandbox.js @@ -24,8 +24,9 @@ createHook({ before: (asyncId) => { // find the nearest parent id that has a sandbox let parent = asyncId - while (chain.has(parent) && !sandboxes.has(parent)) + while (chain.has(parent) && !sandboxes.has(parent)) { parent = chain.get(parent) + } process = sandboxes.has(parent) ? sandboxes.get(parent) @@ -201,19 +202,22 @@ class Sandbox extends EventEmitter { // test.teardown hook teardown () { - if (this[_parent]) + if (this[_parent]) { sandboxes.delete(this[_parent]) + } return rimraf(this[_dirs].temp).catch(() => null) } // proxy get handler [_get] (target, prop, receiver) { - if (this[_data].has(prop)) + if (this[_data].has(prop)) { return this[_data].get(prop) + } - if (this[prop] !== undefined) + if (this[prop] !== undefined) { return Reflect.get(this, prop, this) + } const actual = Reflect.get(target, prop, receiver) if (typeof actual === 'function') { diff --git a/test/lib/auth/legacy.js b/test/lib/auth/legacy.js index d06f9535742fb..7b61e9f6e946e 100644 --- a/test/lib/auth/legacy.js +++ b/test/lib/auth/legacy.js @@ -13,8 +13,9 @@ const legacy = t.mock('../../../lib/auth/legacy.js', { }, 'npm-profile': profile, '../../../lib/utils/open-url.js': (npm, url, msg) => { - if (!url) + if (!url) { throw Object.assign(new Error('failed open url'), { code: 'ERROR' }) + } }, '../../../lib/utils/read-user-info.js': read, }) diff --git a/test/lib/auth/sso.js b/test/lib/auth/sso.js index 11be002aee345..d5922055931e1 100644 --- a/test/lib/auth/sso.js +++ b/test/lib/auth/sso.js @@ -30,9 +30,9 @@ const sso = t.mock('../../../lib/auth/sso.js', { } }, '../../../lib/utils/otplease.js': (opts, fn) => { - if (opts) + if (opts) { return fn({ ...opts, otp: '1234' }) - else { + } else { throw Object.assign( new Error('failed retrieving otp'), { code: 'ERROR' } diff --git a/test/lib/cli.js b/test/lib/cli.js index 4e24dcd78b73f..d762943b47008 100644 --- a/test/lib/cli.js +++ b/test/lib/cli.js @@ -12,8 +12,9 @@ let exitHandlerNpm = null let exitHandlerCb const exitHandlerMock = (...args) => { exitHandlerCalled = args - if (exitHandlerCb) + if (exitHandlerCb) { exitHandlerCb() + } } exitHandlerMock.setNpm = npm => { exitHandlerNpm = npm @@ -26,15 +27,16 @@ const npmlogMock = { info: (...msg) => logs.push(['info', ...msg]), } -const cliMock = (Npm) => t.mock('../../lib/cli.js', { - '../../lib/npm.js': Npm, - '../../lib/utils/update-notifier.js': async () => null, - '../../lib/utils/unsupported.js': unsupportedMock, - '../../lib/utils/exit-handler.js': exitHandlerMock, - npmlog: npmlogMock, -}) +const cliMock = Npm => + t.mock('../../lib/cli.js', { + '../../lib/npm.js': Npm, + '../../lib/utils/update-notifier.js': async () => null, + '../../lib/utils/unsupported.js': unsupportedMock, + '../../lib/utils/exit-handler.js': exitHandlerMock, + npmlog: npmlogMock, + }) -const processMock = (proc) => { +const processMock = proc => { const mocked = { ...process, on: () => {}, @@ -104,13 +106,16 @@ t.test('calling with --versions calls npm version with no args', async t => { t.test('logged argv is sanitized', async t => { const proc = processMock({ - argv: ['node', 'npm', 'version', 'https://username:password@npmjs.org/test_url_with_a_password'], + argv: [ + 'node', + 'npm', + 'version', + 'https://username:password@npmjs.org/test_url_with_a_password', + ], }) const { Npm } = mockNpm(t, { '../../lib/commands/version.js': class Version { - async exec (args) { - - } + async exec (args) {} }, }) @@ -120,12 +125,11 @@ t.test('logged argv is sanitized', async t => { t.equal(proc.title, 'npm') t.strictSame(logs, [ 'pause', - ['verbose', 'cli', [ - 'node', - 'npm', - 'version', - 'https://username:***@npmjs.org/test_url_with_a_password', - ]], + [ + 'verbose', + 'cli', + ['node', 'npm', 'version', 'https://username:***@npmjs.org/test_url_with_a_password'], + ], ['info', 'using', 'npm@%s', Npm.version], ['info', 'using', 'node@%s', process.version], ]) diff --git a/test/lib/commands/access.js b/test/lib/commands/access.js index 6ddd21428a781..fdf132aff97f3 100644 --- a/test/lib/commands/access.js +++ b/test/lib/commands/access.js @@ -36,7 +36,7 @@ t.test('completion', async t => { testComp(['npm', 'access', 'revoke'], []) await t.rejects( - access.completion({conf: {argv: {remain: ['npm', 'access', 'foobar']}}}), + access.completion({ conf: { argv: { remain: ['npm', 'access', 'foobar'] } } }), { message: 'foobar not recognized' } ) }) diff --git a/test/lib/commands/adduser.js b/test/lib/commands/adduser.js index 368d5d68a7227..71d79ea9351b1 100644 --- a/test/lib/commands/adduser.js +++ b/test/lib/commands/adduser.js @@ -16,8 +16,9 @@ let deletedConfig = {} let registryOutput = '' let setConfig = {} const authDummy = (npm, options) => { - if (!options.fromFlatOptions) + if (!options.fromFlatOptions) { throw new Error('did not pass full flatOptions to auth function') + } return Promise.resolve({ message: 'success', @@ -40,13 +41,15 @@ const npm = { config: { delete: deleteMock, get (key, where) { - if (!where || where === 'user') + if (!where || where === 'user') { return _flatOptions[key] + } }, getCredentialsByURI, async save () { - if (failSave) + if (failSave) { throw new Error('error saving user config') + } }, set (key, value, where) { setConfig = { diff --git a/test/lib/commands/audit.js b/test/lib/commands/audit.js index cf6d36d4710b7..59a33e8652d6f 100644 --- a/test/lib/commands/audit.js +++ b/test/lib/commands/audit.js @@ -35,8 +35,9 @@ t.test('should audit using Arborist', t => { } }, '../../../lib/utils/reify-finish.js': (npm, arb) => { - if (arb !== ARB_OBJ) + if (arb !== ARB_OBJ) { throw new Error('got wrong object passed to reify-output') + } REIFY_FINISH_CALLED = true }, @@ -122,8 +123,7 @@ t.test('report endpoint error', t => { head: ['ers'], }, statusCode: 420, - body: json ? { nope: 'lol' } - : Buffer.from('i had a vuln but i eated it lol'), + body: json ? { nope: 'lol' } : Buffer.from('i had a vuln but i eated it lol'), }, } } @@ -132,27 +132,25 @@ t.test('report endpoint error', t => { }) const audit = new Audit(npm) - await t.rejects( - audit.exec([]), - 'audit endpoint returned an error' - ) + await t.rejects(audit.exec([]), 'audit endpoint returned an error') t.strictSame(OUTPUT, [ [ - json ? '{\n' + - ' "message": "hello, this didnt work",\n' + - ' "method": "POST",\n' + - ' "uri": "https://example.com/",\n' + - ' "headers": {\n' + - ' "head": [\n' + - ' "ers"\n' + - ' ]\n' + - ' },\n' + - ' "statusCode": 420,\n' + - ' "body": {\n' + - ' "nope": "lol"\n' + - ' }\n' + - '}' - : 'i had a vuln but i eated it lol', + json + ? '{\n' + + ' "message": "hello, this didnt work",\n' + + ' "method": "POST",\n' + + ' "uri": "https://example.com/",\n' + + ' "headers": {\n' + + ' "head": [\n' + + ' "ers"\n' + + ' ]\n' + + ' },\n' + + ' "statusCode": 420,\n' + + ' "body": {\n' + + ' "nope": "lol"\n' + + ' }\n' + + '}' + : 'i had a vuln but i eated it lol', ], ]) t.strictSame(LOGS, [['audit', 'hello, this didnt work']]) @@ -165,20 +163,27 @@ t.test('completion', t => { const Audit = require('../../../lib/commands/audit.js') const audit = new Audit({}) t.test('fix', async t => { - t.resolveMatch(audit.completion({ conf: { argv: { remain: ['npm', 'audit'] } } }), ['fix'], 'completes to fix') + t.resolveMatch( + audit.completion({ conf: { argv: { remain: ['npm', 'audit'] } } }), + ['fix'], + 'completes to fix' + ) t.end() }) t.test('subcommand fix', t => { - t.resolveMatch(audit.completion({ conf: { argv: { remain: ['npm', 'audit', 'fix'] } } }), [], 'resolves to ?') + t.resolveMatch( + audit.completion({ conf: { argv: { remain: ['npm', 'audit', 'fix'] } } }), + [], + 'resolves to ?' + ) t.end() }) t.test('subcommand not recognized', t => { - t.rejects( - audit.completion({ conf: { argv: { remain: ['npm', 'audit', 'repare'] } } }), - { message: 'repare not recognized' } - ) + t.rejects(audit.completion({ conf: { argv: { remain: ['npm', 'audit', 'repare'] } } }), { + message: 'repare not recognized', + }) t.end() }) diff --git a/test/lib/commands/bugs.js b/test/lib/commands/bugs.js index dcb36af393a61..dbd618b0848a4 100644 --- a/test/lib/commands/bugs.js +++ b/test/lib/commands/bugs.js @@ -89,12 +89,12 @@ t.test('open bugs urls & emails', t => { keys.forEach(pkg => { t.test(pkg, async t => { await bugs.exec([pkg]) - t.equal(opened[expect[pkg]], 1, 'opened expected url', {opened}) + t.equal(opened[expect[pkg]], 1, 'opened expected url', { opened }) }) }) }) t.test('open default package if none specified', async t => { await bugs.exec([]) - t.equal(opened['https://example.com'], 1, 'opened expected url', {opened}) + t.equal(opened['https://example.com'], 1, 'opened expected url', { opened }) }) diff --git a/test/lib/commands/cache.js b/test/lib/commands/cache.js index c12318f4e579b..0ebfb8fade86d 100644 --- a/test/lib/commands/cache.js +++ b/test/lib/commands/cache.js @@ -58,15 +58,24 @@ const setupCacacheFixture = () => { pkgs.forEach(pkg => addCacachePkg(...pkg)) // corrupt the packument cacacheContent[ - [cacacheEntries['make-fetch-happen:request-cache:https://registry.npmjs.org/corrupted'].integrity] + [ + cacacheEntries['make-fetch-happen:request-cache:https://registry.npmjs.org/corrupted'] + .integrity, + ] ].data = Buffer.from('<>>>}"') // nuke the version dist cacacheContent[ - [cacacheEntries['make-fetch-happen:request-cache:https://registry.npmjs.org/missing-dist'].integrity] + [ + cacacheEntries['make-fetch-happen:request-cache:https://registry.npmjs.org/missing-dist'] + .integrity, + ] ].data = Buffer.from(JSON.stringify({ versions: { '23.0.0': {} } })) // make the version a non-object cacacheContent[ - [cacacheEntries['make-fetch-happen:request-cache:https://registry.npmjs.org/missing-version'].integrity] + [ + cacacheEntries['make-fetch-happen:request-cache:https://registry.npmjs.org/missing-version'] + .integrity, + ] ].data = Buffer.from(JSON.stringify({ versions: 'hello' })) } @@ -89,8 +98,9 @@ const addCacachePkg = (spec, registry, publicURL) => { const parts = npa(spec) const ver = parts.rawSpec || '1.0.0' let url = `${registry}/${parts.name}/-/${parts.name}-${ver}.tgz` - if (!publicURL) + if (!publicURL) { url = `${registry}/aabbcc/${contentId}` + } const key = `make-fetch-happen:request-cache:${url}` const pkey = `make-fetch-happen:request-cache:${registry}/${parts.escapedName}` if (!packuments[parts.escapedName]) { @@ -111,26 +121,30 @@ const addCacachePkg = (spec, registry, publicURL) => { } const cacache = { - verify: (path) => { + verify: path => { return cacacheVerifyStats }, get: (path, key) => { - if (cacacheEntries[key] === undefined - || cacacheContent[cacacheEntries[key].integrity] === undefined) + if ( + cacacheEntries[key] === undefined || + cacacheContent[cacacheEntries[key].integrity] === undefined + ) { throw new Error() + } return cacacheContent[cacacheEntries[key].integrity] }, rm: { entry: (path, key) => { - if (cacacheEntries[key] === undefined) + if (cacacheEntries[key] === undefined) { throw new Error() + } delete cacacheEntries[key] }, content: (path, sha) => { delete cacacheContent[sha] }, }, - ls: (path) => { + ls: path => { return cacacheEntries }, } @@ -147,7 +161,7 @@ const npm = mockNpm({ cache: '/fake/path', flatOptions: { force: false }, config: { force: false }, - output: (msg) => { + output: msg => { outputOutput.push(msg) }, log: { @@ -159,19 +173,11 @@ const npm = mockNpm({ const cache = new Cache(npm) t.test('cache no args', async t => { - await t.rejects( - cache.exec([]), - 'usage instructions', - 'should throw usage instructions' - ) + await t.rejects(cache.exec([]), 'usage instructions', 'should throw usage instructions') }) t.test('cache clean', async t => { - await t.rejects( - cache.exec(['clean']), - 'the npm cache self-heals', - 'should throw warning' - ) + await t.rejects(cache.exec(['clean']), 'the npm cache self-heals', 'should throw warning') }) t.test('cache clean (force)', async t => { @@ -192,14 +198,8 @@ t.test('cache add no arg', async t => { logOutput = [] }) - await t.rejects( - cache.exec(['add']), - { code: 'EUSAGE' }, - 'throws usage error' - ) - t.strictSame(logOutput, [ - ['silly', 'cache add', 'args', []], - ], 'logs correctly') + await t.rejects(cache.exec(['add']), { code: 'EUSAGE' }, 'throws usage error') + t.strictSame(logOutput, [['silly', 'cache add', 'args', []]], 'logs correctly') }) t.test('cache add pkg only', async t => { @@ -210,10 +210,14 @@ t.test('cache add pkg only', async t => { }) await cache.exec(['add', 'mypkg']) - t.strictSame(logOutput, [ - ['silly', 'cache add', 'args', ['mypkg']], - ['silly', 'cache add', 'spec', 'mypkg'], - ], 'logs correctly') + t.strictSame( + logOutput, + [ + ['silly', 'cache add', 'args', ['mypkg']], + ['silly', 'cache add', 'spec', 'mypkg'], + ], + 'logs correctly' + ) t.equal(tarballStreamSpec, 'mypkg', 'passes the correct spec to pacote') t.same(tarballStreamOpts, npm.flatOptions, 'passes the correct options to pacote') }) @@ -226,11 +230,15 @@ t.test('cache add multiple pkgs', async t => { }) await cache.exec(['add', 'mypkg', 'anotherpkg']) - t.strictSame(logOutput, [ - ['silly', 'cache add', 'args', ['mypkg', 'anotherpkg']], - ['silly', 'cache add', 'spec', 'mypkg'], - ['silly', 'cache add', 'spec', 'anotherpkg'], - ], 'logs correctly') + t.strictSame( + logOutput, + [ + ['silly', 'cache add', 'args', ['mypkg', 'anotherpkg']], + ['silly', 'cache add', 'spec', 'mypkg'], + ['silly', 'cache add', 'spec', 'anotherpkg'], + ], + 'logs correctly' + ) t.equal(tarballStreamSpec, 'anotherpkg', 'passes the correct spec to pacote') t.same(tarballStreamOpts, npm.flatOptions, 'passes the correct options to pacote') }) @@ -243,9 +251,12 @@ t.test('cache ls', async t => { setupCacacheFixture() await cache.exec(['ls']) t.strictSame(outputOutput, [ + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/@fritzy/staydown/-/@fritzy/staydown-3.1.1.tgz', 'make-fetch-happen:request-cache:https://registry.npmjs.org/@fritzy%2fstaydown', + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/@gar/npm-expansion/-/@gar/npm-expansion-2.1.0.tgz', + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/@gar/npm-expansion/-/@gar/npm-expansion-3.0.0-beta.tgz', 'make-fetch-happen:request-cache:https://registry.npmjs.org/@gar%2fnpm-expansion', 'make-fetch-happen:request-cache:https://registry.npmjs.org/ape-ecs', @@ -255,8 +266,10 @@ t.test('cache ls', async t => { 'make-fetch-happen:request-cache:https://registry.npmjs.org/foo', 'make-fetch-happen:request-cache:https://registry.npmjs.org/foo/-/foo-1.2.3-beta.tgz', 'make-fetch-happen:request-cache:https://registry.npmjs.org/missing-dist', + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/missing-dist/-/missing-dist-23.0.0.tgz', 'make-fetch-happen:request-cache:https://registry.npmjs.org/missing-version', + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/missing-version/-/missing-version-16.2.0.tgz', 'make-fetch-happen:request-cache:https://registry.npmjs.org/npm', 'make-fetch-happen:request-cache:https://registry.npmjs.org/npm/-/npm-1.2.0.tgz', @@ -307,11 +320,7 @@ t.test('cache ls tagged', async t => { t.teardown(() => { outputOutput = [] }) - await t.rejects( - cache.exec(['ls', 'webpack@latest']), - 'tagged package', - 'should throw warning' - ) + await t.rejects(cache.exec(['ls', 'webpack@latest']), 'tagged package', 'should throw warning') }) t.test('cache ls scoped and scoped slash', async t => { @@ -320,8 +329,10 @@ t.test('cache ls scoped and scoped slash', async t => { }) await cache.exec(['ls', '@fritzy/staydown', '@gar/npm-expansion']) t.strictSame(outputOutput, [ + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/@fritzy/staydown/-/@fritzy/staydown-3.1.1.tgz', 'make-fetch-happen:request-cache:https://registry.npmjs.org/@fritzy%2fstaydown', + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/@gar/npm-expansion/-/@gar/npm-expansion-2.1.0.tgz', 'make-fetch-happen:request-cache:https://registry.npmjs.org/@gar%2fnpm-expansion', ]) @@ -345,6 +356,7 @@ t.test('cache ls missing packument dist', async t => { await cache.exec(['ls', 'missing-dist']) t.strictSame(outputOutput, [ 'make-fetch-happen:request-cache:https://registry.npmjs.org/missing-dist', + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/missing-dist/-/missing-dist-23.0.0.tgz', ]) }) @@ -356,6 +368,7 @@ t.test('cache ls missing packument version not an object', async t => { await cache.exec(['ls', 'missing-version']) t.strictSame(outputOutput, [ 'make-fetch-happen:request-cache:https://registry.npmjs.org/missing-version', + /* eslint-disable-next-line max-len */ 'make-fetch-happen:request-cache:https://registry.npmjs.org/missing-version/-/missing-version-16.2.0.tgz', ]) }) @@ -364,9 +377,12 @@ t.test('cache rm', async t => { t.teardown(() => { outputOutput = [] }) - await cache.exec(['rm', - 'make-fetch-happen:request-cache:https://registry.npmjs.org/webpack/-/webpack-4.44.1.tgz']) + await cache.exec([ + 'rm', + 'make-fetch-happen:request-cache:https://registry.npmjs.org/webpack/-/webpack-4.44.1.tgz', + ]) t.strictSame(outputOutput, [ + /* eslint-disable-next-line max-len */ 'Deleted: make-fetch-happen:request-cache:https://registry.npmjs.org/webpack/-/webpack-4.44.1.tgz', ]) }) @@ -377,9 +393,7 @@ t.test('cache rm unfound', async t => { logOutput = [] }) await cache.exec(['rm', 'made-up-key']) - t.strictSame(logOutput, [ - ['warn', 'Not Found: made-up-key'], - ], 'logs correctly') + t.strictSame(logOutput, [['warn', 'Not Found: made-up-key']], 'logs correctly') }) t.test('cache verify', async t => { @@ -388,12 +402,16 @@ t.test('cache verify', async t => { }) await cache.exec(['verify']) - t.match(outputOutput, [ - `Cache verified and compressed (${path.join(npm.cache, '_cacache')})`, - 'Content verified: 1 (100 bytes)', - 'Index entries: 1', - 'Finished in 2s', - ], 'prints correct output') + t.match( + outputOutput, + [ + `Cache verified and compressed (${path.join(npm.cache, '_cacache')})`, + 'Content verified: 1 (100 bytes)', + 'Index entries: 1', + 'Finished in 2s', + ], + 'prints correct output' + ) }) t.test('cache verify w/ extra output', async t => { @@ -412,15 +430,19 @@ t.test('cache verify w/ extra output', async t => { }) await cache.exec(['check']) - t.match(outputOutput, [ - `Cache verified and compressed (~${path.join('/fake/path', '_cacache')})`, - 'Content verified: 1 (100 bytes)', - 'Corrupted content removed: 1', - 'Content garbage-collected: 2 (200 bytes)', - 'Missing content: 3', - 'Index entries: 1', - 'Finished in 2s', - ], 'prints correct output') + t.match( + outputOutput, + [ + `Cache verified and compressed (~${path.join('/fake/path', '_cacache')})`, + 'Content verified: 1 (100 bytes)', + 'Corrupted content removed: 1', + 'Content garbage-collected: 2 (200 bytes)', + 'Missing content: 3', + 'Index entries: 1', + 'Finished in 2s', + ], + 'prints correct output' + ) }) t.test('cache completion', async t => { diff --git a/test/lib/commands/ci.js b/test/lib/commands/ci.js index 8573b585a5f47..1091f9125b041 100644 --- a/test/lib/commands/ci.js +++ b/test/lib/commands/ci.js @@ -52,13 +52,15 @@ t.test('should use Arborist and run-script', async t => { // when the test is done, we assert that all timers ended const timers = {} const onTime = msg => { - if (timers[msg]) + if (timers[msg]) { throw new Error(`saw duplicate timer: ${msg}`) + } timers[msg] = true } const onTimeEnd = msg => { - if (!timers[msg]) + if (!timers[msg]) { throw new Error(`ended timer that was not started: ${msg}`) + } timers[msg] = false } process.on('time', onTime) @@ -118,8 +120,9 @@ t.test('should use Arborist and run-script', async t => { const ci = new CI(npm) await ci.exec(null) - for (const [msg, result] of Object.entries(timers)) + for (const [msg, result] of Object.entries(timers)) { t.notOk(result, `properly resolved ${msg} timer`) + } t.match(timers, { 'npm-ci:rm': false }, 'saw the rimraf timer') t.equal(actualRimrafs, expectRimrafs, 'removed the right number of things') t.strictSame(scripts, [], 'called all scripts') diff --git a/test/lib/commands/completion.js b/test/lib/commands/completion.js index 7fdee0627273e..7a7e0a759fbfe 100644 --- a/test/lib/commands/completion.js +++ b/test/lib/commands/completion.js @@ -2,7 +2,9 @@ const t = require('tap') const fs = require('fs') const path = require('path') -const completionScript = fs.readFileSync(path.resolve(__dirname, '../../../lib/utils/completion.sh'), { encoding: 'utf8' }).replace(/^#!.*?\n/, '') +const completionScript = fs + .readFileSync(path.resolve(__dirname, '../../../lib/utils/completion.sh'), { encoding: 'utf8' }) + .replace(/^#!.*?\n/, '') const output = [] const npmConfig = {} @@ -14,20 +16,22 @@ const npm = { npmConfig[key] = value }, clear: () => { - for (const key in npmConfig) + for (const key in npmConfig) { delete npmConfig[key] + } }, }, - cmd: (cmd) => { - return ({ + cmd: cmd => { + return { completion: { completion: () => [['>>', '~/.bashrc']], }, adduser: {}, access: { completion: () => { - if (accessCompletionError) + if (accessCompletionError) { throw new Error('access completion failed') + } return ['public', 'restricted'] }, @@ -45,9 +49,9 @@ const npm = { return ' fast' }, }, - }[cmd]) + }[cmd] }, - output: (line) => { + output: line => { output.push(line) }, } @@ -56,11 +60,7 @@ const cmdList = { aliases: { login: 'adduser', }, - cmdList: [ - 'access', - 'adduser', - 'completion', - ], + cmdList: ['access', 'adduser', 'completion'], plumbing: [], } @@ -78,7 +78,7 @@ const config = { }, } -const deref = (cmd) => { +const deref = cmd => { return cmd } @@ -102,10 +102,14 @@ t.test('completion completion', async t => { }) const res = await completion.completion({ w: 2 }) - t.strictSame(res, [ - ['>>', '~/.zshrc'], - ['>>', '~/.bashrc'], - ], 'identifies both shells') + t.strictSame( + res, + [ + ['>>', '~/.zshrc'], + ['>>', '~/.bashrc'], + ], + 'identifies both shells' + ) t.end() }) @@ -137,10 +141,9 @@ t.test('completion errors in windows without bash', async t => { await t.rejects( compl.exec({}), - { code: 'ENOTSUP', - message: /completion supported only in MINGW/, - - }, 'returns the correct error') + { code: 'ENOTSUP', message: /completion supported only in MINGW/ }, + 'returns the correct error' + ) }) t.test('dump script when completion is not being attempted', async t => { @@ -302,13 +305,17 @@ t.test('handles async completion function', async t => { await completion.exec(['npm', 'promise', '']) - t.strictSame(npmConfig, { - argv: { - remain: ['npm', 'promise'], - cooked: ['npm', 'promise'], - original: ['npm', 'promise'], + t.strictSame( + npmConfig, + { + argv: { + remain: ['npm', 'promise'], + cooked: ['npm', 'promise'], + original: ['npm', 'promise'], + }, }, - }, 'applies command config appropriately') + 'applies command config appropriately' + ) t.strictSame(output, ['resolved_completion_promise'], 'resolves async completion results') }) @@ -327,14 +334,22 @@ t.test('completion triggers command completions', async t => { await completion.exec(['npm', 'access', '']) - t.strictSame(npmConfig, { - argv: { - remain: ['npm', 'access'], - cooked: ['npm', 'access'], - original: ['npm', 'access'], + t.strictSame( + npmConfig, + { + argv: { + remain: ['npm', 'access'], + cooked: ['npm', 'access'], + original: ['npm', 'access'], + }, }, - }, 'applies command config appropriately') - t.strictSame(output, [['public', 'restricted'].join('\n')], 'correctly completed a subcommand name') + 'applies command config appropriately' + ) + t.strictSame( + output, + [['public', 'restricted'].join('\n')], + 'correctly completed a subcommand name' + ) }) t.test('completion triggers filtered command completions', async t => { @@ -352,13 +367,17 @@ t.test('completion triggers filtered command completions', async t => { await completion.exec(['npm', 'access', 'p']) - t.strictSame(npmConfig, { - argv: { - remain: ['npm', 'access'], - cooked: ['npm', 'access'], - original: ['npm', 'access'], + t.strictSame( + npmConfig, + { + argv: { + remain: ['npm', 'access'], + cooked: ['npm', 'access'], + original: ['npm', 'access'], + }, }, - }, 'applies command config appropriately') + 'applies command config appropriately' + ) t.strictSame(output, ['public'], 'correctly completed a subcommand name') }) @@ -377,13 +396,17 @@ t.test('completions for commands that return nested arrays are joined', async t await completion.exec(['npm', 'completion', '']) - t.strictSame(npmConfig, { - argv: { - remain: ['npm', 'completion'], - cooked: ['npm', 'completion'], - original: ['npm', 'completion'], + t.strictSame( + npmConfig, + { + argv: { + remain: ['npm', 'completion'], + cooked: ['npm', 'completion'], + original: ['npm', 'completion'], + }, }, - }, 'applies command config appropriately') + 'applies command config appropriately' + ) t.strictSame(output, ['>> ~/.bashrc'], 'joins nested arrays') }) @@ -402,13 +425,17 @@ t.test('completions for commands that return nothing work correctly', async t => await completion.exec(['npm', 'donothing', '']) - t.strictSame(npmConfig, { - argv: { - remain: ['npm', 'donothing'], - cooked: ['npm', 'donothing'], - original: ['npm', 'donothing'], + t.strictSame( + npmConfig, + { + argv: { + remain: ['npm', 'donothing'], + cooked: ['npm', 'donothing'], + original: ['npm', 'donothing'], + }, }, - }, 'applies command config appropriately') + 'applies command config appropriately' + ) t.strictSame(output, [], 'returns nothing') }) @@ -426,14 +453,18 @@ t.test('completions for commands that return a single item work correctly', asyn }) await completion.exec(['npm', 'driveaboat', '']) - t.strictSame(npmConfig, { - argv: { - remain: ['npm', 'driveaboat'], - cooked: ['npm', 'driveaboat'], - original: ['npm', 'driveaboat'], + t.strictSame( + npmConfig, + { + argv: { + remain: ['npm', 'driveaboat'], + cooked: ['npm', 'driveaboat'], + original: ['npm', 'driveaboat'], + }, }, - }, 'applies command config appropriately') - t.strictSame(output, ['\' fast\''], 'returns the correctly escaped string') + 'applies command config appropriately' + ) + t.strictSame(output, ["' fast'"], 'returns the correctly escaped string') }) t.test('command completion for commands with no completion return no results', async t => { @@ -450,14 +481,18 @@ t.test('command completion for commands with no completion return no results', a }) // quotes around adduser are to ensure coverage when unescaping commands - await completion.exec(['npm', '\'adduser\'', '']) - t.strictSame(npmConfig, { - argv: { - remain: ['npm', 'adduser'], - cooked: ['npm', 'adduser'], - original: ['npm', 'adduser'], + await completion.exec(['npm', "'adduser'", '']) + t.strictSame( + npmConfig, + { + argv: { + remain: ['npm', 'adduser'], + cooked: ['npm', 'adduser'], + original: ['npm', 'adduser'], + }, }, - }, 'applies command config appropriately') + 'applies command config appropriately' + ) t.strictSame(output, [], 'correctly completed a subcommand name') }) @@ -481,13 +516,17 @@ t.test('command completion errors propagate', async t => { /access completion failed/, 'catches the appropriate error' ) - t.strictSame(npmConfig, { - argv: { - remain: ['npm', 'access'], - cooked: ['npm', 'access'], - original: ['npm', 'access'], + t.strictSame( + npmConfig, + { + argv: { + remain: ['npm', 'access'], + cooked: ['npm', 'access'], + original: ['npm', 'access'], + }, }, - }, 'applies command config appropriately') + 'applies command config appropriately' + ) t.strictSame(output, [], 'returns no results') }) @@ -507,7 +546,11 @@ t.test('completion can complete flags', async t => { await completion.exec(['npm', 'install', '--']) t.strictSame(npmConfig, {}, 'does not apply command config') - t.strictSame(output, [['--global', '--browser', '--registry', '--reg', '--no-global', '--no-browser'].join('\n')], 'correctly completes flag names') + t.strictSame( + output, + [['--global', '--browser', '--registry', '--reg', '--no-global', '--no-browser'].join('\n')], + 'correctly completes flag names' + ) }) t.test('double dashes escape from flag completion', async t => { @@ -526,7 +569,11 @@ t.test('double dashes escape from flag completion', async t => { await completion.exec(['npm', '--', 'install', '--']) t.strictSame(npmConfig, {}, 'does not apply command config') - t.strictSame(output, [['access', 'adduser', 'completion', 'login'].join('\n')], 'correctly completes flag names') + t.strictSame( + output, + [['access', 'adduser', 'completion', 'login'].join('\n')], + 'correctly completes flag names' + ) }) t.test('completion cannot complete options that take a value in mid-command', async t => { diff --git a/test/lib/commands/config.js b/test/lib/commands/config.js index 56ec7fd91630e..b37088c06b9cd 100644 --- a/test/lib/commands/config.js +++ b/test/lib/commands/config.js @@ -10,25 +10,37 @@ const readFile = promisify(fs.readFile) const Sandbox = require('../../fixtures/sandbox.js') -t.test('config no args', async (t) => { +t.test('config no args', async t => { const sandbox = new Sandbox(t) - await t.rejects(sandbox.run('config', []), { - code: 'EUSAGE', - }, 'rejects with usage') + await t.rejects( + sandbox.run('config', []), + { + code: 'EUSAGE', + }, + 'rejects with usage' + ) }) -t.test('config ignores workspaces', async (t) => { +t.test('config ignores workspaces', async t => { const sandbox = new Sandbox(t) - await t.rejects(sandbox.run('config', ['--workspaces']), { - code: 'EUSAGE', - }, 'rejects with usage') - - t.match(sandbox.logs.warn, [['config', 'This command does not support workspaces.']], 'logged the warning') + await t.rejects( + sandbox.run('config', ['--workspaces']), + { + code: 'EUSAGE', + }, + 'rejects with usage' + ) + + t.match( + sandbox.logs.warn, + [['config', 'This command does not support workspaces.']], + 'logged the warning' + ) }) -t.test('config list', async (t) => { +t.test('config list', async t => { const sandbox = new Sandbox(t) const temp = t.testdir({ @@ -51,7 +63,7 @@ t.test('config list', async (t) => { t.matchSnapshot(sandbox.output, 'output matches snapshot') }) -t.test('config list --long', async (t) => { +t.test('config list --long', async t => { const temp = t.testdir({ global: { npmrc: 'globalloaded=yes', @@ -73,7 +85,7 @@ t.test('config list --long', async (t) => { t.matchSnapshot(sandbox.output, 'output matches snapshot') }) -t.test('config list --json', async (t) => { +t.test('config list --json', async t => { const temp = t.testdir({ global: { npmrc: 'globalloaded=yes', @@ -95,15 +107,19 @@ t.test('config list --json', async (t) => { t.matchSnapshot(sandbox.output, 'output matches snapshot') }) -t.test('config delete no args', async (t) => { +t.test('config delete no args', async t => { const sandbox = new Sandbox(t) - await t.rejects(sandbox.run('config', ['delete']), { - code: 'EUSAGE', - }, 'rejects with usage') + await t.rejects( + sandbox.run('config', ['delete']), + { + code: 'EUSAGE', + }, + 'rejects with usage' + ) }) -t.test('config delete single key', async (t) => { +t.test('config delete single key', async t => { // location defaults to user, so we work with a userconfig const home = t.testdir({ '.npmrc': 'foo=bar\nbar=baz', @@ -118,7 +134,7 @@ t.test('config delete single key', async (t) => { t.not(contents.includes('foo='), 'foo was removed on disk') }) -t.test('config delete multiple keys', async (t) => { +t.test('config delete multiple keys', async t => { const home = t.testdir({ '.npmrc': 'foo=bar\nbar=baz\nbaz=buz', }) @@ -134,7 +150,7 @@ t.test('config delete multiple keys', async (t) => { t.not(contents.includes('bar='), 'bar was removed on disk') }) -t.test('config delete key --location=global', async (t) => { +t.test('config delete key --location=global', async t => { const global = t.testdir({ npmrc: 'foo=bar\nbar=baz', }) @@ -148,7 +164,7 @@ t.test('config delete key --location=global', async (t) => { t.not(contents.includes('foo='), 'foo was removed on disk') }) -t.test('config delete key --global', async (t) => { +t.test('config delete key --global', async t => { const global = t.testdir({ npmrc: 'foo=bar\nbar=baz', }) @@ -162,15 +178,19 @@ t.test('config delete key --global', async (t) => { t.not(contents.includes('foo='), 'foo was removed on disk') }) -t.test('config set no args', async (t) => { +t.test('config set no args', async t => { const sandbox = new Sandbox(t) - await t.rejects(sandbox.run('config', ['set']), { - code: 'EUSAGE', - }, 'rejects with usage') + await t.rejects( + sandbox.run('config', ['set']), + { + code: 'EUSAGE', + }, + 'rejects with usage' + ) }) -t.test('config set key', async (t) => { +t.test('config set key', async t => { const home = t.testdir({ '.npmrc': 'foo=bar', }) @@ -185,7 +205,7 @@ t.test('config set key', async (t) => { t.ok(contents.includes('foo='), 'wrote foo to disk') }) -t.test('config set key value', async (t) => { +t.test('config set key value', async t => { const home = t.testdir({ '.npmrc': 'foo=bar', }) @@ -200,7 +220,7 @@ t.test('config set key value', async (t) => { t.ok(contents.includes('foo=baz'), 'wrote foo to disk') }) -t.test('config set key=value', async (t) => { +t.test('config set key=value', async t => { const home = t.testdir({ '.npmrc': 'foo=bar', }) @@ -215,7 +235,7 @@ t.test('config set key=value', async (t) => { t.ok(contents.includes('foo=baz'), 'wrote foo to disk') }) -t.test('config set key1 value1 key2=value2 key3', async (t) => { +t.test('config set key1 value1 key2=value2 key3', async t => { const home = t.testdir({ '.npmrc': 'foo=bar\nbar=baz\nbaz=foo', }) @@ -233,17 +253,19 @@ t.test('config set key1 value1 key2=value2 key3', async (t) => { t.ok(contents.includes('baz='), 'baz was written to disk') }) -t.test('config set invalid key logs warning', async (t) => { +t.test('config set invalid key logs warning', async t => { const sandbox = new Sandbox(t) // this doesn't reject, it only logs a warning await sandbox.run('config', ['set', 'access=foo']) - t.match(sandbox.logs.warn, [ - ['invalid config', 'access="foo"', `set in ${join(sandbox.home, '.npmrc')}`], - ], 'logged warning') + t.match( + sandbox.logs.warn, + [['invalid config', 'access="foo"', `set in ${join(sandbox.home, '.npmrc')}`]], + 'logged warning' + ) }) -t.test('config set key=value --location=global', async (t) => { +t.test('config set key=value --location=global', async t => { const global = t.testdir({ npmrc: 'foo=bar\nbar=baz', }) @@ -257,7 +279,7 @@ t.test('config set key=value --location=global', async (t) => { t.not(contents.includes('foo=buzz'), 'foo was saved on disk') }) -t.test('config set key=value --global', async (t) => { +t.test('config set key=value --global', async t => { const global = t.testdir({ npmrc: 'foo=bar\nbar=baz', }) @@ -271,7 +293,7 @@ t.test('config set key=value --global', async (t) => { t.not(contents.includes('foo=buzz'), 'foo was saved on disk') }) -t.test('config get no args', async (t) => { +t.test('config get no args', async t => { const sandbox = new Sandbox(t) await sandbox.run('config', ['get']) @@ -285,28 +307,38 @@ t.test('config get no args', async (t) => { t.equal(listOutput, getOutput, 'get with no args outputs list') }) -t.test('config get single key', async (t) => { +t.test('config get single key', async t => { const sandbox = new Sandbox(t) await sandbox.run('config', ['get', 'node-version']) t.equal(sandbox.output, sandbox.config.get('node-version'), 'should get the value') }) -t.test('config get multiple keys', async (t) => { +t.test('config get multiple keys', async t => { const sandbox = new Sandbox(t) await sandbox.run('config', ['get', 'node-version', 'npm-version']) - t.ok(sandbox.output.includes(`node-version=${sandbox.config.get('node-version')}`), 'outputs node-version') - t.ok(sandbox.output.includes(`npm-version=${sandbox.config.get('npm-version')}`), 'outputs npm-version') + t.ok( + sandbox.output.includes(`node-version=${sandbox.config.get('node-version')}`), + 'outputs node-version' + ) + t.ok( + sandbox.output.includes(`npm-version=${sandbox.config.get('npm-version')}`), + 'outputs npm-version' + ) }) -t.test('config get private key', async (t) => { +t.test('config get private key', async t => { const sandbox = new Sandbox(t) - await t.rejects(sandbox.run('config', ['get', '_authToken']), '_authToken is protected', 'rejects with protected string') + await t.rejects( + sandbox.run('config', ['get', '_authToken']), + '_authToken is protected', + 'rejects with protected string' + ) }) -t.test('config edit', async (t) => { +t.test('config edit', async t => { const home = t.testdir({ '.npmrc': 'foo=bar\nbar=baz', }) @@ -323,7 +355,11 @@ t.test('config edit', async (t) => { await sandbox.run('config', ['edit']) t.ok(editor.called, 'editor was spawned') - t.same(editor.calledWith.args, [join(sandbox.home, '.npmrc')], 'editor opened the user config file') + t.same( + editor.calledWith.args, + [join(sandbox.home, '.npmrc')], + 'editor opened the user config file' + ) const contents = await readFile(join(home, '.npmrc'), { encoding: 'utf8' }) t.ok(contents.includes('foo=bar'), 'kept foo') @@ -331,7 +367,7 @@ t.test('config edit', async (t) => { t.ok(contents.includes('shown below with default values'), 'appends defaults to file') }) -t.test('config edit - editor exits non-0', async (t) => { +t.test('config edit - editor exits non-0', async t => { t.teardown(() => { spawk.clean() }) @@ -341,22 +377,31 @@ t.test('config edit - editor exits non-0', async (t) => { const sandbox = new Sandbox(t) sandbox.process.env.EDITOR = EDITOR - await t.rejects(sandbox.run('config', ['edit']), { - message: 'editor process exited with code: 1', - }, 'rejects with error about editor code') + await t.rejects( + sandbox.run('config', ['edit']), + { + message: 'editor process exited with code: 1', + }, + 'rejects with error about editor code' + ) t.ok(editor.called, 'editor was spawned') - t.same(editor.calledWith.args, [join(sandbox.home, '.npmrc')], 'editor opened the user config file') + t.same( + editor.calledWith.args, + [join(sandbox.home, '.npmrc')], + 'editor opened the user config file' + ) }) -t.test('completion', async (t) => { +t.test('completion', async t => { const sandbox = new Sandbox(t) let allKeys const testComp = async (argv, expect) => { t.match(await sandbox.complete('config', argv), expect, argv.join(' ')) - if (!allKeys) + if (!allKeys) { allKeys = Object.keys(sandbox.config.definitions) + } sandbox.reset() } diff --git a/test/lib/commands/diff.js b/test/lib/commands/diff.js index 9b3e2aca51329..811936fe6d24c 100644 --- a/test/lib/commands/diff.js +++ b/test/lib/commands/diff.js @@ -169,13 +169,14 @@ t.test('single arg', t => { libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@2.1.4', 'should use expected spec') t.equal(b, `file:${fooPath}`, 'should compare to cwd') - t.match(opts, { - ...npm.flatOptions, - diffFiles: [ - './foo.js', - './bar.js', - ], - }, 'should forward flatOptions and diffFiles') + t.match( + opts, + { + ...npm.flatOptions, + diffFiles: ['./foo.js', './bar.js'], + }, + 'should forward flatOptions and diffFiles' + ) } config.diff = ['2.1.4'] @@ -264,7 +265,7 @@ t.test('single arg', t => { const Diff = t.mock('../../../lib/commands/diff.js', { ...mocks, pacote: { - packument: (spec) => { + packument: spec => { t.equal(spec.name, 'bar', 'should have expected spec name') }, }, @@ -273,7 +274,11 @@ t.test('single arg', t => { return { version: '1.8.10' } }, libnpmdiff: async ([a, b], opts) => { - t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') + t.equal( + a, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should target local node_modules pkg' + ) t.equal(b, 'bar@1.8.10', 'should have possible semver range spec') }, }) @@ -325,7 +330,7 @@ t.test('single arg', t => { const Diff = t.mock('../../../lib/commands/diff.js', { ...mocks, pacote: { - packument: (spec) => { + packument: spec => { t.equal(spec.name, 'lorem', 'should have expected spec name') }, }, @@ -334,7 +339,11 @@ t.test('single arg', t => { return { version: '2.1.0' } }, libnpmdiff: async ([a, b], opts) => { - t.equal(a, `lorem@file:${resolve(path, 'globalDir/lib/node_modules/lorem')}`, 'should target local node_modules pkg') + t.equal( + a, + `lorem@file:${resolve(path, 'globalDir/lib/node_modules/lorem')}`, + 'should target local node_modules pkg' + ) t.equal(b, 'lorem@2.1.0', 'should have possible semver range spec') }, }) @@ -364,7 +373,11 @@ t.test('single arg', t => { }) libnpmdiff = async ([a, b], opts) => { - t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') + t.equal( + a, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should target local node_modules pkg' + ) t.equal(b, 'bar@2.0.0', 'should have expected comparison spec') } @@ -407,7 +420,7 @@ t.test('single arg', t => { ...mocks, '../../../lib/utils/read-package-name.js': async () => 'my-project', pacote: { - packument: (spec) => { + packument: spec => { t.equal(spec.name, 'lorem', 'should have expected spec name') }, }, @@ -416,7 +429,11 @@ t.test('single arg', t => { return { version: '2.2.2' } }, libnpmdiff: async ([a, b], opts) => { - t.equal(a, `lorem@file:${resolve(path, 'node_modules/lorem')}`, 'should target local node_modules pkg') + t.equal( + a, + `lorem@file:${resolve(path, 'node_modules/lorem')}`, + 'should target local node_modules pkg' + ) t.equal(b, 'lorem@2.2.2', 'should have expected target spec') }, }) @@ -548,7 +565,11 @@ t.test('first arg is a qualified spec', t => { libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@2.0.0', 'should set expected first spec') - t.equal(b, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') + t.equal( + b, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should target local node_modules pkg' + ) } npm.prefix = path @@ -606,15 +627,20 @@ t.test('first arg is a known dependency name', async t => { }) libnpmdiff = async ([a, b], opts) => { - t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') + t.equal( + a, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should target local node_modules pkg' + ) t.equal(b, 'bar@2.0.0', 'should set expected second spec') } npm.prefix = path config.diff = ['bar', 'bar@2.0.0'] diff.exec([], err => { - if (err) + if (err) { throw err + } }) }) @@ -645,15 +671,24 @@ t.test('first arg is a known dependency name', async t => { }) libnpmdiff = async ([a, b], opts) => { - t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') - t.equal(b, `bar-fork@file:${resolve(path, 'node_modules/bar-fork')}`, 'should target fork local node_modules pkg') + t.equal( + a, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should target local node_modules pkg' + ) + t.equal( + b, + `bar-fork@file:${resolve(path, 'node_modules/bar-fork')}`, + 'should target fork local node_modules pkg' + ) } npm.prefix = path config.diff = ['bar', 'bar-fork'] diff.exec([], err => { - if (err) + if (err) { throw err + } }) }) @@ -678,15 +713,20 @@ t.test('first arg is a known dependency name', async t => { }) libnpmdiff = async ([a, b], opts) => { - t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') + t.equal( + a, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should target local node_modules pkg' + ) t.equal(b, 'bar@2.0.0', 'should use package name from first arg') } npm.prefix = path config.diff = ['bar', '2.0.0'] diff.exec([], err => { - if (err) + if (err) { throw err + } }) }) @@ -711,7 +751,11 @@ t.test('first arg is a known dependency name', async t => { }) libnpmdiff = async ([a, b], opts) => { - t.equal(a, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') + t.equal( + a, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should target local node_modules pkg' + ) t.equal(b, 'bar-fork@latest', 'should set expected second spec') } @@ -759,7 +803,11 @@ t.test('first arg is a valid semver range', t => { libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar@1.0.0', 'should use name from second arg') - t.equal(b, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should set expected second spec from nm') + t.equal( + b, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should set expected second spec from nm' + ) } npm.prefix = path @@ -847,8 +895,9 @@ t.test('first arg is an unknown dependency name', t => { config.diff = ['bar', 'bar@2.0.0'] diff.exec([], err => { - if (err) + if (err) { throw err + } }) }) @@ -874,14 +923,19 @@ t.test('first arg is an unknown dependency name', t => { libnpmdiff = async ([a, b], opts) => { t.equal(a, 'bar-fork@latest', 'should use latest tag') - t.equal(b, `bar@file:${resolve(path, 'node_modules/bar')}`, 'should target local node_modules pkg') + t.equal( + b, + `bar@file:${resolve(path, 'node_modules/bar')}`, + 'should target local node_modules pkg' + ) } npm.prefix = path config.diff = ['bar-fork', 'bar'] diff.exec([], err => { - if (err) + if (err) { throw err + } }) }) @@ -895,8 +949,9 @@ t.test('first arg is an unknown dependency name', t => { config.diff = ['bar', '^1.0.0'] diff.exec([], err => { - if (err) + if (err) { throw err + } }) }) @@ -910,8 +965,9 @@ t.test('first arg is an unknown dependency name', t => { config.diff = ['bar', 'bar-fork'] diff.exec([], err => { - if (err) + if (err) { throw err + } }) }) @@ -928,8 +984,9 @@ t.test('first arg is an unknown dependency name', t => { npm.prefix = path diff.exec([], err => { - if (err) + if (err) { throw err + } }) }) @@ -943,10 +1000,14 @@ t.test('various options', t => { flatOptions.diffNameOnly = true libnpmdiff = async ([a, b], opts) => { - t.match(opts, { - ...npm.flatOptions, - diffNameOnly: true, - }, 'should forward nameOnly=true option') + t.match( + opts, + { + ...npm.flatOptions, + diffNameOnly: true, + }, + 'should forward nameOnly=true option' + ) } await diff.exec([]) @@ -960,13 +1021,14 @@ t.test('various options', t => { libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@2.1.4', 'should use expected spec') t.equal(b, 'foo@3.0.0', 'should use expected spec') - t.match(opts, { - ...npm.flatOptions, - diffFiles: [ - './foo.js', - './bar.js', - ], - }, 'should forward diffFiles values') + t.match( + opts, + { + ...npm.flatOptions, + diffFiles: ['./foo.js', './bar.js'], + }, + 'should forward diffFiles values' + ) } await diff.exec(['./foo.js', './bar.js']) @@ -978,13 +1040,14 @@ t.test('various options', t => { libnpmdiff = async ([a, b], opts) => { t.equal(a, 'foo@latest', 'should have default spec') t.equal(b, `file:${fooPath}`, 'should compare to cwd') - t.match(opts, { - ...npm.flatOptions, - diffFiles: [ - './foo.js', - './bar.js', - ], - }, 'should forward all remaining items as filenames') + t.match( + opts, + { + ...npm.flatOptions, + diffFiles: ['./foo.js', './bar.js'], + }, + 'should forward all remaining items as filenames' + ) } await diff.exec(['./foo.js', './bar.js']) @@ -1001,15 +1064,19 @@ t.test('various options', t => { flatOptions.diffText = true libnpmdiff = async ([a, b], opts) => { - t.match(opts, { - ...npm.flatOptions, - diffContext: 5, - diffIgnoreWhitespace: true, - diffNoPrefix: false, - diffSrcPrefix: 'foo/', - diffDstPrefix: 'bar/', - diffText: true, - }, 'should forward diff options') + t.match( + opts, + { + ...npm.flatOptions, + diffContext: 5, + diffIgnoreWhitespace: true, + diffNoPrefix: false, + diffSrcPrefix: 'foo/', + diffDstPrefix: 'bar/', + diffText: true, + }, + 'should forward diff options' + ) } await diff.exec([]) @@ -1062,10 +1129,14 @@ t.test('workspaces', t => { npm.prefix = path npm.localPrefix = path await diff.execWorkspaces([], []) - t.same(diffCalls, [ - ['workspace-a@latest', join(`file:${path}`, 'workspace-a')], - ['workspace-b@latest', join(`file:${path}`, 'workspace-b')], - ], 'should call libnpmdiff with workspaces params') + t.same( + diffCalls, + [ + ['workspace-a@latest', join(`file:${path}`, 'workspace-a')], + ['workspace-b@latest', join(`file:${path}`, 'workspace-b')], + ], + 'should call libnpmdiff with workspaces params' + ) }) t.test('one workspace', async t => { @@ -1076,9 +1147,11 @@ t.test('workspaces', t => { npm.prefix = path npm.localPrefix = path await diff.execWorkspaces([], ['workspace-a']) - t.same(diffCalls, [ - ['workspace-a@latest', join(`file:${path}`, 'workspace-a')], - ], 'should call libnpmdiff with workspaces params') + t.same( + diffCalls, + [['workspace-a@latest', join(`file:${path}`, 'workspace-a')]], + 'should call libnpmdiff with workspaces params' + ) }) t.test('invalid workspace', async t => { @@ -1087,14 +1160,8 @@ t.test('workspaces', t => { } npm.prefix = path npm.localPrefix = path - await t.rejects( - diff.execWorkspaces([], ['workspace-x']), - /No workspaces found/ - ) - await t.rejects( - diff.execWorkspaces([], ['workspace-x']), - /workspace-x/ - ) + await t.rejects(diff.execWorkspaces([], ['workspace-x']), /No workspaces found/) + await t.rejects(diff.execWorkspaces([], ['workspace-x']), /workspace-x/) }) t.end() }) diff --git a/test/lib/commands/dist-tag.js b/test/lib/commands/dist-tag.js index be66366f84337..6b45dc1167557 100644 --- a/test/lib/commands/dist-tag.js +++ b/test/lib/commands/dist-tag.js @@ -43,8 +43,9 @@ const routeMap = { // XXX overriding this does not appear to do anything, adding t.plan to things // that use it fails the test let npmRegistryFetchMock = (url, opts) => { - if (url === '/-/package/foo/dist-tags') + if (url === '/-/package/foo/dist-tags') { throw new Error('no package found') + } return routeMap[url] } @@ -52,8 +53,9 @@ let npmRegistryFetchMock = (url, opts) => { npmRegistryFetchMock.json = async (url, opts) => routeMap[url] const logger = (...msgs) => { - for (const msg of [...msgs]) + for (const msg of [...msgs]) { log += msg + ' ' + } log += '\n' } diff --git a/test/lib/commands/docs.js b/test/lib/commands/docs.js index 4853a7960c5e5..a3b31bd70656d 100644 --- a/test/lib/commands/docs.js +++ b/test/lib/commands/docs.js @@ -99,14 +99,14 @@ t.test('open docs urls', t => { const url = expect[pkg] t.match({ [url]: 1, - }, opened, `opened ${url}`, {opened}) + }, opened, `opened ${url}`, { opened }) }) }) }) t.test('open default package if none specified', async t => { await docs.exec([]) - t.equal(opened['https://example.com'], 1, 'opened expected url', {opened}) + t.equal(opened['https://example.com'], 1, 'opened expected url', { opened }) }) t.test('workspaces', (t) => { diff --git a/test/lib/commands/doctor.js b/test/lib/commands/doctor.js index 9db622878c620..e3ad5cc72692f 100644 --- a/test/lib/commands/doctor.js +++ b/test/lib/commands/doctor.js @@ -17,14 +17,16 @@ const output = [] let pingError const ping = async () => { - if (pingError) + if (pingError) { throw pingError + } } let whichError = null const which = async () => { - if (whichError) + if (whichError) { throw whichError + } return '/path/to/git' } @@ -51,10 +53,11 @@ const logs = { const clearLogs = (obj = logs) => { output.length = 0 for (const key in obj) { - if (Array.isArray(obj[key])) + if (Array.isArray(obj[key])) { obj[key].length = 0 - else + } else { delete obj[key] + } } } @@ -63,31 +66,35 @@ const npm = { registry: 'https://registry.npmjs.org/', }, log: { - info: (msg) => { + info: msg => { logs.info.push(msg) }, - newItem: (name) => { + newItem: name => { logs[name] = {} return { info: (_, msg) => { - if (!logs[name].info) + if (!logs[name].info) { logs[name].info = [] + } logs[name].info.push(msg) }, warn: (_, msg) => { - if (!logs[name].warn) + if (!logs[name].warn) { logs[name].warn = [] + } logs[name].warn.push(msg) }, error: (_, msg) => { - if (!logs[name].error) + if (!logs[name].error) { logs[name].error = [] + } logs[name].error.push(msg) }, silly: (_, msg) => { - if (!logs[name].silly) + if (!logs[name].silly) { logs[name].silly = [] + } logs[name].silly.push(msg) }, completeWork: () => {}, @@ -103,7 +110,7 @@ const npm = { }, }, version: '7.1.0', - output: (data) => { + output: data => { output.push(data) }, } @@ -162,18 +169,26 @@ t.test('node versions', t => { }) await doctor.exec([]) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' + ) st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -205,14 +220,18 @@ t.test('node versions', t => { await doctor.exec([]) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' + ) st.strictSame(output, [], 'did not print output') }) @@ -241,23 +260,27 @@ t.test('node versions', t => { clearLogs() }) - await st.rejects( - doctor.exec([]), - /Some problems found/, - 'detected the ping error' + await st.rejects(doctor.exec([]), /Some problems found/, 'detected the ping error') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' ) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') st.match(output, /npm ping.*not ok/, 'ping output is ok') st.match(output, /npm -v.*ok/, 'npm -v output is ok') st.match(output, /node -v.*ok/, 'node -v output is ok') - st.match(output, /npm config get registry.*ok.*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry.*ok.*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git.*ok/, 'which git output is ok') st.match(output, /cached files.*ok/, 'cached files are ok') st.match(output, /local node_modules.*ok/, 'local node_modules are ok') @@ -297,18 +320,26 @@ t.test('node versions', t => { }) await winDoctor.exec([]) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: undefined, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: undefined, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' + ) st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cache contents\s*ok/, 'cache contents is ok') }) @@ -337,23 +368,31 @@ t.test('node versions', t => { clearLogs() }) - await st.rejects( - doctor.exec([]), - /Some problems found/, - 'detected the ping error' + await st.rejects(doctor.exec([]), /Some problems found/, 'detected the ping error') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' + ) + st.match( + output, + /npm ping\s*not ok\s*111 this error is 111/, + 'ping output contains trimmed error' ) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') - st.match(output, /npm ping\s*not ok\s*111 this error is 111/, 'ping output contains trimmed error') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -387,23 +426,27 @@ t.test('node versions', t => { clearLogs() }) - await st.rejects( - doctor.exec([]), - /Some problems found/, - 'detected the ping error' + await st.rejects(doctor.exec([]), /Some problems found/, 'detected the ping error') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' ) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') st.match(output, /npm ping\s*not ok\s*generic error/, 'ping output contains trimmed error') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -437,23 +480,27 @@ t.test('node versions', t => { clearLogs() }) - await st.rejects( - doctor.exec([]), - /Some problems found/, - 'detected the out of date npm' + await st.rejects(doctor.exec([]), /Some problems found/, 'detected the out of date npm') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' ) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*not ok/, 'npm -v output is not ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -524,8 +571,9 @@ t.test('node versions', t => { return cb(err) } - if (p === join(dir, 'cache', 'baddir')) + if (p === join(dir, 'cache', 'baddir')) { err = new Error('broken') + } return cb(err, result) } @@ -577,27 +625,31 @@ t.test('node versions', t => { clearLogs() }) - await st.rejects( - doctor.exec([]), - /Some problems found/, - 'identified problems' + await st.rejects(doctor.exec([]), /Some problems found/, 'identified problems') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [join(dir, 'cache')]: { finished: true }, + [join(dir, 'local')]: { finished: true }, + [join(dir, 'global')]: { finished: true }, + [join(dir, 'localBin')]: { finished: true }, + [join(dir, 'globalBin')]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' ) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [join(dir, 'cache')]: { finished: true }, - [join(dir, 'local')]: { finished: true }, - [join(dir, 'global')]: { finished: true }, - [join(dir, 'localBin')]: { finished: true }, - [join(dir, 'globalBin')]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*not ok/, 'cached files are not ok') st.match(output, /local node_modules\s*not ok/, 'local node_modules are not ok') @@ -631,23 +683,27 @@ t.test('node versions', t => { clearLogs() }) - await st.rejects( - doctor.exec([]), - /Some problems found/, - 'detected the missing git' + await st.rejects(doctor.exec([]), /Some problems found/, 'detected the missing git') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' ) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*not ok/, 'which git output is not ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -687,18 +743,26 @@ t.test('node versions', t => { // cache verification problems get fixed and so do not throw an error await doctor.exec([]) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' + ) st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -740,18 +804,26 @@ t.test('node versions', t => { // cache verification problems get fixed and so do not throw an error await doctor.exec([]) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' + ) st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -791,18 +863,26 @@ t.test('node versions', t => { // cache verification problems get fixed and so do not throw an error await doctor.exec([]) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' + ) st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -842,18 +922,26 @@ t.test('node versions', t => { /Some problems found/, 'detected the non-default registry' ) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' + ) st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*ok/, 'node -v output is ok') - st.match(output, /npm config get registry\s*not ok/, 'npm config get registry output is not ok') + st.match( + output, + /npm config get registry\s*not ok/, + 'npm config get registry output is not ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') @@ -901,23 +989,27 @@ t.test('outdated node version', vt => { clearLogs() }) - await st.rejects( - doctor.exec([]), - /Some problems found/, - 'detected the out of date nodejs' + await st.rejects(doctor.exec([]), /Some problems found/, 'detected the out of date nodejs') + st.match( + logs, + { + checkPing: { finished: true }, + getLatestNpmVersion: { finished: true }, + getLatestNodejsVersion: { finished: true }, + getGitPath: { finished: true }, + [dir]: { finished: true }, + verifyCachedFiles: { finished: true }, + }, + 'trackers all finished' ) - st.match(logs, { - checkPing: { finished: true }, - getLatestNpmVersion: { finished: true }, - getLatestNodejsVersion: { finished: true }, - getGitPath: { finished: true }, - [dir]: { finished: true }, - verifyCachedFiles: { finished: true }, - }, 'trackers all finished') st.match(output, /npm ping\s*ok/, 'ping output is ok') st.match(output, /npm -v\s*ok/, 'npm -v output is ok') st.match(output, /node -v\s*not ok/, 'node -v output is not ok') - st.match(output, /npm config get registry\s*ok\s*using default/, 'npm config get registry output is ok') + st.match( + output, + /npm config get registry\s*ok\s*using default/, + 'npm config get registry output is ok' + ) st.match(output, /which git\s*ok/, 'which git output is ok') st.match(output, /cached files\s*ok/, 'cached files are ok') st.match(output, /local node_modules\s*ok/, 'local node_modules are ok') diff --git a/test/lib/commands/edit.js b/test/lib/commands/edit.js index 39e1697b71c1e..92754f2823256 100644 --- a/test/lib/commands/edit.js +++ b/test/lib/commands/edit.js @@ -31,8 +31,9 @@ const npm = { dir: resolve(__dirname, '../../../node_modules'), exec: async (cmd, args) => { rebuildArgs = args - if (rebuildFail) + if (rebuildFail) { throw rebuildFail + } }, } diff --git a/test/lib/commands/exec.js b/test/lib/commands/exec.js index 25c1f789a97ad..4ab26568f1091 100644 --- a/test/lib/commands/exec.js +++ b/test/lib/commands/exec.js @@ -60,8 +60,9 @@ const npm = mockNpm({ const RUN_SCRIPTS = [] const runScript = async opt => { RUN_SCRIPTS.push(opt) - if (!PROGRESS_IGNORED && PROGRESS_ENABLED) + if (!PROGRESS_IGNORED && PROGRESS_ENABLED) { throw new Error('progress not disabled during run script!') + } } const MANIFESTS = {} @@ -134,20 +135,22 @@ t.test('npx foo, bin already exists locally', async t => { npm.localBin = resolve(path, 'node_modules', '.bin') await exec.exec(['foo', 'one arg', 'two arg']) - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foo' }}, - args: ['one arg', 'two arg'], - cache: flatOptions.cache, - npxCache: flatOptions.npxCache, - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { - PATH: [npm.localBin, ...PATH].join(delimiter), + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + cache: flatOptions.cache, + npxCache: flatOptions.npxCache, + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { + PATH: [npm.localBin, ...PATH].join(delimiter), + }, + stdio: 'inherit', }, - stdio: 'inherit', - }]) + ]) }) t.test('npx foo, bin already exists globally', async t => { @@ -163,18 +166,20 @@ t.test('npx foo, bin already exists globally', async t => { npm.globalBin = resolve(path, 'node_modules', '.bin') await exec.exec(['foo', 'one arg', 'two arg']) - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foo' }}, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { - PATH: [npm.globalBin, ...PATH].join(delimiter), + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { + PATH: [npm.globalBin, ...PATH].join(delimiter), + }, + stdio: 'inherit', }, - stdio: 'inherit', - }]) + ]) }) t.test('npm exec foo, already present locally', async t => { @@ -196,23 +201,25 @@ t.test('npm exec foo, already present locally', async t => { t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foo' } }, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH: process.env.PATH }, - stdio: 'inherit', - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH: process.env.PATH }, + stdio: 'inherit', + }, + ]) }) t.test('npm exec , run interactive shell', t => { CI_NAME = null const { isTTY } = process.stdin process.stdin.isTTY = true - t.teardown(() => process.stdin.isTTY = isTTY) + t.teardown(() => (process.stdin.isTTY = isTTY)) const run = async (t, doRun) => { LOG_WARN.length = 0 @@ -226,18 +233,21 @@ t.test('npm exec , run interactive shell', t => { t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') if (doRun) { - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'shell-cmd' } }, - args: [], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH: process.env.PATH }, - stdio: 'inherit', - }]) - } else + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'shell-cmd' } }, + args: [], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH: process.env.PATH }, + stdio: 'inherit', + }, + ]) + } else { t.strictSame(RUN_SCRIPTS, []) + } RUN_SCRIPTS.length = 0 } @@ -246,9 +256,16 @@ t.test('npm exec , run interactive shell', t => { process.stdin.isTTY = true await run(t, true) t.strictSame(LOG_WARN, []) - t.strictSame(npm._mockOutputs, [ - [`\nEntering npm script environment at location:\n${process.cwd()}\nType 'exit' or ^D when finished\n`], - ], 'printed message about interactive shell') + t.strictSame( + npm._mockOutputs, + [ + [ + /* eslint-disable-next-line max-len */ + `\nEntering npm script environment at location:\n${process.cwd()}\nType 'exit' or ^D when finished\n`, + ], + ], + 'printed message about interactive shell' + ) }) t.test('print message with color when tty and not in CI', async t => { @@ -259,9 +276,16 @@ t.test('npm exec , run interactive shell', t => { await run(t, true) t.strictSame(LOG_WARN, []) - t.strictSame(npm._mockOutputs, [ - [`\u001b[0m\u001b[0m\n\u001b[0mEntering npm script environment\u001b[0m\u001b[0m at location:\u001b[0m\n\u001b[0m\u001b[2m${process.cwd()}\u001b[22m\u001b[0m\u001b[1m\u001b[22m\n\u001b[1mType 'exit' or ^D when finished\u001b[22m\n\u001b[1m\u001b[22m`], - ], 'printed message about interactive shell') + t.strictSame( + npm._mockOutputs, + [ + [ + /* eslint-disable-next-line max-len */ + `\u001b[0m\u001b[0m\n\u001b[0mEntering npm script environment\u001b[0m\u001b[0m at location:\u001b[0m\n\u001b[0m\u001b[2m${process.cwd()}\u001b[22m\u001b[0m\u001b[1m\u001b[22m\n\u001b[1mType 'exit' or ^D when finished\u001b[22m\n\u001b[1m\u001b[22m`, + ], + ], + 'printed message about interactive shell' + ) }) t.test('no message when not TTY', async t => { @@ -276,9 +300,7 @@ t.test('npm exec , run interactive shell', t => { CI_NAME = 'travis-ci' process.stdin.isTTY = true await run(t, false) - t.strictSame(LOG_WARN, [ - ['exec', 'Interactive mode disabled in CI environment'], - ]) + t.strictSame(LOG_WARN, [['exec', 'Interactive mode disabled in CI environment']]) t.strictSame(npm._mockOutputs, [], 'no message about interactive shell') }) @@ -289,9 +311,11 @@ t.test('npm exec , run interactive shell', t => { await exec.exec([]) - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: /sh|cmd/ } }, - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: /sh|cmd/ } }, + }, + ]) LOG_WARN.length = 0 ARB_CTOR.length = 0 @@ -325,19 +349,21 @@ t.test('npm exec foo, not present locally or in central loc', async t => { await exec.exec(['foo', 'one arg', 'two arg']) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') t.match(ARB_CTOR, [{ path }]) - t.match(ARB_REIFY, [{add: ['foo@'], legacyPeerDeps: false}], 'need to install foo@') + t.match(ARB_REIFY, [{ add: ['foo@'], legacyPeerDeps: false }], 'need to install foo@') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foo' } }, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH }, - stdio: 'inherit', - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH }, + stdio: 'inherit', + }, + ]) }) t.test('npm exec foo, not present locally but in central loc', async t => { @@ -364,16 +390,18 @@ t.test('npm exec foo, not present locally but in central loc', async t => { t.match(ARB_REIFY, [], 'no need to install again, already there') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foo' } }, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH }, - stdio: 'inherit', - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH }, + stdio: 'inherit', + }, + ]) }) t.test('npm exec foo, present locally but wrong version', async t => { @@ -400,16 +428,18 @@ t.test('npm exec foo, present locally but wrong version', async t => { t.match(ARB_REIFY, [{ add: ['foo@2.x'], legacyPeerDeps: false }], 'need to add foo@2.x') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foo' } }, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH }, - stdio: 'inherit', - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH }, + stdio: 'inherit', + }, + ]) }) t.test('npm exec --package=foo bar', async t => { @@ -433,16 +463,18 @@ t.test('npm exec --package=foo bar', async t => { t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'bar' } }, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH: process.env.PATH }, - stdio: 'inherit', - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'bar' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH: process.env.PATH }, + stdio: 'inherit', + }, + ]) }) t.test('npm exec @foo/bar -- --some=arg, locally installed', async t => { @@ -471,87 +503,97 @@ t.test('npm exec @foo/bar -- --some=arg, locally installed', async t => { t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'bar' } }, - args: ['--some=arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH: process.env.PATH }, - stdio: 'inherit', - }]) -}) - -t.test('npm exec @foo/bar, with same bin alias and no unscoped named bin, locally installed', async t => { - const foobarManifest = { - name: '@foo/bar', - version: '1.2.3', - bin: { - baz: 'corge', // pick the first one - qux: 'corge', - quux: 'corge', - }, - } - const path = t.testdir({ - node_modules: { - '@foo/bar': { - 'package.json': JSON.stringify(foobarManifest), - }, + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'bar' } }, + args: ['--some=arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH: process.env.PATH }, + stdio: 'inherit', }, - }) - npm.localPrefix = path - ARB_ACTUAL_TREE[path] = { - children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), - } - MANIFESTS['@foo/bar'] = foobarManifest - await exec.exec(['@foo/bar', 'one arg', 'two arg']) - t.strictSame(MKDIRPS, [], 'no need to make any dirs') - t.match(ARB_CTOR, [{ path }]) - t.strictSame(ARB_REIFY, [], 'no need to reify anything') - t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'baz' } }, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH: process.env.PATH }, - stdio: 'inherit', - }]) + ]) }) -t.test('npm exec @foo/bar, with different bin alias and no unscoped named bin, locally installed', async t => { - const path = t.testdir() - npm.localPrefix = path - ARB_ACTUAL_TREE[path] = { - children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), - } - MANIFESTS['@foo/bar'] = { - name: '@foo/bar', - version: '1.2.3', - bin: { - foo: 'qux', - corge: 'qux', - baz: 'quux', - }, - _from: 'foo@', - _id: '@foo/bar@1.2.3', +t.test( + 'npm exec @foo/bar, with same bin alias and no unscoped named bin, locally installed', + async t => { + const foobarManifest = { + name: '@foo/bar', + version: '1.2.3', + bin: { + baz: 'corge', // pick the first one + qux: 'corge', + quux: 'corge', + }, + } + const path = t.testdir({ + node_modules: { + '@foo/bar': { + 'package.json': JSON.stringify(foobarManifest), + }, + }, + }) + npm.localPrefix = path + ARB_ACTUAL_TREE[path] = { + children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), + } + MANIFESTS['@foo/bar'] = foobarManifest + await exec.exec(['@foo/bar', 'one arg', 'two arg']) + t.strictSame(MKDIRPS, [], 'no need to make any dirs') + t.match(ARB_CTOR, [{ path }]) + t.strictSame(ARB_REIFY, [], 'no need to reify anything') + t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'baz' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH: process.env.PATH }, + stdio: 'inherit', + }, + ]) } - await t.rejects( - exec.exec(['@foo/bar']), - { +) + +t.test( + 'npm exec @foo/bar, with different bin alias and no unscoped named bin, locally installed', + async t => { + const path = t.testdir() + npm.localPrefix = path + ARB_ACTUAL_TREE[path] = { + children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), + } + MANIFESTS['@foo/bar'] = { + name: '@foo/bar', + version: '1.2.3', + bin: { + foo: 'qux', + corge: 'qux', + baz: 'quux', + }, + _from: 'foo@', + _id: '@foo/bar@1.2.3', + } + await t.rejects(exec.exec(['@foo/bar']), { message: 'could not determine executable to run', pkgid: '@foo/bar@1.2.3', - } - ) -}) + }) + } +) t.test('run command with 2 packages, need install, verify sort', async t => { // test both directions, should use same install dir both times // also test the read() call here, verify that the prompts match - const cases = [['foo', 'bar'], ['bar', 'foo']] + const cases = [ + ['foo', 'bar'], + ['bar', 'foo'], + ] t.plan(cases.length) for (const packages of cases) { t.test(packages.join(', '), async t => { @@ -585,19 +627,21 @@ t.test('run command with 2 packages, need install, verify sort', async t => { await exec.exec(['foobar', 'one arg', 'two arg']) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') t.match(ARB_CTOR, [{ path }]) - t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') + t.match(ARB_REIFY, [{ add, legacyPeerDeps: false }], 'need to install both packages') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foobar' } }, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH }, - stdio: 'inherit', - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foobar' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH }, + stdio: 'inherit', + }, + ]) }) } }) @@ -614,13 +658,10 @@ t.test('npm exec foo, no bin in package', async t => { _from: 'foo@', _id: 'foo@1.2.3', } - await t.rejects( - exec.exec(['foo']), - { - message: 'could not determine executable to run', - pkgid: 'foo@1.2.3', - } - ) + await t.rejects(exec.exec(['foo']), { + message: 'could not determine executable to run', + pkgid: 'foo@1.2.3', + }) }) t.test('npm exec foo, many bins in package, none named foo', async t => { @@ -639,13 +680,10 @@ t.test('npm exec foo, many bins in package, none named foo', async t => { _from: 'foo@', _id: 'foo@1.2.3', } - await t.rejects( - exec.exec(['foo']), - { - message: 'could not determine executable to run', - pkgid: 'foo@1.2.3', - } - ) + await t.rejects(exec.exec(['foo']), { + message: 'could not determine executable to run', + pkgid: 'foo@1.2.3', + }) }) t.test('npm exec -p foo -c "ls -laF"', async t => { @@ -666,23 +704,22 @@ t.test('npm exec -p foo -c "ls -laF"', async t => { t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no need to reify anything') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'ls -laF' } }, - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH: process.env.PATH }, - stdio: 'inherit', - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'ls -laF' } }, + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH: process.env.PATH }, + stdio: 'inherit', + }, + ]) }) t.test('positional args and --call together is an error', async t => { config.call = 'true' - await t.rejects( - exec.exec(['foo']), - exec.usage - ) + await t.rejects(exec.exec(['foo']), exec.usage) }) t.test('prompt when installs are needed if not already present and shell is a TTY', async t => { @@ -732,141 +769,165 @@ t.test('prompt when installs are needed if not already present and shell is a TT await exec.exec(['foobar']) t.strictSame(MKDIRPS, [installDir], 'need to make install dir') t.match(ARB_CTOR, [{ path }]) - t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') + t.match(ARB_REIFY, [{ add, legacyPeerDeps: false }], 'need to install both packages') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foobar' } }, - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH }, - stdio: 'inherit', - }]) - t.strictSame(READ, [{ - prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', - default: 'y', - }]) + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foobar' } }, + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH }, + stdio: 'inherit', + }, + ]) + t.strictSame(READ, [ + { + prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', + default: 'y', + }, + ]) }) -t.test('skip prompt when installs are needed if not already present and shell is not a tty (multiple packages)', async t => { - const stdoutTTY = process.stdout.isTTY - const stdinTTY = process.stdin.isTTY - t.teardown(() => { - process.stdout.isTTY = stdoutTTY - process.stdin.isTTY = stdinTTY - CI_NAME = 'travis-ci' - }) - process.stdout.isTTY = false - process.stdin.isTTY = false - CI_NAME = false +t.test( + /* eslint-disable-next-line max-len */ + 'skip prompt when installs are needed if not already present and shell is not a tty (multiple packages)', + async t => { + const stdoutTTY = process.stdout.isTTY + const stdinTTY = process.stdin.isTTY + t.teardown(() => { + process.stdout.isTTY = stdoutTTY + process.stdin.isTTY = stdinTTY + CI_NAME = 'travis-ci' + }) + process.stdout.isTTY = false + process.stdin.isTTY = false + CI_NAME = false - const packages = ['foo', 'bar'] - READ_RESULT = 'yolo' + const packages = ['foo', 'bar'] + READ_RESULT = 'yolo' - config.package = packages - config.yes = undefined + config.package = packages + config.yes = undefined - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b, 'en')) - const path = t.testdir() - const installDir = resolve('npx-cache-dir/07de77790e5f40f2') - npm.localPrefix = path - ARB_ACTUAL_TREE[path] = { - children: new Map(), - } - ARB_ACTUAL_TREE[installDir] = { - children: new Map(), - } - MANIFESTS.foo = { - name: 'foo', - version: '1.2.3', - bin: { - foo: 'foo', - }, - _from: 'foo@', - } - MANIFESTS.bar = { - name: 'bar', - version: '1.2.3', - bin: { - bar: 'bar', - }, - _from: 'bar@', + const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b, 'en')) + const path = t.testdir() + const installDir = resolve('npx-cache-dir/07de77790e5f40f2') + npm.localPrefix = path + ARB_ACTUAL_TREE[path] = { + children: new Map(), + } + ARB_ACTUAL_TREE[installDir] = { + children: new Map(), + } + MANIFESTS.foo = { + name: 'foo', + version: '1.2.3', + bin: { + foo: 'foo', + }, + _from: 'foo@', + } + MANIFESTS.bar = { + name: 'bar', + version: '1.2.3', + bin: { + bar: 'bar', + }, + _from: 'bar@', + } + await exec.exec(['foobar']) + t.strictSame(MKDIRPS, [installDir], 'need to make install dir') + t.match(ARB_CTOR, [{ path }]) + t.match(ARB_REIFY, [{ add, legacyPeerDeps: false }], 'need to install both packages') + t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') + const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foobar' } }, + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH }, + stdio: 'inherit', + }, + ]) + t.strictSame(READ, [], 'should not have prompted') + t.strictSame( + LOG_WARN, + [['exec', 'The following packages were not found and will be installed: bar, foo']], + 'should have printed a warning' + ) } - await exec.exec(['foobar']) - t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ path }]) - t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages') - t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') - const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foobar' } }, - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH }, - stdio: 'inherit', - }]) - t.strictSame(READ, [], 'should not have prompted') - t.strictSame(LOG_WARN, [['exec', 'The following packages were not found and will be installed: bar, foo']], 'should have printed a warning') -}) - -t.test('skip prompt when installs are needed if not already present and shell is not a tty (single package)', async t => { - const stdoutTTY = process.stdout.isTTY - const stdinTTY = process.stdin.isTTY - t.teardown(() => { - process.stdout.isTTY = stdoutTTY - process.stdin.isTTY = stdinTTY - CI_NAME = 'travis-ci' - }) - process.stdout.isTTY = false - process.stdin.isTTY = false - CI_NAME = false +) + +t.test( + /* eslint-disable-next-line max-len */ + 'skip prompt when installs are needed if not already present and shell is not a tty (single package)', + async t => { + const stdoutTTY = process.stdout.isTTY + const stdinTTY = process.stdin.isTTY + t.teardown(() => { + process.stdout.isTTY = stdoutTTY + process.stdin.isTTY = stdinTTY + CI_NAME = 'travis-ci' + }) + process.stdout.isTTY = false + process.stdin.isTTY = false + CI_NAME = false - const packages = ['foo'] - READ_RESULT = 'yolo' + const packages = ['foo'] + READ_RESULT = 'yolo' - config.package = packages - config.yes = undefined + config.package = packages + config.yes = undefined - const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b, 'en')) - const path = t.testdir() - const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') - npm.localPrefix = path - ARB_ACTUAL_TREE[path] = { - children: new Map(), - } - ARB_ACTUAL_TREE[installDir] = { - children: new Map(), - } - MANIFESTS.foo = { - name: 'foo', - version: '1.2.3', - bin: { - foo: 'foo', - }, - _from: 'foo@', + const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b, 'en')) + const path = t.testdir() + const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') + npm.localPrefix = path + ARB_ACTUAL_TREE[path] = { + children: new Map(), + } + ARB_ACTUAL_TREE[installDir] = { + children: new Map(), + } + MANIFESTS.foo = { + name: 'foo', + version: '1.2.3', + bin: { + foo: 'foo', + }, + _from: 'foo@', + } + await exec.exec(['foobar']) + t.strictSame(MKDIRPS, [installDir], 'need to make install dir') + t.match(ARB_CTOR, [{ path }]) + t.match(ARB_REIFY, [{ add, legacyPeerDeps: false }], 'need to install the package') + t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') + const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foobar' } }, + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH }, + stdio: 'inherit', + }, + ]) + t.strictSame(READ, [], 'should not have prompted') + t.strictSame( + LOG_WARN, + [['exec', 'The following package was not found and will be installed: foo']], + 'should have printed a warning' + ) } - await exec.exec(['foobar']) - t.strictSame(MKDIRPS, [installDir], 'need to make install dir') - t.match(ARB_CTOR, [{ path }]) - t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install the package') - t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') - const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foobar' } }, - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { PATH }, - stdio: 'inherit', - }]) - t.strictSame(READ, [], 'should not have prompted') - t.strictSame(LOG_WARN, [['exec', 'The following package was not found and will be installed: foo']], 'should have printed a warning') -}) +) t.test('abort if prompt rejected', async t => { const stdoutTTY = process.stdout.isTTY @@ -911,20 +972,18 @@ t.test('abort if prompt rejected', async t => { }, _from: 'bar@', } - await t.rejects( - exec.exec(['foobar']), - /canceled/, - 'should be canceled' - ) + await t.rejects(exec.exec(['foobar']), /canceled/, 'should be canceled') t.strictSame(MKDIRPS, [installDir], 'need to make install dir') t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no install performed') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.strictSame(RUN_SCRIPTS, []) - t.strictSame(READ, [{ - prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', - default: 'y', - }]) + t.strictSame(READ, [ + { + prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', + default: 'y', + }, + ]) }) t.test('abort if prompt false', async t => { @@ -970,20 +1029,18 @@ t.test('abort if prompt false', async t => { }, _from: 'bar@', } - await t.rejects( - exec.exec(['foobar']), - 'canceled', - 'should be canceled' - ) + await t.rejects(exec.exec(['foobar']), 'canceled', 'should be canceled') t.strictSame(MKDIRPS, [installDir], 'need to make install dir') t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no install performed') t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') t.strictSame(RUN_SCRIPTS, []) - t.strictSame(READ, [{ - prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', - default: 'y', - }]) + t.strictSame(READ, [ + { + prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ', + default: 'y', + }, + ]) }) t.test('abort if -n provided', async t => { @@ -1028,11 +1085,7 @@ t.test('abort if -n provided', async t => { }, _from: 'bar@', } - await t.rejects( - exec.exec(['foobar']), - /canceled/, - 'should be canceled' - ) + await t.rejects(exec.exec(['foobar']), /canceled/, 'should be canceled') t.strictSame(MKDIRPS, [installDir], 'need to make install dir') t.match(ARB_CTOR, [{ path }]) t.strictSame(ARB_REIFY, [], 'no install performed') @@ -1062,7 +1115,11 @@ t.test('forward legacyPeerDeps opt', async t => { config.yes = true flatOptions.legacyPeerDeps = true await exec.exec(['foo']) - t.match(ARB_REIFY, [{add: ['foo@'], legacyPeerDeps: true}], 'need to install foo@ using legacyPeerDeps opt') + t.match( + ARB_REIFY, + [{ add: ['foo@'], legacyPeerDeps: true }], + 'need to install foo@ using legacyPeerDeps opt' + ) }) t.test('workspaces', t => { @@ -1101,18 +1158,20 @@ t.test('workspaces', t => { t.test('with args, run scripts in the context of a workspace', async t => { await exec.execWorkspaces(['foo', 'one arg', 'two arg'], ['a', 'b']) - t.match(RUN_SCRIPTS, [{ - pkg: { scripts: { npx: 'foo' }}, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { - PATH: [npm.localBin, ...PATH].join(delimiter), + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { + PATH: [npm.localBin, ...PATH].join(delimiter), + }, + stdio: 'inherit', }, - stdio: 'inherit', - }]) + ]) }) t.test('no args, spawn interactive shell', async t => { @@ -1122,9 +1181,18 @@ t.test('workspaces', t => { await exec.execWorkspaces([], ['a']) t.strictSame(LOG_WARN, []) - t.strictSame(npm._mockOutputs, [ - [`\nEntering npm script environment in workspace a@1.0.0 at location:\n${resolve(npm.localPrefix, 'packages/a')}\nType 'exit' or ^D when finished\n`], - ], 'printed message about interactive shell') + t.strictSame( + npm._mockOutputs, + [ + [ + `\nEntering npm script environment in workspace a@1.0.0 at location:\n${resolve( + npm.localPrefix, + 'packages/a' + )}\nType 'exit' or ^D when finished\n`, + ], + ], + 'printed message about interactive shell' + ) npm.color = true flatOptions.color = true @@ -1132,9 +1200,20 @@ t.test('workspaces', t => { await exec.execWorkspaces([], ['a']) t.strictSame(LOG_WARN, []) - t.strictSame(npm._mockOutputs, [ - [`\u001b[0m\u001b[0m\n\u001b[0mEntering npm script environment\u001b[0m\u001b[0m in workspace \u001b[32ma@1.0.0\u001b[39m at location:\u001b[0m\n\u001b[0m\u001b[2m${resolve(npm.localPrefix, 'packages/a')}\u001b[22m\u001b[0m\u001b[1m\u001b[22m\n\u001b[1mType 'exit' or ^D when finished\u001b[22m\n\u001b[1m\u001b[22m`], - ], 'printed message about interactive shell') + t.strictSame( + npm._mockOutputs, + [ + [ + /* eslint-disable-next-line max-len */ + `\u001b[0m\u001b[0m\n\u001b[0mEntering npm script environment\u001b[0m\u001b[0m in workspace \u001b[32ma@1.0.0\u001b[39m at location:\u001b[0m\n\u001b[0m\u001b[2m${resolve( + npm.localPrefix, + 'packages/a' + /* eslint-disable-next-line max-len */ + )}\u001b[22m\u001b[0m\u001b[1m\u001b[22m\n\u001b[1mType 'exit' or ^D when finished\u001b[22m\n\u001b[1m\u001b[22m`, + ], + ], + 'printed message about interactive shell' + ) }) t.end() diff --git a/test/lib/commands/explore.js b/test/lib/commands/explore.js index 4ae10afc69e77..b2e7be2136b76 100644 --- a/test/lib/commands/explore.js +++ b/test/lib/commands/explore.js @@ -11,7 +11,7 @@ const mockRPJ = async path => { } } RPJ_CALLED = path - return {some: 'package'} + return { some: 'package' } } let RUN_SCRIPT_ERROR = null @@ -19,8 +19,9 @@ let RUN_SCRIPT_EXIT_CODE = 0 let RUN_SCRIPT_SIGNAL = null let RUN_SCRIPT_EXEC = null const mockRunScript = ({ pkg, banner, path, event, stdio }) => { - if (event !== '_explore') + if (event !== '_explore') { throw new Error('got wrong event name') + } RUN_SCRIPT_EXEC = pkg.scripts._explore diff --git a/test/lib/commands/fund.js b/test/lib/commands/fund.js index 7a86389f084df..b82ed93fe5c7e 100644 --- a/test/lib/commands/fund.js +++ b/test/lib/commands/fund.js @@ -102,10 +102,7 @@ const nestedMultipleFundingPackages = { 'package.json': JSON.stringify({ name: 'nested-multiple-funding-packages', version, - funding: [ - 'https://one.example.com', - 'https://two.example.com', - ], + funding: ['https://one.example.com', 'https://two.example.com'], dependencies: { foo: '*', }, @@ -129,10 +126,7 @@ const nestedMultipleFundingPackages = { 'package.json': JSON.stringify({ name: 'bar', version, - funding: [ - 'http://collective.example.com', - { url: 'http://sponsors.example.com/you' }, - ], + funding: ['http://collective.example.com', { url: 'http://sponsors.example.com/you' }], }), }, }, @@ -188,25 +182,28 @@ const config = { which: null, } const openUrl = async (npm, url, msg) => { - if (url === 'http://npmjs.org') + if (url === 'http://npmjs.org') { throw new Error('ERROR') + } if (config.json) { printUrl = JSON.stringify({ title: msg, url: url, }) - } else + } else { printUrl = `${msg}:\n ${url}` + } } const Fund = t.mock('../../../lib/commands/fund.js', { '../../../lib/utils/open-url.js': openUrl, pacote: { - manifest: (arg) => arg.name === 'ntl' - ? Promise.resolve({ - funding: 'http://example.com/pacote', - }) - : Promise.reject(new Error('ERROR')), + manifest: arg => + arg.name === 'ntl' + ? Promise.resolve({ + funding: 'http://example.com/pacote', + }) + : Promise.reject(new Error('ERROR')), }, }) const npm = mockNpm({ @@ -278,10 +275,7 @@ t.test('fund containing multi-level nested deps with no funding', async t => { npm.prefix = t.testdir(nestedNoFundingPackages) await fund.exec([]) - t.matchSnapshot( - result, - 'should omit dependencies with no funding declared' - ) + t.matchSnapshot(result, 'should omit dependencies with no funding declared') t.end() }) @@ -368,11 +362,7 @@ t.test('fund does not support global', async t => { npm.prefix = t.testdir({}) config.global = true - await t.rejects( - fund.exec([]), - { code: 'EFUNDGLOBAL' }, - 'should throw EFUNDGLOBAL error' - ) + await t.rejects(fund.exec([]), { code: 'EFUNDGLOBAL' }, 'should throw EFUNDGLOBAL error') config.global = false }) @@ -437,11 +427,7 @@ t.test('fund using symlink ref', async t => { // using symlinked ref await fund.exec(['./node_modules/a']) - t.match( - printUrl, - 'http://example.com/a', - 'should retrieve funding url from symlink' - ) + t.match(printUrl, 'http://example.com/a', 'should retrieve funding url from symlink') printUrl = '' result = '' @@ -449,11 +435,7 @@ t.test('fund using symlink ref', async t => { // using target ref await fund.exec(['./a']) - t.match( - printUrl, - 'http://example.com/a', - 'should retrieve funding url from symlink target' - ) + t.match(printUrl, 'http://example.com/a', 'should retrieve funding url from symlink target') }) t.test('fund using data from actual tree', async t => { @@ -568,7 +550,11 @@ t.test('fund using bad which value', async t => { await t.rejects( fund.exec(['bar']), - { code: 'EFUNDNUMBER', message: '`npm fund [<@scope>/] [--which=fundingSourceNumber]` must be given a positive integer' }, + { + code: 'EFUNDNUMBER', + /* eslint-disable-next-line max-len */ + message: '`npm fund [<@scope>/] [--which=fundingSourceNumber]` must be given a positive integer', + }, 'should have bad which option error message' ) config.which = null @@ -595,11 +581,7 @@ t.test('fund a package throws on openUrl', async t => { }), }) - await t.rejects( - fund.exec(['.']), - { message: 'ERROR' }, - 'should throw unknown error' - ) + await t.rejects(fund.exec(['.']), { message: 'ERROR' }, 'should throw unknown error') }) t.test('fund a package with type and multiple sources', async t => { @@ -714,10 +696,7 @@ t.test('sub dep with fund info and a parent with no funding info', async t => { 'package.json': JSON.stringify({ name: 'c', version: '1.0.0', - funding: [ - 'http://example.com/c', - 'http://example.com/c-other', - ], + funding: ['http://example.com/c', 'http://example.com/c-other'], }), }, }, @@ -745,10 +724,7 @@ t.test('workspaces', async t => { 'package.json': JSON.stringify({ name: 'c', version: '1.0.0', - funding: [ - 'http://example.com/c', - 'http://example.com/c-other', - ], + funding: ['http://example.com/c', 'http://example.com/c-other'], }), }, d: { @@ -785,14 +761,12 @@ t.test('workspaces', async t => { await fund.execWorkspaces([], ['a']) - t.matchSnapshot(result, - 'should display only filtered workspace name and its deps') + t.matchSnapshot(result, 'should display only filtered workspace name and its deps') result = '' await fund.execWorkspaces([], ['./packages/a']) - t.matchSnapshot(result, - 'should display only filtered workspace path and its deps') + t.matchSnapshot(result, 'should display only filtered workspace path and its deps') }) }) diff --git a/test/lib/commands/help-search.js b/test/lib/commands/help-search.js index 9faa38a32fd02..e13647a6582b6 100644 --- a/test/lib/commands/help-search.js +++ b/test/lib/commands/help-search.js @@ -4,7 +4,7 @@ const { fake: mockNpm } = require('../../fixtures/mock-npm') const ansicolors = require('ansicolors') const OUTPUT = [] -const output = (msg) => { +const output = msg => { OUTPUT.push(msg) } @@ -20,17 +20,22 @@ const npm = mockNpm({ }, usage: 'npm test usage', exec: async () => { - if (npmHelpErr) + if (npmHelpErr) { throw npmHelpErr + } }, output, }) let globRoot = null const globDir = { - 'npm-exec.md': 'the exec command\nhelp has multiple lines of exec help\none of them references exec', + 'npm-exec.md': + 'the exec command\nhelp has multiple lines of exec help\none of them references exec', + /* eslint-disable-next-line max-len */ 'npm-something.md': 'another\ncommand you run\nthat\nreferences exec\nand has multiple lines\nwith no matches\nthat will be ignored\nand another line\nthat does have exec as well', + /* eslint-disable-next-line max-len */ 'npm-run-script.md': 'the scripted run-script command runs scripts\nand has lines\nsome of which dont match the string run\nor script\nscript', + /* eslint-disable-next-line max-len */ 'npm-install.md': 'does a thing in a script\nif a thing does not exist in a thing you run\nto install it and run it maybe in a script', 'npm-help.md': 'will run the `help-search` command if you need to run it to help you search', 'npm-help-search.md': 'is the help search command\nthat you get if you run help-search', @@ -39,7 +44,10 @@ const globDir = { 'npm-extra-useless.md': 'exec\nexec\nexec', } const glob = (p, cb) => - cb(null, Object.keys(globDir).map((file) => join(globRoot, file))) + cb( + null, + Object.keys(globDir).map(file => join(globRoot, file)) + ) const HelpSearch = t.mock('../../../lib/commands/help-search.js', { glob, @@ -99,7 +107,11 @@ t.test('npm help-search long output with color', async t => { await helpSearch.exec(['help-search']) const highlightedText = ansicolors.bgBlack(ansicolors.red('help-search')) - t.equal(OUTPUT.some((line) => line.includes(highlightedText)), true, 'returned highlighted search terms') + t.equal( + OUTPUT.some(line => line.includes(highlightedText)), + true, + 'returned highlighted search terms' + ) }) t.test('npm help-search no args', async t => { diff --git a/test/lib/commands/help.js b/test/lib/commands/help.js index 9ea2b9d92ae7e..b76234d996627 100644 --- a/test/lib/commands/help.js +++ b/test/lib/commands/help.js @@ -12,7 +12,7 @@ const OUTPUT = [] const npm = { usage: 'test npm usage', config: { - get: (key) => npmConfig[key], + get: key => npmConfig[key], set: (key, value) => { npmConfig[key] = value }, @@ -21,12 +21,13 @@ const npm = { }, }, exec: async (cmd, args) => { - if (cmd === 'help-search') + if (cmd === 'help-search') { helpSearchArgs = args - else if (cmd === 'help') + } else if (cmd === 'help') { return { usage: 'npm help ' } + } }, - deref: (cmd) => {}, + deref: cmd => {}, output: msg => { OUTPUT.push(msg) }, @@ -89,7 +90,11 @@ t.test('npm help completion', async t => { const threeArgs = await help.completion({ conf: { argv: { remain: ['one', 'two', 'three'] } } }) t.strictSame(threeArgs, [], 'outputs no results when more than 2 args are provided') globErr = new Error('glob failed') - t.rejects(help.completion({ conf: { argv: { remain: [] } } }), /glob failed/, 'glob errors propagate') + t.rejects( + help.completion({ conf: { argv: { remain: [] } } }), + /glob failed/, + 'glob errors propagate' + ) }) t.test('npm help multiple args calls search', async t => { @@ -121,11 +126,7 @@ t.test('npm help glob errors propagate', async t => { spawnArgs = null }) - await t.rejects( - help.exec(['whoami']), - /glob failed/, - 'glob error propagates' - ) + await t.rejects(help.exec(['whoami']), /glob failed/, 'glob error propagates') }) t.test('npm help whoami', async t => { @@ -144,10 +145,7 @@ t.test('npm help whoami', async t => { t.test('npm help 1 install', async t => { npmConfig.viewer = 'browser' - globResult = [ - '/root/man/man5/install.5', - '/root/man/man1/npm-install.1', - ] + globResult = ['/root/man/man5/install.5', '/root/man/man1/npm-install.1'] t.teardown(() => { npmConfig.viewer = undefined @@ -164,9 +162,7 @@ t.test('npm help 1 install', async t => { t.test('npm help 5 install', async t => { npmConfig.viewer = 'browser' - globResult = [ - '/root/man/man5/install.5', - ] + globResult = ['/root/man/man5/install.5'] t.teardown(() => { npmConfig.viewer = undefined @@ -184,9 +180,7 @@ t.test('npm help 5 install', async t => { t.test('npm help 7 config', async t => { npmConfig.viewer = 'browser' - globResult = [ - '/root/man/man7/config.7', - ] + globResult = ['/root/man/man7/config.7'] t.teardown(() => { npmConfig.viewer = undefined globParam = null @@ -218,10 +212,7 @@ t.test('npm help package.json redirects to package-json', async t => { t.test('npm help ?(un)star', async t => { npmConfig.viewer = 'woman' - globResult = [ - '/root/man/man1/npm-star.1', - '/root/man/man1/npm-unstar.1', - ] + globResult = ['/root/man/man1/npm-star.1', '/root/man/man1/npm-unstar.1'] t.teardown(() => { npmConfig.viewer = undefined globResult = globDefaults @@ -232,16 +223,17 @@ t.test('npm help ?(un)star', async t => { await help.exec(['?(un)star']) t.equal(spawnBin, 'emacsclient', 'maps woman to emacs correctly') - t.strictSame(spawnArgs, ['-e', `(woman-find-file '/root/man/man1/npm-star.1')`], 'passes the correct arguments') + t.strictSame( + spawnArgs, + ['-e', `(woman-find-file '/root/man/man1/npm-star.1')`], + 'passes the correct arguments' + ) }) t.test('npm help - woman viewer propagates errors', async t => { npmConfig.viewer = 'woman' spawnCode = 1 - globResult = [ - '/root/man/man1/npm-star.1', - '/root/man/man1/npm-unstar.1', - ] + globResult = ['/root/man/man1/npm-star.1', '/root/man/man1/npm-unstar.1'] t.teardown(() => { npmConfig.viewer = undefined spawnCode = 0 @@ -256,7 +248,11 @@ t.test('npm help - woman viewer propagates errors', async t => { 'received the correct error' ) t.equal(spawnBin, 'emacsclient', 'maps woman to emacs correctly') - t.strictSame(spawnArgs, ['-e', `(woman-find-file '/root/man/man1/npm-star.1')`], 'passes the correct arguments') + t.strictSame( + spawnArgs, + ['-e', `(woman-find-file '/root/man/man1/npm-star.1')`], + 'passes the correct arguments' + ) }) t.test('npm help un*', async t => { @@ -291,11 +287,7 @@ t.test('npm help - man viewer propagates errors', async t => { spawnArgs = null }) - await t.rejects( - help.exec(['un*']), - /help process exited with code: 1/, - 'received correct error' - ) + await t.rejects(help.exec(['un*']), /help process exited with code: 1/, 'received correct error') t.equal(spawnBin, 'man', 'calls man by default') t.strictSame(spawnArgs, ['/root/man/man1/npm-uninstall.1'], 'passes the correct arguments') }) diff --git a/test/lib/commands/hook.js b/test/lib/commands/hook.js index af162e4fce37c..cd4b38787280f 100644 --- a/test/lib/commands/hook.js +++ b/test/lib/commands/hook.js @@ -9,7 +9,7 @@ const npm = { loglevel: 'info', unicode: false, }, - output: (msg) => { + output: msg => { output.push(msg) }, } @@ -28,13 +28,14 @@ const libnpmhook = { hookArgs = { pkg, uri, secret, opts } return { id: 1, name: pkg.replace(/^@/, ''), type: pkgTypes[pkg], endpoint: uri } }, - ls: async (opts) => { + ls: async opts => { hookArgs = opts let id = 0 - if (hookResponse) + if (hookResponse) { return hookResponse + } - return Object.keys(pkgTypes).map((name) => ({ + return Object.keys(pkgTypes).map(name => ({ id: ++id, name: name.replace(/^@/, ''), type: pkgTypes[name], @@ -45,7 +46,12 @@ const libnpmhook = { rm: async (id, opts) => { hookArgs = { id, opts } const pkg = Object.keys(pkgTypes)[0] - return { id: 1, name: pkg.replace(/^@/, ''), type: pkgTypes[pkg], endpoint: 'https://google.com' } + return { + id: 1, + name: pkg.replace(/^@/, ''), + type: pkgTypes[pkg], + endpoint: 'https://google.com', + } }, update: async (id, uri, secret, opts) => { hookArgs = { id, uri, secret, opts } @@ -61,10 +67,7 @@ const Hook = t.mock('../../../lib/commands/hook.js', { const hook = new Hook(npm) t.test('npm hook no args', async t => { - await t.rejects( - hook.exec([]), - hook.usage, 'throws usage with no arguments' - ) + await t.rejects(hook.exec([]), hook.usage, 'throws usage with no arguments') }) t.test('npm hook add', async t => { @@ -75,12 +78,16 @@ t.test('npm hook add', async t => { await hook.exec(['add', 'semver', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - pkg: 'semver', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'provided the correct arguments to libnpmhook') + t.strictSame( + hookArgs, + { + pkg: 'semver', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'provided the correct arguments to libnpmhook' + ) t.strictSame(output, ['+ semver -> https://google.com'], 'prints the correct output') }) @@ -94,12 +101,16 @@ t.test('npm hook add - unicode output', async t => { await hook.exec(['add', 'semver', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - pkg: 'semver', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'provided the correct arguments to libnpmhook') + t.strictSame( + hookArgs, + { + pkg: 'semver', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'provided the correct arguments to libnpmhook' + ) t.strictSame(output, ['+ semver ➜ https://google.com'], 'prints the correct output') }) @@ -113,18 +124,26 @@ t.test('npm hook add - json output', async t => { await hook.exec(['add', '@npmcli', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - pkg: '@npmcli', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'provided the correct arguments to libnpmhook') - t.strictSame(JSON.parse(output[0]), { - id: 1, - name: 'npmcli', - endpoint: 'https://google.com', - type: 'scope', - }, 'prints the correct json output') + t.strictSame( + hookArgs, + { + pkg: '@npmcli', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'provided the correct arguments to libnpmhook' + ) + t.strictSame( + JSON.parse(output[0]), + { + id: 1, + name: 'npmcli', + endpoint: 'https://google.com', + type: 'scope', + }, + 'prints the correct json output' + ) }) t.test('npm hook add - parseable output', async t => { @@ -137,18 +156,26 @@ t.test('npm hook add - parseable output', async t => { await hook.exec(['add', '@npmcli', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - pkg: '@npmcli', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'provided the correct arguments to libnpmhook') - t.strictSame(output[0].split(/\t/), [ - 'id', 'name', 'type', 'endpoint', - ], 'prints the correct parseable output headers') - t.strictSame(output[1].split(/\t/), [ - '1', 'npmcli', 'scope', 'https://google.com', - ], 'prints the correct parseable values') + t.strictSame( + hookArgs, + { + pkg: '@npmcli', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'provided the correct arguments to libnpmhook' + ) + t.strictSame( + output[0].split(/\t/), + ['id', 'name', 'type', 'endpoint'], + 'prints the correct parseable output headers' + ) + t.strictSame( + output[1].split(/\t/), + ['1', 'npmcli', 'scope', 'https://google.com'], + 'prints the correct parseable values' + ) }) t.test('npm hook add - silent output', async t => { @@ -161,12 +188,16 @@ t.test('npm hook add - silent output', async t => { await hook.exec(['add', '@npmcli', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - pkg: '@npmcli', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'provided the correct arguments to libnpmhook') + t.strictSame( + hookArgs, + { + pkg: '@npmcli', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'provided the correct arguments to libnpmhook' + ) t.strictSame(output, [], 'printed no output') }) @@ -178,10 +209,14 @@ t.test('npm hook ls', async t => { await hook.exec(['ls']) - t.strictSame(hookArgs, { - ...npm.flatOptions, - package: undefined, - }, 'received the correct arguments') + t.strictSame( + hookArgs, + { + ...npm.flatOptions, + package: undefined, + }, + 'received the correct arguments' + ) t.equal(output[0], 'You have 3 hooks configured.', 'prints the correct header') const out = require('../../../lib/utils/ansi-trim')(output[1]) t.match(out, /semver.*https:\/\/google.com.*\n.*\n.*never triggered/, 'prints package hook') @@ -199,20 +234,26 @@ t.test('npm hook ls, no results', async t => { await hook.exec(['ls']) - t.strictSame(hookArgs, { - ...npm.flatOptions, - package: undefined, - }, 'received the correct arguments') - t.equal(output[0], 'You don\'t have any hooks configured yet.', 'prints the correct result') + t.strictSame( + hookArgs, + { + ...npm.flatOptions, + package: undefined, + }, + 'received the correct arguments' + ) + t.equal(output[0], "You don't have any hooks configured yet.", 'prints the correct result') }) t.test('npm hook ls, single result', async t => { - hookResponse = [{ - id: 1, - name: 'semver', - type: 'package', - endpoint: 'https://google.com', - }] + hookResponse = [ + { + id: 1, + name: 'semver', + type: 'package', + endpoint: 'https://google.com', + }, + ] t.teardown(() => { hookResponse = null @@ -222,10 +263,14 @@ t.test('npm hook ls, single result', async t => { await hook.exec(['ls']) - t.strictSame(hookArgs, { - ...npm.flatOptions, - package: undefined, - }, 'received the correct arguments') + t.strictSame( + hookArgs, + { + ...npm.flatOptions, + package: undefined, + }, + 'received the correct arguments' + ) t.equal(output[0], 'You have one hook configured.', 'prints the correct header') const out = require('../../../lib/utils/ansi-trim')(output[1]) t.match(out, /semver.*https:\/\/google.com.*\n.*\n.*never triggered/, 'prints package hook') @@ -241,27 +286,39 @@ t.test('npm hook ls - json output', async t => { await hook.exec(['ls']) - t.strictSame(hookArgs, { - ...npm.flatOptions, - package: undefined, - }, 'received the correct arguments') + t.strictSame( + hookArgs, + { + ...npm.flatOptions, + package: undefined, + }, + 'received the correct arguments' + ) const out = JSON.parse(output[0]) - t.match(out, [{ - id: 1, - name: 'semver', - type: 'package', - endpoint: 'https://google.com', - }, { - id: 2, - name: 'npmcli', - type: 'scope', - endpoint: 'https://google.com', - }, { - id: 3, - name: 'npm', - type: 'owner', - endpoint: 'https://google.com', - }], 'prints the correct output') + t.match( + out, + [ + { + id: 1, + name: 'semver', + type: 'package', + endpoint: 'https://google.com', + }, + { + id: 2, + name: 'npmcli', + type: 'scope', + endpoint: 'https://google.com', + }, + { + id: 3, + name: 'npm', + type: 'owner', + endpoint: 'https://google.com', + }, + ], + 'prints the correct output' + ) }) t.test('npm hook ls - parseable output', async t => { @@ -274,16 +331,24 @@ t.test('npm hook ls - parseable output', async t => { await hook.exec(['ls']) - t.strictSame(hookArgs, { - ...npm.flatOptions, - package: undefined, - }, 'received the correct arguments') - t.strictSame(output.map(line => line.split(/\t/)), [ - ['id', 'name', 'type', 'endpoint', 'last_delivery'], - ['1', 'semver', 'package', 'https://google.com', ''], - ['2', 'npmcli', 'scope', 'https://google.com', `${now}`], - ['3', 'npm', 'owner', 'https://google.com', ''], - ], 'prints the correct result') + t.strictSame( + hookArgs, + { + ...npm.flatOptions, + package: undefined, + }, + 'received the correct arguments' + ) + t.strictSame( + output.map(line => line.split(/\t/)), + [ + ['id', 'name', 'type', 'endpoint', 'last_delivery'], + ['1', 'semver', 'package', 'https://google.com', ''], + ['2', 'npmcli', 'scope', 'https://google.com', `${now}`], + ['3', 'npm', 'owner', 'https://google.com', ''], + ], + 'prints the correct result' + ) }) t.test('npm hook ls - silent output', async t => { @@ -296,10 +361,14 @@ t.test('npm hook ls - silent output', async t => { await hook.exec(['ls']) - t.strictSame(hookArgs, { - ...npm.flatOptions, - package: undefined, - }, 'received the correct arguments') + t.strictSame( + hookArgs, + { + ...npm.flatOptions, + package: undefined, + }, + 'received the correct arguments' + ) t.strictSame(output, [], 'printed no output') }) @@ -311,13 +380,15 @@ t.test('npm hook rm', async t => { await hook.exec(['rm', '1']) - t.strictSame(hookArgs, { - id: '1', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(output, [ - '- semver X https://google.com', - ], 'printed the correct output') + t.strictSame( + hookArgs, + { + id: '1', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame(output, ['- semver X https://google.com'], 'printed the correct output') }) t.test('npm hook rm - unicode output', async t => { @@ -330,13 +401,15 @@ t.test('npm hook rm - unicode output', async t => { await hook.exec(['rm', '1']) - t.strictSame(hookArgs, { - id: '1', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(output, [ - '- semver ✘ https://google.com', - ], 'printed the correct output') + t.strictSame( + hookArgs, + { + id: '1', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame(output, ['- semver ✘ https://google.com'], 'printed the correct output') }) t.test('npm hook rm - silent output', async t => { @@ -349,10 +422,14 @@ t.test('npm hook rm - silent output', async t => { await hook.exec(['rm', '1']) - t.strictSame(hookArgs, { - id: '1', - opts: npm.flatOptions, - }, 'received the correct arguments') + t.strictSame( + hookArgs, + { + id: '1', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) t.strictSame(output, [], 'printed no output') }) @@ -366,16 +443,24 @@ t.test('npm hook rm - json output', async t => { await hook.exec(['rm', '1']) - t.strictSame(hookArgs, { - id: '1', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(JSON.parse(output[0]), { - id: 1, - name: 'semver', - type: 'package', - endpoint: 'https://google.com', - }, 'printed correct output') + t.strictSame( + hookArgs, + { + id: '1', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame( + JSON.parse(output[0]), + { + id: 1, + name: 'semver', + type: 'package', + endpoint: 'https://google.com', + }, + 'printed correct output' + ) }) t.test('npm hook rm - parseable output', async t => { @@ -388,14 +473,22 @@ t.test('npm hook rm - parseable output', async t => { await hook.exec(['rm', '1']) - t.strictSame(hookArgs, { - id: '1', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(output.map(line => line.split(/\t/)), [ - ['id', 'name', 'type', 'endpoint'], - ['1', 'semver', 'package', 'https://google.com'], - ], 'printed correct output') + t.strictSame( + hookArgs, + { + id: '1', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame( + output.map(line => line.split(/\t/)), + [ + ['id', 'name', 'type', 'endpoint'], + ['1', 'semver', 'package', 'https://google.com'], + ], + 'printed correct output' + ) }) t.test('npm hook update', async t => { @@ -406,15 +499,17 @@ t.test('npm hook update', async t => { await hook.exec(['update', '1', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - id: '1', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(output, [ - '+ semver -> https://google.com', - ], 'printed the correct output') + t.strictSame( + hookArgs, + { + id: '1', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame(output, ['+ semver -> https://google.com'], 'printed the correct output') }) t.test('npm hook update - unicode', async t => { @@ -427,15 +522,17 @@ t.test('npm hook update - unicode', async t => { await hook.exec(['update', '1', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - id: '1', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(output, [ - '+ semver ➜ https://google.com', - ], 'printed the correct output') + t.strictSame( + hookArgs, + { + id: '1', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame(output, ['+ semver ➜ https://google.com'], 'printed the correct output') }) t.test('npm hook update - json output', async t => { @@ -448,18 +545,26 @@ t.test('npm hook update - json output', async t => { await hook.exec(['update', '1', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - id: '1', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(JSON.parse(output[0]), { - id: '1', - name: 'semver', - type: 'package', - endpoint: 'https://google.com', - }, 'printed the correct output') + t.strictSame( + hookArgs, + { + id: '1', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame( + JSON.parse(output[0]), + { + id: '1', + name: 'semver', + type: 'package', + endpoint: 'https://google.com', + }, + 'printed the correct output' + ) }) t.test('npm hook update - parseable output', async t => { @@ -472,16 +577,24 @@ t.test('npm hook update - parseable output', async t => { await hook.exec(['update', '1', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - id: '1', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(output.map(line => line.split(/\t/)), [ - ['id', 'name', 'type', 'endpoint'], - ['1', 'semver', 'package', 'https://google.com'], - ], 'printed the correct output') + t.strictSame( + hookArgs, + { + id: '1', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame( + output.map(line => line.split(/\t/)), + [ + ['id', 'name', 'type', 'endpoint'], + ['1', 'semver', 'package', 'https://google.com'], + ], + 'printed the correct output' + ) }) t.test('npm hook update - silent output', async t => { @@ -494,11 +607,15 @@ t.test('npm hook update - silent output', async t => { await hook.exec(['update', '1', 'https://google.com', 'some-secret']) - t.strictSame(hookArgs, { - id: '1', - uri: 'https://google.com', - secret: 'some-secret', - opts: npm.flatOptions, - }, 'received the correct arguments') + t.strictSame( + hookArgs, + { + id: '1', + uri: 'https://google.com', + secret: 'some-secret', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) t.strictSame(output, [], 'printed no output') }) diff --git a/test/lib/commands/install-ci-test.js b/test/lib/commands/install-ci-test.js index 2baec1e0120ef..0828d2b24ed97 100644 --- a/test/lib/commands/install-ci-test.js +++ b/test/lib/commands/install-ci-test.js @@ -14,8 +14,9 @@ const installCITest = new InstallCITest({ ciArgs = args ciCalled = true } - if (ciError) + if (ciError) { throw ciError + } if (cmd === 'test') { testArgs = args diff --git a/test/lib/commands/install-test.js b/test/lib/commands/install-test.js index 291755bf8288b..223bbe106aec7 100644 --- a/test/lib/commands/install-test.js +++ b/test/lib/commands/install-test.js @@ -14,8 +14,9 @@ const installTest = new InstallTest({ installArgs = args installCalled = true } - if (installError) + if (installError) { throw installError + } if (cmd === 'test') { testArgs = args diff --git a/test/lib/commands/install.js b/test/lib/commands/install.js index 3f9c5f264a3ba..9de2ae2781c12 100644 --- a/test/lib/commands/install.js +++ b/test/lib/commands/install.js @@ -24,8 +24,9 @@ t.test('should install using Arborist', (t) => { } }, '../../../lib/utils/reify-finish.js': (npm, arb) => { - if (arb !== ARB_OBJ) + if (arb !== ARB_OBJ) { throw new Error('got wrong object passed to reify-finish') + } }, }) @@ -250,10 +251,11 @@ t.test('completion to folder', async t => { }, fs: { readdir: (path) => { - if (path === '/') + if (path === '/') { return ['arborist'] - else + } else { return ['package.json'] + } }, }, }) @@ -305,10 +307,11 @@ t.test('completion to folder - match is not a package', async t => { }, fs: { readdir: (path) => { - if (path === '/') + if (path === '/') { return ['arborist'] - else + } else { throw new Error('EONT') + } }, }, }) diff --git a/test/lib/commands/link.js b/test/lib/commands/link.js index 60215a0dcc064..a01de0b247990 100644 --- a/test/lib/commands/link.js +++ b/test/lib/commands/link.js @@ -29,8 +29,9 @@ const printLinks = async (opts) => { const linkedItems = [...tree.inventory.values()] .sort((a, b) => a.pkgid.localeCompare(b.pkgid, 'en')) for (const item of linkedItems) { - if (item.isLink) + if (item.isLink) { res += `${item.path} -> ${item.target.path}\n` + } } return res } diff --git a/test/lib/commands/ll.js b/test/lib/commands/ll.js index 9846348584293..c39d4338120d4 100644 --- a/test/lib/commands/ll.js +++ b/test/lib/commands/ll.js @@ -26,7 +26,8 @@ t.test('ll', t => { }) ll.exec(['pkg'], err => { - if (err) + if (err) { throw err + } }) }) diff --git a/test/lib/commands/logout.js b/test/lib/commands/logout.js index 09dc805c99632..39ef86c843e2b 100644 --- a/test/lib/commands/logout.js +++ b/test/lib/commands/logout.js @@ -26,7 +26,7 @@ const mocks = { const Logout = t.mock('../../../lib/commands/logout.js', mocks) const logout = new Logout(npm) -t.test('token logout', async (t) => { +t.test('token logout', async t => { t.teardown(() => { delete flatOptions.token result = null @@ -49,7 +49,7 @@ t.test('token logout', async (t) => { ) } - npm.config.clearCredentialsByURI = (registry) => { + npm.config.clearCredentialsByURI = registry => { t.equal( registry, 'https://registry.npmjs.org/', @@ -57,7 +57,7 @@ t.test('token logout', async (t) => { ) } - npm.config.save = (type) => { + npm.config.save = type => { t.equal(type, 'user', 'should save to user config') } @@ -79,7 +79,7 @@ t.test('token logout', async (t) => { ) }) -t.test('token scoped logout', async (t) => { +t.test('token scoped logout', async t => { t.teardown(() => { config.scope = '' delete flatOptions['//diff-registry.npmjs.com/:_authToken'] @@ -111,7 +111,7 @@ t.test('token scoped logout', async (t) => { ) } - npm.config.clearCredentialsByURI = (registry) => { + npm.config.clearCredentialsByURI = registry => { t.equal( registry, 'https://diff-registry.npmjs.com/', @@ -120,15 +120,11 @@ t.test('token scoped logout', async (t) => { } npm.config.delete = (ref, type) => { - t.equal( - ref, - '@myscope:registry', - 'should delete scoped registyr from config' - ) + t.equal(ref, '@myscope:registry', 'should delete scoped registyr from config') t.equal(type, 'user', 'should delete from user config') } - npm.config.save = (type) => { + npm.config.save = type => { t.equal(type, 'user', 'should save to user config') } @@ -152,7 +148,7 @@ t.test('token scoped logout', async (t) => { ) }) -t.test('user/pass logout', async (t) => { +t.test('user/pass logout', async t => { t.teardown(() => { delete flatOptions['//registry.npmjs.org/:username'] delete flatOptions['//registry.npmjs.org/:_password'] @@ -183,12 +179,15 @@ t.test('user/pass logout', async (t) => { t.test('missing credentials', async t => { await t.rejects( logout.exec([]), - { code: 'ENEEDAUTH', message: /not logged in to https:\/\/registry.npmjs.org\/, so can't log out!/ }, + { + code: 'ENEEDAUTH', + message: /not logged in to https:\/\/registry.npmjs.org\/, so can't log out!/, + }, 'should throw with expected error code' ) }) -t.test('ignore invalid scoped registry config', async (t) => { +t.test('ignore invalid scoped registry config', async t => { t.teardown(() => { delete flatOptions.token result = null @@ -213,7 +212,7 @@ t.test('ignore invalid scoped registry config', async (t) => { ) } - npm.config.clearCredentialsByURI = (registry) => { + npm.config.clearCredentialsByURI = registry => { t.equal( registry, 'https://registry.npmjs.org/', diff --git a/test/lib/commands/ls.js b/test/lib/commands/ls.js index 97224a74c8011..1cbcb593ff0b5 100644 --- a/test/lib/commands/ls.js +++ b/test/lib/commands/ls.js @@ -122,29 +122,31 @@ const npm = mockNpm({ const ls = new LS(npm) const redactCwd = res => - res && res.replace(/\\+/g, '/').replace(new RegExp(__dirname.replace(/\\+/g, '/'), 'gi'), '{CWD}') + res && + res.replace(/\\+/g, '/').replace(new RegExp(__dirname.replace(/\\+/g, '/'), 'gi'), '{CWD}') const redactCwdObj = obj => { - if (Array.isArray(obj)) + if (Array.isArray(obj)) { return obj.map(o => redactCwdObj(o)) - else if (typeof obj === 'string') + } else if (typeof obj === 'string') { return redactCwd(obj) - else if (!obj) + } else if (!obj) { return obj - else if (typeof obj === 'object') { + } else if (typeof obj === 'object') { return Object.keys(obj).reduce((o, k) => { o[k] = redactCwdObj(obj[k]) return o }, {}) - } else + } else { return obj + } } const jsonParse = res => redactCwdObj(JSON.parse(res)) -const cleanUpResult = () => result = '' +const cleanUpResult = () => (result = '') -t.test('ls', (t) => { +t.test('ls', t => { t.beforeEach(cleanUpResult) config.json = false config.unicode = false @@ -161,7 +163,10 @@ t.test('ls', (t) => { ...simpleNmFixture, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree representation of dependencies structure') + t.matchSnapshot( + redactCwd(result), + 'should output tree representation of dependencies structure' + ) }) t.test('missing package.json', async t => { @@ -169,7 +174,10 @@ t.test('ls', (t) => { ...simpleNmFixture, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree missing name/version of top-level package') + t.matchSnapshot( + redactCwd(result), + 'should output tree missing name/version of top-level package' + ) }) t.test('extraneous deps', async t => { @@ -201,7 +209,10 @@ t.test('ls', (t) => { ...simpleNmFixture, }) await ls.exec(['chai']) - t.matchSnapshot(redactCwd(result), 'should output tree contaning only occurrences of filtered by package and colored output') + t.matchSnapshot( + redactCwd(result), + 'should output tree contaning only occurrences of filtered by package and colored output' + ) npm.color = false }) @@ -220,7 +231,10 @@ t.test('ls', (t) => { ...simpleNmFixture, }) await ls.exec(['.']) - t.matchSnapshot(redactCwd(result), 'should output tree contaning only occurrences of filtered by package and colored output') + t.matchSnapshot( + redactCwd(result), + 'should output tree contaning only occurrences of filtered by package and colored output' + ) config.all = true config.depth = Infinity process.exitCode = 0 @@ -239,7 +253,10 @@ t.test('ls', (t) => { ...simpleNmFixture, }) await ls.exec(['dog']) - t.matchSnapshot(redactCwd(result), 'should output tree contaning only occurrences of filtered package and its ancestors') + t.matchSnapshot( + redactCwd(result), + 'should output tree contaning only occurrences of filtered package and its ancestors' + ) }) t.test('with multiple filter args', async t => { @@ -264,7 +281,11 @@ t.test('ls', (t) => { }, }) await ls.exec(['dog@*', 'chai@1.0.0']) - t.matchSnapshot(redactCwd(result), 'should output tree contaning only occurrences of multiple filtered packages and their ancestors') + t.matchSnapshot( + redactCwd(result), + /* eslint-disable-next-line max-len */ + 'should output tree contaning only occurrences of multiple filtered packages and their ancestors' + ) }) t.test('with missing filter arg', async t => { @@ -281,11 +302,7 @@ t.test('ls', (t) => { }) await ls.exec(['notadep']) t.matchSnapshot(redactCwd(result), 'should output tree containing no dependencies info') - t.equal( - process.exitCode, - 1, - 'should exit with error code 1' - ) + t.equal(process.exitCode, 1, 'should exit with error code 1') process.exitCode = 0 }) @@ -382,7 +399,10 @@ t.test('ls', (t) => { }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps and their deps only') + t.matchSnapshot( + redactCwd(result), + 'should output tree containing top-level deps and their deps only' + ) config.all = true config.depth = Infinity }) @@ -404,13 +424,17 @@ t.test('ls', (t) => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') t.equal( redactCwd(err.message).replace(/\r\n/g, '\n'), + /* eslint-disable-next-line max-len */ 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous/node_modules/chai\n' + 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous/node_modules/foo\n' + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', 'should log missing/invalid/extraneous errors' ) }) - t.matchSnapshot(redactCwd(result), 'should output tree containing missing, invalid, extraneous labels') + t.matchSnapshot( + redactCwd(result), + 'should output tree containing missing, invalid, extraneous labels' + ) }) t.test('colored output', async t => { @@ -426,11 +450,7 @@ t.test('ls', (t) => { }), ...simpleNmFixture, }) - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' }, - 'should have error code' - ) + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should have error code') t.matchSnapshot(redactCwd(result), 'should output tree containing color info') npm.color = false }) @@ -666,7 +686,10 @@ t.test('ls', (t) => { ...diffDepTypesNmFixture, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps with descriptions') + t.matchSnapshot( + redactCwd(result), + 'should output tree containing top-level deps with descriptions' + ) config.all = true config.depth = Infinity config.long = false @@ -676,11 +699,7 @@ t.test('ls', (t) => { npm.prefix = t.testdir({ 'package.json': '{broken json', }) - await t.rejects( - ls.exec([]), - { code: 'EJSONPARSE' }, - 'should throw EJSONPARSE error' - ) + await t.rejects(ls.exec([]), { code: 'EJSONPARSE' }, 'should throw EJSONPARSE error') t.matchSnapshot(redactCwd(result), 'should print empty result') }) @@ -712,7 +731,10 @@ t.test('ls', (t) => { ...diffDepTypesNmFixture, }) await t.rejects(ls.exec([])) - t.matchSnapshot(redactCwd(result), 'should output tree signaling mismatching peer dep in problems') + t.matchSnapshot( + redactCwd(result), + 'should output tree signaling mismatching peer dep in problems' + ) }) t.test('invalid deduped dep', async t => { @@ -745,7 +767,10 @@ t.test('ls', (t) => { }, }) await t.rejects(ls.exec([])) - t.matchSnapshot(redactCwd(result), 'should output tree signaling mismatching peer dep in problems') + t.matchSnapshot( + redactCwd(result), + 'should output tree signaling mismatching peer dep in problems' + ) npm.color = false }) @@ -776,7 +801,10 @@ t.test('ls', (t) => { { code: 'ELSPROBLEMS', message: /missing: b@\^1.0.0/ }, 'should list missing dep problem' ) - t.matchSnapshot(redactCwd(result), 'should output parseable signaling missing peer dep in problems') + t.matchSnapshot( + redactCwd(result), + 'should output parseable signaling missing peer dep in problems' + ) }) t.test('unmet peer dep', async t => { @@ -825,7 +853,10 @@ t.test('ls', (t) => { { code: 'ELSPROBLEMS', message: /invalid: optional-dep@1.0.0/ }, 'should have invalid dep error msg' ) - t.matchSnapshot(redactCwd(result), 'should output tree with empty entry for missing optional deps') + t.matchSnapshot( + redactCwd(result), + 'should output tree with empty entry for missing optional deps' + ) npm.color = false }) @@ -1138,6 +1169,7 @@ t.test('ls', (t) => { name: 'abbrev', version: '1.1.1', from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', }, }, @@ -1148,6 +1180,7 @@ t.test('ls', (t) => { version: '1.1.1', _id: 'abbrev@1.1.1', _from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', _requested: { type: 'git', @@ -1191,6 +1224,7 @@ t.test('ls', (t) => { a: { version: '1.0.1', resolved: 'foo@dog://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + /* eslint-disable-next-line max-len */ integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', }, }, @@ -1244,10 +1278,7 @@ t.test('ls', (t) => { saveSpec: null, fetchSpec: 'latest', }, - _requiredBy: [ - '#USER', - '/', - ], + _requiredBy: ['#USER', '/'], _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', _spec: 'simple-output', }), @@ -1341,20 +1372,18 @@ t.test('ls', (t) => { }) await ls.exec(['c']) - t.matchSnapshot(redactCwd(result), 'should print tree and not duplicate child of missing items') + t.matchSnapshot( + redactCwd(result), + 'should print tree and not duplicate child of missing items' + ) }) - t.test('loading a tree containing workspaces', async (t) => { + t.test('loading a tree containing workspaces', async t => { npm.localPrefix = npm.prefix = t.testdir({ 'package.json': JSON.stringify({ name: 'workspaces-tree', version: '1.0.0', - workspaces: [ - './a', - './b', - './d', - './group/*', - ], + workspaces: ['./a', './b', './d', './group/*'], }), node_modules: { a: t.fixture('symlink', '../a'), @@ -1432,16 +1461,14 @@ t.test('ls', (t) => { config.depth = 0 npm.color = true await ls.exec([]) - t.matchSnapshot(redactCwd(result), - 'should list workspaces properly with default configs') + t.matchSnapshot(redactCwd(result), 'should list workspaces properly with default configs') config.all = false config.depth = 0 npm.color = true npm.flatOptions.workspacesEnabled = false await ls.exec([]) - t.matchSnapshot(redactCwd(result), - 'should not list workspaces with --no-workspaces') + t.matchSnapshot(redactCwd(result), 'should not list workspaces with --no-workspaces') config.all = true config.depth = Infinity npm.color = false @@ -1449,15 +1476,13 @@ t.test('ls', (t) => { // --all await ls.exec([]) - t.matchSnapshot(redactCwd(result), - 'should list --all workspaces properly') + t.matchSnapshot(redactCwd(result), 'should list --all workspaces properly') // --production config.production = true await ls.exec([]) - t.matchSnapshot(redactCwd(result), - 'should list only prod deps of workspaces') + t.matchSnapshot(redactCwd(result), 'should list only prod deps of workspaces') config.production = false @@ -1468,20 +1493,20 @@ t.test('ls', (t) => { // filter out a single workspace and its deps using workspaces filters await ls.execWorkspaces([], ['a']) - t.matchSnapshot(redactCwd(result), - 'should filter using workspace config') + t.matchSnapshot(redactCwd(result), 'should filter using workspace config') // filter out a workspace by parent path await ls.execWorkspaces([], ['./group']) - t.matchSnapshot(redactCwd(result), - 'should filter by parent folder workspace config') + t.matchSnapshot(redactCwd(result), 'should filter by parent folder workspace config') // filter by a dep within a workspaces sub tree await ls.execWorkspaces(['bar'], ['d']) - t.matchSnapshot(redactCwd(result), - 'should print all tree and filter by dep within only the ws subtree') + t.matchSnapshot( + redactCwd(result), + 'should print all tree and filter by dep within only the ws subtree' + ) }) t.test('filter pkg arg using depth option', async t => { @@ -1553,7 +1578,7 @@ t.test('ls', (t) => { t.end() }) -t.test('ls --parseable', (t) => { +t.test('ls --parseable', t => { t.beforeEach(cleanUpResult) config.json = false config.unicode = false @@ -1571,7 +1596,10 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output parseable representation of dependencies structure') + t.matchSnapshot( + redactCwd(result), + 'should output parseable representation of dependencies structure' + ) }) t.test('missing package.json', async t => { @@ -1579,7 +1607,10 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output parseable missing name/version of top-level package') + t.matchSnapshot( + redactCwd(result), + 'should output parseable missing name/version of top-level package' + ) }) t.test('extraneous deps', async t => { @@ -1610,7 +1641,10 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) await ls.exec(['chai']) - t.matchSnapshot(redactCwd(result), 'should output parseable contaning only occurrences of filtered by package') + t.matchSnapshot( + redactCwd(result), + 'should output parseable contaning only occurrences of filtered by package' + ) }) t.test('with filter arg nested dep', async t => { @@ -1626,7 +1660,10 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) await ls.exec(['dog']) - t.matchSnapshot(redactCwd(result), 'should output parseable contaning only occurrences of filtered package') + t.matchSnapshot( + redactCwd(result), + 'should output parseable contaning only occurrences of filtered package' + ) }) t.test('with multiple filter args', async t => { @@ -1651,7 +1688,11 @@ t.test('ls --parseable', (t) => { }, }) await ls.exec(['dog@*', 'chai@1.0.0']) - t.matchSnapshot(redactCwd(result), 'should output parseable contaning only occurrences of multiple filtered packages and their ancestors') + t.matchSnapshot( + redactCwd(result), + /* eslint-disable-next-line max-len */ + 'should output parseable contaning only occurrences of multiple filtered packages and their ancestors' + ) }) t.test('with missing filter arg', async t => { @@ -1667,7 +1708,10 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) await ls.exec(['notadep']) - t.matchSnapshot(redactCwd(result), 'should output parseable output containing no dependencies info') + t.matchSnapshot( + redactCwd(result), + 'should output parseable output containing no dependencies info' + ) }) t.test('default --depth value should be 0', async t => { @@ -1685,7 +1729,10 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output parseable output containing only top-level dependencies') + t.matchSnapshot( + redactCwd(result), + 'should output parseable output containing only top-level dependencies' + ) config.all = true config.depth = Infinity }) @@ -1725,7 +1772,10 @@ t.test('ls --parseable', (t) => { ...simpleNmFixture, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output parseable containing top-level deps and their deps only') + t.matchSnapshot( + redactCwd(result), + 'should output parseable containing top-level deps and their deps only' + ) config.all = true config.depth = Infinity }) @@ -1742,12 +1792,11 @@ t.test('ls --parseable', (t) => { }), ...simpleNmFixture, }) - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' }, - 'should list dep problems' + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') + t.matchSnapshot( + redactCwd(result), + 'should output parseable containing top-level deps and their deps only' ) - t.matchSnapshot(redactCwd(result), 'should output parseable containing top-level deps and their deps only') }) t.test('--dev', async t => { @@ -1950,12 +1999,11 @@ t.test('ls --parseable', (t) => { }), ...simpleNmFixture, }) - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' }, - 'should list dep problems' + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') + t.matchSnapshot( + redactCwd(result), + 'should output parseable result containing EXTRANEOUS/INVALID labels' ) - t.matchSnapshot(redactCwd(result), 'should output parseable result containing EXTRANEOUS/INVALID labels') config.long = false }) @@ -2021,7 +2069,10 @@ t.test('ls --parseable', (t) => { ...diffDepTypesNmFixture, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps with descriptions') + t.matchSnapshot( + redactCwd(result), + 'should output tree containing top-level deps with descriptions' + ) config.all = true config.depth = Infinity config.long = false @@ -2031,10 +2082,7 @@ t.test('ls --parseable', (t) => { npm.prefix = t.testdir({ 'package.json': '{broken json', }) - await t.rejects( - ls.exec([]), - { code: 'EJSONPARSE' }, - 'should throw EJSONPARSE error') + await t.rejects(ls.exec([]), { code: 'EJSONPARSE' }, 'should throw EJSONPARSE error') t.matchSnapshot(redactCwd(result), 'should print empty result') }) @@ -2066,7 +2114,10 @@ t.test('ls --parseable', (t) => { ...diffDepTypesNmFixture, }) await t.rejects(ls.exec([])) - t.matchSnapshot(redactCwd(result), 'should output parseable signaling missing peer dep in problems') + t.matchSnapshot( + redactCwd(result), + 'should output parseable signaling missing peer dep in problems' + ) }) t.test('unmet optional dep', async t => { @@ -2096,7 +2147,10 @@ t.test('ls --parseable', (t) => { { code: 'ELSPROBLEMS', message: /invalid: optional-dep@1.0.0/ }, 'should have invalid dep error msg' ) - t.matchSnapshot(redactCwd(result), 'should output parseable with empty entry for missing optional deps') + t.matchSnapshot( + redactCwd(result), + 'should output parseable with empty entry for missing optional deps' + ) }) t.test('cycle deps', async t => { @@ -2185,6 +2239,7 @@ t.test('ls --parseable', (t) => { 'node_modules/abbrev': { name: 'abbrev', version: '1.1.1', + /* eslint-disable-next-line max-len */ resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', }, }, @@ -2195,6 +2250,7 @@ t.test('ls --parseable', (t) => { version: '1.1.1', _id: 'abbrev@1.1.1', _from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', _requested: { type: 'git', @@ -2250,10 +2306,7 @@ t.test('ls --parseable', (t) => { saveSpec: null, fetchSpec: 'latest', }, - _requiredBy: [ - '#USER', - '/', - ], + _requiredBy: ['#USER', '/'], _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', _spec: 'simple-output', }), @@ -2342,44 +2395,40 @@ t.test('ignore missing optional deps', async t => { }), node_modules: { 'prod-ok': { - 'package.json': JSON.stringify({name: 'prod-ok', version: '1.2.3' }), + 'package.json': JSON.stringify({ name: 'prod-ok', version: '1.2.3' }), }, 'prod-wrong': { - 'package.json': JSON.stringify({name: 'prod-wrong', version: '3.2.1' }), + 'package.json': JSON.stringify({ name: 'prod-wrong', version: '3.2.1' }), }, 'optional-ok': { - 'package.json': JSON.stringify({name: 'optional-ok', version: '1.2.3' }), + 'package.json': JSON.stringify({ name: 'optional-ok', version: '1.2.3' }), }, 'optional-wrong': { - 'package.json': JSON.stringify({name: 'optional-wrong', version: '3.2.1' }), + 'package.json': JSON.stringify({ name: 'optional-wrong', version: '3.2.1' }), }, 'peer-optional-ok': { - 'package.json': JSON.stringify({name: 'peer-optional-ok', version: '1.2.3' }), + 'package.json': JSON.stringify({ name: 'peer-optional-ok', version: '1.2.3' }), }, 'peer-optional-wrong': { - 'package.json': JSON.stringify({name: 'peer-optional-wrong', version: '3.2.1' }), + 'package.json': JSON.stringify({ name: 'peer-optional-wrong', version: '3.2.1' }), }, 'peer-ok': { - 'package.json': JSON.stringify({name: 'peer-ok', version: '1.2.3' }), + 'package.json': JSON.stringify({ name: 'peer-ok', version: '1.2.3' }), }, 'peer-wrong': { - 'package.json': JSON.stringify({name: 'peer-wrong', version: '3.2.1' }), + 'package.json': JSON.stringify({ name: 'peer-wrong', version: '3.2.1' }), }, }, }) config.all = true const prefix = npm.prefix.toLowerCase().replace(/\\/g, '/') - const cleanupPaths = str => - str.toLowerCase().replace(/\\/g, '/').split(prefix).join('{project}') + const cleanupPaths = str => str.toLowerCase().replace(/\\/g, '/').split(prefix).join('{project}') t.test('--json', async t => { config.json = true config.parseable = false - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' } - ) + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }) result = JSON.parse(result) const problems = result.problems.map(cleanupPaths) t.matchSnapshot(problems, 'ls --json problems') @@ -2388,25 +2437,19 @@ t.test('ignore missing optional deps', async t => { t.test('--parseable', async t => { config.json = false config.parseable = true - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' } - ) + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }) t.matchSnapshot(cleanupPaths(result), 'ls --parseable result') }) t.test('human output', async t => { config.json = false config.parseable = false - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' } - ) + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }) t.matchSnapshot(cleanupPaths(result), 'ls result') }) }) -t.test('ls --json', (t) => { +t.test('ls --json', t => { t.beforeEach(cleanUpResult) config.json = true config.parseable = false @@ -2455,8 +2498,11 @@ t.test('ls --json', (t) => { jsonParse(result), { problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/chai', + /* eslint-disable-next-line max-len */ 'extraneous: dog@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/dog', + /* eslint-disable-next-line max-len */ 'extraneous: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/foo', ], dependencies: { @@ -2464,6 +2510,7 @@ t.test('ls --json', (t) => { version: '1.0.0', extraneous: true, problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: dog@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/dog', ], }, @@ -2471,6 +2518,7 @@ t.test('ls --json', (t) => { version: '1.0.0', extraneous: true, problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/foo', ], dependencies: { @@ -2483,6 +2531,7 @@ t.test('ls --json', (t) => { version: '1.0.0', extraneous: true, problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/chai', ], }, @@ -2525,6 +2574,7 @@ t.test('ls --json', (t) => { version: '1.0.0', extraneous: true, problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-extraneous-deps/node_modules/chai', ], }, @@ -2557,20 +2607,14 @@ t.test('ls --json', (t) => { 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', 'should log missing dep as error' ) - t.equal( - err.code, - 'ELSPROBLEMS', - 'should have ELSPROBLEMS error code' - ) + t.equal(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') }) t.match( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', - problems: [ - 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', - ], + problems: ['missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0'], }, 'should output json containing problems info' ) @@ -2603,11 +2647,7 @@ t.test('ls --json', (t) => { }, 'should output json contaning only occurrences of filtered by package' ) - t.not( - process.exitCode, - 1, - 'should not exit with error code 1' - ) + t.not(process.exitCode, 1, 'should not exit with error code 1') }) t.test('with filter arg nested dep', async t => { @@ -2685,6 +2725,7 @@ t.test('ls --json', (t) => { }, }, }, + /* eslint-disable-next-line max-len */ 'should output json contaning only occurrences of multiple filtered packages and their ancestors' ) }) @@ -2710,11 +2751,7 @@ t.test('ls --json', (t) => { }, 'should output json containing no dependencies info' ) - t.equal( - process.exitCode, - 1, - 'should exit with error code 1' - ) + t.equal(process.exitCode, 1, 'should exit with error code 1') process.exitCode = 0 }) @@ -2840,18 +2877,16 @@ t.test('ls --json', (t) => { }), ...simpleNmFixture, }) - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' }, - 'should list dep problems' - ) + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/chai', + /* eslint-disable-next-line max-len */ 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/foo', 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', ], @@ -2860,6 +2895,7 @@ t.test('ls --json', (t) => { version: '1.0.0', invalid: '"^2.0.0" from the root project', problems: [ + /* eslint-disable-next-line max-len */ 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/foo', ], dependencies: { @@ -2872,15 +2908,14 @@ t.test('ls --json', (t) => { version: '1.0.0', extraneous: true, problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/chai', ], }, ipsum: { required: '^1.0.0', missing: true, - problems: [ - 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', - ], + problems: ['missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0'], }, }, }, @@ -3149,7 +3184,9 @@ t.test('ls --json', (t) => { 'node_modules/@isaacs/dedupe-tests-a': { name: '@isaacs/dedupe-tests-a', version: '1.0.1', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', dependencies: { '@isaacs/dedupe-tests-b': '1', @@ -3158,20 +3195,26 @@ t.test('ls --json', (t) => { 'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': { name: '@isaacs/dedupe-tests-b', version: '1.0.0', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', }, 'node_modules/@isaacs/dedupe-tests-b': { name: '@isaacs/dedupe-tests-b', version: '2.0.0', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', }, }, dependencies: { '@isaacs/dedupe-tests-a': { version: '1.0.1', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', requires: { '@isaacs/dedupe-tests-b': '1', @@ -3179,14 +3222,18 @@ t.test('ls --json', (t) => { dependencies: { '@isaacs/dedupe-tests-b': { version: '1.0.0', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', }, }, }, '@isaacs/dedupe-tests-b': { version: '2.0.0', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', }, }, @@ -3209,12 +3256,15 @@ t.test('ls --json', (t) => { dependencies: { '@isaacs/dedupe-tests-a': { version: '1.0.1', - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + resolved: + 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', dependencies: { '@isaacs/dedupe-tests-b': { - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + resolved: + 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', extraneous: true, problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/tap-testdir-ls-ls---json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', ], }, @@ -3222,10 +3272,12 @@ t.test('ls --json', (t) => { }, '@isaacs/dedupe-tests-b': { version: '2.0.0', - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + resolved: + 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', }, }, problems: [ + /* eslint-disable-next-line max-len */ 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/tap-testdir-ls-ls---json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', ], }, @@ -3342,6 +3394,7 @@ t.test('ls --json', (t) => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, + /* eslint-disable-next-line max-len */ path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/prod-dep/node_modules/dog', extraneous: false, }, @@ -3482,6 +3535,7 @@ t.test('ls --json', (t) => { { invalid: true, problems: [ + /* eslint-disable-next-line max-len */ 'error in {CWD}/tap-testdir-ls-ls---json-json-read-problems: Failed to parse root package.json', ], }, @@ -3489,14 +3543,10 @@ t.test('ls --json', (t) => { ) }) - t.test('empty location', async (t) => { + t.test('empty location', async t => { npm.prefix = t.testdir({}) await ls.exec([]) - t.same( - jsonParse(result), - {}, - 'should print empty json result' - ) + t.same(jsonParse(result), {}, 'should print empty json result') }) t.test('unmet peer dep', async t => { @@ -3520,17 +3570,14 @@ t.test('ls --json', (t) => { }), ...diffDepTypesNmFixture, }) - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' }, - 'Should have ELSPROBLEMS error code' - ) + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'Should have ELSPROBLEMS error code') t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', problems: [ + /* eslint-disable-next-line max-len */ 'invalid: peer-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-peer-dep/node_modules/peer-dep', ], dependencies: { @@ -3538,6 +3585,7 @@ t.test('ls --json', (t) => { version: '1.0.0', invalid: '"^2.0.0" from the root project', problems: [ + /* eslint-disable-next-line max-len */ 'invalid: peer-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-peer-dep/node_modules/peer-dep', ], }, @@ -3592,6 +3640,7 @@ t.test('ls --json', (t) => { name: 'test-npm-ls', version: '1.0.0', problems: [ + /* eslint-disable-next-line max-len */ 'invalid: optional-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-optional-dep/node_modules/optional-dep', // mismatching optional deps get flagged in problems ], dependencies: { @@ -3599,6 +3648,7 @@ t.test('ls --json', (t) => { version: '1.0.0', invalid: '"^2.0.0" from the root project', problems: [ + /* eslint-disable-next-line max-len */ 'invalid: optional-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-optional-dep/node_modules/optional-dep', ], }, @@ -3743,6 +3793,7 @@ t.test('ls --json', (t) => { version: '1.1.1', id: 'abbrev@1.1.1', from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', }, }, @@ -3753,6 +3804,7 @@ t.test('ls --json', (t) => { version: '1.1.1', _id: 'abbrev@1.1.1', _from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', _requested: { type: 'git', @@ -3776,6 +3828,7 @@ t.test('ls --json', (t) => { dependencies: { abbrev: { version: '1.1.1', + /* eslint-disable-next-line max-len */ resolved: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', }, }, @@ -3812,10 +3865,7 @@ t.test('ls --json', (t) => { saveSpec: null, fetchSpec: 'latest', }, - _requiredBy: [ - '#USER', - '/', - ], + _requiredBy: ['#USER', '/'], _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', _spec: 'simple-output', }, @@ -3838,10 +3888,7 @@ t.test('ls --json', (t) => { saveSpec: null, fetchSpec: 'latest', }, - _requiredBy: [ - '#USER', - '/', - ], + _requiredBy: ['#USER', '/'], _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', _spec: 'simple-output', }), @@ -3985,19 +4032,14 @@ t.test('show multiple invalid reasons', async t => { }, }) - const cleanupPaths = str => - redactCwd(str).toLowerCase().replace(/\\/g, '/') - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' }, - 'should list dep problems' - ) + const cleanupPaths = str => redactCwd(str).toLowerCase().replace(/\\/g, '/') + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') t.matchSnapshot(cleanupPaths(result), 'ls result') }) -t.test('ls --package-lock-only', (t) => { +t.test('ls --package-lock-only', t => { config['package-lock-only'] = true - t.test('ls --package-lock-only --json', (t) => { + t.test('ls --package-lock-only --json', t => { t.beforeEach(cleanUpResult) config.json = true config.parseable = false @@ -4188,11 +4230,7 @@ t.test('ls --package-lock-only', (t) => { }, 'should output json contaning only occurrences of filtered by package' ) - t.equal( - process.exitCode, - 0, - 'should exit with error code 0' - ) + t.equal(process.exitCode, 0, 'should exit with error code 0') }) t.test('with filter arg nested dep', async t => { @@ -4297,6 +4335,7 @@ t.test('ls --package-lock-only', (t) => { }, }, }, + /* eslint-disable-next-line max-len */ 'should output json contaning only occurrences of multiple filtered packages and their ancestors' ) }) @@ -4337,11 +4376,7 @@ t.test('ls --package-lock-only', (t) => { }, 'should output json containing no dependencies info' ) - t.equal( - process.exitCode, - 1, - 'should exit with error code 1' - ) + t.equal(process.exitCode, 1, 'should exit with error code 1') process.exitCode = 0 }) @@ -4527,17 +4562,14 @@ t.test('ls --package-lock-only', (t) => { }, }), }) - await t.rejects( - ls.exec([]), - { code: 'ELSPROBLEMS' }, - 'should list dep problems' - ) + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') t.same( jsonParse(result), { name: 'test-npm-ls', version: '1.0.0', problems: [ + /* eslint-disable-next-line max-len */ 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---package-lock-only-ls---package-lock-only---json-missing-invalid-extraneous/node_modules/foo', 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', ], @@ -4546,6 +4578,7 @@ t.test('ls --package-lock-only', (t) => { version: '1.0.0', invalid: '"^2.0.0" from the root project', problems: [ + /* eslint-disable-next-line max-len */ 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---package-lock-only-ls---package-lock-only---json-missing-invalid-extraneous/node_modules/foo', ], dependencies: { @@ -4557,9 +4590,7 @@ t.test('ls --package-lock-only', (t) => { ipsum: { required: '^1.0.0', missing: true, - problems: [ - 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', - ], + problems: ['missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0'], }, }, }, @@ -4586,7 +4617,9 @@ t.test('ls --package-lock-only', (t) => { 'node_modules/@isaacs/dedupe-tests-a': { name: '@isaacs/dedupe-tests-a', version: '1.0.1', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', dependencies: { '@isaacs/dedupe-tests-b': '1', @@ -4595,20 +4628,26 @@ t.test('ls --package-lock-only', (t) => { 'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': { name: '@isaacs/dedupe-tests-b', version: '1.0.0', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', }, 'node_modules/@isaacs/dedupe-tests-b': { name: '@isaacs/dedupe-tests-b', version: '2.0.0', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', }, }, dependencies: { '@isaacs/dedupe-tests-a': { version: '1.0.1', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', requires: { '@isaacs/dedupe-tests-b': '1', @@ -4616,14 +4655,18 @@ t.test('ls --package-lock-only', (t) => { dependencies: { '@isaacs/dedupe-tests-b': { version: '1.0.0', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', }, }, }, '@isaacs/dedupe-tests-b': { version: '2.0.0', + /* eslint-disable-next-line max-len */ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + /* eslint-disable-next-line max-len */ integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', }, }, @@ -4646,17 +4689,20 @@ t.test('ls --package-lock-only', (t) => { dependencies: { '@isaacs/dedupe-tests-a': { version: '1.0.1', - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + resolved: + 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', dependencies: { '@isaacs/dedupe-tests-b': { version: '1.0.0', - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + resolved: + 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', }, }, }, '@isaacs/dedupe-tests-b': { version: '2.0.0', - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + resolved: + 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', }, }, }, @@ -4716,12 +4762,12 @@ t.test('ls --package-lock-only', (t) => { requires: true, dependencies: { abbrev: { + /* eslint-disable-next-line max-len */ version: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', from: 'abbrev@git+https://github.com/isaacs/abbrev-js.git', }, }, - } - ), + }), }) await ls.exec([]) t.same( @@ -4731,6 +4777,7 @@ t.test('ls --package-lock-only', (t) => { version: '1.0.0', dependencies: { abbrev: { + /* eslint-disable-next-line max-len */ resolved: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', }, }, diff --git a/test/lib/commands/org.js b/test/lib/commands/org.js index 16a432c27b516..903b9de4f6084 100644 --- a/test/lib/commands/org.js +++ b/test/lib/commands/org.js @@ -9,7 +9,7 @@ const npm = { silent: false, loglevel: 'info', }, - output: (msg) => { + output: msg => { output.push(msg) }, } @@ -47,28 +47,32 @@ const Org = t.mock('../../../lib/commands/org.js', { const org = new Org(npm) t.test('completion', async t => { - const completion = (argv) => - org.completion({ conf: { argv: { remain: argv } } }) + const completion = argv => org.completion({ conf: { argv: { remain: argv } } }) const assertions = [ - [['npm', 'org'], ['set', 'rm', 'ls']], + [ + ['npm', 'org'], + ['set', 'rm', 'ls'], + ], [['npm', 'org', 'ls'], []], [['npm', 'org', 'add'], []], [['npm', 'org', 'rm'], []], [['npm', 'org', 'set'], []], ] - for (const [argv, expected] of assertions) + for (const [argv, expected] of assertions) { t.resolveMatch(completion(argv), expected, `completion for: ${argv.join(', ')}`) + } - t.rejects(completion(['npm', 'org', 'flurb']), /flurb not recognized/, 'errors for unknown subcommand') + t.rejects( + completion(['npm', 'org', 'flurb']), + /flurb not recognized/, + 'errors for unknown subcommand' + ) }) t.test('npm org - invalid subcommand', async t => { - await t.rejects( - org.exec(['foo']), - org.usage - ) + await t.rejects(org.exec(['foo']), org.usage) }) t.test('npm org add', async t => { @@ -79,13 +83,21 @@ t.test('npm org add', async t => { await org.exec(['add', 'orgname', 'username']) - t.strictSame(orgSetArgs, { - org: 'orgname', - user: 'username', - role: 'developer', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.equal(output[0], 'Added username as developer to orgname. You now have 1 member in this org.', 'printed the correct output') + t.strictSame( + orgSetArgs, + { + org: 'orgname', + user: 'username', + role: 'developer', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.equal( + output[0], + 'Added username as developer to orgname. You now have 1 member in this org.', + 'printed the correct output' + ) }) t.test('npm org add - no org', async t => { @@ -136,13 +148,21 @@ t.test('npm org add - more users', async t => { }) await org.exec(['add', 'orgname', 'username']) - t.strictSame(orgSetArgs, { - org: 'orgname', - user: 'username', - role: 'developer', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.equal(output[0], 'Added username as developer to orgname. You now have 5 members in this org.', 'printed the correct output') + t.strictSame( + orgSetArgs, + { + org: 'orgname', + user: 'username', + role: 'developer', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.equal( + output[0], + 'Added username as developer to orgname. You now have 5 members in this org.', + 'printed the correct output' + ) }) t.test('npm org add - json output', async t => { @@ -155,20 +175,28 @@ t.test('npm org add - json output', async t => { await org.exec(['add', 'orgname', 'username']) - t.strictSame(orgSetArgs, { - org: 'orgname', - user: 'username', - role: 'developer', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(JSON.parse(output[0]), { - org: { - name: 'orgname', - size: 1, + t.strictSame( + orgSetArgs, + { + org: 'orgname', + user: 'username', + role: 'developer', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame( + JSON.parse(output[0]), + { + org: { + name: 'orgname', + size: 1, + }, + user: 'username', + role: 'developer', }, - user: 'username', - role: 'developer', - }, 'printed the correct output') + 'printed the correct output' + ) }) t.test('npm org add - parseable output', async t => { @@ -181,16 +209,24 @@ t.test('npm org add - parseable output', async t => { await org.exec(['add', 'orgname', 'username']) - t.strictSame(orgSetArgs, { - org: 'orgname', - user: 'username', - role: 'developer', - opts: npm.flatOptions, - }, 'received the correct arguments') - t.strictSame(output.map(line => line.split(/\t/)), [ - ['org', 'orgsize', 'user', 'role'], - ['orgname', '1', 'username', 'developer'], - ], 'printed the correct output') + t.strictSame( + orgSetArgs, + { + org: 'orgname', + user: 'username', + role: 'developer', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) + t.strictSame( + output.map(line => line.split(/\t/)), + [ + ['org', 'orgsize', 'user', 'role'], + ['orgname', '1', 'username', 'developer'], + ], + 'printed the correct output' + ) }) t.test('npm org add - silent output', async t => { @@ -203,12 +239,16 @@ t.test('npm org add - silent output', async t => { await org.exec(['add', 'orgname', 'username']) - t.strictSame(orgSetArgs, { - org: 'orgname', - user: 'username', - role: 'developer', - opts: npm.flatOptions, - }, 'received the correct arguments') + t.strictSame( + orgSetArgs, + { + org: 'orgname', + user: 'username', + role: 'developer', + opts: npm.flatOptions, + }, + 'received the correct arguments' + ) t.strictSame(output, [], 'prints no output') }) @@ -221,16 +261,28 @@ t.test('npm org rm', async t => { await org.exec(['rm', 'orgname', 'username']) - t.strictSame(orgRmArgs, { - org: 'orgname', - user: 'username', - opts: npm.flatOptions, - }, 'libnpmorg.rm received the correct args') - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'libnpmorg.ls received the correct args') - t.equal(output[0], 'Successfully removed username from orgname. You now have 0 members in this org.', 'printed the correct output') + t.strictSame( + orgRmArgs, + { + org: 'orgname', + user: 'username', + opts: npm.flatOptions, + }, + 'libnpmorg.rm received the correct args' + ) + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'libnpmorg.ls received the correct args' + ) + t.equal( + output[0], + 'Successfully removed username from orgname. You now have 0 members in this org.', + 'printed the correct output' + ) }) t.test('npm org rm - no org', async t => { @@ -254,11 +306,7 @@ t.test('npm org rm - no user', async t => { output.length = 0 }) - await t.rejects( - org.exec(['rm', 'orgname']), - /`username` is required/, - 'threw the correct error' - ) + await t.rejects(org.exec(['rm', 'orgname']), /`username` is required/, 'threw the correct error') }) t.test('npm org rm - one user left', async t => { @@ -275,16 +323,28 @@ t.test('npm org rm - one user left', async t => { await org.exec(['rm', 'orgname', 'username']) - t.strictSame(orgRmArgs, { - org: 'orgname', - user: 'username', - opts: npm.flatOptions, - }, 'libnpmorg.rm received the correct args') - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'libnpmorg.ls received the correct args') - t.equal(output[0], 'Successfully removed username from orgname. You now have 1 member in this org.', 'printed the correct output') + t.strictSame( + orgRmArgs, + { + org: 'orgname', + user: 'username', + opts: npm.flatOptions, + }, + 'libnpmorg.rm received the correct args' + ) + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'libnpmorg.ls received the correct args' + ) + t.equal( + output[0], + 'Successfully removed username from orgname. You now have 1 member in this org.', + 'printed the correct output' + ) }) t.test('npm org rm - json output', async t => { @@ -298,21 +358,33 @@ t.test('npm org rm - json output', async t => { await org.exec(['rm', 'orgname', 'username']) - t.strictSame(orgRmArgs, { - org: 'orgname', - user: 'username', - opts: npm.flatOptions, - }, 'libnpmorg.rm received the correct args') - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'libnpmorg.ls received the correct args') - t.strictSame(JSON.parse(output[0]), { - user: 'username', - org: 'orgname', - userCount: 0, - deleted: true, - }, 'printed the correct output') + t.strictSame( + orgRmArgs, + { + org: 'orgname', + user: 'username', + opts: npm.flatOptions, + }, + 'libnpmorg.rm received the correct args' + ) + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'libnpmorg.ls received the correct args' + ) + t.strictSame( + JSON.parse(output[0]), + { + user: 'username', + org: 'orgname', + userCount: 0, + deleted: true, + }, + 'printed the correct output' + ) }) t.test('npm org rm - parseable output', async t => { @@ -326,19 +398,31 @@ t.test('npm org rm - parseable output', async t => { await org.exec(['rm', 'orgname', 'username']) - t.strictSame(orgRmArgs, { - org: 'orgname', - user: 'username', - opts: npm.flatOptions, - }, 'libnpmorg.rm received the correct args') - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'libnpmorg.ls received the correct args') - t.strictSame(output.map(line => line.split(/\t/)), [ - ['user', 'org', 'userCount', 'deleted'], - ['username', 'orgname', '0', 'true'], - ], 'printed the correct output') + t.strictSame( + orgRmArgs, + { + org: 'orgname', + user: 'username', + opts: npm.flatOptions, + }, + 'libnpmorg.rm received the correct args' + ) + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'libnpmorg.ls received the correct args' + ) + t.strictSame( + output.map(line => line.split(/\t/)), + [ + ['user', 'org', 'userCount', 'deleted'], + ['username', 'orgname', '0', 'true'], + ], + 'printed the correct output' + ) }) t.test('npm org rm - silent output', async t => { @@ -352,15 +436,23 @@ t.test('npm org rm - silent output', async t => { await org.exec(['rm', 'orgname', 'username']) - t.strictSame(orgRmArgs, { - org: 'orgname', - user: 'username', - opts: npm.flatOptions, - }, 'libnpmorg.rm received the correct args') - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'libnpmorg.ls received the correct args') + t.strictSame( + orgRmArgs, + { + org: 'orgname', + user: 'username', + opts: npm.flatOptions, + }, + 'libnpmorg.rm received the correct args' + ) + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'libnpmorg.ls received the correct args' + ) t.strictSame(output, [], 'printed no output') }) @@ -378,10 +470,14 @@ t.test('npm org ls', async t => { await org.exec(['ls', 'orgname']) - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'receieved the correct args') + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'receieved the correct args' + ) const out = ansiTrim(output[0]) t.match(out, /one.*developer/, 'contains the developer member') t.match(out, /two.*admin/, 'contains the admin member') @@ -401,10 +497,14 @@ t.test('npm org ls - user filter', async t => { await org.exec(['ls', 'orgname', 'username']) - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'receieved the correct args') + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'receieved the correct args' + ) const out = ansiTrim(output[0]) t.match(out, /username.*admin/, 'contains the filtered member') t.notMatch(out, /missing.*admin/, 'does not contain other members') @@ -422,10 +522,14 @@ t.test('npm org ls - user filter, missing user', async t => { await org.exec(['ls', 'orgname', 'username']) - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'receieved the correct args') + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'receieved the correct args' + ) const out = ansiTrim(output[0]) t.notMatch(out, /username/, 'does not contain the requested member') t.notMatch(out, /missing.*admin/, 'does not contain other members') @@ -437,11 +541,7 @@ t.test('npm org ls - no org', async t => { output.length = 0 }) - await t.rejects( - org.exec(['ls']), - /`orgname` is required/, - 'throws the correct error' - ) + await t.rejects(org.exec(['ls']), /`orgname` is required/, 'throws the correct error') }) t.test('npm org ls - json output', async t => { @@ -460,10 +560,14 @@ t.test('npm org ls - json output', async t => { await org.exec(['ls', 'orgname']) - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'receieved the correct args') + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'receieved the correct args' + ) t.strictSame(JSON.parse(output[0]), orgList, 'prints the correct output') }) @@ -483,16 +587,24 @@ t.test('npm org ls - parseable output', async t => { await org.exec(['ls', 'orgname']) - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'receieved the correct args') - t.strictSame(output.map(line => line.split(/\t/)), [ - ['user', 'role'], - ['one', 'developer'], - ['two', 'admin'], - ['three', 'owner'], - ], 'printed the correct output') + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'receieved the correct args' + ) + t.strictSame( + output.map(line => line.split(/\t/)), + [ + ['user', 'role'], + ['one', 'developer'], + ['two', 'admin'], + ['three', 'owner'], + ], + 'printed the correct output' + ) }) t.test('npm org ls - silent output', async t => { @@ -511,9 +623,13 @@ t.test('npm org ls - silent output', async t => { await org.exec(['ls', 'orgname']) - t.strictSame(orgLsArgs, { - org: 'orgname', - opts: npm.flatOptions, - }, 'receieved the correct args') + t.strictSame( + orgLsArgs, + { + org: 'orgname', + opts: npm.flatOptions, + }, + 'receieved the correct args' + ) t.strictSame(output, [], 'printed no output') }) diff --git a/test/lib/commands/outdated.js b/test/lib/commands/outdated.js index 1841ea9b17c85..245e93039c3f3 100644 --- a/test/lib/commands/outdated.js +++ b/test/lib/commands/outdated.js @@ -55,8 +55,9 @@ const packument = spec => { }, } - if (spec.name === 'eta') + if (spec.name === 'eta') { throw new Error('There is an error with this package.') + } if (!mocks[spec.name]) { const err = new Error() diff --git a/test/lib/commands/owner.js b/test/lib/commands/owner.js index c9d936d47bece..8645b349f82fe 100644 --- a/test/lib/commands/owner.js +++ b/test/lib/commands/owner.js @@ -211,8 +211,9 @@ t.test('owner add ', async t => { 'should contain expected new owners, adding requested user' ) return {} - } else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => { t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') @@ -250,10 +251,11 @@ t.test('owner add cwd package', async t => { email: 'foo@github.com', name: 'foo', } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') + } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { return {} - else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', @@ -290,8 +292,9 @@ t.test('owner add already an owner', async t => { email: 'ruyadorno@hotmail.com', name: 'ruyadorno', } - } else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => { return { @@ -314,12 +317,13 @@ t.test('owner add fails to retrieve user', async t => { readPackageNameResponse = npmFetch.json = async (uri, opts) => { // retrieve borked user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') + if (uri === '/-/user/org.couchdb.user:foo') { return { ok: false } - else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') + } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { return {} - else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', @@ -356,8 +360,9 @@ t.test('owner add fails to PUT updates', async t => { message: "I'm a teapot", }, } - } else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', @@ -391,8 +396,9 @@ t.test('owner add fails to retrieve user info', async t => { new Error("I'm a teapot"), { status: 418 } ) - } else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', @@ -422,10 +428,11 @@ t.test('owner add no previous maintainers property from server', as email: 'foo@github.com', name: 'foo', } - } else if (uri === '/@npmcli%2fno-owners-pkg/-rev/1-foobaaa1') + } else if (uri === '/@npmcli%2fno-owners-pkg/-rev/1-foobaaa1') { return {} - else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => { return { @@ -512,8 +519,9 @@ t.test('owner rm ', async t => { 'should contain expected new owners, removing requested user' ) return {} - } else + } else { t.fail(`unexpected fetch json call to: ${uri}`) + } } pacote.packument = async (spec, opts) => { t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') @@ -556,10 +564,11 @@ t.test('owner rm not a current owner', async t => { email: 'foo@github.com', name: 'foo', } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') + } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { return {} - else + } else { t.fail(`unexpected fetch json call to: ${uri}`) + } } pacote.packument = async (spec, opts) => { return { @@ -588,10 +597,11 @@ t.test('owner rm cwd package', async t => { email: 'ruyadorno@hotmail.com', name: 'ruyadorno', } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') + } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { return {} - else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', @@ -619,8 +629,9 @@ t.test('owner rm only user', async t => { email: 'ruyadorno@hotmail.com', name: 'ruyadorno', } - } else + } else { t.fail(`unexpected fetch json call to uri: ${uri}`) + } } pacote.packument = async (spec, opts) => ({ _rev: '1-foobaaa1', diff --git a/test/lib/commands/pack.js b/test/lib/commands/pack.js index 6a5749623c437..f0349823a677b 100644 --- a/test/lib/commands/pack.js +++ b/test/lib/commands/pack.js @@ -7,15 +7,17 @@ const OUTPUT = [] const output = (...msg) => OUTPUT.push(msg) const libnpmpack = async (spec, opts) => { - if (!opts) + if (!opts) { throw new Error('expected options object') + } return '' } const mockPacote = { - manifest: (spec) => { - if (spec.type === 'directory') + manifest: spec => { + if (spec.type === 'directory') { return pacote.manifest(spec) + } const m = { name: spec.name || 'test-package', version: spec.version || '1.0.0-test', @@ -25,7 +27,7 @@ const mockPacote = { }, } -t.afterEach(() => OUTPUT.length = 0) +t.afterEach(() => (OUTPUT.length = 0)) t.test('should pack current directory with no arguments', async t => { let tarballFileName @@ -87,10 +89,14 @@ t.test('follows pack-destination config', async t => { t.test('should pack given directory', async t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + }, + null, + 2 + ), }) const Pack = t.mock('../../../lib/commands/pack.js', { @@ -122,10 +128,14 @@ t.test('should pack given directory', async t => { t.test('should pack given directory for scoped package', async t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: '@cool/my-pkg', - version: '1.0.0', - }, null, 2), + 'package.json': JSON.stringify( + { + name: '@cool/my-pkg', + version: '1.0.0', + }, + null, + 2 + ), }) const Pack = t.mock('../../../lib/commands/pack.js', { @@ -191,11 +201,15 @@ t.test('should log pack contents', async t => { t.test('should log output as valid json', async t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - main: './index.js', - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + main: './index.js', + }, + null, + 2 + ), 'README.md': 'text', 'index.js': 'void', }) @@ -214,13 +228,16 @@ t.test('should log output as valid json', async t => { integrity: { sha512: [ { + /* eslint-disable-next-line max-len */ source: 'sha512-JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==', + /* eslint-disable-next-line max-len */ digest: 'JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==', algorithm: 'sha512', options: [], }, ], toJSON () { + /* eslint-disable-next-line max-len */ return 'sha512-JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==' }, }, @@ -256,23 +273,30 @@ t.test('should log output as valid json', async t => { await pack.exec([testDir]) - t.match(JSON.parse(OUTPUT), [{ - id: '@ruyadorno/redact@1.0.0', - name: '@ruyadorno/redact', - version: '1.0.0', - size: 2450, - unpackedSize: 4911, - shasum: '044c7574639b923076069d6e801e2d1866430f17', - integrity: 'sha512-JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==', - filename: '@ruyadorno/redact-1.0.0.tgz', - files: [ - { path: 'LICENSE' }, - { path: 'README.md' }, - { path: 'index.js' }, - { path: 'package.json' }, + t.match( + JSON.parse(OUTPUT), + [ + { + id: '@ruyadorno/redact@1.0.0', + name: '@ruyadorno/redact', + version: '1.0.0', + size: 2450, + unpackedSize: 4911, + shasum: '044c7574639b923076069d6e801e2d1866430f17', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-JSdyskeR2qonBUaQ4vdlU/vQGSfgCxSq5O+vH+d2yVWRqzso4O3gUzd6QX/V7OWV//zU7kA5o63Zf433jUnOtQ==', + filename: '@ruyadorno/redact-1.0.0.tgz', + files: [ + { path: 'LICENSE' }, + { path: 'README.md' }, + { path: 'index.js' }, + { path: 'package.json' }, + ], + entryCount: 4, + }, ], - entryCount: 4, - }], 'pack details output as valid json') + 'pack details output as valid json' + ) }) t.test('invalid packument', async t => { @@ -302,20 +326,21 @@ t.test('invalid packument', async t => { output, }) const pack = new Pack(npm) - await t.rejects( - pack.exec([]), - 'Invalid package, must have name and version' - ) + await t.rejects(pack.exec([]), 'Invalid package, must have name and version') t.strictSame(OUTPUT, []) }) -t.test('workspaces', (t) => { +t.test('workspaces', t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, + null, + 2 + ), 'workspace-a': { 'package.json': JSON.stringify({ name: 'workspace-a', @@ -355,35 +380,25 @@ t.test('workspaces', (t) => { t.test('all workspaces', async t => { await pack.execWorkspaces([], []) - t.strictSame(OUTPUT, [ - ['workspace-a-1.0.0.tgz'], - ['workspace-b-1.0.0.tgz'], - ]) + t.strictSame(OUTPUT, [['workspace-a-1.0.0.tgz'], ['workspace-b-1.0.0.tgz']]) }) t.test('all workspaces, `.` first arg', async t => { await pack.execWorkspaces(['.'], []) - t.strictSame(OUTPUT, [ - ['workspace-a-1.0.0.tgz'], - ['workspace-b-1.0.0.tgz'], - ]) + t.strictSame(OUTPUT, [['workspace-a-1.0.0.tgz'], ['workspace-b-1.0.0.tgz']]) }) t.test('one workspace', async t => { await pack.execWorkspaces([], ['workspace-a']) - t.strictSame(OUTPUT, [ - ['workspace-a-1.0.0.tgz'], - ]) + t.strictSame(OUTPUT, [['workspace-a-1.0.0.tgz']]) }) t.test('specific package', async t => { await pack.execWorkspaces(['abbrev'], []) - t.strictSame(OUTPUT, [ - ['abbrev-1.0.0-test.tgz'], - ]) + t.strictSame(OUTPUT, [['abbrev-1.0.0-test.tgz']]) }) t.end() }) diff --git a/test/lib/commands/profile.js b/test/lib/commands/profile.js index 0a3680cf155f8..6554ca89e40f8 100644 --- a/test/lib/commands/profile.js +++ b/test/lib/commands/profile.js @@ -34,10 +34,8 @@ const mocks = { 'qrcode-terminal': { generate: (url, cb) => cb() }, 'cli-table3': class extends Array { toString () { - return this - .filter(Boolean) - .map(i => [...Object.entries(i)] - .map(i => i.join(': '))) + return this.filter(Boolean) + .map(i => [...Object.entries(i)].map(i => i.join(': '))) .join('\n') } }, @@ -78,10 +76,7 @@ const Profile = t.mock('../../../lib/commands/profile.js', mocks) const profile = new Profile(npm) t.test('no args', async t => { - await t.rejects( - profile.exec([]), - profile.usage - ) + await t.rejects(profile.exec([]), profile.usage) }) t.test('profile get no args', t => { @@ -100,10 +95,7 @@ t.test('profile get no args', t => { t.test('default output', async t => { await profile.exec(['get']) - t.matchSnapshot( - result, - 'should output table with contents' - ) + t.matchSnapshot(result, 'should output table with contents') }) t.test('--json', async t => { @@ -111,21 +103,14 @@ t.test('profile get no args', t => { await profile.exec(['get']) - t.same( - JSON.parse(result), - userProfile, - 'should output json profile result' - ) + t.same(JSON.parse(result), userProfile, 'should output json profile result') }) t.test('--parseable', async t => { config.parseable = true await profile.exec(['get']) - t.matchSnapshot( - result, - 'should output all profile info as parseable result' - ) + t.matchSnapshot(result, 'should output all profile info as parseable result') }) t.test('no tfa enabled', async t => { @@ -145,10 +130,7 @@ t.test('profile get no args', t => { const profile = new Profile(npm) await profile.exec(['get']) - t.matchSnapshot( - result, - 'should output expected profile values' - ) + t.matchSnapshot(result, 'should output expected profile values') }) t.test('unverified email', async t => { @@ -169,10 +151,7 @@ t.test('profile get no args', t => { await profile.exec(['get']) - t.matchSnapshot( - result, - 'should output table with contents' - ) + t.matchSnapshot(result, 'should output table with contents') }) t.test('profile has cidr_whitelist item', async t => { @@ -193,10 +172,7 @@ t.test('profile get no args', t => { await profile.exec(['get']) - t.matchSnapshot( - result, - 'should output table with contents' - ) + t.matchSnapshot(result, 'should output table with contents') }) t.end() @@ -218,11 +194,7 @@ t.test('profile get ', t => { t.test('default output', async t => { await profile.exec(['get', 'name']) - t.equal( - result, - 'foo', - 'should output value result' - ) + t.equal(result, 'foo', 'should output value result') }) t.test('--json', async t => { @@ -242,10 +214,7 @@ t.test('profile get ', t => { await profile.exec(['get', 'name']) - t.matchSnapshot( - result, - 'should output parseable result value' - ) + t.matchSnapshot(result, 'should output parseable result value') }) t.end() @@ -267,10 +236,7 @@ t.test('profile get multiple args', t => { t.test('default output', async t => { await profile.exec(['get', 'name', 'email', 'github']) - t.matchSnapshot( - result, - 'should output all keys' - ) + t.matchSnapshot(result, 'should output all keys') }) t.test('--json', async t => { @@ -278,11 +244,7 @@ t.test('profile get multiple args', t => { await profile.exec(['get', 'name', 'email', 'github']) - t.same( - JSON.parse(result), - userProfile, - 'should output json profile result and ignore args' - ) + t.same(JSON.parse(result), userProfile, 'should output json profile result and ignore args') }) t.test('--parseable', async t => { @@ -290,19 +252,13 @@ t.test('profile get multiple args', t => { await profile.exec(['get', 'name', 'email', 'github']) - t.matchSnapshot( - result, - 'should output parseable profile value results' - ) + t.matchSnapshot(result, 'should output parseable profile value results') }) t.test('comma separated', async t => { await profile.exec(['get', 'name,email,github']) - t.matchSnapshot( - result, - 'should output all keys' - ) + t.matchSnapshot(result, 'should output all keys') }) t.end() @@ -371,11 +327,7 @@ t.test('profile set ', t => { const profile = new Profile(npm) await profile.exec(['set', 'fullname', 'Lorem Ipsum']) - t.equal( - result, - 'Set\nfullname\nto\nLorem Ipsum', - 'should output set key success msg' - ) + t.equal(result, 'Set\nfullname\nto\nLorem Ipsum', 'should output set key success msg') }) t.test('--json', async t => { @@ -413,10 +365,7 @@ t.test('profile set ', t => { await profile.exec(['set', 'fullname', 'Lorem Ipsum']) - t.matchSnapshot( - result, - 'should output parseable set key success msg' - ) + t.matchSnapshot(result, 'should output parseable set key success msg') }) t.end() @@ -437,11 +386,7 @@ t.test('profile set ', t => { }, 'should set new value to email' ) - t.match( - conf, - npm.flatOptions, - 'should forward flatOptions config' - ) + t.match(conf, npm.flatOptions, 'should forward flatOptions config') return { ...userProfile, ...newUser, @@ -456,11 +401,7 @@ t.test('profile set ', t => { const profile = new Profile(npm) await profile.exec(['set', 'email', 'foo@npmjs.com']) - t.equal( - result, - 'Set\nemail\nto\nfoo@npmjs.com', - 'should output set key success msg' - ) + t.equal(result, 'Set\nemail\nto\nfoo@npmjs.com', 'should output set key success msg') }) t.test('change password', async t => { @@ -481,11 +422,7 @@ t.test('profile set ', t => { }, 'should set new password' ) - t.match( - conf, - npm.flatOptions, - 'should forward flatOptions config' - ) + t.match(conf, npm.flatOptions, 'should forward flatOptions config') return { ...userProfile, } @@ -494,18 +431,17 @@ t.test('profile set ', t => { const readUserInfo = { async password (label) { - if (label === 'Current password: ') + if (label === 'Current password: ') { t.ok('should interactively ask for password confirmation') - else if (label === 'New password: ') + } else if (label === 'New password: ') { t.ok('should interactively ask for new password') - else if (label === ' Again: ') + } else if (label === ' Again: ') { t.ok('should interactively ask for new password confirmation') - else + } else { throw new Error('Unexpected label: ' + label) + } - return label === 'Current password: ' - ? 'currentpassword1234' - : 'newpassword1234' + return label === 'Current password: ' ? 'currentpassword1234' : 'newpassword1234' }, } @@ -518,11 +454,7 @@ t.test('profile set ', t => { await profile.exec(['set', 'password']) - t.equal( - result, - 'Set\npassword', - 'should output set password success msg' - ) + t.equal(result, 'Set\npassword', 'should output set password success msg') }) t.test('password confirmation mismatch', async t => { @@ -548,9 +480,7 @@ t.test('profile set ', t => { case 'Current password: ': return 'currentpassword1234' case 'New password: ': - return passwordPromptCount < 3 - ? 'password-that-will-not-be-confirmed' - : 'newpassword' + return passwordPromptCount < 3 ? 'password-that-will-not-be-confirmed' : 'newpassword' case ' Again: ': return 'newpassword' default: @@ -583,11 +513,7 @@ t.test('profile set ', t => { await profile.exec(['set', 'password']) - t.equal( - result, - 'Set\npassword', - 'should output set password success msg' - ) + t.equal(result, 'Set\npassword', 'should output set password success msg') }) t.end() @@ -616,7 +542,7 @@ t.test('enable-2fa', t => { await t.rejects( profile.exec(['enable-2fa', 'auth-only']), 'Enabling two-factor authentication is an interactive ' + - 'operation and JSON output mode is not available', + 'operation and JSON output mode is not available', 'should throw no support msg' ) }) @@ -627,7 +553,7 @@ t.test('enable-2fa', t => { await t.rejects( profile.exec(['enable-2fa', 'auth-only']), 'Enabling two-factor authentication is an interactive ' + - 'operation and parseable output mode is not available', + 'operation and parseable output mode is not available', 'should throw no support msg' ) }) @@ -657,8 +583,8 @@ t.test('enable-2fa', t => { await t.rejects( profile.exec(['enable-2fa', 'auth-only']), 'Your registry https://registry.npmjs.org/ does ' + - 'not seem to support bearer tokens. Bearer tokens ' + - 'are required for two-factor authentication', + 'not seem to support bearer tokens. Bearer tokens ' + + 'are required for two-factor authentication', 'should throw no support msg' ) }) @@ -684,8 +610,8 @@ t.test('enable-2fa', t => { await t.rejects( profile.exec(['enable-2fa', 'auth-only']), 'Your registry https://registry.npmjs.org/ does ' + - 'not seem to support bearer tokens. Bearer tokens ' + - 'are required for two-factor authentication', + 'not seem to support bearer tokens. Bearer tokens ' + + 'are required for two-factor authentication', 'should throw no support msg' ) }) @@ -700,8 +626,7 @@ t.test('enable-2fa', t => { await t.rejects( profile.exec(['enable-2fa', 'auth-only']), - 'You need to be logged in to registry ' + - 'https://registry.npmjs.org/ in order to enable 2fa' + 'You need to be logged in to registry ' + 'https://registry.npmjs.org/ in order to enable 2fa' ) }) @@ -709,7 +634,7 @@ t.test('enable-2fa', t => { t.plan(10) // mock legacy basic auth style - npm.config.getCredentialsByURI = (reg) => { + npm.config.getCredentialsByURI = reg => { t.equal(reg, flatOptions.registry, 'should use expected registry') return { auth: Buffer.from('foo:bar').toString('base64') } } @@ -717,7 +642,7 @@ t.test('enable-2fa', t => { t.equal(registry, flatOptions.registry, 'should set expected registry') t.equal(token, 'token', 'should set expected token') } - npm.config.save = (type) => { + npm.config.save = type => { t.equal(type, 'user', 'should save to user config') } @@ -839,10 +764,7 @@ t.test('enable-2fa', t => { ) return { ...userProfile, - tfa: [ - '123456', - '789101', - ], + tfa: ['123456', '789101'], } } @@ -863,7 +785,7 @@ t.test('enable-2fa', t => { } const qrcode = { - // eslint-disable-next-line standard/no-callback-literal + /* eslint-disable-next-line node/no-callback-literal */ generate: (url, cb) => cb('qrcode'), } @@ -877,10 +799,7 @@ t.test('enable-2fa', t => { await profile.exec(['enable-2fa', 'auth-only']) - t.matchSnapshot( - result, - 'should output 2fa enablement success msgs' - ) + t.matchSnapshot(result, 'should output 2fa enablement success msgs') }) t.test('from token and set otp, retrieves invalid otp', async t => { @@ -934,7 +853,7 @@ t.test('enable-2fa', t => { flatOptions.otp = '123456' flatOptions.otp = '123456' - npm.config.getCredentialsByURI = (reg) => { + npm.config.getCredentialsByURI = reg => { return { token: 'token' } } @@ -976,7 +895,7 @@ t.test('enable-2fa', t => { }) t.test('missing tfa from user profile', async t => { - npm.config.getCredentialsByURI = (reg) => { + npm.config.getCredentialsByURI = reg => { return { token: 'token' } } @@ -1021,7 +940,7 @@ t.test('enable-2fa', t => { }) t.test('defaults to auth-and-writes permission if no mode specified', async t => { - npm.config.getCredentialsByURI = (reg) => { + npm.config.getCredentialsByURI = reg => { return { token: 'token' } } @@ -1085,11 +1004,7 @@ t.test('disable-2fa', t => { const profile = new Profile(npm) await profile.exec(['disable-2fa']) - t.equal( - result, - 'Two factor authentication not enabled.', - 'should output already disalbed msg' - ) + t.equal(result, 'Two factor authentication not enabled.', 'should output already disalbed msg') }) t.test('requests otp', t => { @@ -1143,11 +1058,7 @@ t.test('disable-2fa', t => { const profile = new Profile(npm) await profile.exec(['disable-2fa']) - t.equal( - result, - 'Two factor authentication disabled.', - 'should output already disabled msg' - ) + t.equal(result, 'Two factor authentication disabled.', 'should output already disabled msg') }) t.test('--json', async t => { @@ -1162,11 +1073,7 @@ t.test('disable-2fa', t => { await profile.exec(['disable-2fa']) - t.same( - JSON.parse(result), - { tfa: false }, - 'should output json already disabled msg' - ) + t.same(JSON.parse(result), { tfa: false }, 'should output json already disabled msg') }) t.test('--parseable', async t => { @@ -1181,11 +1088,7 @@ t.test('disable-2fa', t => { await profile.exec(['disable-2fa']) - t.equal( - result, - 'tfa\tfalse', - 'should output parseable already disabled msg' - ) + t.equal(result, 'tfa\tfalse', 'should output parseable already disabled msg') }) t.end() @@ -1240,11 +1143,7 @@ t.test('disable-2fa', t => { await profile.exec(['disable-2fa']) - t.equal( - result, - 'Two factor authentication disabled.', - 'should output already disalbed msg' - ) + t.equal(result, 'Two factor authentication disabled.', 'should output already disalbed msg') }) t.end() @@ -1260,11 +1159,7 @@ t.test('unknown subcommand', async t => { t.test('completion', t => { const testComp = async ({ t, argv, expect, title }) => { - t.resolveMatch( - profile.completion({ conf: { argv: { remain: argv } } }), - expect, - title - ) + t.resolveMatch(profile.completion({ conf: { argv: { remain: argv } } }), expect, title) } t.test('npm profile autocomplete', async t => { @@ -1306,7 +1201,8 @@ t.test('completion', t => { t.test('npm profile unknown subcommand autocomplete', async t => { t.rejects( profile.completion({ conf: { argv: { remain: ['npm', 'profile', 'asdf'] } } }), - { message: 'asdf not recognized' }, 'should throw unknown cmd error' + { message: 'asdf not recognized' }, + 'should throw unknown cmd error' ) t.end() }) diff --git a/test/lib/commands/publish.js b/test/lib/commands/publish.js index 6c444e5f7fb0c..5f4fb401064c2 100644 --- a/test/lib/commands/publish.js +++ b/test/lib/commands/publish.js @@ -9,84 +9,102 @@ const fs = require('fs') const log = require('npmlog') log.level = 'silent' -t.cleanSnapshot = (data) => { +t.cleanSnapshot = data => { return data.replace(/^ *"gitHead": .*$\n/gm, '') } -const {definitions} = require('../../../lib/utils/config') +const { definitions } = require('../../../lib/utils/config') const defaults = Object.entries(definitions).reduce((defaults, [key, def]) => { defaults[key] = def.default return defaults }, {}) -t.afterEach(() => log.level = 'silent') +t.afterEach(() => (log.level = 'silent')) -t.test('should publish with libnpmpublish, passing through flatOptions and respecting publishConfig.registry', async t => { - t.plan(6) +t.test( + /* eslint-disable-next-line max-len */ + 'should publish with libnpmpublish, passing through flatOptions and respecting publishConfig.registry', + async t => { + t.plan(6) - const registry = 'https://some.registry' - const publishConfig = { registry } - const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - publishConfig, - }, null, 2), - }) + const registry = 'https://some.registry' + const publishConfig = { registry } + const testDir = t.testdir({ + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + publishConfig, + }, + null, + 2 + ), + }) - const Publish = t.mock('../../../lib/commands/publish.js', { - // verify that we do NOT remove publishConfig if it was there originally - // and then removed during the script/pack process - libnpmpack: async () => { - fs.writeFileSync(`${testDir}/package.json`, JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - })) - return Buffer.from('') - }, - libnpmpublish: { - publish: (manifest, tarData, opts) => { - t.match(manifest, { name: 'my-cool-pkg', version: '1.0.0' }, 'gets manifest') - t.type(tarData, Buffer, 'tarData is a buffer') - t.ok(opts, 'gets opts object') - t.same(opts.customValue, true, 'flatOptions values are passed through') - t.same(opts.registry, registry, 'publishConfig.registry is passed through') + const Publish = t.mock('../../../lib/commands/publish.js', { + // verify that we do NOT remove publishConfig if it was there originally + // and then removed during the script/pack process + libnpmpack: async () => { + fs.writeFileSync( + `${testDir}/package.json`, + JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + }) + ) + return Buffer.from('') }, - }, - }) - const npm = mockNpm({ - flatOptions: { - customValue: true, - workspacesEnabled: true, - }, - }) - npm.config.getCredentialsByURI = (uri) => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) + libnpmpublish: { + publish: (manifest, tarData, opts) => { + t.match(manifest, { name: 'my-cool-pkg', version: '1.0.0' }, 'gets manifest') + t.type(tarData, Buffer, 'tarData is a buffer') + t.ok(opts, 'gets opts object') + t.same(opts.customValue, true, 'flatOptions values are passed through') + t.same(opts.registry, registry, 'publishConfig.registry is passed through') + }, + }, + }) + const npm = mockNpm({ + flatOptions: { + customValue: true, + workspacesEnabled: true, + }, + }) + npm.config.getCredentialsByURI = uri => { + t.same(uri, registry, 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) - await publish.exec([testDir]) -}) + await publish.exec([testDir]) + } +) t.test('re-loads publishConfig.registry if added during script process', async t => { t.plan(5) const registry = 'https://some.registry' const publishConfig = { registry } const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + }, + null, + 2 + ), }) const Publish = t.mock('../../../lib/commands/publish.js', { libnpmpack: async () => { - fs.writeFileSync(`${testDir}/package.json`, JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - publishConfig, - })) + fs.writeFileSync( + `${testDir}/package.json`, + JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + publishConfig, + }) + ) return Buffer.from('') }, libnpmpublish: { @@ -99,7 +117,7 @@ t.test('re-loads publishConfig.registry if added during script process', async t }, }) const npm = mockNpm() - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, registry, 'gets credentials for expected registry') return { token: 'some.registry.token' } } @@ -112,10 +130,14 @@ t.test('if loglevel=info and json, should not output package contents', async t t.plan(3) const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + }, + null, + 2 + ), }) log.level = 'info' @@ -140,7 +162,7 @@ t.test('if loglevel=info and json, should not output package contents', async t t.pass('output is called') }, }) - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') return { token: 'some.registry.token' } } @@ -149,96 +171,109 @@ t.test('if loglevel=info and json, should not output package contents', async t await publish.exec([testDir]) }) -t.test('if loglevel=silent and dry-run, should not output package contents or publish or validate credentials, should log tarball contents', async t => { - t.plan(1) +t.test( + /* eslint-disable-next-line max-len */ + 'if loglevel=silent and dry-run, should not output package contents or publish or validate credentials, should log tarball contents', + async t => { + t.plan(1) - const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - }, null, 2), - }) + const testDir = t.testdir({ + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + }, + null, + 2 + ), + }) - log.level = 'silent' - const Publish = t.mock('../../../lib/commands/publish.js', { - '../../../lib/utils/tar.js': { - getContents: () => ({ - id: 'someid', - }), - logTar: () => { - t.pass('logTar is called') + log.level = 'silent' + const Publish = t.mock('../../../lib/commands/publish.js', { + '../../../lib/utils/tar.js': { + getContents: () => ({ + id: 'someid', + }), + logTar: () => { + t.pass('logTar is called') + }, }, - }, - libnpmpublish: { - publish: () => { - throw new Error('should not call libnpmpublish in dry run') + libnpmpublish: { + publish: () => { + throw new Error('should not call libnpmpublish in dry run') + }, }, - }, - }) - const npm = mockNpm({ - config: { 'dry-run': true }, - output: () => { - throw new Error('should not output in dry run mode') - }, - }) - npm.config.getCredentialsByURI = () => { - throw new Error('should not call getCredentialsByURI in dry run') - } - - const publish = new Publish(npm) - - await publish.exec([testDir]) -}) + }) + const npm = mockNpm({ + config: { 'dry-run': true }, + output: () => { + throw new Error('should not output in dry run mode') + }, + }) + npm.config.getCredentialsByURI = () => { + throw new Error('should not call getCredentialsByURI in dry run') + } -t.test('if loglevel=info and dry-run, should not publish, should log package contents and log tarball contents', async t => { - t.plan(2) + const publish = new Publish(npm) - const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - }, null, 2), - }) + await publish.exec([testDir]) + } +) + +t.test( + /* eslint-disable-next-line max-len */ + 'if loglevel=info and dry-run, should not publish, should log package contents and log tarball contents', + async t => { + t.plan(2) + + const testDir = t.testdir({ + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + }, + null, + 2 + ), + }) - log.level = 'info' - const Publish = t.mock('../../../lib/commands/publish.js', { - '../../../lib/utils/tar.js': { - getContents: () => ({ - id: 'someid', - }), - logTar: () => { - t.pass('logTar is called') + log.level = 'info' + const Publish = t.mock('../../../lib/commands/publish.js', { + '../../../lib/utils/tar.js': { + getContents: () => ({ + id: 'someid', + }), + logTar: () => { + t.pass('logTar is called') + }, }, - }, - libnpmpublish: { - publish: () => { - throw new Error('should not call libnpmpublish in dry run') + libnpmpublish: { + publish: () => { + throw new Error('should not call libnpmpublish in dry run') + }, }, - }, - }) - const npm = mockNpm({ - config: { 'dry-run': true }, - output: () => { - t.pass('output fn is called') - }, - }) - npm.config.getCredentialsByURI = () => { - throw new Error('should not call getCredentialsByURI in dry run') - } - const publish = new Publish(npm) + }) + const npm = mockNpm({ + config: { 'dry-run': true }, + output: () => { + t.pass('output fn is called') + }, + }) + npm.config.getCredentialsByURI = () => { + throw new Error('should not call getCredentialsByURI in dry run') + } + const publish = new Publish(npm) - await publish.exec([testDir]) -}) + await publish.exec([testDir]) + } +) t.test('shows usage with wrong set of arguments', async t => { t.plan(1) const Publish = t.mock('../../../lib/commands/publish.js') const publish = new Publish({}) - await t.rejects( - publish.exec(['a', 'b', 'c']), - publish.usage - ) + await t.rejects(publish.exec(['a', 'b', 'c']), publish.usage) }) t.test('throws when invalid tag', async t => { @@ -270,26 +305,33 @@ t.test('can publish a tarball', async t => { }, }) const tar = require('tar') - tar.c({ - cwd: testDir, - file: `${testDir}/tarball/package.tgz`, - sync: true, - }, ['package']) + tar.c( + { + cwd: testDir, + file: `${testDir}/tarball/package.tgz`, + sync: true, + }, + ['package'] + ) const tarFile = fs.readFileSync(`${testDir}/tarball/package.tgz`) const Publish = t.mock('../../../lib/commands/publish.js', { libnpmpublish: { publish: (manifest, tarData, opts) => { - t.match(manifest, { - name: 'my-cool-tarball', - version: '1.2.3', - }, 'sent manifest to lib pub') + t.match( + manifest, + { + name: 'my-cool-tarball', + version: '1.2.3', + }, + 'sent manifest to lib pub' + ) t.strictSame(tarData, tarFile, 'sent the tarball data to lib pub') }, }, }) const npm = mockNpm() - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') return { token: 'some.registry.token' } } @@ -302,7 +344,7 @@ t.test('should check auth for default registry', async t => { t.plan(2) const Publish = t.mock('../../../lib/commands/publish.js') const npm = mockNpm() - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') return {} } @@ -322,7 +364,7 @@ t.test('should check auth for configured registry', async t => { const npm = mockNpm({ flatOptions: { registry }, }) - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, registry, 'gets credentials for expected registry') return {} } @@ -339,17 +381,21 @@ t.test('should check auth for scope specific registry', async t => { t.plan(2) const registry = 'https://some.registry' const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: '@npm/my-cool-pkg', - version: '1.0.0', - }, null, 2), + 'package.json': JSON.stringify( + { + name: '@npm/my-cool-pkg', + version: '1.0.0', + }, + null, + 2 + ), }) const Publish = t.mock('../../../lib/commands/publish.js') const npm = mockNpm({ flatOptions: { '@npm:registry': registry }, }) - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, registry, 'gets credentials for expected registry') return {} } @@ -366,10 +412,14 @@ t.test('should use auth for scope specific registry', async t => { t.plan(3) const registry = 'https://some.registry' const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: '@npm/my-cool-pkg', - version: '1.0.0', - }, null, 2), + 'package.json': JSON.stringify( + { + name: '@npm/my-cool-pkg', + version: '1.0.0', + }, + null, + 2 + ), }) const Publish = t.mock('../../../lib/commands/publish.js', { @@ -383,7 +433,7 @@ t.test('should use auth for scope specific registry', async t => { const npm = mockNpm({ flatOptions: { '@npm:registry': registry }, }) - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, registry, 'gets credentials for expected registry') return { token: 'some.registry.token' } } @@ -398,11 +448,15 @@ t.test('read registry only from publishConfig', async t => { const registry = 'https://some.registry' const publishConfig = { registry } const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - publishConfig, - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + publishConfig, + }, + null, + 2 + ), }) const Publish = t.mock('../../../lib/commands/publish.js', { @@ -414,7 +468,7 @@ t.test('read registry only from publishConfig', async t => { }, }) const npm = mockNpm() - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, registry, 'gets credentials for expected registry') return { token: 'some.registry.token' } } @@ -430,18 +484,24 @@ t.test('able to publish after if encountered multiple configs', async t => { const tag = 'better-tag' const publishConfig = { registry } const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - publishConfig, - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + publishConfig, + }, + null, + 2 + ), }) const configList = [defaults] - configList.unshift(Object.assign(Object.create(configList[0]), { - registry: `https://other.registry`, - tag: 'some-tag', - })) + configList.unshift( + Object.assign(Object.create(configList[0]), { + registry: `https://other.registry`, + tag: 'some-tag', + }) + ) configList.unshift(Object.assign(Object.create(configList[0]), { tag })) const Publish = t.mock('../../../lib/commands/publish.js', { @@ -460,7 +520,7 @@ t.test('able to publish after if encountered multiple configs', async t => { config: { get: key => configList[0][key], list: configList, - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.same(uri, registry, 'gets credentials for expected registry') return { token: 'some.registry.token' } }, @@ -470,13 +530,17 @@ t.test('able to publish after if encountered multiple configs', async t => { await publish.exec([testDir]) }) -t.test('workspaces', (t) => { +t.test('workspaces', t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }, + null, + 2 + ), 'workspace-a': { 'package.json': JSON.stringify({ name: 'workspace-a', @@ -508,7 +572,7 @@ t.test('workspaces', (t) => { }) const Publish = t.mock('../../../lib/commands/publish.js', { '../../../lib/utils/tar.js': { - getContents: (manifest) => ({ + getContents: manifest => ({ id: manifest._id, }), logTar: () => {}, @@ -520,12 +584,12 @@ t.test('workspaces', (t) => { }, }) const npm = mockNpm({ - output: (o) => { + output: o => { outputs.push(o) }, }) npm.localPrefix = testDir - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { return { token: 'some.registry.token' } } const publish = new Publish(npm) @@ -545,14 +609,8 @@ t.test('workspaces', (t) => { }) t.test('invalid workspace', async t => { - await t.rejects( - publish.execWorkspaces([], ['workspace-x']), - /No workspaces found/ - ) - await t.rejects( - publish.execWorkspaces([], ['workspace-x']), - /workspace-x/ - ) + await t.rejects(publish.execWorkspaces([], ['workspace-x']), /No workspaces found/) + await t.rejects(publish.execWorkspaces([], ['workspace-x']), /workspace-x/) }) t.test('json', async t => { @@ -598,7 +656,7 @@ t.test('private workspaces', async t => { }) const mocks = { '../../../lib/utils/tar.js': { - getContents: (manifest) => ({ + getContents: manifest => ({ id: manifest._id, }), logTar: () => {}, @@ -606,22 +664,19 @@ t.test('private workspaces', async t => { libnpmpublish: { publish: (manifest, tarballData, opts) => { if (manifest.private) { - throw Object.assign( - new Error('private pkg'), - { code: 'EPRIVATE' } - ) + throw Object.assign(new Error('private pkg'), { code: 'EPRIVATE' }) } publishes.push(manifest) }, }, } const npm = mockNpm({ - output: (o) => { + output: o => { outputs.push(o) }, }) npm.localPrefix = testDir - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { return { token: 'some.registry.token' } } @@ -635,6 +690,7 @@ t.test('private workspaces', async t => { t.equal(title, 'publish', 'should use publish warn title') t.match( msg, + /* eslint-disable-next-line max-len */ 'Skipping workspace \u001b[32m@npmcli/a\u001b[39m, marked as \u001b[1mprivate\u001b[22m', 'should display skip private workspace warn msg' ) @@ -678,8 +734,9 @@ t.test('private workspaces', async t => { ...mocks, libnpmpublish: { publish: (manifest, tarballData, opts) => { - if (manifest.private) + if (manifest.private) { throw new Error('ERR') + } publishes.push(manifest) }, @@ -691,11 +748,7 @@ t.test('private workspaces', async t => { }) const publish = new Publish(npm) - await t.rejects( - publish.execWorkspaces([], []), - /ERR/, - 'should throw unexpected error' - ) + await t.rejects(publish.execWorkspaces([], []), /ERR/, 'should throw unexpected error') }) t.end() @@ -703,21 +756,25 @@ t.test('private workspaces', async t => { t.test('runs correct lifecycle scripts', async t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - scripts: { - prepublishOnly: 'echo test prepublishOnly', - prepublish: 'echo test prepublish', // should NOT run this one - publish: 'echo test publish', - postpublish: 'echo test postpublish', + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + scripts: { + prepublishOnly: 'echo test prepublishOnly', + prepublish: 'echo test prepublish', // should NOT run this one + publish: 'echo test publish', + postpublish: 'echo test postpublish', + }, }, - }, null, 2), + null, + 2 + ), }) const scripts = [] const Publish = t.mock('../../../lib/commands/publish.js', { - '@npmcli/run-script': (args) => { + '@npmcli/run-script': args => { scripts.push(args) }, '../../../lib/utils/tar.js': { @@ -739,7 +796,7 @@ t.test('runs correct lifecycle scripts', async t => { t.pass('output is called') }, }) - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') return { token: 'some.registry.token' } } @@ -754,10 +811,14 @@ t.test('runs correct lifecycle scripts', async t => { t.test('does not run scripts on --ignore-scripts', async t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'my-cool-pkg', + version: '1.0.0', + }, + null, + 2 + ), }) const Publish = t.mock('../../../lib/commands/publish.js', { @@ -784,7 +845,7 @@ t.test('does not run scripts on --ignore-scripts', async t => { t.pass('output is called') }, }) - npm.config.getCredentialsByURI = (uri) => { + npm.config.getCredentialsByURI = uri => { t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') return { token: 'some.registry.token' } } diff --git a/test/lib/commands/repo.js b/test/lib/commands/repo.js index 9a7c4a95096cd..4e61047b4e7c7 100644 --- a/test/lib/commands/repo.js +++ b/test/lib/commands/repo.js @@ -233,7 +233,7 @@ t.test('open repo urls', t => { const url = expect[pkg] t.match({ [url]: 1, - }, opened, `opened ${url}`, {opened}) + }, opened, `opened ${url}`, { opened }) t.end() }) }) @@ -263,7 +263,7 @@ t.test('fail if cannot figure out repo url', t => { t.test('open default package if none specified', async t => { npm.localPrefix = pkgDirs await npm.exec('repo', []) - t.equal(opened['https://example.com/thispkg'], 1, 'opened expected url', {opened}) + t.equal(opened['https://example.com/thispkg'], 1, 'opened expected url', { opened }) }) t.test('workspaces', t => { diff --git a/test/lib/commands/run-script.js b/test/lib/commands/run-script.js index 6b3b40055c362..e421c655ef64f 100644 --- a/test/lib/commands/run-script.js +++ b/test/lib/commands/run-script.js @@ -2,12 +2,9 @@ const t = require('tap') const { resolve } = require('path') const { fake: mockNpm } = require('../../fixtures/mock-npm') -const normalizePath = p => p - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') +const normalizePath = p => p.replace(/\\+/g, '/').replace(/\r\n/g, '\n') -const cleanOutput = (str) => normalizePath(str) - .replace(normalizePath(process.cwd()), '{CWD}') +const cleanOutput = str => normalizePath(str).replace(normalizePath(process.cwd()), '{CWD}') const RUN_SCRIPTS = [] const flatOptions = { @@ -23,7 +20,7 @@ const npm = mockNpm({ localPrefix: __dirname, flatOptions, config, - cmd: (c) => { + cmd: c => { return { description: `test ${c} description` } }, output: (...msg) => output.push(msg), @@ -50,11 +47,14 @@ t.afterEach(() => { const getRS = windows => { const RunScript = t.mock('../../../lib/commands/run-script.js', { - '@npmcli/run-script': Object.assign(async opts => { - RUN_SCRIPTS.push(opts) - }, { - isServerPackage: require('@npmcli/run-script').isServerPackage, - }), + '@npmcli/run-script': Object.assign( + async opts => { + RUN_SCRIPTS.push(opts) + }, + { + isServerPackage: require('@npmcli/run-script').isServerPackage, + } + ), npmlog, '../../../lib/utils/is-windows-shell.js': windows, }) @@ -69,26 +69,29 @@ t.test('completion', t => { const dir = t.testdir() npm.localPrefix = dir t.test('already have a script name', async t => { - const res = await runScript.completion({conf: {argv: {remain: ['npm', 'run', 'x']}}}) + const res = await runScript.completion({ conf: { argv: { remain: ['npm', 'run', 'x'] } } }) t.equal(res, undefined) t.end() }) t.test('no package.json', async t => { - const res = await runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + const res = await runScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) t.strictSame(res, []) t.end() }) t.test('has package.json, no scripts', async t => { writeFileSync(`${dir}/package.json`, JSON.stringify({})) - const res = await runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + const res = await runScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) t.strictSame(res, []) t.end() }) t.test('has package.json, with scripts', async t => { - writeFileSync(`${dir}/package.json`, JSON.stringify({ - scripts: { hello: 'echo hello', world: 'echo world' }, - })) - const res = await runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + writeFileSync( + `${dir}/package.json`, + JSON.stringify({ + scripts: { hello: 'echo hello', world: 'echo world' }, + }) + ) + const res = await runScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) t.strictSame(res, ['hello', 'world']) t.end() }) @@ -98,14 +101,8 @@ t.test('completion', t => { t.test('fail if no package.json', async t => { t.plan(2) npm.localPrefix = t.testdir() - await t.rejects( - runScript.exec([]), - { code: 'ENOENT' } - ) - await t.rejects( - runScript.exec(['test']), - { code: 'ENOENT' } - ) + await t.rejects(runScript.exec([]), { code: 'ENOENT' }) + await t.rejects(runScript.exec(['test']), { code: 'ENOENT' }) }) t.test('default env, start, and restart scripts', t => { @@ -123,7 +120,7 @@ t.test('default env, start, and restart scripts', t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {}}, + pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {} }, event: 'start', }, ]) @@ -160,12 +157,14 @@ t.test('default env, start, and restart scripts', t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', + pkg: { + name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { env: 'SET', - } }, + }, + }, event: 'env', }, ]) @@ -181,12 +180,14 @@ t.test('default env, start, and restart scripts', t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', + pkg: { + name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { restart: 'npm stop --if-present && npm start', - } }, + }, + }, event: 'restart', }, ]) @@ -236,7 +237,8 @@ t.test('non-default env script', t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', + pkg: { + name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { @@ -258,30 +260,15 @@ t.test('try to run missing script', t => { }), }) t.test('no suggestions', async t => { - await t.rejects( - runScript.exec(['notevenclose']), - 'Missing script: "notevenclose"' - ) + await t.rejects(runScript.exec(['notevenclose']), 'Missing script: "notevenclose"') }) t.test('script suggestions', async t => { - await t.rejects( - runScript.exec(['helo']), - /Missing script: "helo"/ - ) - await t.rejects( - runScript.exec(['helo']), - /npm run hello/ - ) + await t.rejects(runScript.exec(['helo']), /Missing script: "helo"/) + await t.rejects(runScript.exec(['helo']), /npm run hello/) }) t.test('bin suggestions', async t => { - await t.rejects( - runScript.exec(['goodneght']), - /Missing script: "goodneght"/ - ) - await t.rejects( - runScript.exec(['goodneght']), - /npm exec goodnight/ - ) + await t.rejects(runScript.exec(['goodneght']), /Missing script: "goodneght"/) + await t.rejects(runScript.exec(['goodneght']), /npm exec goodnight/) }) t.test('with --if-present', async t => { config['if-present'] = true @@ -313,12 +300,14 @@ t.test('run pre/post hooks', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', + pkg: { + name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { env: 'env', - } }, + }, + }, event: 'env', }, { event: 'postenv' }, @@ -348,14 +337,16 @@ t.test('skip pre/post hooks when using ignoreScripts', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', + pkg: { + name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { preenv: 'echo before the env', postenv: 'echo after the env', env: 'env', - } }, + }, + }, banner: true, event: 'env', }, @@ -392,12 +383,14 @@ t.test('run silent', async t => { scriptShell: undefined, stdio: 'inherit', stdioString: true, - pkg: { name: 'x', + pkg: { + name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: { env: 'env', - } }, + }, + }, event: 'env', banner: false, }, @@ -426,16 +419,20 @@ t.test('list scripts', t => { t.test('no args', async t => { await runScript.exec([]) - t.strictSame(output, [ - ['Lifecycle scripts included in x@1.2.3:'], - [' test\n exit 2'], - [' start\n node server.js'], - [' stop\n node kill-server.js'], - ['\navailable via `npm run-script`:'], - [' preenv\n echo before the env'], - [' postenv\n echo after the env'], - [''], - ], 'basic report') + t.strictSame( + output, + [ + ['Lifecycle scripts included in x@1.2.3:'], + [' test\n exit 2'], + [' start\n node server.js'], + [' stop\n node kill-server.js'], + ['\navailable via `npm run-script`:'], + [' preenv\n echo before the env'], + [' postenv\n echo after the env'], + [''], + ], + 'basic report' + ) }) t.test('silent', async t => { @@ -646,32 +643,28 @@ t.test('workspaces', t => { await runScript.execWorkspaces([], []) t.strictSame(output, [ [ + /* eslint-disable-next-line max-len */ '\u001b[1mScripts\u001b[22m available in \x1B[32ma@1.0.0\x1B[39m via `\x1B[34mnpm run-script\x1B[39m`:', ], [' glorp\n \x1B[2mecho a doing the glerp glop\x1B[22m'], [''], [ + /* eslint-disable-next-line max-len */ '\u001b[1mScripts\u001b[22m available in \x1B[32mb@2.0.0\x1B[39m via `\x1B[34mnpm run-script\x1B[39m`:', ], [' glorp\n \x1B[2mecho b doing the glerp glop\x1B[22m'], [''], - [ - '\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32mc@1.0.0\x1B[39m:', - ], + ['\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32mc@1.0.0\x1B[39m:'], [' test\n \x1B[2mexit 0\x1B[22m'], [' posttest\n \x1B[2mecho posttest\x1B[22m'], ['\navailable via `\x1B[34mnpm run-script\x1B[39m`:'], [' lorem\n \x1B[2mecho c lorem\x1B[22m'], [''], - [ - '\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32md@1.0.0\x1B[39m:', - ], + ['\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32md@1.0.0\x1B[39m:'], [' test\n \x1B[2mexit 0\x1B[22m'], [' posttest\n \x1B[2mecho posttest\x1B[22m'], [''], - [ - '\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32me\x1B[39m:', - ], + ['\x1B[0m\x1B[1mLifecycle scripts\x1B[22m\x1B[0m included in \x1B[32me\x1B[39m:'], [' test\n \x1B[2mexit 0\x1B[22m'], [' start\n \x1B[2mecho start something\x1B[22m'], [''], @@ -684,27 +677,27 @@ t.test('workspaces', t => { t.strictSame(output, [ [ '{\n' + - ' "a": {\n' + - ' "glorp": "echo a doing the glerp glop"\n' + - ' },\n' + - ' "b": {\n' + - ' "glorp": "echo b doing the glerp glop"\n' + - ' },\n' + - ' "c": {\n' + - ' "test": "exit 0",\n' + - ' "posttest": "echo posttest",\n' + - ' "lorem": "echo c lorem"\n' + - ' },\n' + - ' "d": {\n' + - ' "test": "exit 0",\n' + - ' "posttest": "echo posttest"\n' + - ' },\n' + - ' "e": {\n' + - ' "test": "exit 0",\n' + - ' "start": "echo start something"\n' + - ' },\n' + - ' "noscripts": {}\n' + - '}', + ' "a": {\n' + + ' "glorp": "echo a doing the glerp glop"\n' + + ' },\n' + + ' "b": {\n' + + ' "glorp": "echo b doing the glerp glop"\n' + + ' },\n' + + ' "c": {\n' + + ' "test": "exit 0",\n' + + ' "posttest": "echo posttest",\n' + + ' "lorem": "echo c lorem"\n' + + ' },\n' + + ' "d": {\n' + + ' "test": "exit 0",\n' + + ' "posttest": "echo posttest"\n' + + ' },\n' + + ' "e": {\n' + + ' "test": "exit 0",\n' + + ' "start": "echo start something"\n' + + ' },\n' + + ' "noscripts": {}\n' + + '}', ], ]) }) @@ -765,7 +758,7 @@ t.test('workspaces', t => { t.test('missing scripts in all workspaces', async t => { const LOG = [] - npmlog.error = (err) => { + npmlog.error = err => { LOG.push(String(err)) } await t.rejects( @@ -777,51 +770,60 @@ t.test('workspaces', t => { process.exitCode = 0 // clean exit code t.match(RUN_SCRIPTS, []) - t.strictSame(LOG.map(cleanOutput), [ - 'Lifecycle script `missing-script` failed with error:', - 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', - ' in workspace: a@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/a', - 'Lifecycle script `missing-script` failed with error:', - 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', - ' in workspace: b@2.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/b', - 'Lifecycle script `missing-script` failed with error:', - 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', - ' in workspace: c@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/c', - 'Lifecycle script `missing-script` failed with error:', - 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', - ' in workspace: d@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/d', - 'Lifecycle script `missing-script` failed with error:', - 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', - ' in workspace: e', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/e', - 'Lifecycle script `missing-script` failed with error:', - 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', - ' in workspace: noscripts@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/noscripts', - ], 'should log error msgs for each workspace script') + t.strictSame( + LOG.map(cleanOutput), + [ + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: a@1.0.0', + ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/a', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: b@2.0.0', + ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/b', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: c@1.0.0', + ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/c', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: d@1.0.0', + ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/d', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: e', + ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/e', + 'Lifecycle script `missing-script` failed with error:', + 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: noscripts@1.0.0', + /* eslint-disable-next-line max-len */ + ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/noscripts', + ], + 'should log error msgs for each workspace script' + ) }) t.test('missing scripts in some workspaces', async t => { const LOG = [] - npmlog.error = (err) => { + npmlog.error = err => { LOG.push(String(err)) } await runScript.execWorkspaces(['test'], ['a', 'b', 'c', 'd']) t.match(RUN_SCRIPTS, []) - t.strictSame(LOG.map(cleanOutput), [ - 'Lifecycle script `test` failed with error:', - 'Error: Missing script: "test"\n\nTo see a list of scripts, run:\n npm run', - ' in workspace: a@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/a', - 'Lifecycle script `test` failed with error:', - 'Error: Missing script: "test"\n\nTo see a list of scripts, run:\n npm run', - ' in workspace: b@2.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/b', - ], 'should log error msgs for each workspace script') + t.strictSame( + LOG.map(cleanOutput), + [ + 'Lifecycle script `test` failed with error:', + 'Error: Missing script: "test"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: a@1.0.0', + ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/a', + 'Lifecycle script `test` failed with error:', + 'Error: Missing script: "test"\n\nTo see a list of scripts, run:\n npm run', + ' in workspace: b@2.0.0', + ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/b', + ], + 'should log error msgs for each workspace script' + ) }) t.test('no workspaces when filtering by user args', async t => { @@ -866,8 +868,9 @@ t.test('workspaces', t => { t.test('failed workspace run with succeeded runs', async t => { const RunScript = t.mock('../../../lib/commands/run-script.js', { '@npmcli/run-script': async opts => { - if (opts.pkg.name === 'a') + if (opts.pkg.name === 'a') { throw new Error('ERR') + } RUN_SCRIPTS.push(opts) }, diff --git a/test/lib/commands/search.js b/test/lib/commands/search.js index a58d5afb9d994..c8dbc1b3b79b2 100644 --- a/test/lib/commands/search.js +++ b/test/lib/commands/search.js @@ -69,8 +69,9 @@ t.test('search ', async t => { }) const search = new Search(npm) - for (const i of libnpmsearchResultFixture) + for (const i of libnpmsearchResultFixture) { src.write(i) + } src.end() @@ -96,8 +97,9 @@ t.test('search --json', async t => { }) const search = new Search(npm) - for (const i of libnpmsearchResultFixture) + for (const i of libnpmsearchResultFixture) { src.write(i) + } src.end() await search.exec(['libnpm']) diff --git a/test/lib/commands/set-script.js b/test/lib/commands/set-script.js index 0684ed3a240d4..592a2431c2e3e 100644 --- a/test/lib/commands/set-script.js +++ b/test/lib/commands/set-script.js @@ -24,14 +24,14 @@ const setScript = new SetScript(npm) t.test('completion', t => { t.test('already have a script name', async t => { npm.localPrefix = t.testdir({}) - const res = await setScript.completion({conf: {argv: {remain: ['npm', 'run', 'x']}}}) + const res = await setScript.completion({ conf: { argv: { remain: ['npm', 'run', 'x'] } } }) t.equal(res, undefined) t.end() }) t.test('no package.json', async t => { npm.localPrefix = t.testdir({}) - const res = await setScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + const res = await setScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) t.strictSame(res, []) t.end() }) @@ -40,7 +40,7 @@ t.test('completion', t => { npm.localPrefix = t.testdir({ 'package.json': JSON.stringify({}), }) - const res = await setScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + const res = await setScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) t.strictSame(res, []) t.end() }) @@ -51,7 +51,7 @@ t.test('completion', t => { scripts: { hello: 'echo hello', world: 'echo world' }, }), }) - const res = await setScript.completion({conf: {argv: {remain: ['npm', 'run']}}}) + const res = await setScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) t.strictSame(res, ['hello', 'world']) t.end() }) @@ -116,7 +116,7 @@ t.test('creates scripts object', async t => { await setScript.exec(['arg1', 'arg2']) const contents = fs.readFileSync(resolve(npm.localPrefix, 'package.json')) - t.ok(parseJSON(contents), {scripts: {arg1: 'arg2'}}) + t.ok(parseJSON(contents), { scripts: { arg1: 'arg2' } }) }) t.test('warns when overwriting', async t => { diff --git a/test/lib/commands/set.js b/test/lib/commands/set.js index f7d2841ea0156..a57ea1a5401dd 100644 --- a/test/lib/commands/set.js +++ b/test/lib/commands/set.js @@ -8,21 +8,25 @@ t.skip('npm set', async t => { await npm.load() t.test('no args', async t => { - t.rejects( - npm.exec('set', []), - /Usage:/, - 'prints usage' - ) + t.rejects(npm.exec('set', []), /Usage:/, 'prints usage') }) t.test('test-config-item', async t => { npm.localPrefix = t.testdir({}) - t.not(npm.config.get('test-config-item', 'project'), 'test config value', 'config is not already new value') + t.not( + npm.config.get('test-config-item', 'project'), + 'test config value', + 'config is not already new value' + ) // This will write to ~/.npmrc! // Don't unskip until we can write to project level await npm.exec('set', ['test-config-item=test config value']) t.equal(joinedOutput(), '', 'outputs nothing') - t.equal(npm.config.get('test-config-item', 'project'), 'test config value', 'config is set to new value') + t.equal( + npm.config.get('test-config-item', 'project'), + 'test config value', + 'config is set to new value' + ) }) }) @@ -31,8 +35,9 @@ t.skip('npm set', async t => { let configArgs = null const npm = { exec: async (cmd, args) => { - if (cmd === 'config') + if (cmd === 'config') { configArgs = args + } }, } @@ -40,10 +45,7 @@ const Set = t.mock('../../../lib/commands/set.js') const set = new Set(npm) t.test('npm set - no args', async t => { - await t.rejects( - set.exec([]), - set.usage - ) + await t.rejects(set.exec([]), set.usage) }) t.test('npm set', async t => { diff --git a/test/lib/commands/shrinkwrap.js b/test/lib/commands/shrinkwrap.js index 112aa0a28a29b..efe7e538051f7 100644 --- a/test/lib/commands/shrinkwrap.js +++ b/test/lib/commands/shrinkwrap.js @@ -7,42 +7,40 @@ const { real: mockNpm } = require('../../fixtures/mock-npm') // stringifying to remove escaped values like \\" // This also doesn't reorder the keys of the object // like tap does by default which is nice in this case -t.formatSnapshot = (obj) => JSON.stringify(obj, (k, v) => { - try { - return JSON.parse(v) - } catch (_) {} - return v -}, 2) +t.formatSnapshot = obj => + JSON.stringify( + obj, + (k, v) => { + try { + return JSON.parse(v) + } catch (_) {} + return v + }, + 2 + ) // Run shrinkwrap against a specified testdir with config items // and make some assertions that should always be true. Sets // the results on t.context for use in child tests -const shrinkwrap = async ( - t, - testdir = {}, - config = {}, - mocks = {} -) => { +const shrinkwrap = async (t, testdir = {}, config = {}, mocks = {}) => { const { Npm, logs } = mockNpm(t, mocks) const npm = new Npm() await npm.load() npm.localPrefix = t.testdir(testdir) - if (config.lockfileVersion) + if (config.lockfileVersion) { npm.config.set('lockfile-version', config.lockfileVersion) - if (config.global) + } + if (config.global) { npm.config.set('global', config.global) + } await npm.exec('shrinkwrap', []) const newFile = resolve(npm.localPrefix, 'npm-shrinkwrap.json') const oldFile = resolve(npm.localPrefix, 'package-lock.json') - const notices = logs - .filter(([title]) => title === 'notice') - .map(([,, msg]) => msg) - const warnings = logs - .filter(([title]) => title === 'warn') - .map(([,, msg]) => msg) + const notices = logs.filter(([title]) => title === 'notice').map(([, , msg]) => msg) + const warnings = logs.filter(([title]) => title === 'warn').map(([, , msg]) => msg) t.notOk(fs.existsSync(oldFile), 'package-lock is always deleted') t.same(warnings, [], 'no warnings') @@ -76,29 +74,29 @@ const shrinkwrapMatrix = async (t, file, assertions) => { existingDir = { node_modules: { '.package-lock.json': existing } } } - await t.test('ancient', async (t) => { + await t.test('ancient', async t => { await shrinkwrap(t, ancientDir) t.match(t.context, assertions.ancient) t.matchSnapshot(t.context) }) - await t.test('ancient upgrade', async (t) => { + await t.test('ancient upgrade', async t => { await shrinkwrap(t, ancientDir, upgrade) t.match(t.context, assertions.ancientUpgrade) t.matchSnapshot(t.context) }) if (existingDir) { - await t.test('existing', async (t) => { + await t.test('existing', async t => { await shrinkwrap(t, existingDir) t.match(t.context, assertions.existing) t.matchSnapshot(t.context) }) - await t.test('existing upgrade', async (t) => { + await t.test('existing upgrade', async t => { await shrinkwrap(t, existingDir, upgrade) t.match(t.context, assertions.existingUpgrade) t.matchSnapshot(t.context) }) - await t.test('existing downgrade', async (t) => { + await t.test('existing downgrade', async t => { await shrinkwrap(t, existingDir, downgrade) t.match(t.context, assertions.existingDowngrade) t.matchSnapshot(t.context) @@ -107,101 +105,116 @@ const shrinkwrapMatrix = async (t, file, assertions) => { } const NOTICES = { - CREATED: (v = '') => - [`created a lockfile as npm-shrinkwrap.json${v && ` with version ${v}`}`], - RENAMED: (v = '') => - [`package-lock.json has been renamed to npm-shrinkwrap.json${v && ` and updated to version ${v}`}`], - UPDATED: (v = '') => - [`npm-shrinkwrap.json updated to version ${v}`], - SAME: () => - [`npm-shrinkwrap.json up to date`], + CREATED: (v = '') => [`created a lockfile as npm-shrinkwrap.json${v && ` with version ${v}`}`], + RENAMED: (v = '') => [ + `package-lock.json has been renamed to npm-shrinkwrap.json${ + v && ` and updated to version ${v}` + }`, + ], + UPDATED: (v = '') => [`npm-shrinkwrap.json updated to version ${v}`], + SAME: () => [`npm-shrinkwrap.json up to date`], } -t.test('with nothing', t => shrinkwrapMatrix(t, null, { - ancient: { - shrinkwrap: { lockfileVersion: 2 }, - logs: NOTICES.CREATED(2), - }, - ancientUpgrade: { - shrinkwrap: { lockfileVersion: 3 }, - logs: NOTICES.CREATED(3), - }, -})) - -t.test('with package-lock.json', t => shrinkwrapMatrix(t, 'package-lock', { - ancient: { - shrinkwrap: { lockfileVersion: 2 }, - logs: NOTICES.RENAMED(2), - }, - ancientUpgrade: { - shrinkwrap: { lockfileVersion: 3 }, - logs: NOTICES.RENAMED(3), - }, - existing: { - shrinkwrap: { lockfileVersion: 2 }, - logs: NOTICES.RENAMED(), - }, - existingUpgrade: { - shrinkwrap: { lockfileVersion: 3 }, - logs: NOTICES.RENAMED(3), - }, - existingDowngrade: { - shrinkwrap: { lockfileVersion: 1 }, - logs: NOTICES.RENAMED(1), - }, -})) - -t.test('with npm-shrinkwrap.json', t => shrinkwrapMatrix(t, 'npm-shrinkwrap', { - ancient: { - shrinkwrap: { lockfileVersion: 2 }, - logs: NOTICES.UPDATED(2), - }, - ancientUpgrade: { - shrinkwrap: { lockfileVersion: 3 }, - logs: NOTICES.UPDATED(3), - }, - existing: { - shrinkwrap: { lockfileVersion: 2 }, - logs: NOTICES.SAME(), - }, - existingUpgrade: { - shrinkwrap: { lockfileVersion: 3 }, - logs: NOTICES.UPDATED(3), - }, - existingDowngrade: { - shrinkwrap: { lockfileVersion: 1 }, - logs: NOTICES.UPDATED(1), - }, -})) - -t.test('with hidden lockfile', t => shrinkwrapMatrix(t, 'hidden-lockfile', { - ancient: { - shrinkwrap: { lockfileVersion: 1 }, - logs: NOTICES.CREATED(), - }, - ancientUpgrade: { - shrinkwrap: { lockfileVersion: 3 }, - logs: NOTICES.CREATED(), - }, - existing: { - shrinkwrap: { lockfileVersion: 2 }, - logs: NOTICES.CREATED(), - }, - existingUpgrade: { - shrinkwrap: { lockfileVersion: 3 }, - logs: NOTICES.CREATED(3), - }, - existingDowngrade: { - shrinkwrap: { lockfileVersion: 1 }, - logs: NOTICES.CREATED(1), - }, -})) +t.test('with nothing', t => + shrinkwrapMatrix(t, null, { + ancient: { + shrinkwrap: { lockfileVersion: 2 }, + logs: NOTICES.CREATED(2), + }, + ancientUpgrade: { + shrinkwrap: { lockfileVersion: 3 }, + logs: NOTICES.CREATED(3), + }, + }) +) + +t.test('with package-lock.json', t => + shrinkwrapMatrix(t, 'package-lock', { + ancient: { + shrinkwrap: { lockfileVersion: 2 }, + logs: NOTICES.RENAMED(2), + }, + ancientUpgrade: { + shrinkwrap: { lockfileVersion: 3 }, + logs: NOTICES.RENAMED(3), + }, + existing: { + shrinkwrap: { lockfileVersion: 2 }, + logs: NOTICES.RENAMED(), + }, + existingUpgrade: { + shrinkwrap: { lockfileVersion: 3 }, + logs: NOTICES.RENAMED(3), + }, + existingDowngrade: { + shrinkwrap: { lockfileVersion: 1 }, + logs: NOTICES.RENAMED(1), + }, + }) +) + +t.test('with npm-shrinkwrap.json', t => + shrinkwrapMatrix(t, 'npm-shrinkwrap', { + ancient: { + shrinkwrap: { lockfileVersion: 2 }, + logs: NOTICES.UPDATED(2), + }, + ancientUpgrade: { + shrinkwrap: { lockfileVersion: 3 }, + logs: NOTICES.UPDATED(3), + }, + existing: { + shrinkwrap: { lockfileVersion: 2 }, + logs: NOTICES.SAME(), + }, + existingUpgrade: { + shrinkwrap: { lockfileVersion: 3 }, + logs: NOTICES.UPDATED(3), + }, + existingDowngrade: { + shrinkwrap: { lockfileVersion: 1 }, + logs: NOTICES.UPDATED(1), + }, + }) +) + +t.test('with hidden lockfile', t => + shrinkwrapMatrix(t, 'hidden-lockfile', { + ancient: { + shrinkwrap: { lockfileVersion: 1 }, + logs: NOTICES.CREATED(), + }, + ancientUpgrade: { + shrinkwrap: { lockfileVersion: 3 }, + logs: NOTICES.CREATED(), + }, + existing: { + shrinkwrap: { lockfileVersion: 2 }, + logs: NOTICES.CREATED(), + }, + existingUpgrade: { + shrinkwrap: { lockfileVersion: 3 }, + logs: NOTICES.CREATED(3), + }, + existingDowngrade: { + shrinkwrap: { lockfileVersion: 1 }, + logs: NOTICES.CREATED(1), + }, + }) +) t.test('throws in global mode', async t => { - t.rejects(shrinkwrap(t, {}, { - global: true, - }), { - message: '`npm shrinkwrap` does not work for global packages', - code: 'ESHRINKWRAPGLOBAL', - }) + t.rejects( + shrinkwrap( + t, + {}, + { + global: true, + } + ), + { + message: '`npm shrinkwrap` does not work for global packages', + code: 'ESHRINKWRAPGLOBAL', + } + ) }) diff --git a/test/lib/commands/team.js b/test/lib/commands/team.js index c374d15d80c1f..96260835fa2b6 100644 --- a/test/lib/commands/team.js +++ b/test/lib/commands/team.js @@ -388,8 +388,8 @@ t.test('completion', t => { }) t.test('npm team unknown subcommand autocomplete', async t => { - t.rejects(completion({conf: {argv: {remain: ['npm', 'team', 'missing-subcommand'] } } }), - {message: 'missing-subcommand not recognized'}, 'should throw a a not recognized error' + t.rejects(completion({ conf: { argv: { remain: ['npm', 'team', 'missing-subcommand'] } } }), + { message: 'missing-subcommand not recognized' }, 'should throw a a not recognized error' ) t.end() diff --git a/test/lib/commands/token.js b/test/lib/commands/token.js index c598c366cf374..6d0dc9d7e0874 100644 --- a/test/lib/commands/token.js +++ b/test/lib/commands/token.js @@ -21,16 +21,17 @@ const Token = t.mock('../../../lib/commands/token.js', { const token = new Token(npm) -const tokenWithMocks = (mockRequests) => { +const tokenWithMocks = mockRequests => { for (const mod in mockRequests) { - if (mod === 'npm') + if (mod === 'npm') { mockRequests.npm = { ...npm, ...mockRequests.npm } - else { - if (typeof mockRequests[mod] === 'function') + } else { + if (typeof mockRequests[mod] === 'function') { mocks[mod] = mockRequests[mod] - else { - for (const key in mockRequests[mod]) + } else { + for (const key in mockRequests[mod]) { mocks[mod][key] = mockRequests[mod][key] + } } } } @@ -38,11 +39,12 @@ const tokenWithMocks = (mockRequests) => { const reset = () => { for (const mod in mockRequests) { if (mod !== 'npm') { - if (typeof mockRequests[mod] === 'function') + if (typeof mockRequests[mod] === 'function') { mocks[mod] = () => {} - else { - for (const key in mockRequests[mod]) + } else { + for (const key in mockRequests[mod]) { delete mocks[mod][key] + } } } } @@ -52,7 +54,7 @@ const tokenWithMocks = (mockRequests) => { return [token, reset] } -t.test('completion', (t) => { +t.test('completion', t => { t.plan(5) const testComp = (argv, expect) => { @@ -64,10 +66,9 @@ t.test('completion', (t) => { testComp(['npm', 'token', 'revoke'], []) testComp(['npm', 'token', 'create'], []) - t.rejects( - token.completion({ conf: { argv: { remain: ['npm', 'token', 'foobar'] } } }), - { message: 'foobar not recognize' } - ) + t.rejects(token.completion({ conf: { argv: { remain: ['npm', 'token', 'foobar'] } } }), { + message: 'foobar not recognize', + }) }) t.test('token foobar', async t => { @@ -76,7 +77,7 @@ t.test('token foobar', async t => { const [, reset] = tokenWithMocks({ log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'shows a gauge') }, }, @@ -85,51 +86,51 @@ t.test('token foobar', async t => { t.teardown(reset) - await t.rejects( - token.exec(['foobar']), - /foobar is not a recognized subcommand/ - ) + await t.rejects(token.exec(['foobar']), /foobar is not a recognized subcommand/) }) t.test('token list', async t => { t.plan(14) const now = new Date().toISOString() - const tokens = [{ - key: 'abcd1234abcd1234', - token: 'efgh5678efgh5678', - cidr_whitelist: null, - readonly: false, - created: now, - updated: now, - }, { - key: 'abcd1256', - token: 'hgfe8765', - cidr_whitelist: ['192.168.1.1/32'], - readonly: true, - created: now, - updated: now, - }] + const tokens = [ + { + key: 'abcd1234abcd1234', + token: 'efgh5678efgh5678', + cidr_whitelist: null, + readonly: false, + created: now, + updated: now, + }, + { + key: 'abcd1256', + token: 'hgfe8765', + cidr_whitelist: ['192.168.1.1/32'], + readonly: true, + created: now, + updated: now, + }, + ] const [token, reset] = tokenWithMocks({ npm: { flatOptions: { registry: 'https://registry.npmjs.org', otp: '123456' }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, }, }, profile: { - listTokens: (conf) => { + listTokens: conf => { t.same(conf.auth, { token: 'thisisnotarealtoken', otp: '123456' }) return tokens }, }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token') }, }, @@ -138,7 +139,7 @@ t.test('token list', async t => { t.equal(msg, 'getting list') }, }, - output: (spec) => { + output: spec => { const lines = spec.split(/\r?\n/) t.match(lines[3], ' abcd123 ', 'includes the trimmed key') t.match(lines[3], ' efgh56… ', 'includes the trimmed token') @@ -161,34 +162,40 @@ t.test('token list json output', async t => { t.plan(7) const now = new Date().toISOString() - const tokens = [{ - key: 'abcd1234abcd1234', - token: 'efgh5678efgh5678', - cidr_whitelist: null, - readonly: false, - created: now, - updated: now, - }] + const tokens = [ + { + key: 'abcd1234abcd1234', + token: 'efgh5678efgh5678', + cidr_whitelist: null, + readonly: false, + created: now, + updated: now, + }, + ] const [token, reset] = tokenWithMocks({ npm: { flatOptions: { registry: 'https://registry.npmjs.org', json: true }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { username: 'foo', password: 'bar' } }, }, }, profile: { - listTokens: (conf) => { - t.same(conf.auth, { basic: { username: 'foo', password: 'bar' } }, 'passes the correct auth') + listTokens: conf => { + t.same( + conf.auth, + { basic: { username: 'foo', password: 'bar' } }, + 'passes the correct auth' + ) return tokens }, }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token') }, }, @@ -197,7 +204,7 @@ t.test('token list json output', async t => { t.equal(msg, 'getting list') }, }, - output: (spec) => { + output: spec => { t.type(spec, 'string', 'is called with a string') const parsed = JSON.parse(spec) t.match(parsed, tokens, 'prints the json parsed tokens') @@ -213,21 +220,24 @@ t.test('token list parseable output', async t => { t.plan(11) const now = new Date().toISOString() - const tokens = [{ - key: 'abcd1234abcd1234', - token: 'efgh5678efgh5678', - cidr_whitelist: null, - readonly: false, - created: now, - updated: now, - }, { - key: 'efgh5678ijkl9101', - token: 'hgfe8765', - cidr_whitelist: ['192.168.1.1/32'], - readonly: true, - created: now, - updated: now, - }] + const tokens = [ + { + key: 'abcd1234abcd1234', + token: 'efgh5678efgh5678', + cidr_whitelist: null, + readonly: false, + created: now, + updated: now, + }, + { + key: 'efgh5678ijkl9101', + token: 'hgfe8765', + cidr_whitelist: ['192.168.1.1/32'], + readonly: true, + created: now, + updated: now, + }, + ] let callCount = 0 @@ -235,21 +245,25 @@ t.test('token list parseable output', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org', parseable: true }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { auth: Buffer.from('foo:bar').toString('base64') } }, }, }, profile: { - listTokens: (conf) => { - t.same(conf.auth, { basic: { username: 'foo', password: 'bar' } }, 'passes the correct auth') + listTokens: conf => { + t.same( + conf.auth, + { basic: { username: 'foo', password: 'bar' } }, + 'passes the correct auth' + ) return tokens }, }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token') }, }, @@ -258,15 +272,34 @@ t.test('token list parseable output', async t => { t.equal(msg, 'getting list') }, }, - output: (spec) => { + output: spec => { ++callCount t.type(spec, 'string', 'is called with a string') - if (callCount === 1) - t.equal(spec, ['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'), 'prints header') - else if (callCount === 2) - t.equal(spec, [tokens[0].key, tokens[0].token, tokens[0].created, tokens[0].readonly, ''].join('\t'), 'prints token info') - else - t.equal(spec, [tokens[1].key, tokens[1].token, tokens[1].created, tokens[1].readonly, tokens[1].cidr_whitelist.join(',')].join('\t'), 'prints token info') + if (callCount === 1) { + t.equal( + spec, + ['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'), + 'prints header' + ) + } else if (callCount === 2) { + t.equal( + spec, + [tokens[0].key, tokens[0].token, tokens[0].created, tokens[0].readonly, ''].join('\t'), + 'prints token info' + ) + } else { + t.equal( + spec, + [ + tokens[1].key, + tokens[1].token, + tokens[1].created, + tokens[1].readonly, + tokens[1].cidr_whitelist.join(','), + ].join('\t'), + 'prints token info' + ) + } }, }) @@ -282,7 +315,7 @@ t.test('token revoke', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org' }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return {} }, @@ -290,7 +323,7 @@ t.test('token revoke', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -306,17 +339,15 @@ t.test('token revoke', async t => { }, }, profile: { - listTokens: (conf) => { + listTokens: conf => { t.same(conf.auth, {}, 'passes the correct empty auth') - return Promise.resolve([ - { key: 'abcd1234' }, - ]) + return Promise.resolve([{ key: 'abcd1234' }]) }, - removeToken: (key) => { + removeToken: key => { t.equal(key, 'abcd1234', 'deletes the correct token') }, }, - output: (spec) => { + output: spec => { t.equal(spec, 'Removed 1 token') }, }) @@ -333,7 +364,7 @@ t.test('token revoke multiple tokens', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org' }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -341,7 +372,7 @@ t.test('token revoke multiple tokens', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -357,16 +388,13 @@ t.test('token revoke multiple tokens', async t => { }, }, profile: { - listTokens: () => Promise.resolve([ - { key: 'abcd1234' }, - { key: 'efgh5678' }, - ]), - removeToken: (key) => { + listTokens: () => Promise.resolve([{ key: 'abcd1234' }, { key: 'efgh5678' }]), + removeToken: key => { // this will run twice t.ok(['abcd1234', 'efgh5678'].includes(key), 'deletes the correct token') }, }, - output: (spec) => { + output: spec => { t.equal(spec, 'Removed 2 tokens') }, }) @@ -383,7 +411,7 @@ t.test('token revoke json output', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org', json: true }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -391,7 +419,7 @@ t.test('token revoke json output', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -407,14 +435,12 @@ t.test('token revoke json output', async t => { }, }, profile: { - listTokens: () => Promise.resolve([ - { key: 'abcd1234' }, - ]), - removeToken: (key) => { + listTokens: () => Promise.resolve([{ key: 'abcd1234' }]), + removeToken: key => { t.equal(key, 'abcd1234', 'deletes the correct token') }, }, - output: (spec) => { + output: spec => { t.type(spec, 'string', 'is given a string') const parsed = JSON.parse(spec) t.same(parsed, ['abcd1234'], 'logs the token as json') @@ -433,7 +459,7 @@ t.test('token revoke parseable output', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org', parseable: true }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -441,7 +467,7 @@ t.test('token revoke parseable output', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -457,14 +483,12 @@ t.test('token revoke parseable output', async t => { }, }, profile: { - listTokens: () => Promise.resolve([ - { key: 'abcd1234' }, - ]), - removeToken: (key) => { + listTokens: () => Promise.resolve([{ key: 'abcd1234' }]), + removeToken: key => { t.equal(key, 'abcd1234', 'deletes the correct token') }, }, - output: (spec) => { + output: spec => { t.equal(spec, 'abcd1234', 'logs the token as a string') }, }) @@ -481,7 +505,7 @@ t.test('token revoke by token', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org' }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -489,7 +513,7 @@ t.test('token revoke by token', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -505,14 +529,12 @@ t.test('token revoke by token', async t => { }, }, profile: { - listTokens: () => Promise.resolve([ - { key: 'abcd1234', token: 'efgh5678' }, - ]), - removeToken: (key) => { + listTokens: () => Promise.resolve([{ key: 'abcd1234', token: 'efgh5678' }]), + removeToken: key => { t.equal(key, 'efgh5678', 'passes through user input') }, }, - output: (spec) => { + output: spec => { t.equal(spec, 'Removed 1 token') }, }) @@ -528,7 +550,7 @@ t.test('token revoke requires an id', async t => { const [token, reset] = tokenWithMocks({ log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token') }, }, @@ -537,10 +559,7 @@ t.test('token revoke requires an id', async t => { t.teardown(reset) - await t.rejects( - token.exec(['rm']), - /`` argument is required/ - ) + await t.rejects(token.exec(['rm']), /`` argument is required/) }) t.test('token revoke ambiguous id errors', async t => { @@ -550,7 +569,7 @@ t.test('token revoke ambiguous id errors', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org' }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -558,7 +577,7 @@ t.test('token revoke ambiguous id errors', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -574,19 +593,13 @@ t.test('token revoke ambiguous id errors', async t => { }, }, profile: { - listTokens: () => Promise.resolve([ - { key: 'abcd1234' }, - { key: 'abcd5678' }, - ]), + listTokens: () => Promise.resolve([{ key: 'abcd1234' }, { key: 'abcd5678' }]), }, }) t.teardown(reset) - await t.rejects( - token.exec(['rm', 'abcd']), - /Token ID "abcd" was ambiguous/ - ) + await t.rejects(token.exec(['rm', 'abcd']), /Token ID "abcd" was ambiguous/) }) t.test('token revoke unknown id errors', async t => { @@ -596,7 +609,7 @@ t.test('token revoke unknown id errors', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org' }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -604,7 +617,7 @@ t.test('token revoke unknown id errors', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -620,18 +633,13 @@ t.test('token revoke unknown id errors', async t => { }, }, profile: { - listTokens: () => Promise.resolve([ - { key: 'abcd1234' }, - ]), + listTokens: () => Promise.resolve([{ key: 'abcd1234' }]), }, }) t.teardown(reset) - await t.rejects( - token.exec(['rm', 'efgh']), - /Unknown token id or value "efgh"./ - ) + await t.rejects(token.exec(['rm', 'efgh']), /Unknown token id or value "efgh"./) }) t.test('token create', async t => { @@ -642,9 +650,12 @@ t.test('token create', async t => { const [token, reset] = tokenWithMocks({ npm: { - flatOptions: { registry: 'https://registry.npmjs.org', cidr: ['10.0.0.0/8', '192.168.1.0/24'] }, + flatOptions: { + registry: 'https://registry.npmjs.org', + cidr: ['10.0.0.0/8', '192.168.1.0/24'], + }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -652,7 +663,7 @@ t.test('token create', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -679,7 +690,7 @@ t.test('token create', async t => { } }, }, - output: (spec) => { + output: spec => { const lines = spec.split(/\r?\n/) t.match(lines[1], 'token') t.match(lines[1], 'efgh5678', 'prints the whole token') @@ -706,7 +717,7 @@ t.test('token create json output', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org', json: true }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -714,7 +725,7 @@ t.test('token create json output', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -741,10 +752,14 @@ t.test('token create json output', async t => { } }, }, - output: (spec) => { + output: spec => { t.type(spec, 'string', 'outputs a string') const parsed = JSON.parse(spec) - t.same(parsed, { token: 'efgh5678', created: now, readonly: false, cidr_whitelist: [] }, 'outputs the correct object') + t.same( + parsed, + { token: 'efgh5678', created: now, readonly: false, cidr_whitelist: [] }, + 'outputs the correct object' + ) }, }) @@ -764,7 +779,7 @@ t.test('token create parseable output', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org', parseable: true }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -772,7 +787,7 @@ t.test('token create parseable output', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -799,16 +814,17 @@ t.test('token create parseable output', async t => { } }, }, - output: (spec) => { + output: spec => { ++callCount - if (callCount === 1) + if (callCount === 1) { t.match(spec, 'token\tefgh5678', 'prints the token') - else if (callCount === 2) + } else if (callCount === 2) { t.match(spec, `created\t${now}`, 'prints the created timestamp') - else if (callCount === 3) + } else if (callCount === 3) { t.match(spec, 'readonly\tfalse', 'prints the readonly flag') - else + } else { t.match(spec, 'cidr_whitelist\t', 'prints the cidr whitelist') + } }, }) @@ -826,7 +842,7 @@ t.test('token create ipv6 cidr', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org', cidr: '::1/128' }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -834,7 +850,7 @@ t.test('token create ipv6 cidr', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, @@ -848,7 +864,10 @@ t.test('token create ipv6 cidr', async t => { await t.rejects( token.exec(['create']), - { code: 'EINVALIDCIDR', message: /CIDR whitelist can only contain IPv4 addresses, ::1\/128 is IPv6/ }, + { + code: 'EINVALIDCIDR', + message: /CIDR whitelist can only contain IPv4 addresses, ::1\/128 is IPv6/, + }, 'returns correct error' ) }) @@ -862,7 +881,7 @@ t.test('token create invalid cidr', async t => { npm: { flatOptions: { registry: 'https://registry.npmjs.org', cidr: 'apple/cider' }, config: { - getCredentialsByURI: (uri) => { + getCredentialsByURI: uri => { t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') return { token: 'thisisnotarealtoken' } }, @@ -870,7 +889,7 @@ t.test('token create invalid cidr', async t => { }, log: { gauge: { - show: (name) => { + show: name => { t.equal(name, 'token', 'starts a gauge') }, }, diff --git a/test/lib/commands/unpublish.js b/test/lib/commands/unpublish.js index 7e6b5755c76a8..6ac2067531c80 100644 --- a/test/lib/commands/unpublish.js +++ b/test/lib/commands/unpublish.js @@ -150,10 +150,11 @@ t.test('unpublish @version', async t => { npm.log = { silly (title, key, value) { t.equal(title, 'unpublish', 'should silly log args') - if (key === 'spec') + if (key === 'spec') { t.match(value, { name: 'pkg', rawSpec: '1.0.0' }) - else + } else { t.equal(value, 'pkg@1.0.0', 'should log originally passed arg') + } }, } @@ -337,7 +338,7 @@ t.test('completion', async t => { const testComp = async (t, { unpublish, argv, partialWord, expect, title }) => { const res = await unpublish.completion( - {conf: {argv: {remain: argv}}, partialWord} + { conf: { argv: { remain: argv } }, partialWord } ) t.strictSame(res, expect, title || argv.join(' ')) } diff --git a/test/lib/commands/version.js b/test/lib/commands/version.js index 3b3f76f759be8..6603b581061a6 100644 --- a/test/lib/commands/version.js +++ b/test/lib/commands/version.js @@ -14,8 +14,9 @@ const npm = mockNpm({ prefix: '', version: '1.0.0', output: (...msg) => { - for (const m of msg) + for (const m of msg) { result.push(m) + } }, }) const mocks = { @@ -47,11 +48,13 @@ t.test('no args', async t => { t.same( result, - [{ - 'test-version-no-args': '3.2.1', - node: '1.0.0', - npm: '1.0.0', - }], + [ + { + 'test-version-no-args': '3.2.1', + node: '1.0.0', + npm: '1.0.0', + }, + ], 'should output expected values for various versions in npm' ) }) @@ -70,16 +73,10 @@ t.test('completion', async t => { t.strictSame(res, expect, argv.join(' ')) } - await testComp(['npm', 'version'], [ - 'major', - 'minor', - 'patch', - 'premajor', - 'preminor', - 'prepatch', - 'prerelease', - 'from-git', - ]) + await testComp( + ['npm', 'version'], + ['major', 'minor', 'patch', 'premajor', 'preminor', 'prepatch', 'prerelease', 'from-git'] + ) await testComp(['npm', 'version', 'major'], []) t.end() @@ -93,10 +90,12 @@ t.test('failure reading package.json', async t => { t.same( result, - [{ - npm: '1.0.0', - node: '1.0.0', - }], + [ + { + npm: '1.0.0', + node: '1.0.0', + }, + ], 'should not have package name on returning object' ) }) @@ -108,11 +107,7 @@ t.test('--json option', async t => { Object.defineProperty(process, 'versions', { value: {} }) await version.exec([]) - t.same( - result, - ['{\n "npm": "1.0.0"\n}'], - 'should return json stringified result' - ) + t.same(result, ['{\n "npm": "1.0.0"\n}'], 'should return json stringified result') }) t.test('with one arg', async t => { @@ -144,11 +139,15 @@ t.test('workspaces', async t => { t.test('no args, all workspaces', async t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, + null, + 2 + ), 'workspace-a': { 'package.json': JSON.stringify({ name: 'workspace-a', @@ -166,21 +165,31 @@ t.test('workspaces', async t => { npm.prefix = testDir const version = new Version(npm) await version.execWorkspaces([], []) - t.same(result, [{ - 'workspaces-test': '1.0.0', - 'workspace-a': '1.0.0', - 'workspace-b': '1.0.0', - npm: '1.0.0', - }], 'outputs includes main package and workspace versions') + t.same( + result, + [ + { + 'workspaces-test': '1.0.0', + 'workspace-a': '1.0.0', + 'workspace-b': '1.0.0', + npm: '1.0.0', + }, + ], + 'outputs includes main package and workspace versions' + ) }) t.test('no args, single workspaces', async t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, + null, + 2 + ), 'workspace-a': { 'package.json': JSON.stringify({ name: 'workspace-a', @@ -198,20 +207,30 @@ t.test('workspaces', async t => { npm.prefix = testDir const version = new Version(npm) await version.execWorkspaces([], ['workspace-a']) - t.same(result, [{ - 'workspaces-test': '1.0.0', - 'workspace-a': '1.0.0', - npm: '1.0.0', - }], 'outputs includes main package and requested workspace versions') + t.same( + result, + [ + { + 'workspaces-test': '1.0.0', + 'workspace-a': '1.0.0', + npm: '1.0.0', + }, + ], + 'outputs includes main package and requested workspace versions' + ) }) t.test('no args, all workspaces, workspace with missing name or version', async t => { const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }, + null, + 2 + ), 'workspace-a': { 'package.json': JSON.stringify({ name: 'workspace-a', @@ -233,21 +252,31 @@ t.test('workspaces', async t => { npm.prefix = testDir const version = new Version(npm) await version.execWorkspaces([], []) - t.same(result, [{ - 'workspaces-test': '1.0.0', - 'workspace-a': '1.0.0', - npm: '1.0.0', - }], 'outputs includes main package and valid workspace versions') + t.same( + result, + [ + { + 'workspaces-test': '1.0.0', + 'workspace-a': '1.0.0', + npm: '1.0.0', + }, + ], + 'outputs includes main package and valid workspace versions' + ) }) t.test('with one arg, all workspaces', async t => { const libNpmVersionArgs = [] const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], - }, null, 2), + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, + null, + 2 + ), 'workspace-a': { 'package.json': JSON.stringify({ name: 'workspace-a', @@ -273,7 +302,11 @@ t.test('workspaces', async t => { const version = new Version(npm) await version.execWorkspaces(['major'], []) - t.same(result, ['workspace-a', 'v2.0.0', 'workspace-b', 'v2.0.0'], 'outputs the new version for only the workspaces prefixed by the tagVersionPrefix') + t.same( + result, + ['workspace-a', 'v2.0.0', 'workspace-b', 'v2.0.0'], + 'outputs the new version for only the workspaces prefixed by the tagVersionPrefix' + ) }) t.test('too many args', async t => { diff --git a/test/lib/commands/view.js b/test/lib/commands/view.js index 116930aff4ede..728787ec4aacc 100644 --- a/test/lib/commands/view.js +++ b/test/lib/commands/view.js @@ -21,11 +21,13 @@ const cleanLogs = () => { const yesterday = new Date(Date.now() - 1000 * 60 * 60 * 25) const packument = (nv, opts) => { - if (!opts.fullMetadata) + if (!opts.fullMetadata) { throw new Error('must fetch fullMetadata') + } - if (!opts.preferOnline) + if (!opts.preferOnline) { throw new Error('must fetch with preferOnline') + } const mocks = { red: { @@ -192,8 +194,9 @@ const packument = (nv, opts) => { license: {}, dependencies: (() => { const deps = {} - for (let i = 0; i < 25; i++) + for (let i = 0; i < 25; i++) { deps[i] = '1.0.0' + } return deps })(), @@ -255,8 +258,9 @@ const packument = (nv, opts) => { }, }, } - if (nv.type === 'git') + if (nv.type === 'git') { return mocks[nv.hosted.project] + } return mocks[nv.name] } @@ -528,7 +532,7 @@ t.test('throws when unpublished', async t => { const view = new View(npm) await t.rejects( view.exec(['red']), - { code: 'E404'} + { code: 'E404' } ) }) @@ -654,7 +658,7 @@ t.test('no registry completion', async t => { }, }) const view = new View(npm) - const res = await view.completion({conf: { argv: { remain: ['npm', 'view'] } } }) + const res = await view.completion({ conf: { argv: { remain: ['npm', 'view'] } } }) t.notOk(res, 'there is no package completion') t.end() }) diff --git a/test/lib/load-all-commands.js b/test/lib/load-all-commands.js index e9d61f9c1f69e..f813e50b220e1 100644 --- a/test/lib/load-all-commands.js +++ b/test/lib/load-all-commands.js @@ -20,12 +20,17 @@ t.test('load each command', async t => { for (const cmd of cmdList.sort((a, b) => a.localeCompare(b, 'en'))) { t.test(cmd, async t => { const impl = await npm.cmd(cmd) - if (impl.completion) + if (impl.completion) { t.type(impl.completion, 'function', 'completion, if present, is a function') + } t.type(impl.exec, 'function', 'implementation has an exec function') t.type(impl.execWorkspaces, 'function', 'implementation has an execWorkspaces function') t.equal(util.inspect(impl.exec), '[AsyncFunction: exec]', 'exec function is async') - t.equal(util.inspect(impl.execWorkspaces), '[AsyncFunction: execWorkspaces]', 'execWorkspaces function is async') + t.equal( + util.inspect(impl.execWorkspaces), + '[AsyncFunction: execWorkspaces]', + 'execWorkspaces function is async' + ) t.ok(impl.description, 'implementation has a description') t.ok(impl.name, 'implementation has a name') t.match(impl.usage, cmd, 'usage contains the command') diff --git a/test/lib/load-all.js b/test/lib/load-all.js index b6b2b6adc44f5..fb45331ba92aa 100644 --- a/test/lib/load-all.js +++ b/test/lib/load-all.js @@ -5,9 +5,9 @@ const { real: mockNpm } = require('../fixtures/mock-npm') const full = process.env.npm_lifecycle_event === 'check-coverage' -if (!full) +if (!full) { t.pass('nothing to do here, not checking for full coverage') -else { +} else { const { Npm } = mockNpm(t) const npm = new Npm() diff --git a/test/lib/npm.js b/test/lib/npm.js index dc9640c0629b1..1ccd26e375803 100644 --- a/test/lib/npm.js +++ b/test/lib/npm.js @@ -18,8 +18,9 @@ for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) { ['test', 'run-script'].some(i => i === event), 'should match "npm test" or "npm run test"' ) - } else + } else { t.match(process.env[env], /^(run-script|exec)$/) + } } delete process.env[env] } @@ -42,8 +43,9 @@ const bePosix = () => { const argv = [...process.argv] t.afterEach(() => { - for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) + for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) { delete process.env[env] + } process.env.npm_config_cache = CACHE process.argv = argv Object.defineProperty(process, 'platform', { diff --git a/test/lib/utils/config/definitions.js b/test/lib/utils/config/definitions.js index 15b43715f45bd..f6813a8bc0bb5 100644 --- a/test/lib/utils/config/definitions.js +++ b/test/lib/utils/config/definitions.js @@ -19,8 +19,9 @@ const definitions = require(defpath) // Tie the definitions to a snapshot so that if they change we are forced to // update snapshots, which rebuilds the docs -for (const key of Object.keys(definitions)) +for (const key of Object.keys(definitions)) { t.matchSnapshot(definitions[key].describe(), `config description for ${key}`) +} const isWin = '../../../../lib/utils/is-windows.js' @@ -184,7 +185,7 @@ t.test('cache', t => { const flat = {} defsNix.cache.flatten('cache', { cache: '/some/cache/value' }, flat) - const {join} = require('path') + const { join } = require('path') t.equal(flat.cache, join('/some/cache/value', '_cacache')) t.equal(flat.npxCache, join('/some/cache/value', '_npx')) @@ -199,8 +200,8 @@ t.test('flatteners that populate flat.omit array', t => { // ignored if setting is not dev or development obj.also = 'ignored' definitions.also.flatten('also', obj, flat) - t.strictSame(obj, {also: 'ignored', omit: [], include: []}, 'nothing done') - t.strictSame(flat, {omit: []}, 'nothing done') + t.strictSame(obj, { also: 'ignored', omit: [], include: [] }, 'nothing done') + t.strictSame(flat, { omit: [] }, 'nothing done') obj.also = 'development' definitions.also.flatten('also', obj, flat) @@ -227,10 +228,10 @@ t.test('flatteners that populate flat.omit array', t => { const flat = {} const obj = { include: ['dev'] } definitions.include.flatten('include', obj, flat) - t.strictSame(flat, {omit: []}, 'not omitting anything') + t.strictSame(flat, { omit: [] }, 'not omitting anything') obj.omit = ['optional', 'dev'] definitions.include.flatten('include', obj, flat) - t.strictSame(flat, {omit: ['optional']}, 'only omitting optional') + t.strictSame(flat, { omit: ['optional'] }, 'only omitting optional') t.end() }) @@ -254,12 +255,12 @@ t.test('flatteners that populate flat.omit array', t => { obj.only = 'prod' definitions.only.flatten('only', obj, flat) - t.strictSame(flat, {omit: ['dev']}, 'omit dev when --only=prod') + t.strictSame(flat, { omit: ['dev'] }, 'omit dev when --only=prod') obj.include = ['dev'] flat.omit = [] definitions.only.flatten('only', obj, flat) - t.strictSame(flat, {omit: []}, 'do not omit when included') + t.strictSame(flat, { omit: [] }, 'do not omit when included') t.end() }) @@ -283,7 +284,7 @@ t.test('flatteners that populate flat.omit array', t => { optional: true, include: ['optional'], }, 'include optional when set') - t.strictSame(flat, {omit: []}, 'nothing to omit in flatOptions') + t.strictSame(flat, { omit: [] }, 'nothing to omit in flatOptions') delete obj.include obj.optional = false @@ -293,21 +294,21 @@ t.test('flatteners that populate flat.omit array', t => { optional: false, include: [], }, 'omit optional when set false') - t.strictSame(flat, {omit: ['optional']}, 'omit optional when set false') + t.strictSame(flat, { omit: ['optional'] }, 'omit optional when set false') t.end() }) t.test('production', t => { const flat = {} - const obj = {production: true} + const obj = { production: true } definitions.production.flatten('production', obj, flat) t.strictSame(obj, { production: true, omit: ['dev'], include: [], }, '--production sets --omit=dev') - t.strictSame(flat, {omit: ['dev']}, '--production sets --omit=dev') + t.strictSame(flat, { omit: ['dev'] }, '--production sets --omit=dev') delete obj.omit obj.production = false @@ -328,14 +329,14 @@ t.test('flatteners that populate flat.omit array', t => { include: ['dev'], omit: [], }, 'omit and include dev') - t.strictSame(flat, {omit: []}, 'do not omit dev when included') + t.strictSame(flat, { omit: [] }, 'do not omit dev when included') t.end() }) t.test('dev', t => { const flat = {} - const obj = {dev: true} + const obj = { dev: true } definitions.dev.flatten('dev', obj, flat) t.strictSame(obj, { dev: true, @@ -355,7 +356,7 @@ t.test('cache-max', t => { t.strictSame(flat, {}, 'no effect if not <= 0') obj['cache-max'] = 0 definitions['cache-max'].flatten('cache-max', obj, flat) - t.strictSame(flat, {preferOnline: true}, 'preferOnline if <= 0') + t.strictSame(flat, { preferOnline: true }, 'preferOnline if <= 0') t.end() }) @@ -366,7 +367,7 @@ t.test('cache-min', t => { t.strictSame(flat, {}, 'no effect if not >= 9999') obj['cache-min'] = 9999 definitions['cache-min'].flatten('cache-min', obj, flat) - t.strictSame(flat, {preferOffline: true}, 'preferOffline if >=9999') + t.strictSame(flat, { preferOffline: true }, 'preferOffline if >=9999') t.end() }) @@ -378,19 +379,19 @@ t.test('color', t => { const obj = { color: 'always' } definitions.color.flatten('color', obj, flat) - t.strictSame(flat, {color: true}, 'true when --color=always') + t.strictSame(flat, { color: true }, 'true when --color=always') obj.color = false definitions.color.flatten('color', obj, flat) - t.strictSame(flat, {color: false}, 'true when --no-color') + t.strictSame(flat, { color: false }, 'true when --no-color') process.stdout.isTTY = false obj.color = true definitions.color.flatten('color', obj, flat) - t.strictSame(flat, {color: false}, 'no color when stdout not tty') + t.strictSame(flat, { color: false }, 'no color when stdout not tty') process.stdout.isTTY = true definitions.color.flatten('color', obj, flat) - t.strictSame(flat, {color: true}, '--color turns on color when stdout is tty') + t.strictSame(flat, { color: true }, '--color turns on color when stdout is tty') delete process.env.NO_COLOR const defsAllowColor = t.mock(defpath) @@ -421,7 +422,7 @@ t.test('retry options', t => { const flat = {} obj[config] = 99 definitions[config].flatten(config, obj, flat) - t.strictSame(flat, {retry: {[option]: 99}}, msg) + t.strictSame(flat, { retry: { [option]: 99 } }, msg) delete obj[config] } t.end() @@ -442,7 +443,7 @@ t.test('search options', t => { const flat = {} obj[config] = 99 definitions[config].flatten(config, obj, flat) - t.strictSame(flat, { search: { limit: 20, [option]: 99 }}, msg) + t.strictSame(flat, { search: { limit: 20, [option]: 99 } }, msg) delete obj[config] } @@ -515,18 +516,18 @@ t.test('shrinkwrap/package-lock', t => { const obj = { shrinkwrap: false } const flat = {} definitions.shrinkwrap.flatten('shrinkwrap', obj, flat) - t.strictSame(flat, {packageLock: false}) + t.strictSame(flat, { packageLock: false }) obj.shrinkwrap = true definitions.shrinkwrap.flatten('shrinkwrap', obj, flat) - t.strictSame(flat, {packageLock: true}) + t.strictSame(flat, { packageLock: true }) delete obj.shrinkwrap obj['package-lock'] = false definitions['package-lock'].flatten('package-lock', obj, flat) - t.strictSame(flat, {packageLock: false}) + t.strictSame(flat, { packageLock: false }) obj['package-lock'] = true definitions['package-lock'].flatten('package-lock', obj, flat) - t.strictSame(flat, {packageLock: true}) + t.strictSame(flat, { packageLock: true }) t.end() }) @@ -550,7 +551,7 @@ t.test('defaultTag', t => { const obj = { tag: 'next' } const flat = {} definitions.tag.flatten('tag', obj, flat) - t.strictSame(flat, {defaultTag: 'next'}) + t.strictSame(flat, { defaultTag: 'next' }) t.end() }) @@ -558,7 +559,7 @@ t.test('timeout', t => { const obj = { 'fetch-timeout': 123 } const flat = {} definitions['fetch-timeout'].flatten('fetch-timeout', obj, flat) - t.strictSame(flat, {timeout: 123}) + t.strictSame(flat, { timeout: 123 }) t.end() }) @@ -573,10 +574,10 @@ t.test('saveType', t => { t.strictSame(flat, {}, 'remove if false and set to prod') flat.saveType = 'dev' definitions['save-prod'].flatten('save-prod', obj, flat) - t.strictSame(flat, {saveType: 'dev'}, 'ignore if false and not already prod') + t.strictSame(flat, { saveType: 'dev' }, 'ignore if false and not already prod') obj['save-prod'] = true definitions['save-prod'].flatten('save-prod', obj, flat) - t.strictSame(flat, {saveType: 'prod'}, 'set to prod if true') + t.strictSame(flat, { saveType: 'prod' }, 'set to prod if true') t.end() }) @@ -591,10 +592,10 @@ t.test('saveType', t => { flat.saveType = 'prod' obj['save-dev'] = false definitions['save-dev'].flatten('save-dev', obj, flat) - t.strictSame(flat, {saveType: 'prod'}, 'ignore if false and not already dev') + t.strictSame(flat, { saveType: 'prod' }, 'ignore if false and not already dev') obj['save-dev'] = true definitions['save-dev'].flatten('save-dev', obj, flat) - t.strictSame(flat, {saveType: 'dev'}, 'set to dev if true') + t.strictSame(flat, { saveType: 'dev' }, 'set to dev if true') t.end() }) @@ -602,40 +603,40 @@ t.test('saveType', t => { const obj = { 'save-bundle': true } const flat = {} definitions['save-bundle'].flatten('save-bundle', obj, flat) - t.strictSame(flat, {saveBundle: true}, 'set the saveBundle flag') + t.strictSame(flat, { saveBundle: true }, 'set the saveBundle flag') obj['save-bundle'] = false definitions['save-bundle'].flatten('save-bundle', obj, flat) - t.strictSame(flat, {saveBundle: false}, 'unset the saveBundle flag') + t.strictSame(flat, { saveBundle: false }, 'unset the saveBundle flag') obj['save-bundle'] = true obj['save-peer'] = true definitions['save-bundle'].flatten('save-bundle', obj, flat) - t.strictSame(flat, {saveBundle: false}, 'false if save-peer is set') + t.strictSame(flat, { saveBundle: false }, 'false if save-peer is set') t.end() }) t.test('save-peer', t => { - const obj = { 'save-peer': false} + const obj = { 'save-peer': false } const flat = {} definitions['save-peer'].flatten('save-peer', obj, flat) t.strictSame(flat, {}, 'no effect if false and not yet set') obj['save-peer'] = true definitions['save-peer'].flatten('save-peer', obj, flat) - t.strictSame(flat, {saveType: 'peer'}, 'set saveType to peer if unset') + t.strictSame(flat, { saveType: 'peer' }, 'set saveType to peer if unset') flat.saveType = 'optional' definitions['save-peer'].flatten('save-peer', obj, flat) - t.strictSame(flat, {saveType: 'peerOptional'}, 'set to peerOptional if optional already') + t.strictSame(flat, { saveType: 'peerOptional' }, 'set to peerOptional if optional already') definitions['save-peer'].flatten('save-peer', obj, flat) - t.strictSame(flat, {saveType: 'peerOptional'}, 'no effect if already peerOptional') + t.strictSame(flat, { saveType: 'peerOptional' }, 'no effect if already peerOptional') obj['save-peer'] = false definitions['save-peer'].flatten('save-peer', obj, flat) - t.strictSame(flat, {saveType: 'optional'}, 'switch peerOptional to optional if false') + t.strictSame(flat, { saveType: 'optional' }, 'switch peerOptional to optional if false') obj['save-peer'] = false flat.saveType = 'peer' @@ -646,25 +647,25 @@ t.test('saveType', t => { }) t.test('save-optional', t => { - const obj = { 'save-optional': false} + const obj = { 'save-optional': false } const flat = {} definitions['save-optional'].flatten('save-optional', obj, flat) t.strictSame(flat, {}, 'no effect if false and not yet set') obj['save-optional'] = true definitions['save-optional'].flatten('save-optional', obj, flat) - t.strictSame(flat, {saveType: 'optional'}, 'set saveType to optional if unset') + t.strictSame(flat, { saveType: 'optional' }, 'set saveType to optional if unset') flat.saveType = 'peer' definitions['save-optional'].flatten('save-optional', obj, flat) - t.strictSame(flat, {saveType: 'peerOptional'}, 'set to peerOptional if peer already') + t.strictSame(flat, { saveType: 'peerOptional' }, 'set to peerOptional if peer already') definitions['save-optional'].flatten('save-optional', obj, flat) - t.strictSame(flat, {saveType: 'peerOptional'}, 'no effect if already peerOptional') + t.strictSame(flat, { saveType: 'peerOptional' }, 'no effect if already peerOptional') obj['save-optional'] = false definitions['save-optional'].flatten('save-optional', obj, flat) - t.strictSame(flat, {saveType: 'peer'}, 'switch peerOptional to peer if false') + t.strictSame(flat, { saveType: 'peer' }, 'switch peerOptional to peer if false') flat.saveType = 'optional' definitions['save-optional'].flatten('save-optional', obj, flat) diff --git a/test/lib/utils/error-message.js b/test/lib/utils/error-message.js index aec4c3a199271..1959b9217a7d0 100644 --- a/test/lib/utils/error-message.js +++ b/test/lib/utils/error-message.js @@ -215,10 +215,11 @@ t.test('args are cleaned', t => { t.test('eacces/eperm', t => { const runTest = (windows, loaded, cachePath, cacheDest) => t => { - if (windows) + if (windows) { beWindows() - else + } else { bePosix() + } const path = `${cachePath ? CACHE : '/not/cache/dir'}/path` const dest = `${cacheDest ? CACHE : '/not/cache/dir'}/dest` @@ -229,10 +230,11 @@ t.test('eacces/eperm', t => { stack: 'dummy stack trace', }) verboseLogs.length = 0 - if (loaded) + if (loaded) { t.matchSnapshot(errorMessage(er, npm)) - else + } else { t.matchSnapshot(errorMessage(er, unloadedNpm)) + } t.matchSnapshot(verboseLogs) t.end() @@ -243,7 +245,7 @@ t.test('eacces/eperm', t => { for (const loaded of [true, false]) { for (const cachePath of [true, false]) { for (const cacheDest of [true, false]) { - const m = JSON.stringify({windows, loaded, cachePath, cacheDest}) + const m = JSON.stringify({ windows, loaded, cachePath, cacheDest }) t.test(m, runTest(windows, loaded, cachePath, cacheDest)) } } diff --git a/test/lib/utils/open-url.js b/test/lib/utils/open-url.js index 36724d0adf7bb..e4792ae5f5e35 100644 --- a/test/lib/utils/open-url.js +++ b/test/lib/utils/open-url.js @@ -8,7 +8,7 @@ const npm = { browser: true, }, config: { - get: (k) => npm._config[k], + get: k => npm._config[k], set: (k, v) => { npm._config[k] = v }, @@ -29,7 +29,7 @@ const openUrl = t.mock('../../../lib/utils/open-url.js', { opener, }) -t.test('opens a url', async (t) => { +t.test('opens a url', async t => { t.teardown(() => { openerUrl = null openerOpts = null @@ -41,31 +41,39 @@ t.test('opens a url', async (t) => { t.same(OUTPUT, [], 'printed no output') }) -t.test('returns error for non-https and non-file url', async (t) => { +t.test('returns error for non-https and non-file url', async t => { t.teardown(() => { openerUrl = null openerOpts = null OUTPUT.length = 0 }) - await t.rejects(openUrl(npm, 'ftp://www.npmjs.com', 'npm home'), /Invalid URL/, 'got the correct error') + await t.rejects( + openUrl(npm, 'ftp://www.npmjs.com', 'npm home'), + /Invalid URL/, + 'got the correct error' + ) t.equal(openerUrl, null, 'did not open') t.same(openerOpts, null, 'did not open') t.same(OUTPUT, [], 'printed no output') }) -t.test('returns error for non-parseable url', async (t) => { +t.test('returns error for non-parseable url', async t => { t.teardown(() => { openerUrl = null openerOpts = null OUTPUT.length = 0 }) - await t.rejects(openUrl(npm, 'git+ssh://user@host:repo.git', 'npm home'), /Invalid URL/, 'got the correct error') + await t.rejects( + openUrl(npm, 'git+ssh://user@host:repo.git', 'npm home'), + /Invalid URL/, + 'got the correct error' + ) t.equal(openerUrl, null, 'did not open') t.same(openerOpts, null, 'did not open') t.same(OUTPUT, [], 'printed no output') }) -t.test('encodes non-URL-safe characters in url provided', async (t) => { +t.test('encodes non-URL-safe characters in url provided', async t => { t.teardown(() => { openerUrl = null openerOpts = null @@ -77,7 +85,7 @@ t.test('encodes non-URL-safe characters in url provided', async (t) => { t.same(OUTPUT, [], 'printed no output') }) -t.test('opens a url with the given browser', async (t) => { +t.test('opens a url with the given browser', async t => { npm.config.set('browser', 'chrome') t.teardown(() => { openerUrl = null @@ -91,7 +99,7 @@ t.test('opens a url with the given browser', async (t) => { t.same(OUTPUT, [], 'printed no output') }) -t.test('prints where to go when browser is disabled', async (t) => { +t.test('prints where to go when browser is disabled', async t => { npm.config.set('browser', false) t.teardown(() => { openerUrl = null @@ -107,7 +115,7 @@ t.test('prints where to go when browser is disabled', async (t) => { t.matchSnapshot(OUTPUT[0][0], 'printed expected message') }) -t.test('prints where to go when browser is disabled and json is enabled', async (t) => { +t.test('prints where to go when browser is disabled and json is enabled', async t => { npm.config.set('browser', false) npm.config.set('json', true) t.teardown(() => { @@ -125,7 +133,7 @@ t.test('prints where to go when browser is disabled and json is enabled', async t.matchSnapshot(OUTPUT[0][0], 'printed expected message') }) -t.test('prints where to go when given browser does not exist', async (t) => { +t.test('prints where to go when given browser does not exist', async t => { npm.config.set('browser', 'firefox') openerResult = Object.assign(new Error('failed'), { code: 'ENOENT' }) t.teardown(() => { @@ -142,7 +150,7 @@ t.test('prints where to go when given browser does not exist', async (t) => { t.matchSnapshot(OUTPUT[0][0], 'printed expected message') }) -t.test('handles unknown opener error', async (t) => { +t.test('handles unknown opener error', async t => { npm.config.set('browser', 'firefox') openerResult = Object.assign(new Error('failed'), { code: 'ENOBRIAN' }) t.teardown(() => { diff --git a/test/lib/utils/otplease.js b/test/lib/utils/otplease.js index fb9476120e2df..b3711965c2c9c 100644 --- a/test/lib/utils/otplease.js +++ b/test/lib/utils/otplease.js @@ -20,8 +20,9 @@ t.test('prompts for otp for EOTP', async (t) => { let runs = 0 const fn = async (opts) => { - if (++runs === 1) + if (++runs === 1) { throw Object.assign(new Error('nope'), { code: 'EOTP' }) + } t.equal(opts.some, 'prop', 'carried original options') t.equal(opts.otp, '1234', 'received the otp') diff --git a/test/lib/utils/pulse-till-done.js b/test/lib/utils/pulse-till-done.js index c1d7902c0684a..acbf66396a702 100644 --- a/test/lib/utils/pulse-till-done.js +++ b/test/lib/utils/pulse-till-done.js @@ -4,8 +4,9 @@ let pulseStarted = null const npmlog = { gauge: { pulse: () => { - if (pulseStarted) + if (pulseStarted) { pulseStarted() + } }, }, } diff --git a/test/lib/utils/read-user-info.js b/test/lib/utils/read-user-info.js index 5d937ff78a551..35101f1d7029a 100644 --- a/test/lib/utils/read-user-info.js +++ b/test/lib/utils/read-user-info.js @@ -14,14 +14,16 @@ const npmlog = { const npmUserValidate = { username: (username) => { - if (username === 'invalid') + if (username === 'invalid') { return new Error('invalid username') + } return null }, email: (email) => { - if (email.startsWith('invalid')) + if (email.startsWith('invalid')) { return new Error('invalid email') + } return null }, diff --git a/test/lib/utils/reify-finish.js b/test/lib/utils/reify-finish.js index b66d5bcd3b53a..b565034058adb 100644 --- a/test/lib/utils/reify-finish.js +++ b/test/lib/utils/reify-finish.js @@ -10,7 +10,7 @@ const npm = { const builtinConfMock = { loadError: new Error('no builtin config'), - raw: { hasBuiltinConfig: true, x: 'y', nested: { foo: 'bar' }}, + raw: { hasBuiltinConfig: true, x: 'y', nested: { foo: 'bar' } }, } const reifyOutput = () => {} @@ -22,8 +22,9 @@ const fs = { promises: realFs.promises && { ...realFs.promises, writeFile: async (path, data) => { - if (!expectWrite) + if (!expectWrite) { throw new Error('did not expect to write builtin config file') + } return realFs.promises.writeFile(path, data) }, }, @@ -69,7 +70,7 @@ t.test('should write if everything above passes', async t => { await reifyFinish(npm, { options: { global: true }, actualTree: { - inventory: new Map([['node_modules/npm', {path}]]), + inventory: new Map([['node_modules/npm', { path }]]), }, }) // windowwwwwwssss!!!!! diff --git a/test/lib/utils/reify-output.js b/test/lib/utils/reify-output.js index 3ffbdf86a2989..9a1bffb4033f9 100644 --- a/test/lib/utils/reify-output.js +++ b/test/lib/utils/reify-output.js @@ -97,8 +97,9 @@ t.test('no message when funding config is false', (t) => { }) settings.fund = false npm.output = out => { - if (out.endsWith('looking for funding')) + if (out.endsWith('looking for funding')) { t.fail('should not print funding info', { actual: out }) + } } reifyOutput(npm, { @@ -283,8 +284,9 @@ t.test('showing and not showing audit report', async t => { delete npm.flatOptions.auditLevel npm.command = command // only set exitCode back if we're passing tests - if (t.passing()) + if (t.passing()) { process.exitCode = exitCode + } }) process.exitCode = 0 @@ -312,8 +314,9 @@ t.test('showing and not showing audit report', async t => { delete npm.flatOptions.auditLevel npm.command = command // only set exitCode back if we're passing tests - if (t.passing()) + if (t.passing()) { process.exitCode = exitCode + } }) process.exitCode = 0 @@ -368,11 +371,13 @@ t.test('packages changed message', t => { ], }, } - for (let i = 0; i < added; i++) + for (let i = 0; i < added; i++) { mock.diff.children.push({ action: 'ADD', ideal: { location: 'loc' } }) + } - for (let i = 0; i < removed; i++) + for (let i = 0; i < removed; i++) { mock.diff.children.push({ action: 'REMOVE', actual: { location: 'loc' } }) + } for (let i = 0; i < changed; i++) { const actual = { location: 'loc' } @@ -395,8 +400,9 @@ t.test('packages changed message', t => { for (const removed of [0, 1, 2]) { for (const changed of [0, 1, 2]) { for (const audited of [0, 1, 2]) { - for (const json of [true, false]) + for (const json of [true, false]) { cases.push([added, removed, changed, audited, json, 'install']) + } } } } @@ -407,8 +413,9 @@ t.test('packages changed message', t => { cases.push([0, 0, 0, 2, false, 'audit']) t.plan(cases.length) - for (const [added, removed, changed, audited, json, command] of cases) + for (const [added, removed, changed, audited, json, command] of cases) { testCase(t, added, removed, changed, audited, json, command) + } t.end() }) diff --git a/test/lib/utils/split-package-names.js b/test/lib/utils/split-package-names.js index 82b8f5578397f..5fe1e6cd8dde3 100644 --- a/test/lib/utils/split-package-names.js +++ b/test/lib/utils/split-package-names.js @@ -11,7 +11,8 @@ t.test('splitPackageNames', t => { ['@npmcli/one/semver', '@npmcli/one/node_modules/semver'], ] - for (const [input, expected] of assertions) + for (const [input, expected] of assertions) { t.equal(splitPackageNames(input), expected, `split ${input} correctly`) + } t.end() }) diff --git a/test/lib/utils/update-notifier.js b/test/lib/utils/update-notifier.js index dc0a64ff46127..78ff93825e489 100644 --- a/test/lib/utils/update-notifier.js +++ b/test/lib/utils/update-notifier.js @@ -21,13 +21,17 @@ const pacote = { process.exit(1) } MANIFEST_REQUEST.push(spec) - if (PACOTE_ERROR) + if (PACOTE_ERROR) { throw PACOTE_ERROR + } return { - version: spec === 'npm@latest' ? CURRENT_VERSION - : /-/.test(spec) ? CURRENT_BETA - : NEXT_VERSION, + version: + spec === 'npm@latest' + ? CURRENT_VERSION + : /-/.test(spec) + ? CURRENT_BETA + : NEXT_VERSION, } }, } @@ -36,7 +40,7 @@ const npm = { flatOptions, log: { useColor: () => true }, version: CURRENT_VERSION, - config: { get: (k) => k !== 'global' }, + config: { get: k => k !== 'global' }, command: 'view', argv: ['npm'], } @@ -54,7 +58,9 @@ const fs = { ...require('fs'), stat: (path, cb) => { if (basename(path) !== '_update-notifier-last-checked') { - console.error(new Error('should only write to notifier last checked file')) + console.error( + new Error('should only write to notifier last checked file') + ) process.exit(1) } process.nextTick(() => cb(STAT_ERROR, { mtime: new Date(STAT_MTIME) })) @@ -65,7 +71,9 @@ const fs = { process.exit(1) } if (basename(path) !== '_update-notifier-last-checked') { - console.error(new Error('should only write to notifier last checked file')) + console.error( + new Error('should only write to notifier last checked file') + ) process.exit(1) } process.nextTick(() => cb(WRITE_ERROR)) @@ -93,30 +101,39 @@ const runUpdateNotifier = async npm => { t.test('situations in which we do not notify', t => { t.test('nothing to do if notifier disabled', async t => { - t.equal(await runUpdateNotifier({ - ...npm, - config: { get: (k) => k !== 'update-notifier' }, - }), null) + t.equal( + await runUpdateNotifier({ + ...npm, + config: { get: k => k !== 'update-notifier' }, + }), + null + ) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) t.test('do not suggest update if already updating', async t => { - t.equal(await runUpdateNotifier({ - ...npm, - flatOptions: { ...flatOptions, global: true }, - command: 'install', - argv: ['npm'], - }), null) + t.equal( + await runUpdateNotifier({ + ...npm, + flatOptions: { ...flatOptions, global: true }, + command: 'install', + argv: ['npm'], + }), + null + ) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) t.test('do not suggest update if already updating with spec', async t => { - t.equal(await runUpdateNotifier({ - ...npm, - flatOptions: { ...flatOptions, global: true }, - command: 'install', - argv: ['npm@latest'], - }), null) + t.equal( + await runUpdateNotifier({ + ...npm, + flatOptions: { ...flatOptions, global: true }, + command: 'install', + argv: ['npm@latest'], + }), + null + ) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) @@ -161,14 +178,14 @@ t.test('situations in which we do not notify', t => { t.test('only check weekly for GA releases', async t => { // One week (plus five minutes to account for test environment fuzziness) - STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24 * 7) + (1000 * 60 * 5) + STAT_MTIME = Date.now() - 1000 * 60 * 60 * 24 * 7 + 1000 * 60 * 5 t.equal(await runUpdateNotifier(npm), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) t.test('only check daily for betas', async t => { // One day (plus five minutes to account for test environment fuzziness) - STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24) + (1000 * 60 * 5) + STAT_MTIME = Date.now() - 1000 * 60 * 60 * 24 + 1000 * 60 * 5 t.equal(await runUpdateNotifier({ ...npm, version: HAVE_BETA }), null) t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests') }) @@ -180,42 +197,70 @@ t.test('notification situations', t => { t.test('new beta available', async t => { const version = HAVE_BETA t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot( + await runUpdateNotifier({ ...npmNoColor, version }), + 'no color' + ) t.strictSame(MANIFEST_REQUEST, [`npm@^${version}`, `npm@^${version}`]) }) t.test('patch to next version', async t => { const version = NEXT_PATCH t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') - t.strictSame(MANIFEST_REQUEST, ['npm@latest', `npm@^${version}`, 'npm@latest', `npm@^${version}`]) + t.matchSnapshot( + await runUpdateNotifier({ ...npmNoColor, version }), + 'no color' + ) + t.strictSame(MANIFEST_REQUEST, [ + 'npm@latest', + `npm@^${version}`, + 'npm@latest', + `npm@^${version}`, + ]) }) t.test('minor to next version', async t => { const version = NEXT_MINOR t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') - t.strictSame(MANIFEST_REQUEST, ['npm@latest', `npm@^${version}`, 'npm@latest', `npm@^${version}`]) + t.matchSnapshot( + await runUpdateNotifier({ ...npmNoColor, version }), + 'no color' + ) + t.strictSame(MANIFEST_REQUEST, [ + 'npm@latest', + `npm@^${version}`, + 'npm@latest', + `npm@^${version}`, + ]) }) t.test('patch to current', async t => { const version = CURRENT_PATCH t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot( + await runUpdateNotifier({ ...npmNoColor, version }), + 'no color' + ) t.strictSame(MANIFEST_REQUEST, ['npm@latest', 'npm@latest']) }) t.test('minor to current', async t => { const version = CURRENT_MINOR t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot( + await runUpdateNotifier({ ...npmNoColor, version }), + 'no color' + ) t.strictSame(MANIFEST_REQUEST, ['npm@latest', 'npm@latest']) }) t.test('major to current', async t => { const version = CURRENT_MAJOR t.matchSnapshot(await runUpdateNotifier({ ...npm, version }), 'color') - t.matchSnapshot(await runUpdateNotifier({ ...npmNoColor, version }), 'no color') + t.matchSnapshot( + await runUpdateNotifier({ ...npmNoColor, version }), + 'no color' + ) t.strictSame(MANIFEST_REQUEST, ['npm@latest', 'npm@latest']) }) diff --git a/test/lib/workspaces/get-workspaces.js b/test/lib/workspaces/get-workspaces.js index 0f51d95fcb763..0d1bba3144d83 100644 --- a/test/lib/workspaces/get-workspaces.js +++ b/test/lib/workspaces/get-workspaces.js @@ -11,8 +11,9 @@ const cleanOutput = (str, path) => normalizePath(str) const clean = (res, path) => { const cleaned = new Map() - for (const [key, value] of res.entries()) + for (const [key, value] of res.entries()) { cleaned.set(key, cleanOutput(value, path)) + } return cleaned }