Skip to content

Commit

Permalink
tools: only fetch previous versions when necessary
Browse files Browse the repository at this point in the history
Refactor the logic for working out the previous versions of Node.js for
the API documentation so that the parsing (including the potential https
get) happens at most once per build (as opposed to the current once per
generated API doc).

Signed-off-by: Richard Lau <riclau@uk.ibm.com>

PR-URL: #32518
Fixes: #32512
Reviewed-By: Joyee Cheung <joyeec9h3@gmail.com>
Reviewed-By: Myles Borins <myles.borins@gmail.com>
  • Loading branch information
richardlau authored and codebytere committed Mar 31, 2020
1 parent 7123c0f commit d577190
Show file tree
Hide file tree
Showing 6 changed files with 132 additions and 88 deletions.
11 changes: 9 additions & 2 deletions Makefile
Expand Up @@ -738,15 +738,22 @@ out/doc/api/assets/%: doc/api_assets/% out/doc/api/assets
run-npm-ci = $(PWD)/$(NPM) ci

LINK_DATA = out/doc/apilinks.json
VERSIONS_DATA = out/doc/previous-versions.json
gen-api = tools/doc/generate.js --node-version=$(FULLVERSION) \
--apilinks=$(LINK_DATA) $< --output-directory=out/doc/api
--apilinks=$(LINK_DATA) $< --output-directory=out/doc/api \
--versions-file=$(VERSIONS_DATA)
gen-apilink = tools/doc/apilinks.js $(LINK_DATA) $(wildcard lib/*.js)

$(LINK_DATA): $(wildcard lib/*.js) tools/doc/apilinks.js
$(call available-node, $(gen-apilink))

# Regenerate previous versions data if the current version changes
$(VERSIONS_DATA): CHANGELOG.md src/node_version.h tools/doc/versions.js
$(call available-node, tools/doc/versions.js $@)

out/doc/api/%.json out/doc/api/%.html: doc/api/%.md tools/doc/generate.js \
tools/doc/markdown.js tools/doc/html.js tools/doc/json.js tools/doc/apilinks.js | $(LINK_DATA)
tools/doc/markdown.js tools/doc/html.js tools/doc/json.js \
tools/doc/apilinks.js $(VERSIONS_DATA) | $(LINK_DATA)
$(call available-node, $(gen-api))

out/doc/api/all.html: $(apidocs_html) tools/doc/allhtml.js \
Expand Down
21 changes: 16 additions & 5 deletions test/doctool/test-doctool-html.js
Expand Up @@ -36,7 +36,7 @@ const testLinksMapper = {
}
};

async function toHTML({ input, filename, nodeVersion }) {
function toHTML({ input, filename, nodeVersion, versions }) {
const content = unified()
.use(replaceLinks, { filename, linksMapper: testLinksMapper })
.use(markdown)
Expand All @@ -49,7 +49,7 @@ async function toHTML({ input, filename, nodeVersion }) {
.use(htmlStringify)
.processSync(input);

return html.toHTML({ input, content, filename, nodeVersion });
return html.toHTML({ input, content, filename, nodeVersion, versions });
}

// Test data is a list of objects with two properties.
Expand Down Expand Up @@ -129,16 +129,27 @@ const testData = [
];

const spaces = /\s/g;
const versions = [
{ num: '10.x', lts: true },
{ num: '9.x' },
{ num: '8.x' },
{ num: '7.x' },
{ num: '6.x' },
{ num: '5.x' },
{ num: '4.x' },
{ num: '0.12.x' },
{ num: '0.10.x' }];

testData.forEach(({ file, html }) => {
// Normalize expected data by stripping whitespace.
const expected = html.replace(spaces, '');

readFile(file, 'utf8', common.mustCall(async (err, input) => {
assert.ifError(err);
const output = await toHTML({ input: input,
filename: 'foo',
nodeVersion: process.version });
const output = toHTML({ input: input,
filename: 'foo',
nodeVersion: process.version,
versions: versions });

const actual = output.replace(spaces, '');
// Assert that the input stripped of all whitespace contains the
Expand Down
83 changes: 49 additions & 34 deletions test/doctool/test-doctool-versions.js
Expand Up @@ -2,8 +2,14 @@

require('../common');
const assert = require('assert');
const { spawnSync } = require('child_process');
const fs = require('fs');
const path = require('path');
const tmpdir = require('../common/tmpdir');
const util = require('util');
const { versions } = require('../../tools/doc/versions.js');

const debuglog = util.debuglog('test');
const versionsTool = path.join('../../tools/doc/versions.js');

// At the time of writing these are the minimum expected versions.
// New versions of Node.js do not have to be explicitly added here.
Expand All @@ -21,39 +27,48 @@ const expected = [
'0.10.x',
];

async function test() {
const vers = await versions();
// Coherence checks for each returned version.
for (const version of vers) {
const tested = util.inspect(version);
const parts = version.num.split('.');
const expectedLength = parts[0] === '0' ? 3 : 2;
assert.strictEqual(parts.length, expectedLength,
`'num' from ${tested} should be '<major>.x'.`);
assert.strictEqual(parts[parts.length - 1], 'x',
`'num' from ${tested} doesn't end in '.x'.`);
const isEvenRelease = Number.parseInt(parts[expectedLength - 2]) % 2 === 0;
const hasLtsProperty = version.hasOwnProperty('lts');
if (hasLtsProperty) {
// Odd-numbered versions of Node.js are never LTS.
assert.ok(isEvenRelease, `${tested} should not be an 'lts' release.`);
assert.ok(version.lts, `'lts' from ${tested} should 'true'.`);
}
}
tmpdir.refresh();
const versionsFile = path.join(tmpdir.path, 'versions.json');
debuglog(versionsFile);
const opts = { cwd: tmpdir.path, encoding: 'utf8' };
const cp = spawnSync(process.execPath, [ versionsTool, versionsFile ], opts);
debuglog(cp.stderr);
debuglog(cp.stdout);
assert.strictEqual(cp.stdout, '');
assert.strictEqual(cp.signal, null);
assert.strictEqual(cp.status, 0);
const versions = JSON.parse(fs.readFileSync(versionsFile));
debuglog(versions);

// Check that the minimum number of versions were returned.
// Later versions are allowed, but not checked for here (they were checked
// above).
// Also check for the previous semver major -- From master this will be the
// most recent major release.
const thisMajor = Number.parseInt(process.versions.node.split('.')[0]);
const prevMajorString = `${thisMajor - 1}.x`;
if (!expected.includes(prevMajorString)) {
expected.unshift(prevMajorString);
}
for (const version of expected) {
assert.ok(vers.find((x) => x.num === version),
`Did not find entry for '${version}' in ${util.inspect(vers)}`);
// Coherence checks for each returned version.
for (const version of versions) {
const tested = util.inspect(version);
const parts = version.num.split('.');
const expectedLength = parts[0] === '0' ? 3 : 2;
assert.strictEqual(parts.length, expectedLength,
`'num' from ${tested} should be '<major>.x'.`);
assert.strictEqual(parts[parts.length - 1], 'x',
`'num' from ${tested} doesn't end in '.x'.`);
const isEvenRelease = Number.parseInt(parts[expectedLength - 2]) % 2 === 0;
const hasLtsProperty = version.hasOwnProperty('lts');
if (hasLtsProperty) {
// Odd-numbered versions of Node.js are never LTS.
assert.ok(isEvenRelease, `${tested} should not be an 'lts' release.`);
assert.ok(version.lts, `'lts' from ${tested} should 'true'.`);
}
}
test();

// Check that the minimum number of versions were returned.
// Later versions are allowed, but not checked for here (they were checked
// above).
// Also check for the previous semver major -- From master this will be the
// most recent major release.
const thisMajor = Number.parseInt(process.versions.node.split('.')[0]);
const prevMajorString = `${thisMajor - 1}.x`;
if (!expected.includes(prevMajorString)) {
expected.unshift(prevMajorString);
}
for (const version of expected) {
assert.ok(versions.find((x) => x.num === version),
`Did not find entry for '${version}' in ${util.inspect(versions)}`);
}
11 changes: 10 additions & 1 deletion tools/doc/generate.js
Expand Up @@ -42,6 +42,7 @@ let filename = null;
let nodeVersion = null;
let outputDir = null;
let apilinks = {};
let versions = {};

async function main() {
for (const arg of args) {
Expand All @@ -58,6 +59,13 @@ async function main() {
throw new Error(`${linkFile} is empty`);
}
apilinks = JSON.parse(data);
} else if (arg.startsWith('--versions-file=')) {
const versionsFile = arg.replace(/^--versions-file=/, '');
const data = await fs.readFile(versionsFile, 'utf8');
if (!data.trim()) {
throw new Error(`${versionsFile} is empty`);
}
versions = JSON.parse(data);
}
}

Expand All @@ -84,7 +92,8 @@ async function main() {
.use(htmlStringify)
.process(input);

const myHtml = await html.toHTML({ input, content, filename, nodeVersion });
const myHtml = await html.toHTML({ input, content, filename, nodeVersion,
versions });
const basename = path.basename(filename, '.md');
const htmlTarget = path.join(outputDir, `${basename}.html`);
const jsonTarget = path.join(outputDir, `${basename}.json`);
Expand Down
8 changes: 3 additions & 5 deletions tools/doc/html.js
Expand Up @@ -23,7 +23,6 @@

const common = require('./common.js');
const fs = require('fs');
const getVersions = require('./versions.js');
const unified = require('unified');
const find = require('unist-util-find');
const visit = require('unist-util-visit');
Expand Down Expand Up @@ -63,7 +62,7 @@ const gtocHTML = unified()
const templatePath = path.join(docPath, 'template.html');
const template = fs.readFileSync(templatePath, 'utf8');

async function toHTML({ input, content, filename, nodeVersion }) {
function toHTML({ input, content, filename, nodeVersion, versions }) {
filename = path.basename(filename, '.md');

const id = filename.replace(/\W+/g, '-');
Expand All @@ -81,7 +80,7 @@ async function toHTML({ input, content, filename, nodeVersion }) {
const docCreated = input.match(
/<!--\s*introduced_in\s*=\s*v([0-9]+)\.([0-9]+)\.[0-9]+\s*-->/);
if (docCreated) {
HTML = HTML.replace('__ALTDOCS__', await altDocs(filename, docCreated));
HTML = HTML.replace('__ALTDOCS__', altDocs(filename, docCreated, versions));
} else {
console.error(`Failed to add alternative version links to ${filename}`);
HTML = HTML.replace('__ALTDOCS__', '');
Expand Down Expand Up @@ -391,10 +390,9 @@ function getId(text, idCounters) {
return text;
}

async function altDocs(filename, docCreated) {
function altDocs(filename, docCreated, versions) {
const [, docCreatedMajor, docCreatedMinor] = docCreated.map(Number);
const host = 'https://nodejs.org';
const versions = await getVersions.versions();

const getHref = (versionNum) =>
`${host}/docs/latest-v${versionNum}/api/${filename}.html`;
Expand Down
86 changes: 45 additions & 41 deletions tools/doc/versions.js
@@ -1,11 +1,9 @@
'use strict';

const { readFileSync } = require('fs');
const { readFileSync, writeFileSync } = require('fs');
const path = require('path');
const srcRoot = path.join(__dirname, '..', '..');

let _versions;

const isRelease = () => {
const re = /#define NODE_VERSION_IS_RELEASE 0/;
const file = path.join(srcRoot, 'src', 'node_version.h');
Expand All @@ -15,7 +13,7 @@ const isRelease = () => {
const getUrl = (url) => {
return new Promise((resolve, reject) => {
const https = require('https');
const request = https.get(url, { timeout: 5000 }, (response) => {
const request = https.get(url, { timeout: 30000 }, (response) => {
if (response.statusCode !== 200) {
reject(new Error(
`Failed to get ${url}, status code ${response.statusCode}`));
Expand All @@ -32,45 +30,51 @@ const getUrl = (url) => {
};

const kNoInternet = !!process.env.NODE_TEST_NO_INTERNET;
const outFile = (process.argv.length > 2 ? process.argv[2] : undefined);

module.exports = {
async versions() {
if (_versions) {
return _versions;
}

// The CHANGELOG.md on release branches may not reference newer semver
// majors of Node.js so fetch and parse the version from the master branch.
const url =
'https://raw.githubusercontent.com/nodejs/node/master/CHANGELOG.md';
let changelog;
const file = path.join(srcRoot, 'CHANGELOG.md');
if (kNoInternet) {
changelog = readFileSync(file, { encoding: 'utf8' });
} else {
try {
changelog = await getUrl(url);
} catch (e) {
// Fail if this is a release build, otherwise fallback to local files.
if (isRelease()) {
throw e;
} else {
console.warn(`Unable to retrieve ${url}. Falling back to ${file}.`);
changelog = readFileSync(file, { encoding: 'utf8' });
}
async function versions() {
// The CHANGELOG.md on release branches may not reference newer semver
// majors of Node.js so fetch and parse the version from the master branch.
const url =
'https://raw.githubusercontent.com/nodejs/node/master/CHANGELOG.md';
let changelog;
const file = path.join(srcRoot, 'CHANGELOG.md');
if (kNoInternet) {
changelog = readFileSync(file, { encoding: 'utf8' });
} else {
try {
changelog = await getUrl(url);
} catch (e) {
// Fail if this is a release build, otherwise fallback to local files.
if (isRelease()) {
throw e;
} else {
console.warn(`Unable to retrieve ${url}. Falling back to ${file}.`);
changelog = readFileSync(file, { encoding: 'utf8' });
}
}
const ltsRE = /Long Term Support/i;
const versionRE = /\* \[Node\.js ([0-9.]+)\]\S+ (.*)\r?\n/g;
_versions = [];
let match;
while ((match = versionRE.exec(changelog)) != null) {
const entry = { num: `${match[1]}.x` };
if (ltsRE.test(match[2])) {
entry.lts = true;
}
_versions.push(entry);
}
const ltsRE = /Long Term Support/i;
const versionRE = /\* \[Node\.js ([0-9.]+)\]\S+ (.*)\r?\n/g;
const _versions = [];
let match;
while ((match = versionRE.exec(changelog)) != null) {
const entry = { num: `${match[1]}.x` };
if (ltsRE.test(match[2])) {
entry.lts = true;
}
return _versions;
_versions.push(entry);
}
};
return _versions;
}

versions().then((v) => {
if (outFile) {
writeFileSync(outFile, JSON.stringify(v));
} else {
console.log(JSON.stringify(v));
}
}).catch((err) => {
console.error(err);
process.exit(1);
});

0 comments on commit d577190

Please sign in to comment.