-
-
Notifications
You must be signed in to change notification settings - Fork 232
/
index.js
143 lines (122 loc) · 4.73 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
import { readFileSync, createReadStream, readdirSync } from 'node:fs';
import Benchmark from 'benchmark';
import { loadTreeConstructionTestData } from 'parse5-test-utils/dist/generate-parsing-tests.js';
import { loadSAXParserTestData } from 'parse5-test-utils/dist/load-sax-parser-test-data.js';
import { treeAdapters, WritableStreamStub } from 'parse5-test-utils/dist/common.js';
import * as parse5 from '../../packages/parse5/dist/index.js';
import { ParserStream as parse5Stream } from '../../packages/parse5-parser-stream/dist/index.js';
import * as parse5Upstream from 'parse5';
const hugePagePath = new URL('../../test/data/huge-page/huge-page.html', import.meta.url);
const treeConstructionPath = new URL('../../test/data/html5lib-tests/tree-construction', import.meta.url);
const saxPath = new URL('../../test/data/sax/', import.meta.url);
//HACK: https://github.com/bestiejs/benchmark.js/issues/51
/* global workingCopy, WorkingCopyParserStream, upstreamParser, hugePage, microTests, runMicro, runPages, files */
global.workingCopy = parse5;
global.WorkingCopyParserStream = parse5Stream;
global.upstreamParser = parse5Upstream;
// Huge page data
global.hugePage = readFileSync(hugePagePath).toString();
// Micro data
global.microTests = loadTreeConstructionTestData([treeConstructionPath], treeAdapters.default)
.filter(
(test) =>
//NOTE: this test caused stack overflow in parse5 v1.x
test.input !== '<button><p><button>'
)
.map((test) => ({
html: test.input,
fragmentContext: test.fragmentContext,
}));
global.runMicro = function (parser) {
for (const test of microTests) {
if (test.fragmentContext) {
parser.parseFragment(test.fragmentContext, test.html);
} else {
parser.parse(test.html);
}
}
};
// Pages data
const pages = loadSAXParserTestData().map((test) => test.src);
global.runPages = function (parser) {
for (const page of pages) {
parser.parse(page);
}
};
// Stream data
global.files = readdirSync(saxPath).map((dirName) => new URL(`${dirName}/src.html`, saxPath).pathname);
// Utils
function getHz(suite, testName) {
for (let i = 0; i < suite.length; i++) {
if (suite[i].name === testName) {
return suite[i].hz;
}
}
}
function runBench({ name, workingCopyFn, upstreamFn, defer = false }) {
const suite = new Benchmark.Suite(name);
suite
.add('Working copy', workingCopyFn, { defer })
.add('Upstream', upstreamFn, { defer })
.on('start', () => console.log(name))
.on('cycle', (event) => console.log(String(event.target)))
.on('complete', () => {
const workingCopyHz = getHz(suite, 'Working copy');
const upstreamHz = getHz(suite, 'Upstream');
if (workingCopyHz > upstreamHz) {
console.log(`Working copy is ${(workingCopyHz / upstreamHz).toFixed(2)}x faster.\n`);
} else {
console.log(`Working copy is ${(upstreamHz / workingCopyHz).toFixed(2)}x slower.\n`);
}
})
.run();
}
// Benchmarks
runBench({
name: 'parse5 regression benchmark - MICRO',
workingCopyFn: () => runMicro(workingCopy),
upstreamFn: () => runMicro(upstreamParser),
});
runBench({
name: 'parse5 regression benchmark - HUGE',
workingCopyFn: () => workingCopy.parse(hugePage),
upstreamFn: () => upstreamParser.parse(hugePage),
});
runBench({
name: 'parse5 regression benchmark - PAGES',
workingCopyFn: () => runPages(workingCopy),
upstreamFn: () => runPages(upstreamParser),
});
runBench({
name: 'parse5 regression benchmark - STREAM',
defer: true,
workingCopyFn: async (deferred) => {
const parsePromises = files.map(
(fileName) =>
new Promise((resolve) => {
const stream = createReadStream(fileName, 'utf8');
const parserStream = new WorkingCopyParserStream();
stream.pipe(parserStream);
parserStream.on('finish', resolve);
})
);
await Promise.all(parsePromises);
deferred.resolve();
},
upstreamFn: async (deferred) => {
const parsePromises = files.map(
(fileName) =>
new Promise((resolve) => {
const stream = createReadStream(fileName, 'utf8');
const writable = new WritableStreamStub();
writable.on('finish', () => {
upstreamParser.parse(writable.writtenData);
resolve();
});
stream.pipe(writable);
})
);
await Promise.all(parsePromises);
deferred.resolve();
},
});