/
bench.js
160 lines (150 loc) · 4.52 KB
/
bench.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
const translateHeapStats = (stats = []) => {
const result = {}
for (const { space_name, space_used_size } of stats) {
result[space_name] = space_used_size
}
return result
}
const updateMaxEachKey = (current, update) => {
for (const key in current) {
current[key] = Math.max(current[key], update[key])
}
}
const diffEachKey = (a, b) => {
const result = {}
for (const key in a) {
result[key] = b[key] - a[key]
}
return result
}
const toHumanReadable = (obj) => {
const result = {}
for (const key in obj) {
if (obj[key] > 0) result[key] = `+${(obj[key] / 1024 / 1024).toFixed(4)} MB`
}
return result
}
const recordMemorySpike = (frequency = 10) => {
const v8 = require('node:v8')
const initial = translateHeapStats(v8.getHeapSpaceStatistics())
const result = { ...initial }
const collect = () =>
updateMaxEachKey(result, translateHeapStats(v8.getHeapSpaceStatistics()))
const interval = setInterval(collect, frequency)
return {
collect,
getResult: () => {
clearInterval(interval)
collect()
return toHumanReadable(diffEachKey(initial, result))
},
}
}
const history = (name, value) => {
const fs = require('node:fs')
const { execSync } = require('node:child_process')
const filePath = `.bench-${name}.log` // the intention is for the file to be gitignored and only referred locally
let log = {}
if (fs.existsSync(filePath)) {
log = JSON.parse(fs.readFileSync(filePath, 'utf8'))
}
const summary = {
current: value,
...log,
}
const commit =
'git:' + execSync('git rev-parse --short HEAD').toString().trim()
log[commit] = value
fs.writeFileSync(filePath, JSON.stringify(log, null, 2))
return summary
}
/**
* Benchmarks a synchronous function.
*
* @param {Function} fn - The synchronous function to be benchmarked.
* @param {string} name - The name of the benchmark.
* @param {number} [iterations=1000] - The number of iterations to run the
* benchmark. Default is `1000`
* @returns {Object} - The benchmark result, including the average time per
* iteration and memory spike.
*/
exports.simpleBench = (fn, name, iterations = 1000) => {
const t0 = performance.now()
for (let i = 0; i < iterations; i++) {
fn()
}
const t1 = performance.now()
return {
current: {
[name]: (t1 - t0) / iterations,
},
}
}
/**
* Benchmarks a synchronous function.
*
* @param {Function} fn - The synchronous function to be benchmarked.
* @param {string} name - The name of the benchmark.
* @param {number} [iterations=1000] - The number of iterations to run the
* benchmark. Default is `1000`
* @returns {Object} - The benchmark result, including the average time per
* iteration and memory spike.
*/
exports.bench = (fn, name, iterations = 1000) => {
const s = recordMemorySpike()
const t0 = performance.now()
for (let i = 0; i < iterations; i++) {
fn()
s.collect()
}
const t1 = performance.now()
return history(name, {
[name]: (t1 - t0) / iterations,
MEMORY_SPIKE: s.getResult(),
})
}
/**
* Benchmarks an asynchronous function.
*
* @param {Function} fn - The asynchronous function to be benchmarked.
* @param {string} name - The name of the benchmark.
* @param {number} [iterations=1000] - The number of iterations to run the
* benchmark. Default is `1000`
* @returns {Object} - The benchmark result, including the average time per
* iteration and memory spike.
*/
exports.benchAsync = async (fn, name, iterations = 1000) => {
const s = recordMemorySpike()
const t0 = performance.now()
for (let i = 0; i < iterations; i++) {
await fn()
s.collect()
}
const t1 = performance.now()
return history(name, {
[name]: (t1 - t0) / iterations,
MEMORY_SPIKE: s.getResult(),
})
}
/**
* Benchmarks multiple asynchronous function calls using Promise.all.
*
* @param {Function} fn - The asynchronous function to be benchmarked.
* @param {string} name - The name of the benchmark.
* @param {number} [iterations=1000] - The number of iterations to run the
* benchmark. Default is `1000`
* @returns {Object} - The benchmark result, including the average time per
* iteration and memory spike.
*/
exports.benchAsyncAll = async (fn, name, iterations = 1000) => {
const s = recordMemorySpike()
const calls = new Array(iterations).fill(0)
const t0 = performance.now()
await Promise.all(calls.map(() => fn()))
const t1 = performance.now()
return history(name, {
[name]: (t1 - t0) / iterations,
MEMORY_SPIKE: s.getResult(),
})
}
exports.recordMemorySpike = recordMemorySpike