forked from mcollina/autocannon
/
progressTracker.js
228 lines (192 loc) · 6.72 KB
/
progressTracker.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
'use strict'
const ProgressBar = require('progress')
const Table = require('cli-table3')
const Chalk = require('chalk')
const testColorSupport = require('color-support')
const prettyBytes = require('pretty-bytes')
const format = require('./format')
const percentiles = require('hdr-histogram-percentiles-obj').percentiles
const defaults = {
// use stderr as its progressBar's default
outputStream: process.stderr,
renderProgressBar: true,
renderResultsTable: true,
renderLatencyTable: false
}
function track (instance, opts) {
if (!instance) {
throw new Error('instance required for tracking')
}
opts = Object.assign({}, defaults, opts)
const chalk = new Chalk.Instance(testColorSupport({ stream: opts.outputStream, alwaysReturn: true }))
// this default needs to be set after chalk is setup, because chalk is now local to this func
opts.progressBarString = opts.progressBarString || `${chalk.green('running')} [:bar] :percent`
const iOpts = instance.opts
let durationProgressBar
let amountProgressBar
let addedListeners = false
instance.on('start', () => {
if (opts.renderProgressBar) {
const socketPath = iOpts.socketPath ? ` (${iOpts.socketPath})` : ''
let msg = `${iOpts.connections} connections`
if (iOpts.pipelining > 1) {
msg += ` with ${iOpts.pipelining} pipelining factor`
}
if (!iOpts.amount) {
logToStream(`Running ${iOpts.duration}s test @ ${iOpts.url}${socketPath}\n${msg}\n`)
durationProgressBar = trackDuration(instance, opts, iOpts)
} else {
logToStream(`Running ${iOpts.amount} requests test @ ${iOpts.url}${socketPath}\n${msg}\n`)
amountProgressBar = trackAmount(instance, opts, iOpts)
}
addListener()
}
})
function addListener () {
// add listeners for progress bar to instance here so they aren't
// added on restarting, causing listener leaks
if (addedListeners) {
return
}
addedListeners = true
// note: Attempted to curry the functions below, but that breaks the functionality
// as they use the scope/closure of the progress bar variables to allow them to be reset
if (opts.outputStream.isTTY) {
if (!iOpts.amount) { // duration progress bar
instance.on('tick', () => { durationProgressBar.tick() })
instance.on('done', () => { durationProgressBar.tick(iOpts.duration - 1) })
process.once('SIGINT', () => { durationProgressBar.tick(iOpts.duration - 1) })
} else { // amount progress bar
instance.on('response', () => { amountProgressBar.tick() })
instance.on('reqError', () => { amountProgressBar.tick() })
instance.on('done', () => { amountProgressBar.tick(iOpts.amount - 1) })
process.once('SIGINT', () => { amountProgressBar.tick(iOpts.amount - 1) })
}
}
}
instance.on('done', (result) => {
// the code below this `if` just renders the results table...
// if the user doesn't want to render the table, we can just return early
if (!opts.renderResultsTable) return
const shortLatency = new Table({
head: asColor(chalk.cyan, ['Stat', '2.5%', '50%', '97.5%', '99%', 'Avg', 'Stdev', 'Max'])
})
shortLatency.push(asLowRow(chalk.bold('Latency'), asMs(result.latency)))
logToStream(shortLatency.toString())
const requests = new Table({
head: asColor(chalk.cyan, ['Stat', '1%', '2.5%', '50%', '97.5%', 'Avg', 'Stdev', 'Min'])
})
requests.push(asHighRow(chalk.bold('Req/Sec'), result.requests))
requests.push(asHighRow(chalk.bold('Bytes/Sec'), asBytes(result.throughput)))
logToStream(requests.toString())
logToStream('')
logToStream('Req/Bytes counts sampled once per second.\n')
if (opts.renderLatencyTable) {
const latencies = new Table({
head: asColor(chalk.cyan, ['Percentile', 'Latency (ms)'])
})
percentiles.map((perc) => {
const key = `p${perc}`.replace('.', '_')
return [
chalk.bold('' + perc),
result.latency[key]
]
}).forEach(row => {
latencies.push(row)
})
logToStream(latencies.toString())
logToStream('')
}
if (result.non2xx) {
logToStream(`${result['2xx']} 2xx responses, ${result.non2xx} non 2xx responses`)
}
logToStream(`${format(result.requests.total)} requests in ${result.duration}s, ${prettyBytes(result.throughput.total)} read`)
if (result.errors) {
logToStream(`${format(result.errors)} errors (${format(result.timeouts)} timeouts)`)
}
if (result.mismatches) {
logToStream(`${format(result.mismatches)} requests with mismatched body`)
}
})
function logToStream (msg) {
opts.outputStream.write(msg + '\n')
}
}
function trackDuration (instance, opts, iOpts) {
// if switch needed needed to avoid
// https://github.com/mcollina/autocannon/issues/60
if (!opts.outputStream.isTTY) return
const progressBar = new ProgressBar(opts.progressBarString, {
width: 20,
incomplete: ' ',
total: iOpts.duration,
clear: true,
stream: opts.outputStream
})
progressBar.tick(0)
return progressBar
}
function trackAmount (instance, opts, iOpts) {
// if switch needed needed to avoid
// https://github.com/mcollina/autocannon/issues/60
if (!opts.outputStream.isTTY) return
const progressBar = new ProgressBar(opts.progressBarString, {
width: 20,
incomplete: ' ',
total: iOpts.amount,
clear: true,
stream: opts.outputStream
})
progressBar.tick(0)
return progressBar
}
// create a table row for stats where low values is better
function asLowRow (name, stat) {
return [
name,
stat.p2_5,
stat.p50,
stat.p97_5,
stat.p99,
stat.average,
stat.stddev,
typeof stat.max === 'string' ? stat.max : Math.floor(stat.max * 100) / 100
]
}
// create a table row for stats where high values is better
function asHighRow (name, stat) {
return [
name,
stat.p1,
stat.p2_5,
stat.p50,
stat.p97_5,
stat.average,
stat.stddev,
typeof stat.min === 'string' ? stat.min : Math.floor(stat.min * 100) / 100
]
}
function asColor (colorise, row) {
return row.map((entry) => colorise(entry))
}
function asMs (stat) {
const result = Object.create(null)
Object.keys(stat).forEach((k) => {
result[k] = `${stat[k]} ms`
})
result.max = typeof stat.max === 'string' ? stat.max : `${Math.floor(stat.max * 100) / 100} ms`
return result
}
function asBytes (stat) {
const result = Object.create(stat)
percentiles.forEach((p) => {
const key = `p${p}`.replace('.', '_')
result[key] = prettyBytes(stat[key])
})
result.average = prettyBytes(stat.average)
result.stddev = prettyBytes(stat.stddev)
result.max = prettyBytes(stat.max)
result.min = prettyBytes(stat.min)
return result
}
module.exports = track