Skip to content

Commit

Permalink
lib: add verbose option (#500)
Browse files Browse the repository at this point in the history
  • Loading branch information
RafaelGSS committed Nov 23, 2023
1 parent d7c98b0 commit cd74688
Show file tree
Hide file tree
Showing 4 changed files with 98 additions and 50 deletions.
2 changes: 2 additions & 0 deletions README.md
Expand Up @@ -136,6 +136,8 @@ Available options:
Print connection errors to stderr.
-v/--version
Print the version number.
-V/--verbose
Print the table with results. default: true.
-h/--help
Print this menu.
```
Expand Down
6 changes: 4 additions & 2 deletions autocannon.js
Expand Up @@ -71,6 +71,7 @@ const alias = {
renderProgressBar: 'progress',
renderStatusCodes: 'statusCodes',
title: 'T',
verbose: 'V',
version: 'v',
forever: 'f',
idReplacement: 'I',
Expand Down Expand Up @@ -98,12 +99,13 @@ const defaults = {
idReplacement: false,
excludeErrorStats: false,
debug: false,
workers: 0
workers: 0,
verbose: true
}

function parseArguments (argvs) {
let argv = subarg(argvs, {
boolean: ['json', 'n', 'help', 'renderLatencyTable', 'renderProgressBar', 'renderStatusCodes', 'forever', 'idReplacement', 'excludeErrorStats', 'onPort', 'debug', 'ignoreCoordinatedOmission'],
boolean: ['json', 'n', 'help', 'renderLatencyTable', 'renderProgressBar', 'renderStatusCodes', 'forever', 'idReplacement', 'excludeErrorStats', 'onPort', 'debug', 'ignoreCoordinatedOmission', 'verbose'],
alias,
default: defaults,
'--': true
Expand Down
98 changes: 50 additions & 48 deletions lib/printResult.js
Expand Up @@ -11,65 +11,67 @@ const defaults = {
// use stderr as its progressBar's default
outputStream: process.stderr,
renderResultsTable: true,
renderLatencyTable: false
renderLatencyTable: false,
verbose: true
}

const printResult = (result, opts) => {
opts = Object.assign({}, defaults, opts)
let strResult = ''

const chalk = new Chalk.Instance(testColorSupport({ stream: opts.outputStream, alwaysReturn: true }))
if (opts.verbose) {
const chalk = new Chalk.Instance(testColorSupport({ stream: opts.outputStream, alwaysReturn: true }))

const shortLatency = new Table({
head: asColor(chalk.cyan, ['Stat', '2.5%', '50%', '97.5%', '99%', 'Avg', 'Stdev', 'Max'])
})
shortLatency.push(asLowRow(chalk.bold('Latency'), asMs(result.latency)))
logToLocalStr('\n' + shortLatency.toString())

const requests = new Table({
head: asColor(chalk.cyan, ['Stat', '1%', '2.5%', '50%', '97.5%', 'Avg', 'Stdev', 'Min'])
})

requests.push(asHighRow(chalk.bold('Req/Sec'), asNumber(result.requests)))
requests.push(asHighRow(chalk.bold('Bytes/Sec'), asBytes(result.throughput)))
logToLocalStr(requests.toString())

if (opts.renderStatusCodes === true) {
const statusCodeStats = new Table({
head: asColor(chalk.cyan, ['Code', 'Count'])
const shortLatency = new Table({
head: asColor(chalk.cyan, ['Stat', '2.5%', '50%', '97.5%', '99%', 'Avg', 'Stdev', 'Max'])
})
Object.keys(result.statusCodeStats).forEach(statusCode => {
const stats = result.statusCodeStats[statusCode]
const colorize = colorizeByStatusCode(chalk, statusCode)
statusCodeStats.push([colorize(statusCode), stats.count])
})
logToLocalStr(statusCodeStats.toString())
}
shortLatency.push(asLowRow(chalk.bold('Latency'), asMs(result.latency)))
logToLocalStr('\n' + shortLatency.toString())

logToLocalStr('')
if (result.sampleInt === 1000) {
logToLocalStr('Req/Bytes counts sampled once per second.')
} else {
logToLocalStr('Req/Bytes counts sampled every ' + result.sampleInt / 1000 + ' seconds.')
}
logToLocalStr('# of samples: ' + result.samples)
logToLocalStr('')

if (opts.renderLatencyTable) {
const latencies = new Table({
head: asColor(chalk.cyan, ['Percentile', 'Latency (ms)'])
})
percentiles.map((perc) => {
const key = `p${perc}`.replace('.', '_')
return [
chalk.bold('' + perc),
result.latency[key]
]
}).forEach(row => {
latencies.push(row)
const requests = new Table({
head: asColor(chalk.cyan, ['Stat', '1%', '2.5%', '50%', '97.5%', 'Avg', 'Stdev', 'Min'])
})
logToLocalStr(latencies.toString())

requests.push(asHighRow(chalk.bold('Req/Sec'), asNumber(result.requests)))
requests.push(asHighRow(chalk.bold('Bytes/Sec'), asBytes(result.throughput)))
logToLocalStr(requests.toString())

if (opts.renderStatusCodes === true) {
const statusCodeStats = new Table({
head: asColor(chalk.cyan, ['Code', 'Count'])
})
Object.keys(result.statusCodeStats).forEach(statusCode => {
const stats = result.statusCodeStats[statusCode]
const colorize = colorizeByStatusCode(chalk, statusCode)
statusCodeStats.push([colorize(statusCode), stats.count])
})
logToLocalStr(statusCodeStats.toString())
}
logToLocalStr('')
if (result.sampleInt === 1000) {
logToLocalStr('Req/Bytes counts sampled once per second.')
} else {
logToLocalStr('Req/Bytes counts sampled every ' + result.sampleInt / 1000 + ' seconds.')
}
logToLocalStr('# of samples: ' + result.samples)
logToLocalStr('')

if (opts.renderLatencyTable) {
const latencies = new Table({
head: asColor(chalk.cyan, ['Percentile', 'Latency (ms)'])
})
percentiles.map((perc) => {
const key = `p${perc}`.replace('.', '_')
return [
chalk.bold('' + perc),
result.latency[key]
]
}).forEach(row => {
latencies.push(row)
})
logToLocalStr(latencies.toString())
logToLocalStr('')
}
}

if (result.non2xx) {
Expand Down
42 changes: 42 additions & 0 deletions test/printResult.test.js
Expand Up @@ -97,3 +97,45 @@ test('verify amount of total requests', (t) => {
const expectedRequests = connections * pipelining
t.match(output.includes(`${expectedRequests} requests in`), true)
})

test('should not print when verbose(V=0) is false', (t) => {
t.plan(1)

const connections = 10
const pipelining = 2
const result = {
connections,
pipelining,
latency: {},
requests: {
sent: connections * pipelining
},
throughput: {
average: 3319,
mean: 3319,
stddev: 0,
min: 3318,
max: 3318,
total: 3318,
p0_001: 3319,
p0_01: 3319,
p0_1: 3319,
p1: 3319,
p2_5: 3319,
p10: 3319,
p25: 3319,
p50: 3319,
p75: 3319,
p90: 3319,
p97_5: 3319,
p99: 3319,
p99_9: 3319,
p99_99: 3319,
p99_999: 3319
}
}

// act
const output = printResult(result, { verbose: false })
t.ok(output.split('\n').length === 2)
})

0 comments on commit cd74688

Please sign in to comment.