diff --git a/benchmarks/benchmark.js b/benchmarks/benchmark.js index 5ceb83ecfb6..c321a5000cb 100644 --- a/benchmarks/benchmark.js +++ b/benchmarks/benchmark.js @@ -10,7 +10,7 @@ const { WritableStream } = require('stream/web') const { Pool, Client, fetch, Agent, setGlobalDispatcher } = require('..') -const iterations = (parseInt(process.env.SAMPLES, 10) || 100) + 1 +const iterations = (parseInt(process.env.SAMPLES, 10) || 10) + 1 const errorThreshold = parseInt(process.env.ERROR_TRESHOLD, 10) || 3 const connections = parseInt(process.env.CONNECTIONS, 10) || 50 const pipelining = parseInt(process.env.PIPELINING, 10) || 10 @@ -129,6 +129,8 @@ function printResults (results) { ] }) + console.log(results) + // Add the header row rows.unshift(['Tests', 'Samples', 'Result', 'Tolerance', 'Difference with slowest']) @@ -162,45 +164,26 @@ function printResults (results) { }) } -cronometro( - { - 'http - no keepalive' () { - return makeParallelRequests(resolve => { - http.get(httpNoKeepAliveOptions, res => { - res - .pipe( - new Writable({ - write (chunk, encoding, callback) { - callback() - } - }) - ) - .on('finish', resolve) - }) - }) - }, - 'http - keepalive' () { - return makeParallelRequests(resolve => { - http.get(httpKeepAliveOptions, res => { - res - .pipe( - new Writable({ - write (chunk, encoding, callback) { - callback() - } - }) - ) - .on('finish', resolve) - }) +const experiments = { + 'http - no keepalive' () { + return makeParallelRequests(resolve => { + http.get(httpNoKeepAliveOptions, res => { + res + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) }) - }, - 'undici - pipeline' () { - return makeParallelRequests(resolve => { - dispatcher - .pipeline(undiciOptions, data => { - return data.body - }) - .end() + }) + }, + 'http - keepalive' () { + return makeParallelRequests(resolve => { + http.get(httpKeepAliveOptions, res => { + res .pipe( new Writable({ write (chunk, encoding, callback) { @@ -210,48 +193,73 @@ cronometro( ) .on('finish', resolve) }) - }, - 'undici - request' () { - return makeParallelRequests(resolve => { - dispatcher.request(undiciOptions).then(({ body }) => { - body - .pipe( - new Writable({ - write (chunk, encoding, callback) { - callback() - } - }) - ) - .on('finish', resolve) + }) + }, + 'undici - pipeline' () { + return makeParallelRequests(resolve => { + dispatcher + .pipeline(undiciOptions, data => { + return data.body }) - }) - }, - 'undici - stream' () { - return makeParallelRequests(resolve => { - return dispatcher - .stream(undiciOptions, () => { - return new Writable({ + .end() + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }, + 'undici - request' () { + return makeParallelRequests(resolve => { + dispatcher.request(undiciOptions).then(({ body }) => { + body + .pipe( + new Writable({ write (chunk, encoding, callback) { callback() } }) - }) - .then(resolve) - }) - }, - 'undici - dispatch' () { - return makeParallelRequests(resolve => { - dispatcher.dispatch(undiciOptions, new SimpleRequest(resolve)) + ) + .on('finish', resolve) }) - }, - 'undici - fetch' () { - return makeParallelRequests(resolve => { - fetch(dest.url).then(res => { - res.body.pipeTo(new WritableStream({ write () {}, close () { resolve() } })) + }) + }, + 'undici - stream' () { + return makeParallelRequests(resolve => { + return dispatcher + .stream(undiciOptions, () => { + return new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) }) - }) - } + .then(resolve) + }) }, + 'undici - dispatch' () { + return makeParallelRequests(resolve => { + dispatcher.dispatch(undiciOptions, new SimpleRequest(resolve)) + }) + } +} + +if (process.env.PORT) { + // fetch does not support the socket + experiments['undici - fetch'] = () => { + return makeParallelRequests(resolve => { + fetch(dest.url).then(res => { + res.body.pipeTo(new WritableStream({ write () {}, close () { resolve() } })) + }).catch(console.log) + }) + } +} + +cronometro( + experiments, { iterations, errorThreshold, diff --git a/benchmarks/wait.js b/benchmarks/wait.js index e2cefa51e67..771f9f2626c 100644 --- a/benchmarks/wait.js +++ b/benchmarks/wait.js @@ -6,7 +6,17 @@ const waitOn = require('wait-on') const socketPath = path.join(os.tmpdir(), 'undici.sock') +let resources +if (process.env.PORT) { + resources = [`http-get://localhost:${process.env.PORT}/`] +} else { + resources = [`http-get://unix:${socketPath}:/`] +} + waitOn({ - resources: [`http-get://unix:${socketPath}:/`], + resources, timeout: 5000 -}).catch(() => process.exit(1)) +}).catch((err) => { + console.error(err) + process.exit(1) +}) diff --git a/package.json b/package.json index f0fd80ce7ca..3be6fe34a5a 100644 --- a/package.json +++ b/package.json @@ -54,10 +54,10 @@ "test:typescript": "tsd", "coverage": "nyc --reporter=text --reporter=html npm run test", "coverage:ci": "nyc --reporter=lcov npm run test", - "bench": "concurrently -k -s first npm:bench:server npm:bench:run", + "bench": "PORT=3042 concurrently -k -s first npm:bench:server npm:bench:run", "bench:server": "node benchmarks/server.js", "prebench:run": "node benchmarks/wait.js", - "bench:run": "CONNECTIONS=1 node --experimental-wasm-simd benchmarks/benchmark.js && CONNECTIONS=50 node --experimental-wasm-simd benchmarks/benchmark.js", + "bench:run": "CONNECTIONS=1 node --experimental-wasm-simd benchmarks/benchmark.js; CONNECTIONS=50 node --experimental-wasm-simd benchmarks/benchmark.js", "serve:website": "docsify serve .", "prepare": "husky install", "fuzz": "jsfuzz test/fuzzing/fuzz.js corpus"