Skip to content

Commit

Permalink
Improve error handling of streams
Browse files Browse the repository at this point in the history
  • Loading branch information
ehmicky committed Jun 3, 2019
1 parent 0bd5596 commit 7db09f0
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 8 deletions.
23 changes: 18 additions & 5 deletions index.js
Expand Up @@ -128,21 +128,24 @@ function getStream(process, stream, {encoding, buffer, maxBuffer}) {
ret = _getStream.buffer(process[stream], {maxBuffer});
}

if (stream === 'all') {
return ret.catch(() => {});
}

return ret.catch(error => {
error.stream = stream;
error.message = `${stream} ${error.message}`;
throw error;
});
}

function makeError(result, options) {
const {stdout, stderr, signal} = result;
let {error} = result;
const {code, joinedCommand, timedOut, isCanceled, killed, parsed: {options: {timeout}}} = options;
let {error, error: {stream} = {}} = result;
const {code, joinedCommand, timedOut, isCanceled, killed, parsed, parsed: {options: {timeout}}} = options;

const [exitCodeName, exitCode] = getCode(result, code);

const prefix = getErrorPrefix({timedOut, timeout, signal, exitCodeName, exitCode, isCanceled});
const prefix = getErrorPrefix({timedOut, timeout, signal, exitCodeName, exitCode, isCanceled, stream});
const message = `Command ${prefix}: ${joinedCommand}`;

if (error instanceof Error) {
Expand All @@ -162,6 +165,11 @@ function makeError(result, options) {
error.all = result.all;
}

if (stream !== undefined && error.bufferedData) {
error[stream] = handleOutput(parsed.options, error.bufferedData);
delete error.bufferedData;
}

error.failed = true;
error.timedOut = timedOut;
error.isCanceled = isCanceled;
Expand All @@ -185,7 +193,7 @@ function getCode({error = {}}, code) {
return [];
}

function getErrorPrefix({timedOut, timeout, signal, exitCodeName, exitCode, isCanceled}) {
function getErrorPrefix({timedOut, timeout, signal, exitCodeName, exitCode, isCanceled, stream}) {
if (timedOut) {
return `timed out after ${timeout} milliseconds`;
}
Expand All @@ -198,6 +206,10 @@ function getErrorPrefix({timedOut, timeout, signal, exitCodeName, exitCode, isCa
return `was killed with ${signal}`;
}

if (stream !== undefined) {
return `'${stream}' error`;
}

if (exitCode !== undefined) {
return `failed with exit code ${exitCode} (${exitCodeName})`;
}
Expand Down Expand Up @@ -303,6 +315,7 @@ const execa = (file, args, options) => {

if (spawned.stdin) {
spawned.stdin.on('error', error => {
error.stream = 'stdin';
reject(error);
});
}
Expand Down
14 changes: 11 additions & 3 deletions test.js
Expand Up @@ -253,6 +253,12 @@ test('input option can be a Buffer - sync', t => {
t.is(stdout, 'testing12');
});

test('stdin errors are handled', async t => {
const child = execa('noop');
child.stdin.emit('error', new Error('test'));
await t.throwsAsync(child, /Command 'stdin' error.*\ntest/);
});

test('child process errors are handled', async t => {
const child = execa('noop');
child.emit('error', new Error('test'));
Expand Down Expand Up @@ -301,13 +307,15 @@ test('child_process.spawnSync() errors are propagated', t => {
});

test('maxBuffer affects stdout', async t => {
await t.throwsAsync(execa('max-buffer', ['stdout', '11'], {maxBuffer: 10}), /stdout maxBuffer exceeded/);
await t.notThrowsAsync(execa('max-buffer', ['stdout', '10'], {maxBuffer: 10}));
const {stdout} = await t.throwsAsync(execa('max-buffer', ['stdout', '11'], {maxBuffer: 10}), /max-buffer stdout/);
t.is(stdout, '.'.repeat(10));
});

test('maxBuffer affects stderr', async t => {
await t.throwsAsync(execa('max-buffer', ['stderr', '13'], {maxBuffer: 12}), /stderr maxBuffer exceeded/);
await t.notThrowsAsync(execa('max-buffer', ['stderr', '12'], {maxBuffer: 12}));
await t.notThrowsAsync(execa('max-buffer', ['stderr', '10'], {maxBuffer: 10}));
const {stderr} = await t.throwsAsync(execa('max-buffer', ['stderr', '11'], {maxBuffer: 10}), /max-buffer stderr/);
t.is(stderr, '.'.repeat(10));
});

test('do not buffer stdout when `buffer` set to `false`', async t => {
Expand Down

0 comments on commit 7db09f0

Please sign in to comment.