diff --git a/.travis.yml b/.travis.yml index 2bc4482481..01338c067a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,7 +14,7 @@ matrix: env: CMD=test - node_js: 12 env: CMD=test - - node_js: stable + - node_js: 12 env: CMD=test-browsers addons: sauce_connect: true diff --git a/README.md b/README.md index 8bfcaf3915..19117c1a05 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ npm install --save readable-stream This package is a mirror of the streams implementations in Node.js. -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.15.3/docs/api/stream.html). +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). diff --git a/build/build.js b/build/build.js index b87e6b17b7..3d80f2b5b4 100755 --- a/build/build.js +++ b/build/build.js @@ -131,7 +131,7 @@ pump( //-------------------------------------------------------------------- // Discover, grab and process all test-stream* files on the given release - glob(path.join(testsrcurl, 'test-stream*.js'), function (err, list) { + glob(path.join(testsrcurl, 'test-@(stream|readable)*.js'), function (err, list) { if (err) { throw err } @@ -153,7 +153,6 @@ pump( }) }) - //-------------------------------------------------------------------- // Grab the nodejs/node test/common.js diff --git a/build/files.js b/build/files.js index 7276dfc25a..96d5210947 100644 --- a/build/files.js +++ b/build/files.js @@ -208,6 +208,14 @@ function CorkedRequest(state) { /return createReadableStreamAsyncIterator\(this\);\n};/m , 'return createReadableStreamAsyncIterator(this);\n};\n}' ] + , noAsyncIteratorsFrom1 = [ + /Readable\.from = function *\(iterable, opts\) \{/g + , 'if (typeof Symbol === \'function\' ) {\nReadable.from = function (iterable, opts) {' + ] + , noAsyncIteratorsFrom2 = [ + /return from\(Readable, iterable, opts\);\n};/m + , 'return from(Readable, iterable, opts);\n};\n}' + ] , once = [ /const \{ once \} = require\('internal\/util'\);/ , 'function once(callback) { let called = false; return function(...args) { if (called) return; called = true; callback(...args); }; }' @@ -257,6 +265,8 @@ module.exports['_stream_readable.js'] = [ , numberIE11 , noAsyncIterators1 , noAsyncIterators2 + , noAsyncIteratorsFrom1 + , noAsyncIteratorsFrom2 ] module.exports['_stream_transform.js'] = [ @@ -358,3 +368,14 @@ module.exports['internal/streams/pipeline.js'] = [ 'require(\'.\/end-of-stream\')' ] ] + +module.exports['internal/streams/from.js'] = [ + errorsTwoLevel + , [ + /if \(iterable && iterable\[Symbol.asyncIterator\]\)/ + , `if (iterable && typeof iterable.next === 'function') { + iterator = iterable + } +else if (iterable && iterable[Symbol.asyncIterator])` + ] +] diff --git a/build/test-replacements.js b/build/test-replacements.js index c1aee8330c..eebdec5d67 100644 --- a/build/test-replacements.js +++ b/build/test-replacements.js @@ -55,10 +55,14 @@ module.exports.all = [ ] , bufferShimFix , bufferStaticMethods - , [ + , [ /require\(['"]assert['"]\)/g , 'require(\'assert/\')' ] + , [ + /\/\/ Flags: .*/ + , '' + ] ] module.exports['test-stream2-basic.js'] = [ @@ -105,6 +109,14 @@ module.exports['common.js'] = [ , deepStrictEqual , catchES7 , catchES7OpenClose + , [ + /require\('module'\)\.builtinModules\.includes\('worker_threads'\)/, + 'false' + ] + , [ + /process.argv.length === 2/, + 'false' + ] , [ /^( for \(var x in global\) \{|function leakedGlobals\(\) \{)$/m , ' /**/\n' @@ -452,3 +464,10 @@ module.exports['test-stream-readable-async-iterators.js'] = [ 'const AsyncIteratorPrototype = Object\.getPrototypeOf(function () {})' ] ] + +module.exports['test-readable-from.js'] = [ + [ + /const \{ once \} = require\('events'\);/ + , 'const once = require(\'events.once\');' + ] +] diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 33f478d7e8..192d451488 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -80,17 +80,16 @@ var _require$codes = require('../errors').codes, ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; - -var _require2 = require('../experimentalWarning'), - emitExperimentalWarning = _require2.emitExperimentalWarning; // Lazy loaded to improve the startup performance. + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. var StringDecoder; var createReadableStreamAsyncIterator; +var from; require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; function prependListener(emitter, event, fn) { @@ -144,7 +143,9 @@ function ReadableState(options, stream, isDuplex) { this.resumeScheduled = false; this.paused = true; // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; // has it been destroyed + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + + this.autoDestroy = !!options.autoDestroy; // has it been destroyed this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. @@ -257,16 +258,16 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { if (!skipChunkCheck) er = chunkInvalid(state, chunk); if (er) { - stream.emit('error', er); + errorOrDestroy(stream, er); } else if (state.objectMode || chunk && chunk.length > 0) { if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { chunk = _uint8ArrayToBuffer(chunk); } if (addToFront) { - if (state.endEmitted) stream.emit('error', new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); } else if (state.ended) { - stream.emit('error', new ERR_STREAM_PUSH_AFTER_EOF()); + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); } else if (state.destroyed) { return false; } else { @@ -322,17 +323,32 @@ Readable.prototype.isPaused = function () { Readable.prototype.setEncoding = function (enc) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); // if setEncoding(null), decoder.encoding equals utf8 + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + + this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + + var p = this._readableState.buffer.head; + var content = ''; + + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + + this._readableState.buffer.clear(); - this._readableState.encoding = this._readableState.decoder.encoding; + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; return this; -}; // Don't raise the hwm > 8MB +}; // Don't raise the hwm > 1GB -var MAX_HWM = 0x800000; +var MAX_HWM = 0x40000000; function computeNewHighWaterMark(n) { if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. n = MAX_HWM; } else { // Get the next highest power of 2 to prevent increasing hwm excessively in @@ -449,7 +465,7 @@ Readable.prototype.read = function (n) { if (n > 0) ret = fromList(n, state);else ret = null; if (ret === null) { - state.needReadable = true; + state.needReadable = state.length <= state.highWaterMark; n = 0; } else { state.length -= n; @@ -469,6 +485,7 @@ Readable.prototype.read = function (n) { }; function onEofChunk(stream, state) { + debug('onEofChunk'); if (state.ended) return; if (state.decoder) { @@ -503,6 +520,7 @@ function onEofChunk(stream, state) { function emitReadable(stream) { var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); state.needReadable = false; if (!state.emittedReadable) { @@ -518,6 +536,7 @@ function emitReadable_(stream) { if (!state.destroyed && (state.length || state.ended)) { stream.emit('readable'); + state.emittedReadable = false; } // The stream needs another readable event if // 1. It is not flowing, as the flow mechanism will take // care of it. @@ -583,7 +602,7 @@ function maybeReadMore_(stream, state) { Readable.prototype._read = function (n) { - this.emit('error', new ERR_METHOD_NOT_IMPLEMENTED('_read()')); + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); }; Readable.prototype.pipe = function (dest, pipeOpts) { @@ -682,7 +701,7 @@ Readable.prototype.pipe = function (dest, pipeOpts) { debug('onerror', er); unpipe(); dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); } // Make sure our error handler is attached before userland ones. @@ -986,8 +1005,6 @@ Readable.prototype.wrap = function (stream) { if (typeof Symbol === 'function') { Readable.prototype[Symbol.asyncIterator] = function () { - emitExperimentalWarning('Readable[Symbol.asyncIterator]'); - if (createReadableStreamAsyncIterator === undefined) { createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); } @@ -1075,9 +1092,29 @@ function endReadableNT(state, stream) { state.endEmitted = true; stream.readable = false; stream.emit('end'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } } } +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + + return from(Readable, iterable, opts); + }; +} + function indexOf(xs, x) { for (var i = 0, l = xs.length; i < l; i++) { if (xs[i] === x) return i; diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js index b35447aedc..a2634d7c24 100644 --- a/lib/_stream_writable.js +++ b/lib/_stream_writable.js @@ -94,6 +94,8 @@ var _require$codes = require('../errors').codes, ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; + require('inherits')(Writable, Stream); function nop() {} @@ -173,7 +175,9 @@ function WritableState(options, stream, isDuplex) { this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; // count buffered requests + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + + this.autoDestroy = !!options.autoDestroy; // count buffered requests this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always // one allocated and free to use, and we maintain at most two @@ -250,13 +254,13 @@ function Writable(options) { Writable.prototype.pipe = function () { - this.emit('error', new ERR_STREAM_CANNOT_PIPE()); + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; function writeAfterEnd(stream, cb) { var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); + errorOrDestroy(stream, er); process.nextTick(cb, er); } // Checks that a user-supplied chunk is valid, especially for the particular // mode the stream is in. Currently this means that `null` is never accepted @@ -273,7 +277,7 @@ function validChunk(stream, state, chunk, cb) { } if (er) { - stream.emit('error', er); + errorOrDestroy(stream, er); process.nextTick(cb, er); return false; } @@ -417,13 +421,13 @@ function onwriteError(stream, state, sync, er, cb) { process.nextTick(finishMaybe, stream, state); stream._writableState.errorEmitted = true; - stream.emit('error', er); + errorOrDestroy(stream, er); } else { // the caller expect this to happen before if // it is async cb(er); stream._writableState.errorEmitted = true; - stream.emit('error', er); // this can emit finish, but finish must + errorOrDestroy(stream, er); // this can emit finish, but finish must // always follow error finishMaybe(stream, state); @@ -587,7 +591,7 @@ function callFinal(stream, state) { state.pendingcb--; if (err) { - stream.emit('error', err); + errorOrDestroy(stream, err); } state.prefinished = true; @@ -618,6 +622,16 @@ function finishMaybe(stream, state) { if (state.pendingcb === 0) { state.finished = true; stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } } } diff --git a/lib/internal/streams/buffer_list.js b/lib/internal/streams/buffer_list.js index ebaf5c72aa..98fe97fccd 100644 --- a/lib/internal/streams/buffer_list.js +++ b/lib/internal/streams/buffer_list.js @@ -1,6 +1,8 @@ 'use strict'; -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { _defineProperty(target, key, source[key]); }); } return target; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js index 63ae49928d..3268a16f3b 100644 --- a/lib/internal/streams/destroy.js +++ b/lib/internal/streams/destroy.js @@ -9,8 +9,13 @@ function destroy(err, cb) { if (readableDestroyed || writableDestroyed) { if (cb) { cb(err); - } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { - process.nextTick(emitErrorNT, this, err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } } return this; @@ -29,10 +34,13 @@ function destroy(err, cb) { this._destroy(err || null, function (err) { if (!cb && err) { - process.nextTick(emitErrorAndCloseNT, _this, err); - - if (_this._writableState) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); } } else if (cb) { process.nextTick(emitCloseNT, _this); @@ -79,7 +87,19 @@ function emitErrorNT(self, err) { self.emit('error', err); } +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} + module.exports = { destroy: destroy, - undestroy: undestroy + undestroy: undestroy, + errorOrDestroy: errorOrDestroy }; \ No newline at end of file diff --git a/lib/internal/streams/from-browser.js b/lib/internal/streams/from-browser.js new file mode 100644 index 0000000000..a4ce56f3c9 --- /dev/null +++ b/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js new file mode 100644 index 0000000000..6c41284416 --- /dev/null +++ b/lib/internal/streams/from.js @@ -0,0 +1,64 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } + +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; + +function from(Readable, iterable, opts) { + var iterator; + + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); // Reading boolean to protect against _read + // being called before last iteration completion. + + var reading = false; + + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + + function next() { + return _next2.apply(this, arguments); + } + + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _ref = yield iterator.next(), + value = _ref.value, + done = _ref.done; + + if (done) { + readable.push(null); + } else if (readable.push((yield value))) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + + return readable; +} + +module.exports = from; \ No newline at end of file diff --git a/package.json b/package.json index 6c6bd3b901..d13e7e095e 100644 --- a/package.json +++ b/package.json @@ -20,6 +20,7 @@ "assert": "^1.4.0", "bl": "^2.0.0", "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", "glob": "^7.1.2", "gunzip-maybe": "^1.4.1", "hyperquest": "^2.1.3", @@ -55,6 +56,7 @@ "worker_threads": false, "./errors": "./errors-browser.js", "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" }, "nyc": { diff --git a/test/common/README.md b/test/common/README.md index 7bf22e3c8f..0e773debfc 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -17,6 +17,7 @@ This directory contains modules used to test the Node.js implementation. * [Heap dump checker module](#heap-dump-checker-module) * [HTTP2 module](#http2-module) * [Internet module](#internet-module) +* [tick module](#tick-module) * [tmpdir module](#tmpdir-module) * [WPT module](#wpt-module) @@ -748,6 +749,16 @@ a full `setImmediate()` invocation passes. should not be in scope when `listener.ongc()` is created. +## tick Module + +The `tick` module provides a helper function that can be used to call a callback +after a given number of event loop "ticks". + +### tick(x, cb) + +* `x` [<number>] Number of event loop "ticks". +* `cb` [<Function>] A callback function. + ## tmpdir Module The `tmpdir` module supports the use of a temporary directory for testing. diff --git a/test/common/arraystream.js b/test/common/arraystream.js index 903c10377b..167f927dff 100644 --- a/test/common/arraystream.js +++ b/test/common/arraystream.js @@ -32,9 +32,6 @@ var objectKeys = objectKeys || function (obj) { var _require = require('../../'), Stream = _require.Stream; -var _require2 = require('util'), - inherits = _require2.inherits; - function noop() {} // A stream to push an array into a REPL @@ -48,7 +45,8 @@ function ArrayStream() { }; } -inherits(ArrayStream, Stream); +Object.setPrototypeOf(ArrayStream.prototype, Stream.prototype); +Object.setPrototypeOf(ArrayStream, Stream); ArrayStream.prototype.readable = true; ArrayStream.prototype.writable = true; ArrayStream.prototype.pause = noop; diff --git a/test/common/dns.js b/test/common/dns.js index 983cd5bb86..f63b686fd2 100644 --- a/test/common/dns.js +++ b/test/common/dns.js @@ -4,7 +4,7 @@ function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArra function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } -function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } +function _iterableToArrayLimit(arr, i) { if (!(Symbol.iterator in Object(arr) || Object.prototype.toString.call(arr) === "[object Arguments]")) { return; } var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } @@ -98,8 +98,8 @@ function parseDNSPacket(buffer) { var counts = [['questions', buffer.readUInt16BE(4)], ['answers', buffer.readUInt16BE(6)], ['authorityAnswers', buffer.readUInt16BE(8)], ['additionalRecords', buffer.readUInt16BE(10)]]; var offset = 12; - for (var _i = 0; _i < counts.length; _i++) { - var _counts$_i = _slicedToArray(counts[_i], 2), + for (var _i = 0, _counts = counts; _i < _counts.length; _i++) { + var _counts$_i = _slicedToArray(_counts[_i], 2), sectionName = _counts$_i[0], count = _counts$_i[1]; diff --git a/test/common/index.js b/test/common/index.js index f47899958c..56adc0c5ad 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -93,14 +93,72 @@ var _process$binding = process.binding('config'), var noop = function noop() {}; +var hasCrypto = true; + var isMainThread = function () { - try { + if (false) { return require('worker_threads').isMainThread; - } catch (_e) { - // Worker module not enabled β†’ only a single main thread exists. - return true; + } // Worker module not enabled β†’ only a single main thread exists. + + + return true; +}(); // Check for flags. Skip this for workers (both, the `cluster` module and +// `worker_threads`) and child processes. + + +if (false && isMainThread && module.parent && require('cluster').isMaster) { + // The copyright notice is relatively big and the flags could come afterwards. + var bytesToRead = 1500; + var buffer = Buffer.allocUnsafe(bytesToRead); + var fd = fs.openSync(module.parent.filename, 'r'); + var bytesRead = fs.readSync(fd, buffer, 0, bytesToRead); + fs.closeSync(fd); + var source = buffer.toString('utf8', 0, bytesRead); + var flagStart = source.indexOf('// Flags: --') + 10; + + if (flagStart !== 9) { + var flagEnd = source.indexOf('\n', flagStart); // Normalize different EOL. + + if (source[flagEnd - 1] === '\r') { + flagEnd--; + } + + var flags = source.substring(flagStart, flagEnd).replace(/_/g, '-').split(' '); + var args = process.execArgv.map(function (arg) { + return arg.replace(/_/g, '-'); + }); + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = flags[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var flag = _step.value; + + if (!args.includes(flag) && // If the binary was built without-ssl then the crypto flags are + // invalid (bad option). The test itself should handle this case. + hasCrypto && ( // If the binary is build without `intl` the inspect option is + // invalid. The test itself should handle this case. + process.config.variables.v8_enable_inspector !== 0 || !flag.startsWith('--inspect'))) { + throw new Error("Test has to be started with the flag: '".concat(flag, "'")); + } + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator.return != null) { + _iterator.return(); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } } -}(); +} var isWindows = process.platform === 'win32'; var isAIX = process.platform === 'aix'; @@ -110,7 +168,6 @@ var isFreeBSD = process.platform === 'freebsd'; var isOpenBSD = process.platform === 'openbsd'; var isLinux = process.platform === 'linux'; var isOSX = process.platform === 'darwin'; -var isOSXMojave = isOSX && os.release().startsWith('18'); var enoughTestMem = os.totalmem() > 0x70000000; /* 1.75 Gb */ @@ -119,8 +176,7 @@ var cpus = os.cpus().length === 0 ? [{ }] : os.cpus(); var enoughTestCpu = Array.isArray(cpus) && (cpus.length > 1 || cpus[0].speed > 999); var rootDir = isWindows ? 'c:\\' : '/'; -var buildType = 'readable-stream'; -var hasCrypto = true; // If env var is set then enable async_hook hooks for all tests. +var buildType = 'readable-stream'; // If env var is set then enable async_hook hooks for all tests. if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) { var destroydIdsList = {}; @@ -404,7 +460,7 @@ function canCreateSymLink() { timout: 1000 }); return output.includes('SeCreateSymbolicLinkPrivilege'); - } catch (_unused) { + } catch (_e) { return false; } } // On non-Windows platforms, this always returns `true` @@ -488,7 +544,7 @@ function isAlive(pid) { try { process.kill(pid, 'SIGCONT'); return true; - } catch (_unused2) { + } catch (_unused) { return false; } } @@ -550,26 +606,26 @@ function expectWarning(nameOrMap, expected, code) { } var Comparison = function Comparison(obj, keys) { - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + var _iteratorError2 = undefined; try { - for (var _iterator = keys[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var key = _step.value; + for (var _iterator2 = keys[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var key = _step2.value; if (key in obj) this[key] = obj[key]; } } catch (err) { - _didIteratorError = true; - _iteratorError = err; + _didIteratorError2 = true; + _iteratorError2 = err; } finally { try { - if (!_iteratorNormalCompletion && _iterator.return != null) { - _iterator.return(); + if (!_iteratorNormalCompletion2 && _iterator2.return != null) { + _iterator2.return(); } } finally { - if (_didIteratorError) { - throw _iteratorError; + if (_didIteratorError2) { + throw _iteratorError2; } } } @@ -621,13 +677,13 @@ function expectsError(fn, settings, exact) { var keys = objectKeys(settings); - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; + var _iteratorNormalCompletion3 = true; + var _didIteratorError3 = false; + var _iteratorError3 = undefined; try { - for (var _iterator2 = keys[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var key = _step2.value; + for (var _iterator3 = keys[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { + var key = _step3.value; if (!require('deep-strict-equal')(error[key], innerSettings[key])) { // Create placeholder objects to create a nice output. @@ -651,16 +707,16 @@ function expectsError(fn, settings, exact) { } } } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; + _didIteratorError3 = true; + _iteratorError3 = err; } finally { try { - if (!_iteratorNormalCompletion2 && _iterator2.return != null) { - _iterator2.return(); + if (!_iteratorNormalCompletion3 && _iterator3.return != null) { + _iterator3.return(); } } finally { - if (_didIteratorError2) { - throw _iteratorError2; + if (_didIteratorError3) { + throw _iteratorError3; } } } @@ -701,8 +757,8 @@ function getArrayBufferViews(buf) { var out = []; var arrayBufferViews = [Int8Array, Uint8Array, Uint8ClampedArray, Int16Array, Uint16Array, Int32Array, Uint32Array, Float32Array, Float64Array, DataView]; - for (var _i = 0; _i < arrayBufferViews.length; _i++) { - var type = arrayBufferViews[_i]; + for (var _i = 0, _arrayBufferViews = arrayBufferViews; _i < _arrayBufferViews.length; _i++) { + var type = _arrayBufferViews[_i]; var _type$BYTES_PER_ELEME = type.BYTES_PER_ELEMENT, BYTES_PER_ELEMENT = _type$BYTES_PER_ELEME === void 0 ? 1 : _type$BYTES_PER_ELEME; @@ -740,7 +796,7 @@ function getTTYfd() { if (ttyFd === undefined) { try { return fs.openSync('/dev/tty'); - } catch (_unused3) { + } catch (_unused2) { // There aren't any tty fd's available to use. return -1; } @@ -757,7 +813,7 @@ function runWithInvalidFD(func) { while (fs.fstatSync(fd--) && fd > 0) { ; } - } catch (_unused4) { + } catch (_unused3) { return func(fd); } @@ -792,7 +848,6 @@ module.exports = { isMainThread: isMainThread, isOpenBSD: isOpenBSD, isOSX: isOSX, - isOSXMojave: isOSXMojave, isSunOS: isSunOS, isWindows: isWindows, localIPv6Hosts: localIPv6Hosts, diff --git a/test/common/tick.js b/test/common/tick.js new file mode 100644 index 0000000000..f4f5fb772d --- /dev/null +++ b/test/common/tick.js @@ -0,0 +1,48 @@ +"use strict"; + +/**/ +require('@babel/polyfill'); + +var util = require('util'); + +for (var i in util) { + exports[i] = util[i]; +} +/**/ + + +'use strict'; +/**/ + + +var objectKeys = objectKeys || function (obj) { + var keys = []; + + for (var key in obj) { + keys.push(key); + } + + return keys; +}; +/**/ + + +require('../common'); + +module.exports = function tick(x, cb) { + function ontick() { + if (--x === 0) { + if (typeof cb === 'function') cb(); + } else { + setImmediate(ontick); + } + } + + setImmediate(ontick); +}; + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} \ No newline at end of file diff --git a/test/parallel/test-readable-from.js b/test/parallel/test-readable-from.js new file mode 100644 index 0000000000..83e91f1615 --- /dev/null +++ b/test/parallel/test-readable-from.js @@ -0,0 +1,398 @@ +"use strict"; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } + +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } + +function _awaitAsyncGenerator(value) { return new _AwaitValue(value); } + +function _wrapAsyncGenerator(fn) { return function () { return new _AsyncGenerator(fn.apply(this, arguments)); }; } + +function _AsyncGenerator(gen) { var front, back; function send(key, arg) { return new Promise(function (resolve, reject) { var request = { key: key, arg: arg, resolve: resolve, reject: reject, next: null }; if (back) { back = back.next = request; } else { front = back = request; resume(key, arg); } }); } function resume(key, arg) { try { var result = gen[key](arg); var value = result.value; var wrappedAwait = value instanceof _AwaitValue; Promise.resolve(wrappedAwait ? value.wrapped : value).then(function (arg) { if (wrappedAwait) { resume(key === "return" ? "return" : "next", arg); return; } settle(result.done ? "return" : "normal", arg); }, function (err) { resume("throw", err); }); } catch (err) { settle("throw", err); } } function settle(type, value) { switch (type) { case "return": front.resolve({ value: value, done: true }); break; case "throw": front.reject(value); break; default: front.resolve({ value: value, done: false }); break; } front = front.next; if (front) { resume(front.key, front.arg); } else { back = null; } } this._invoke = send; if (typeof gen.return !== "function") { this.return = undefined; } } + +if (typeof Symbol === "function" && Symbol.asyncIterator) { _AsyncGenerator.prototype[Symbol.asyncIterator] = function () { return this; }; } + +_AsyncGenerator.prototype.next = function (arg) { return this._invoke("next", arg); }; + +_AsyncGenerator.prototype.throw = function (arg) { return this._invoke("throw", arg); }; + +_AsyncGenerator.prototype.return = function (arg) { return this._invoke("return", arg); }; + +function _AwaitValue(value) { this.wrapped = value; } + +function _asyncIterator(iterable) { var method; if (typeof Symbol !== "undefined") { if (Symbol.asyncIterator) { method = iterable[Symbol.asyncIterator]; if (method != null) return method.call(iterable); } if (Symbol.iterator) { method = iterable[Symbol.iterator]; if (method != null) return method.call(iterable); } } throw new TypeError("Object is not async iterable"); } + +/**/ +var bufferShim = require('safe-buffer').Buffer; +/**/ + + +var _require = require('../common'), + mustCall = _require.mustCall; + +var once = require('events.once'); + +var _require2 = require('../../'), + Readable = _require2.Readable; + +var _require3 = require('assert/'), + strictEqual = _require3.strictEqual; + +function toReadableBasicSupport() { + return _toReadableBasicSupport.apply(this, arguments); +} + +function _toReadableBasicSupport() { + _toReadableBasicSupport = _asyncToGenerator(function* () { + function generate() { + return _generate.apply(this, arguments); + } + + function _generate() { + _generate = _wrapAsyncGenerator(function* () { + yield 'a'; + yield 'b'; + yield 'c'; + }); + return _generate.apply(this, arguments); + } + + var stream = Readable.from(generate()); + var expected = ['a', 'b', 'c']; + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + + var _iteratorError; + + try { + for (var _iterator = _asyncIterator(stream), _step, _value; _step = yield _iterator.next(), _iteratorNormalCompletion = _step.done, _value = yield _step.value, !_iteratorNormalCompletion; _iteratorNormalCompletion = true) { + var chunk = _value; + strictEqual(chunk, expected.shift()); + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator.return != null) { + yield _iterator.return(); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } + }); + return _toReadableBasicSupport.apply(this, arguments); +} + +function toReadableSyncIterator() { + return _toReadableSyncIterator.apply(this, arguments); +} + +function _toReadableSyncIterator() { + _toReadableSyncIterator = _asyncToGenerator(function* () { + function* generate() { + yield 'a'; + yield 'b'; + yield 'c'; + } + + var stream = Readable.from(generate()); + var expected = ['a', 'b', 'c']; + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + + var _iteratorError2; + + try { + for (var _iterator2 = _asyncIterator(stream), _step2, _value2; _step2 = yield _iterator2.next(), _iteratorNormalCompletion2 = _step2.done, _value2 = yield _step2.value, !_iteratorNormalCompletion2; _iteratorNormalCompletion2 = true) { + var chunk = _value2; + strictEqual(chunk, expected.shift()); + } + } catch (err) { + _didIteratorError2 = true; + _iteratorError2 = err; + } finally { + try { + if (!_iteratorNormalCompletion2 && _iterator2.return != null) { + yield _iterator2.return(); + } + } finally { + if (_didIteratorError2) { + throw _iteratorError2; + } + } + } + }); + return _toReadableSyncIterator.apply(this, arguments); +} + +function toReadablePromises() { + return _toReadablePromises.apply(this, arguments); +} + +function _toReadablePromises() { + _toReadablePromises = _asyncToGenerator(function* () { + var promises = [Promise.resolve('a'), Promise.resolve('b'), Promise.resolve('c')]; + var stream = Readable.from(promises); + var expected = ['a', 'b', 'c']; + var _iteratorNormalCompletion3 = true; + var _didIteratorError3 = false; + + var _iteratorError3; + + try { + for (var _iterator3 = _asyncIterator(stream), _step3, _value3; _step3 = yield _iterator3.next(), _iteratorNormalCompletion3 = _step3.done, _value3 = yield _step3.value, !_iteratorNormalCompletion3; _iteratorNormalCompletion3 = true) { + var chunk = _value3; + strictEqual(chunk, expected.shift()); + } + } catch (err) { + _didIteratorError3 = true; + _iteratorError3 = err; + } finally { + try { + if (!_iteratorNormalCompletion3 && _iterator3.return != null) { + yield _iterator3.return(); + } + } finally { + if (_didIteratorError3) { + throw _iteratorError3; + } + } + } + }); + return _toReadablePromises.apply(this, arguments); +} + +function toReadableString() { + return _toReadableString.apply(this, arguments); +} + +function _toReadableString() { + _toReadableString = _asyncToGenerator(function* () { + var stream = Readable.from('abc'); + var expected = ['a', 'b', 'c']; + var _iteratorNormalCompletion4 = true; + var _didIteratorError4 = false; + + var _iteratorError4; + + try { + for (var _iterator4 = _asyncIterator(stream), _step4, _value4; _step4 = yield _iterator4.next(), _iteratorNormalCompletion4 = _step4.done, _value4 = yield _step4.value, !_iteratorNormalCompletion4; _iteratorNormalCompletion4 = true) { + var chunk = _value4; + strictEqual(chunk, expected.shift()); + } + } catch (err) { + _didIteratorError4 = true; + _iteratorError4 = err; + } finally { + try { + if (!_iteratorNormalCompletion4 && _iterator4.return != null) { + yield _iterator4.return(); + } + } finally { + if (_didIteratorError4) { + throw _iteratorError4; + } + } + } + }); + return _toReadableString.apply(this, arguments); +} + +function toReadableOnData() { + return _toReadableOnData.apply(this, arguments); +} + +function _toReadableOnData() { + _toReadableOnData = _asyncToGenerator(function* () { + function generate() { + return _generate2.apply(this, arguments); + } + + function _generate2() { + _generate2 = _wrapAsyncGenerator(function* () { + yield 'a'; + yield 'b'; + yield 'c'; + }); + return _generate2.apply(this, arguments); + } + + var stream = Readable.from(generate()); + var iterations = 0; + var expected = ['a', 'b', 'c']; + stream.on('data', function (chunk) { + iterations++; + strictEqual(chunk, expected.shift()); + }); + yield once(stream, 'end'); + strictEqual(iterations, 3); + }); + return _toReadableOnData.apply(this, arguments); +} + +function toReadableOnDataNonObject() { + return _toReadableOnDataNonObject.apply(this, arguments); +} + +function _toReadableOnDataNonObject() { + _toReadableOnDataNonObject = _asyncToGenerator(function* () { + function generate() { + return _generate3.apply(this, arguments); + } + + function _generate3() { + _generate3 = _wrapAsyncGenerator(function* () { + yield 'a'; + yield 'b'; + yield 'c'; + }); + return _generate3.apply(this, arguments); + } + + var stream = Readable.from(generate(), { + objectMode: false + }); + var iterations = 0; + var expected = ['a', 'b', 'c']; + stream.on('data', function (chunk) { + iterations++; + strictEqual(chunk instanceof Buffer, true); + strictEqual(chunk.toString(), expected.shift()); + }); + yield once(stream, 'end'); + strictEqual(iterations, 3); + }); + return _toReadableOnDataNonObject.apply(this, arguments); +} + +function destroysTheStreamWhenThrowing() { + return _destroysTheStreamWhenThrowing.apply(this, arguments); +} + +function _destroysTheStreamWhenThrowing() { + _destroysTheStreamWhenThrowing = _asyncToGenerator(function* () { + function generate() { + return _generate4.apply(this, arguments); + } + + function _generate4() { + _generate4 = _wrapAsyncGenerator(function* () { + throw new Error('kaboom'); + }); + return _generate4.apply(this, arguments); + } + + var stream = Readable.from(generate()); + stream.read(); + + try { + yield once(stream, 'error'); + } catch (err) { + strictEqual(err.message, 'kaboom'); + strictEqual(stream.destroyed, true); + } + }); + return _destroysTheStreamWhenThrowing.apply(this, arguments); +} + +function asTransformStream() { + return _asTransformStream.apply(this, arguments); +} + +function _asTransformStream() { + _asTransformStream = _asyncToGenerator(function* () { + function generate(_x) { + return _generate5.apply(this, arguments); + } + + function _generate5() { + _generate5 = _wrapAsyncGenerator(function* (stream) { + var _iteratorNormalCompletion6 = true; + var _didIteratorError6 = false; + + var _iteratorError6; + + try { + for (var _iterator6 = _asyncIterator(stream), _step6, _value6; _step6 = yield _awaitAsyncGenerator(_iterator6.next()), _iteratorNormalCompletion6 = _step6.done, _value6 = yield _awaitAsyncGenerator(_step6.value), !_iteratorNormalCompletion6; _iteratorNormalCompletion6 = true) { + var chunk = _value6; + yield chunk.toUpperCase(); + } + } catch (err) { + _didIteratorError6 = true; + _iteratorError6 = err; + } finally { + try { + if (!_iteratorNormalCompletion6 && _iterator6.return != null) { + yield _awaitAsyncGenerator(_iterator6.return()); + } + } finally { + if (_didIteratorError6) { + throw _iteratorError6; + } + } + } + }); + return _generate5.apply(this, arguments); + } + + var source = new Readable({ + objectMode: true, + read: function read() { + this.push('a'); + this.push('b'); + this.push('c'); + this.push(null); + } + }); + var stream = Readable.from(generate(source)); + var expected = ['A', 'B', 'C']; + var _iteratorNormalCompletion5 = true; + var _didIteratorError5 = false; + + var _iteratorError5; + + try { + for (var _iterator5 = _asyncIterator(stream), _step5, _value5; _step5 = yield _iterator5.next(), _iteratorNormalCompletion5 = _step5.done, _value5 = yield _step5.value, !_iteratorNormalCompletion5; _iteratorNormalCompletion5 = true) { + var chunk = _value5; + strictEqual(chunk, expected.shift()); + } + } catch (err) { + _didIteratorError5 = true; + _iteratorError5 = err; + } finally { + try { + if (!_iteratorNormalCompletion5 && _iterator5.return != null) { + yield _iterator5.return(); + } + } finally { + if (_didIteratorError5) { + throw _iteratorError5; + } + } + } + }); + return _asTransformStream.apply(this, arguments); +} + +Promise.all([toReadableBasicSupport(), toReadableSyncIterator(), toReadablePromises(), toReadableString(), toReadableOnData(), toReadableOnDataNonObject(), destroysTheStreamWhenThrowing(), asTransformStream()]).then(mustCall()); +; + +(function () { + var t = require('tap'); + + t.pass('sync run'); +})(); + +var _list = process.listeners('uncaughtException'); + +process.removeAllListeners('uncaughtException'); + +_list.pop(); + +_list.forEach(function (e) { + return process.on('uncaughtException', e); +}); \ No newline at end of file diff --git a/test/parallel/test-readable-large-hwm.js b/test/parallel/test-readable-large-hwm.js new file mode 100644 index 0000000000..9e47f0f487 --- /dev/null +++ b/test/parallel/test-readable-large-hwm.js @@ -0,0 +1,50 @@ +"use strict"; + +/**/ +var bufferShim = require('safe-buffer').Buffer; +/**/ + + +var common = require('../common'); + +var _require = require('../../'), + Readable = _require.Readable; // Make sure that readable completes +// even when reading larger buffer. + + +var bufferSize = 10 * 1024 * 1024; +var n = 0; +var r = new Readable({ + read: function read() { + // Try to fill readable buffer piece by piece. + r.push(bufferShim.alloc(bufferSize / 10)); + + if (n++ > 10) { + r.push(null); + } + } +}); +r.on('readable', function () { + while (true) { + var ret = r.read(bufferSize); + if (ret === null) break; + } +}); +r.on('end', common.mustCall()); +; + +(function () { + var t = require('tap'); + + t.pass('sync run'); +})(); + +var _list = process.listeners('uncaughtException'); + +process.removeAllListeners('uncaughtException'); + +_list.pop(); + +_list.forEach(function (e) { + return process.on('uncaughtException', e); +}); \ No newline at end of file diff --git a/test/parallel/test-readable-single-end.js b/test/parallel/test-readable-single-end.js new file mode 100644 index 0000000000..b50c36fffa --- /dev/null +++ b/test/parallel/test-readable-single-end.js @@ -0,0 +1,37 @@ +"use strict"; + +/**/ +var bufferShim = require('safe-buffer').Buffer; +/**/ + + +var common = require('../common'); + +var _require = require('../../'), + Readable = _require.Readable; // This test ensures that there will not be an additional empty 'readable' +// event when stream has ended (only 1 event signalling about end) + + +var r = new Readable({ + read: function read() {} +}); +r.push(null); +r.on('readable', common.mustCall()); +r.on('end', common.mustCall()); +; + +(function () { + var t = require('tap'); + + t.pass('sync run'); +})(); + +var _list = process.listeners('uncaughtException'); + +process.removeAllListeners('uncaughtException'); + +_list.pop(); + +_list.forEach(function (e) { + return process.on('uncaughtException', e); +}); \ No newline at end of file diff --git a/test/parallel/test-stream-auto-destroy.js b/test/parallel/test-stream-auto-destroy.js new file mode 100644 index 0000000000..93338b4c20 --- /dev/null +++ b/test/parallel/test-stream-auto-destroy.js @@ -0,0 +1,99 @@ +"use strict"; + +/**/ +var bufferShim = require('safe-buffer').Buffer; +/**/ + + +var common = require('../common'); + +var stream = require('../../'); + +var assert = require('assert/'); + +{ + var r = new stream.Readable({ + autoDestroy: true, + read: function read() { + this.push('hello'); + this.push('world'); + this.push(null); + }, + destroy: common.mustCall(function (err, cb) { + return cb(); + }) + }); + var ended = false; + r.resume(); + r.on('end', common.mustCall(function () { + ended = true; + })); + r.on('close', common.mustCall(function () { + assert(ended); + })); +} +{ + var w = new stream.Writable({ + autoDestroy: true, + write: function write(data, enc, cb) { + cb(null); + }, + destroy: common.mustCall(function (err, cb) { + return cb(); + }) + }); + var finished = false; + w.write('hello'); + w.write('world'); + w.end(); + w.on('finish', common.mustCall(function () { + finished = true; + })); + w.on('close', common.mustCall(function () { + assert(finished); + })); +} +{ + var t = new stream.Transform({ + autoDestroy: true, + transform: function transform(data, enc, cb) { + cb(null, data); + }, + destroy: common.mustCall(function (err, cb) { + return cb(); + }) + }); + var _ended = false; + var _finished = false; + t.write('hello'); + t.write('world'); + t.end(); + t.resume(); + t.on('end', common.mustCall(function () { + _ended = true; + })); + t.on('finish', common.mustCall(function () { + _finished = true; + })); + t.on('close', common.mustCall(function () { + assert(_ended); + assert(_finished); + })); +} +; + +(function () { + var t = require('tap'); + + t.pass('sync run'); +})(); + +var _list = process.listeners('uncaughtException'); + +process.removeAllListeners('uncaughtException'); + +_list.pop(); + +_list.forEach(function (e) { + return process.on('uncaughtException', e); +}); \ No newline at end of file diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js index 99f3b5a8bc..85d1aea460 100644 --- a/test/parallel/test-stream-buffer-list.js +++ b/test/parallel/test-stream-buffer-list.js @@ -1,7 +1,5 @@ "use strict"; -// Flags: --expose_internals - /**/ var bufferShim = require('safe-buffer').Buffer; /**/ diff --git a/test/parallel/test-stream-duplex-destroy.js b/test/parallel/test-stream-duplex-destroy.js index 470d686aad..d163430858 100644 --- a/test/parallel/test-stream-duplex-destroy.js +++ b/test/parallel/test-stream-duplex-destroy.js @@ -12,9 +12,6 @@ var _require = require('../../'), var assert = require('assert/'); -var _require2 = require('util'), - inherits = _require2.inherits; - { var duplex = new Duplex({ write: function write(chunk, enc, cb) { @@ -228,13 +225,14 @@ var _require2 = require('util'), _duplex8.destroy(); } { - var MyDuplex = function MyDuplex() { + function MyDuplex() { assert.strictEqual(this.destroyed, false); this.destroyed = false; Duplex.call(this); - }; + } - inherits(MyDuplex, Duplex); + Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype); + Object.setPrototypeOf(MyDuplex, Duplex); new MyDuplex(); } ; diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js index 10881e5fe9..086962c8b1 100644 --- a/test/parallel/test-stream-finished.js +++ b/test/parallel/test-stream-finished.js @@ -74,10 +74,14 @@ var promisify = require('util-promisify'); finished(_rs, common.mustCall()); } { - var run = - /*#__PURE__*/ - function () { - var _ref = _asyncToGenerator(function* () { + var finishedPromise = promisify(finished); + + function run() { + return _run.apply(this, arguments); + } + + function _run() { + _run = _asyncToGenerator(function* () { var rs = fs.createReadStream(__filename); var done = common.mustCall(); var ended = false; @@ -89,13 +93,9 @@ var promisify = require('util-promisify'); assert(ended); done(); }); + return _run.apply(this, arguments); + } - return function run() { - return _ref.apply(this, arguments); - }; - }(); - - var finishedPromise = promisify(finished); run(); } { diff --git a/test/parallel/test-stream-pipe-await-drain-manual-resume.js b/test/parallel/test-stream-pipe-await-drain-manual-resume.js index d36d4f3651..aacd62cdf7 100644 --- a/test/parallel/test-stream-pipe-await-drain-manual-resume.js +++ b/test/parallel/test-stream-pipe-await-drain-manual-resume.js @@ -46,10 +46,28 @@ readable.once('pause', common.mustCall(function () { // should be flushed. isCurrentlyBufferingWrites = false; - - for (var _i = 0; _i < queue.length; _i++) { - var queued = queue[_i]; - queued.cb(); + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = queue[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var queued = _step.value; + queued.cb(); + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator.return != null) { + _iterator.return(); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } } })); })); diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js index 934109f832..a761f38c2e 100644 --- a/test/parallel/test-stream-pipe-cleanup.js +++ b/test/parallel/test-stream-pipe-cleanup.js @@ -34,8 +34,6 @@ var stream = require('../../'); var assert = require('assert/'); -var util = require('util'); - (function () { if (/^v0\.8\./.test(process.version)) return; @@ -46,7 +44,8 @@ var util = require('util'); require('stream').Stream.call(this); } - util.inherits(Writable, require('stream').Stream); + Object.setPrototypeOf(Writable.prototype, require('stream').Stream.prototype); + Object.setPrototypeOf(Writable, require('stream').Stream); Writable.prototype.end = function () { this.endCalls++; @@ -62,14 +61,16 @@ var util = require('util'); require('stream').Stream.call(this); } - util.inherits(Readable, require('stream').Stream); + Object.setPrototypeOf(Readable.prototype, require('stream').Stream.prototype); + Object.setPrototypeOf(Readable, require('stream').Stream); function Duplex() { this.readable = true; Writable.call(this); } - util.inherits(Duplex, Writable); + Object.setPrototypeOf(Duplex.prototype, Writable.prototype); + Object.setPrototypeOf(Duplex, Writable); var i = 0; var limit = 100; var w = new Writable(); diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js index 0067440980..b86a4d01fb 100644 --- a/test/parallel/test-stream-pipe-error-handling.js +++ b/test/parallel/test-stream-pipe-error-handling.js @@ -64,10 +64,6 @@ var Stream = require('stream').Stream; assert.strictEqual(_gotErr, _err); } { - var myOnError = function myOnError() { - throw new Error('this should not happen'); - }; - var R = require('../../').Readable; var W = require('../../').Writable; @@ -87,6 +83,10 @@ var Stream = require('stream').Stream; r.pipe(w); w.removeListener('error', myOnError); removed = true; + + function myOnError() { + throw new Error('this should not happen'); + } } { var _R = require('../../').Readable; diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js index fa485dd875..43d0ab0971 100644 --- a/test/parallel/test-stream-pipe-event.js +++ b/test/parallel/test-stream-pipe-event.js @@ -32,15 +32,14 @@ var stream = require('../../'); var assert = require('assert/'); -var util = require('util'); - function Writable() { this.writable = true; require('stream').Stream.call(this); } -util.inherits(Writable, require('stream').Stream); +Object.setPrototypeOf(Writable.prototype, require('stream').Stream.prototype); +Object.setPrototypeOf(Writable, require('stream').Stream); function Readable() { this.readable = true; @@ -48,7 +47,8 @@ function Readable() { require('stream').Stream.call(this); } -util.inherits(Readable, require('stream').Stream); +Object.setPrototypeOf(Readable.prototype, require('stream').Stream.prototype); +Object.setPrototypeOf(Readable, require('stream').Stream); var passed = false; var w = new Writable(); w.on('pipe', function (src) { diff --git a/test/parallel/test-stream-pipe-multiple-pipes.js b/test/parallel/test-stream-pipe-multiple-pipes.js index 458b4c8b23..011923a043 100644 --- a/test/parallel/test-stream-pipe-multiple-pipes.js +++ b/test/parallel/test-stream-pipe-multiple-pipes.js @@ -38,11 +38,30 @@ readable.push(input); // The pipe() calls will postpone emission of the 'resume' // so no data will be available to the writable streams until then. process.nextTick(common.mustCall(function () { - for (var _i = 0; _i < writables.length; _i++) { - var target = writables[_i]; - assert.deepStrictEqual(target.output, [input]); - target.on('unpipe', common.mustCall()); - readable.unpipe(target); + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = writables[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var target = _step.value; + assert.deepStrictEqual(target.output, [input]); + target.on('unpipe', common.mustCall()); + readable.unpipe(target); + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator.return != null) { + _iterator.return(); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } } readable.push('something else'); // This does not get through. @@ -51,9 +70,28 @@ process.nextTick(common.mustCall(function () { readable.resume(); // Make sure the 'end' event gets emitted. })); readable.on('end', common.mustCall(function () { - for (var _i2 = 0; _i2 < writables.length; _i2++) { - var target = writables[_i2]; - assert.deepStrictEqual(target.output, [input]); + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + var _iteratorError2 = undefined; + + try { + for (var _iterator2 = writables[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var target = _step2.value; + assert.deepStrictEqual(target.output, [input]); + } + } catch (err) { + _didIteratorError2 = true; + _iteratorError2 = err; + } finally { + try { + if (!_iteratorNormalCompletion2 && _iterator2.return != null) { + _iterator2.return(); + } + } finally { + if (_didIteratorError2) { + throw _iteratorError2; + } + } } })); ; diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js index 081a6b3636..163212c93f 100644 --- a/test/parallel/test-stream-pipe-unpipe-streams.js +++ b/test/parallel/test-stream-pipe-unpipe-streams.js @@ -38,22 +38,6 @@ source.unpipe(dest2); source.unpipe(dest1); assert.strictEqual(source._readableState.pipes, null); { - var checkDestCleanup = function checkDestCleanup(dest) { - var currentDestId = ++destCount; - - _source.pipe(dest); - - var unpipeChecker = common.mustCall(function () { - assert.deepStrictEqual(dest.listeners('unpipe'), [unpipeChecker], "destination{".concat(currentDestId, "} should have a 'unpipe' event ") + 'listener which is `unpipeChecker`'); - dest.removeListener('unpipe', unpipeChecker); - destCheckEventNames.forEach(function (eventName) { - assert.strictEqual(dest.listenerCount(eventName), 0, "destination{".concat(currentDestId, "}'s '").concat(eventName, "' event ") + 'listeners not removed'); - }); - if (--destCount === 0) checkSrcCleanup(); - }); - dest.on('unpipe', unpipeChecker); - }; - // test `cleanup()` if we unpipe all streams. var _source = Readable({ read: function read() {} @@ -78,6 +62,23 @@ assert.strictEqual(source._readableState.pipes, null); assert.strictEqual(_source.listenerCount(eventName), 0, "source's '".concat(eventName, "' event listeners not removed")); }); }); + + function checkDestCleanup(dest) { + var currentDestId = ++destCount; + + _source.pipe(dest); + + var unpipeChecker = common.mustCall(function () { + assert.deepStrictEqual(dest.listeners('unpipe'), [unpipeChecker], "destination{".concat(currentDestId, "} should have a 'unpipe' event ") + 'listener which is `unpipeChecker`'); + dest.removeListener('unpipe', unpipeChecker); + destCheckEventNames.forEach(function (eventName) { + assert.strictEqual(dest.listenerCount(eventName), 0, "destination{".concat(currentDestId, "}'s '").concat(eventName, "' event ") + 'listeners not removed'); + }); + if (--destCount === 0) checkSrcCleanup(); + }); + dest.on('unpipe', unpipeChecker); + } + checkDestCleanup(_dest); checkDestCleanup(_dest2); diff --git a/test/parallel/test-stream-pipeline.js b/test/parallel/test-stream-pipeline.js index 46af44a91f..686f57027f 100644 --- a/test/parallel/test-stream-pipeline.js +++ b/test/parallel/test-stream-pipeline.js @@ -401,10 +401,14 @@ var promisify = require('util-promisify'); })); } { - var run = - /*#__PURE__*/ - function () { - var _ref = _asyncToGenerator(function* () { + var pipelinePromise = promisify(pipeline); + + function run() { + return _run.apply(this, arguments); + } + + function _run() { + _run = _asyncToGenerator(function* () { var read = new Readable({ read: function read() {} }); @@ -422,13 +426,9 @@ var promisify = require('util-promisify'); yield pipelinePromise(read, write); assert(finished); }); + return _run.apply(this, arguments); + } - return function run() { - return _ref.apply(this, arguments); - }; - }(); - - var pipelinePromise = promisify(pipeline); run(); } { diff --git a/test/parallel/test-stream-readable-async-iterators.js b/test/parallel/test-stream-readable-async-iterators.js index 78ab350010..f0fe61ef68 100644 --- a/test/parallel/test-stream-readable-async-iterators.js +++ b/test/parallel/test-stream-readable-async-iterators.js @@ -4,7 +4,7 @@ function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } -function _asyncIterator(iterable) { var method; if (typeof Symbol === "function") { if (Symbol.asyncIterator) { method = iterable[Symbol.asyncIterator]; if (method != null) return method.call(iterable); } if (Symbol.iterator) { method = iterable[Symbol.iterator]; if (method != null) return method.call(iterable); } } throw new TypeError("Object is not async iterable"); } +function _asyncIterator(iterable) { var method; if (typeof Symbol !== "undefined") { if (Symbol.asyncIterator) { method = iterable[Symbol.asyncIterator]; if (method != null) return method.call(iterable); } if (Symbol.iterator) { method = iterable[Symbol.iterator]; if (method != null) return method.call(iterable); } } throw new TypeError("Object is not async iterable"); } /**/ var bufferShim = require('safe-buffer').Buffer; diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js index 95db94393d..439476ab33 100644 --- a/test/parallel/test-stream-readable-destroy.js +++ b/test/parallel/test-stream-readable-destroy.js @@ -12,9 +12,6 @@ var _require = require('../../'), var assert = require('assert/'); -var _require2 = require('util'), - inherits = _require2.inherits; - { var read = new Readable({ read: function read() {} @@ -173,13 +170,14 @@ var _require2 = require('util'), _read7.destroy(); } { - var MyReadable = function MyReadable() { + function MyReadable() { assert.strictEqual(this.destroyed, false); this.destroyed = false; Readable.call(this); - }; + } - inherits(MyReadable, Readable); + Object.setPrototypeOf(MyReadable.prototype, Readable.prototype); + Object.setPrototypeOf(MyReadable, Readable); new MyReadable(); } { diff --git a/test/parallel/test-stream-readable-infinite-read.js b/test/parallel/test-stream-readable-infinite-read.js new file mode 100644 index 0000000000..f51c0d218c --- /dev/null +++ b/test/parallel/test-stream-readable-infinite-read.js @@ -0,0 +1,54 @@ +"use strict"; + +/**/ +var bufferShim = require('safe-buffer').Buffer; +/**/ + + +var common = require('../common'); + +var assert = require('assert/'); + +var _require = require('../../'), + Readable = _require.Readable; + +var buf = bufferShim.alloc(8192); +var readable = new Readable({ + read: common.mustCall(function () { + this.push(buf); + }, 31) +}); +var i = 0; +readable.on('readable', common.mustCall(function () { + if (i++ === 10) { + // We will just terminate now. + process.removeAllListeners('readable'); + return; + } + + var data = readable.read(); // TODO(mcollina): there is something odd in the highWaterMark logic + // investigate. + + if (i === 1) { + assert.strictEqual(data.length, 8192 * 2); + } else { + assert.strictEqual(data.length, 8192 * 3); + } +}, 11)); +; + +(function () { + var t = require('tap'); + + t.pass('sync run'); +})(); + +var _list = process.listeners('uncaughtException'); + +process.removeAllListeners('uncaughtException'); + +_list.pop(); + +_list.forEach(function (e) { + return process.on('uncaughtException', e); +}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-object-multi-push-async.js b/test/parallel/test-stream-readable-object-multi-push-async.js index 49c350f292..8aecbf1546 100644 --- a/test/parallel/test-stream-readable-object-multi-push-async.js +++ b/test/parallel/test-stream-readable-object-multi-push-async.js @@ -15,21 +15,6 @@ var _require = require('../../'), var MAX = 42; var BATCH = 10; { - var fetchData = function fetchData(cb) { - if (i > MAX) { - setTimeout(cb, 10, null, []); - } else { - var array = []; - var max = i + BATCH; - - for (; i < max; i++) { - array.push(i); - } - - setTimeout(cb, 10, null, array); - } - }; - var readable = new Readable({ objectMode: true, read: common.mustCall(function () { @@ -59,6 +44,22 @@ var BATCH = 10; }, Math.floor(MAX / BATCH) + 2) }); var i = 0; + + function fetchData(cb) { + if (i > MAX) { + setTimeout(cb, 10, null, []); + } else { + var array = []; + var max = i + BATCH; + + for (; i < max; i++) { + array.push(i); + } + + setTimeout(cb, 10, null, array); + } + } + readable.on('readable', function () { var data; console.log('readable emitted'); @@ -72,29 +73,13 @@ var BATCH = 10; })); } { - var _fetchData = function _fetchData(cb) { - if (_i > MAX) { - setTimeout(cb, 10, null, []); - } else { - var array = []; - var max = _i + BATCH; - - for (; _i < max; _i++) { - array.push(_i); - } - - setTimeout(cb, 10, null, array); - } - }; - var _readable = new Readable({ objectMode: true, read: common.mustCall(function () { var _this2 = this; console.log('>> READ'); - - _fetchData(function (err, data) { + fetchData(function (err, data) { if (err) { _this2.destroy(err); @@ -119,6 +104,21 @@ var BATCH = 10; var _i = 0; + function fetchData(cb) { + if (_i > MAX) { + setTimeout(cb, 10, null, []); + } else { + var array = []; + var max = _i + BATCH; + + for (; _i < max; _i++) { + array.push(_i); + } + + setTimeout(cb, 10, null, array); + } + } + _readable.on('data', function (data) { console.log('data emitted', data); }); @@ -128,25 +128,13 @@ var BATCH = 10; })); } { - var _fetchData2 = function _fetchData2(cb) { - var array = []; - var max = _i2 + BATCH; - - for (; _i2 < max; _i2++) { - array.push(_i2); - } - - setTimeout(cb, 10, null, array); - }; - var _readable2 = new Readable({ objectMode: true, read: common.mustCall(function () { var _this3 = this; console.log('>> READ'); - - _fetchData2(function (err, data) { + fetchData(function (err, data) { if (err) { _this3.destroy(err); @@ -169,6 +157,17 @@ var BATCH = 10; var _i2 = 0; + function fetchData(cb) { + var array = []; + var max = _i2 + BATCH; + + for (; _i2 < max; _i2++) { + array.push(_i2); + } + + setTimeout(cb, 10, null, array); + } + _readable2.on('data', function (data) { console.log('data emitted', data); }); diff --git a/test/parallel/test-stream-readable-reading-readingMore.js b/test/parallel/test-stream-readable-reading-readingMore.js index ca17d6262c..a17b4c6cdf 100644 --- a/test/parallel/test-stream-readable-reading-readingMore.js +++ b/test/parallel/test-stream-readable-reading-readingMore.js @@ -12,13 +12,6 @@ var assert = require('assert/'); var Readable = require('../../').Readable; { - var onStreamEnd = function onStreamEnd() { - // End of stream; state.reading is false - // And so should be readingMore. - assert.strictEqual(state.readingMore, false); - assert.strictEqual(state.reading, false); - }; - var readable = new Readable({ read: function read(size) {} }); @@ -33,6 +26,14 @@ var Readable = require('../../').Readable; assert.strictEqual(state.reading, !state.ended); }, 2)); + + function onStreamEnd() { + // End of stream; state.reading is false + // And so should be readingMore. + assert.strictEqual(state.readingMore, false); + assert.strictEqual(state.reading, false); + } + var expectedReadingMore = [true, false]; readable.on('readable', common.mustCall(function () { // there is only one readingMore scheduled from on('data'), @@ -56,13 +57,6 @@ var Readable = require('../../').Readable; readable.push(null); } { - var _onStreamEnd = function _onStreamEnd() { - // End of stream; state.reading is false - // And so should be readingMore. - assert.strictEqual(_state.readingMore, false); - assert.strictEqual(_state.reading, false); - }; - var _readable = new Readable({ read: function read(size) {} }); @@ -80,7 +74,14 @@ var Readable = require('../../').Readable; assert.strictEqual(_state.reading, !_state.ended); }, 2)); - _readable.on('end', common.mustCall(_onStreamEnd)); + function onStreamEnd() { + // End of stream; state.reading is false + // And so should be readingMore. + assert.strictEqual(_state.readingMore, false); + assert.strictEqual(_state.reading, false); + } + + _readable.on('end', common.mustCall(onStreamEnd)); _readable.push('pushed'); // stop emitting 'data' events @@ -104,13 +105,6 @@ var Readable = require('../../').Readable; _readable.push(null); } { - var _onStreamEnd2 = function _onStreamEnd2() { - // End of stream; state.reading is false - // And so should be readingMore. - assert.strictEqual(_state2.readingMore, false); - assert.strictEqual(_state2.reading, false); - }; - var _readable2 = new Readable({ read: function read(size) {} }); @@ -130,7 +124,14 @@ var Readable = require('../../').Readable; _readable2.removeListener('readable', onReadable); - _readable2.on('end', common.mustCall(_onStreamEnd2)); + function onStreamEnd() { + // End of stream; state.reading is false + // And so should be readingMore. + assert.strictEqual(_state2.readingMore, false); + assert.strictEqual(_state2.reading, false); + } + + _readable2.on('end', common.mustCall(onStreamEnd)); _readable2.push('pushed'); // we are still not flowing, we will be resuming in the next tick diff --git a/test/parallel/test-stream-readable-resume-hwm.js b/test/parallel/test-stream-readable-resume-hwm.js new file mode 100644 index 0000000000..93a4a54226 --- /dev/null +++ b/test/parallel/test-stream-readable-resume-hwm.js @@ -0,0 +1,47 @@ +"use strict"; + +/**/ +var bufferShim = require('safe-buffer').Buffer; +/**/ + + +var common = require('../common'); + +var _require = require('../../'), + Readable = _require.Readable; // readable.resume() should not lead to a ._read() call being scheduled +// when we exceed the high water mark already. + + +var readable = new Readable({ + read: common.mustNotCall(), + highWaterMark: 100 +}); // Fill up the internal buffer so that we definitely exceed the HWM: + +for (var i = 0; i < 10; i++) { + readable.push('a'.repeat(200)); +} // Call resume, and pause after one chunk. +// The .pause() is just so that we don’t empty the buffer fully, which would +// be a valid reason to call ._read(). + + +readable.resume(); +readable.once('data', common.mustCall(function () { + return readable.pause(); +})); +; + +(function () { + var t = require('tap'); + + t.pass('sync run'); +})(); + +var _list = process.listeners('uncaughtException'); + +process.removeAllListeners('uncaughtException'); + +_list.pop(); + +_list.forEach(function (e) { + return process.on('uncaughtException', e); +}); \ No newline at end of file diff --git a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js new file mode 100644 index 0000000000..31bd3263ce --- /dev/null +++ b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js @@ -0,0 +1,101 @@ +"use strict"; + +/**/ +var bufferShim = require('safe-buffer').Buffer; +/**/ + + +require('../common'); + +var _require = require('../../'), + Readable = _require.Readable; + +var assert = require('assert/'); + +{ + // Call .setEncoding() while there are bytes already in the buffer. + var r = new Readable({ + read: function read() {} + }); + r.push(bufferShim.from('a')); + r.push(bufferShim.from('b')); + r.setEncoding('utf8'); + var chunks = []; + r.on('data', function (chunk) { + return chunks.push(chunk); + }); + process.nextTick(function () { + assert.deepStrictEqual(chunks, ['ab']); + }); +} +{ + // Call .setEncoding() while the buffer contains a complete, + // but chunked character. + var _r = new Readable({ + read: function read() {} + }); + + _r.push(bufferShim.from([0xf0])); + + _r.push(bufferShim.from([0x9f])); + + _r.push(bufferShim.from([0x8e])); + + _r.push(bufferShim.from([0x89])); + + _r.setEncoding('utf8'); + + var _chunks = []; + + _r.on('data', function (chunk) { + return _chunks.push(chunk); + }); + + process.nextTick(function () { + assert.deepStrictEqual(_chunks, ['πŸŽ‰']); + }); +} +{ + // Call .setEncoding() while the buffer contains an incomplete character, + // and finish the character later. + var _r2 = new Readable({ + read: function read() {} + }); + + _r2.push(bufferShim.from([0xf0])); + + _r2.push(bufferShim.from([0x9f])); + + _r2.setEncoding('utf8'); + + _r2.push(bufferShim.from([0x8e])); + + _r2.push(bufferShim.from([0x89])); + + var _chunks2 = []; + + _r2.on('data', function (chunk) { + return _chunks2.push(chunk); + }); + + process.nextTick(function () { + assert.deepStrictEqual(_chunks2, ['πŸŽ‰']); + }); +} +; + +(function () { + var t = require('tap'); + + t.pass('sync run'); +})(); + +var _list = process.listeners('uncaughtException'); + +process.removeAllListeners('uncaughtException'); + +_list.pop(); + +_list.forEach(function (e) { + return process.on('uncaughtException', e); +}); \ No newline at end of file diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js index cc00ce8c38..dfc88f8166 100644 --- a/test/parallel/test-stream-writable-destroy.js +++ b/test/parallel/test-stream-writable-destroy.js @@ -12,9 +12,6 @@ var _require = require('../../'), var assert = require('assert/'); -var _require2 = require('util'), - inherits = _require2.inherits; - { var write = new Writable({ write: function write(chunk, enc, cb) { @@ -189,6 +186,28 @@ var _require2 = require('util'), assert.strictEqual(_write7._writableState.errorEmitted, true); assert.strictEqual(_write7.destroyed, true); } +{ + var writable = new Writable({ + destroy: common.mustCall(function (err, cb) { + process.nextTick(cb, new Error('kaboom 1')); + }), + write: function write(chunk, enc, cb) { + cb(); + } + }); + writable.on('close', common.mustCall()); + writable.on('error', common.expectsError({ + type: Error, + message: 'kaboom 2' + })); + writable.destroy(); + assert.strictEqual(writable.destroyed, true); + assert.strictEqual(writable._writableState.errorEmitted, false); // Test case where `writable.destroy()` is called again with an error before + // the `_destroy()` callback is called. + + writable.destroy(new Error('kaboom 2')); + assert.strictEqual(writable._writableState.errorEmitted, true); +} { var _write8 = new Writable({ write: function write(chunk, enc, cb) { @@ -204,13 +223,14 @@ var _require2 = require('util'), _write8.destroy(); } { - var MyWritable = function MyWritable() { + function MyWritable() { assert.strictEqual(this.destroyed, false); this.destroyed = false; Writable.call(this); - }; + } - inherits(MyWritable, Writable); + Object.setPrototypeOf(MyWritable.prototype, Writable.prototype); + Object.setPrototypeOf(MyWritable, Writable); new MyWritable(); } { diff --git a/test/parallel/test-stream-write-destroy.js b/test/parallel/test-stream-write-destroy.js index 00688cd70c..6528b2bacd 100644 --- a/test/parallel/test-stream-write-destroy.js +++ b/test/parallel/test-stream-write-destroy.js @@ -14,11 +14,8 @@ var _require = require('../../'), // writes. -var _arr = [false, true]; - -for (var _i = 0; _i < _arr.length; _i++) { +for (var _i = 0, _arr = [false, true]; _i < _arr.length; _i++) { var withPendingData = _arr[_i]; - var _arr2 = [false, true]; var _loop = function _loop() { var useEnd = _arr2[_i2]; @@ -80,7 +77,7 @@ for (var _i = 0; _i < _arr.length; _i++) { assert.strictEqual(finished, !withPendingData && useEnd); }; - for (var _i2 = 0; _i2 < _arr2.length; _i2++) { + for (var _i2 = 0, _arr2 = [false, true]; _i2 < _arr2.length; _i2++) { _loop(); } } diff --git a/test/parallel/test-stream2-basic.js b/test/parallel/test-stream2-basic.js index d7bcd5ab49..f62f6caae5 100644 --- a/test/parallel/test-stream2-basic.js +++ b/test/parallel/test-stream2-basic.js @@ -120,16 +120,6 @@ function (_EE) { }(EE); { - var flow = function flow() { - var res; - - while (null !== (res = r.read(readSize++))) { - reads.push(res.toString()); - } - - r.once('readable', flow); - }; - // Test basic functionality var r = new TestReader(20); var reads = []; @@ -138,6 +128,17 @@ function (_EE) { assert.deepStrictEqual(reads, expect); })); var readSize = 1; + + function flow() { + var res; + + while (null !== (res = r.read(readSize++))) { + reads.push(res.toString()); + } + + r.once('readable', flow); + } + flow(); } { diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js index 1d9e285749..90376973d9 100644 --- a/test/parallel/test-stream2-readable-from-list.js +++ b/test/parallel/test-stream2-readable-from-list.js @@ -20,7 +20,6 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -// Flags: --expose_internals /**/ var bufferShim = require('safe-buffer').Buffer; diff --git a/test/parallel/test-streams-highwatermark.js b/test/parallel/test-streams-highwatermark.js index 66f245de08..b6231c5ba7 100644 --- a/test/parallel/test-streams-highwatermark.js +++ b/test/parallel/test-streams-highwatermark.js @@ -26,11 +26,9 @@ var stream = require('../../'); highWaterMark: ovfl }); assert.strictEqual(writable._writableState.highWaterMark, ovfl); - var _arr = [true, false, '5', {}, -5, NaN]; var _loop = function _loop() { var invalidHwm = _arr[_i]; - var _arr2 = [stream.Readable, stream.Writable]; var _loop2 = function _loop2() { var type = _arr2[_i2]; @@ -45,12 +43,12 @@ var stream = require('../../'); }); }; - for (var _i2 = 0; _i2 < _arr2.length; _i2++) { + for (var _i2 = 0, _arr2 = [stream.Readable, stream.Writable]; _i2 < _arr2.length; _i2++) { _loop2(); } }; - for (var _i = 0; _i < _arr.length; _i++) { + for (var _i = 0, _arr = [true, false, '5', {}, -5, NaN]; _i < _arr.length; _i++) { _loop(); } }