Skip to content

Commit

Permalink
stream: fix readable stream as async iterator function
Browse files Browse the repository at this point in the history
Since v19.2 it's not possible to use readableStreams
as async iterators (confirmed bug).
This patch fixes the problem by reading the Stream.Duplex property
from 'streams/duplex' instead of 'streams/legacy' module

Fixes: #46141
PR-URL: #46147
Reviewed-By: Benjamin Gruenbaum <benjamingr@gmail.com>
Reviewed-By: Robert Nagy <ronagy@icloud.com>
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
  • Loading branch information
ErickWendel authored and juanarbol committed Mar 5, 2023
1 parent cb5bb12 commit 3acfe9b
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 0 deletions.
2 changes: 2 additions & 0 deletions lib/stream/promises.js
Expand Up @@ -13,6 +13,8 @@ const {
const { pipelineImpl: pl } = require('internal/streams/pipeline');
const { finished } = require('internal/streams/end-of-stream');

require('stream');

function pipeline(...streams) {
return new Promise((resolve, reject) => {
let signal;
Expand Down
27 changes: 27 additions & 0 deletions test/parallel/test-stream3-pipeline-async-iterator.js
@@ -0,0 +1,27 @@
/* eslint-disable node-core/require-common-first, require-yield */
'use strict';
const { pipeline } = require('node:stream/promises');
{
// Ensure that async iterators can act as readable and writable streams
async function* myCustomReadable() {
yield 'Hello';
yield 'World';
}

const messages = [];
async function* myCustomWritable(stream) {
for await (const chunk of stream) {
messages.push(chunk);
}
}

(async () => {
await pipeline(
myCustomReadable,
myCustomWritable,
);
// Importing here to avoid initializing streams
require('assert').deepStrictEqual(messages, ['Hello', 'World']);
})()
.then(require('../common').mustCall());
}

0 comments on commit 3acfe9b

Please sign in to comment.