Skip to content

Commit

Permalink
lib: ensure readable stream flows to end
Browse files Browse the repository at this point in the history
If a readable stream was set up with `highWaterMark 0`, the while-loop
in `maybeReadMore_` function would never execute.

The while loop now has an extra or-condition for the case where the
stream is flowing and there are no items. The or-condition is adapted
from the emit-condition of the `addChunk` function.

The `addChunk` also contains a check for `state.sync`. However that part
of the check was omitted here because the `maybeReadMore_` is executed
using `process.nextTick`. `state.sync` is set and then unset  within the
`read()` function so it should never be in effect in `maybeReadMore_`.

Fixes: #24915

PR-URL: #24918
Reviewed-By: Anna Henningsen <[email protected]>
Reviewed-By: Matteo Collina <[email protected]>
  • Loading branch information
Rantanen authored and BethGriggs committed Dec 17, 2018
1 parent 9bd4267 commit 2456a54
Show file tree
Hide file tree
Showing 3 changed files with 157 additions and 4 deletions.
30 changes: 26 additions & 4 deletions lib/_stream_readable.js
Original file line number Diff line number Diff line change
Expand Up @@ -567,16 +567,38 @@ function maybeReadMore(stream, state) {
}

function maybeReadMore_(stream, state) {
var len = state.length;
// Attempt to read more data if we should.
//
// The conditions for reading more data are (one of):
// - Not enough data buffered (state.length < state.highWaterMark). The loop
// is responsible for filling the buffer with enough data if such data
// is available. If highWaterMark is 0 and we are not in the flowing mode
// we should _not_ attempt to buffer any extra data. We'll get more data
// when the stream consumer calls read() instead.
// - No data in the buffer, and the stream is in flowing mode. In this mode
// the loop below is responsible for ensuring read() is called. Failing to
// call read here would abort the flow and there's no other mechanism for
// continuing the flow if the stream consumer has just subscribed to the
// 'data' event.
//
// In addition to the above conditions to keep reading data, the following
// conditions prevent the data from being read:
// - The stream has ended (state.ended).
// - There is already a pending 'read' operation (state.reading). This is a
// case where the the stream has called the implementation defined _read()
// method, but they are processing the call asynchronously and have _not_
// called push() with new data. In this case we skip performing more
// read()s. The execution ends in this method again after the _read() ends
// up calling push() with more data.
while (!state.reading && !state.ended &&
state.length < state.highWaterMark) {
(state.length < state.highWaterMark ||
(state.flowing && state.length === 0))) {
const len = state.length;
debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
Expand Down
27 changes: 27 additions & 0 deletions test/parallel/test-stream-readable-hwm-0-async.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
'use strict';

const common = require('../common');

// This test ensures that Readable stream will continue to call _read
// for streams with highWaterMark === 0 once the stream returns data
// by calling push() asynchronously.

const { Readable } = require('stream');

let count = 5;

const r = new Readable({
// Called 6 times: First 5 return data, last one signals end of stream.
read: common.mustCall(() => {
process.nextTick(common.mustCall(() => {
if (count--)
r.push('a');
else
r.push(null);
}));
}, 6),
highWaterMark: 0,
});

r.on('end', common.mustCall());
r.on('data', common.mustCall(5));
104 changes: 104 additions & 0 deletions test/parallel/test-stream-readable-hwm-0-no-flow-data.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
'use strict';

const common = require('../common');

// Ensure that subscribing the 'data' event will not make the stream flow.
// The 'data' event will require calling read() by hand.
//
// The test is written for the (somewhat rare) highWaterMark: 0 streams to
// specifically catch any regressions that might occur with these streams.

const assert = require('assert');
const { Readable } = require('stream');

const streamData = [ 'a', null ];

// Track the calls so we can assert their order later.
const calls = [];
const r = new Readable({
read: common.mustCall(() => {
calls.push('_read:' + streamData[0]);
process.nextTick(() => {
calls.push('push:' + streamData[0]);
r.push(streamData.shift());
});
}, streamData.length),
highWaterMark: 0,

// Object mode is used here just for testing convenience. It really
// shouldn't affect the order of events. Just the data and its format.
objectMode: true,
});

assert.strictEqual(r.readableFlowing, null);
r.on('readable', common.mustCall(() => {
calls.push('readable');
}, 2));
assert.strictEqual(r.readableFlowing, false);
r.on('data', common.mustCall((data) => {
calls.push('data:' + data);
}, 1));
r.on('end', common.mustCall(() => {
calls.push('end');
}));
assert.strictEqual(r.readableFlowing, false);

// The stream emits the events asynchronously but that's not guaranteed to
// happen on the next tick (especially since the _read implementation above
// uses process.nextTick).
//
// We use setImmediate here to give the stream enough time to emit all the
// events it's about to emit.
setImmediate(() => {

// Only the _read, push, readable calls have happened. No data must be
// emitted yet.
assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable']);

// Calling 'r.read()' should trigger the data event.
assert.strictEqual(r.read(), 'a');
assert.deepStrictEqual(
calls,
['_read:a', 'push:a', 'readable', 'data:a']);

// The next 'read()' will return null because hwm: 0 does not buffer any
// data and the _read implementation above does the push() asynchronously.
//
// Note: This 'null' signals "no data available". It isn't the end-of-stream
// null value as the stream doesn't know yet that it is about to reach the
// end.
//
// Using setImmediate again to give the stream enough time to emit all the
// events it wants to emit.
assert.strictEqual(r.read(), null);
setImmediate(() => {

// There's a new 'readable' event after the data has been pushed.
// The 'end' event will be emitted only after a 'read()'.
//
// This is somewhat special for the case where the '_read' implementation
// calls 'push' asynchronously. If 'push' was synchronous, the 'end' event
// would be emitted here _before_ we call read().
assert.deepStrictEqual(
calls,
['_read:a', 'push:a', 'readable', 'data:a',
'_read:null', 'push:null', 'readable']);

assert.strictEqual(r.read(), null);

// While it isn't really specified whether the 'end' event should happen
// synchronously with read() or not, we'll assert the current behavior
// ('end' event happening on the next tick after read()) so any changes
// to it are noted and acknowledged in the future.
assert.deepStrictEqual(
calls,
['_read:a', 'push:a', 'readable', 'data:a',
'_read:null', 'push:null', 'readable']);
process.nextTick(() => {
assert.deepStrictEqual(
calls,
['_read:a', 'push:a', 'readable', 'data:a',
'_read:null', 'push:null', 'readable', 'end']);
});
});
});

0 comments on commit 2456a54

Please sign in to comment.