Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit a47ee80

Browse filesBrowse files
marcosc90targos
authored andcommitted
stream: convert string to Buffer when calling unshift(<string>)
`readable.unshift` can take a string as an argument, but that string wasn't being converted to a Buffer, which caused a <TypeError: Argument must be a buffer> in some cases. Also if a string was passed, that string was coerced to utf8 encoding. A second optional argument `encoding` was added to `unshift` to fix the encoding issue. Fixes: #27192 PR-URL: #27194 Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Rich Trott <rtrott@gmail.com>
1 parent 887dd60 commit a47ee80
Copy full SHA for a47ee80

File tree

Expand file treeCollapse file tree

4 files changed

+209
-16
lines changed
Open diff view settings
Filter options
Expand file treeCollapse file tree

4 files changed

+209
-16
lines changed
Open diff view settings
Collapse file

‎doc/api/errors.md‎

Copy file name to clipboardExpand all lines: doc/api/errors.md
+1-1Lines changed: 1 addition & 1 deletion
  • Display the source diff
  • Display the rich diff
Original file line numberDiff line numberDiff line change
@@ -2349,7 +2349,7 @@ such as `process.stdout.on('data')`.
23492349
[`sign.sign()`]: crypto.html#crypto_sign_sign_privatekey_outputencoding
23502350
[`stream.pipe()`]: stream.html#stream_readable_pipe_destination_options
23512351
[`stream.push()`]: stream.html#stream_readable_push_chunk_encoding
2352-
[`stream.unshift()`]: stream.html#stream_readable_unshift_chunk
2352+
[`stream.unshift()`]: stream.html#stream_readable_unshift_chunk_encoding
23532353
[`stream.write()`]: stream.html#stream_writable_write_chunk_encoding_callback
23542354
[`subprocess.kill()`]: child_process.html#child_process_subprocess_kill_signal
23552355
[`subprocess.send()`]: child_process.html#child_process_subprocess_send_message_sendhandle_options_callback
Collapse file

‎doc/api/stream.md‎

Copy file name to clipboardExpand all lines: doc/api/stream.md
+3-1Lines changed: 3 additions & 1 deletion
  • Display the source diff
  • Display the rich diff
Original file line numberDiff line numberDiff line change
@@ -1195,7 +1195,7 @@ setTimeout(() => {
11951195
}, 1000);
11961196
```
11971197

1198-
##### readable.unshift(chunk)
1198+
##### readable.unshift(chunk[, encoding])
11991199
<!-- YAML
12001200
added: v0.9.11
12011201
changes:
@@ -1208,6 +1208,8 @@ changes:
12081208
read queue. For streams not operating in object mode, `chunk` must be a
12091209
string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be
12101210
any JavaScript value other than `null`.
1211+
* `encoding` {string} Encoding of string chunks. Must be a valid
1212+
`Buffer` encoding, such as `'utf8'` or `'ascii'`.
12111213

12121214
The `readable.unshift()` method pushes a chunk of data back into the internal
12131215
buffer. This is useful in certain situations where a stream is being consumed by
Collapse file

‎lib/_stream_readable.js‎

Copy file name to clipboardExpand all lines: lib/_stream_readable.js
+18-14Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -207,13 +207,28 @@ Readable.prototype._destroy = function(err, cb) {
207207
// similar to how Writable.write() returns true if you should
208208
// write() some more.
209209
Readable.prototype.push = function(chunk, encoding) {
210-
const state = this._readableState;
211-
var skipChunkCheck;
210+
return readableAddChunk(this, chunk, encoding, false);
211+
};
212+
213+
// Unshift should *always* be something directly out of read()
214+
Readable.prototype.unshift = function(chunk, encoding) {
215+
return readableAddChunk(this, chunk, encoding, true);
216+
};
217+
218+
function readableAddChunk(stream, chunk, encoding, addToFront) {
219+
debug('readableAddChunk', chunk);
220+
const state = stream._readableState;
221+
222+
let skipChunkCheck;
212223

213224
if (!state.objectMode) {
214225
if (typeof chunk === 'string') {
215226
encoding = encoding || state.defaultEncoding;
216-
if (encoding !== state.encoding) {
227+
if (addToFront && state.encoding && state.encoding !== encoding) {
228+
// When unshifting, if state.encoding is set, we have to save
229+
// the string in the BufferList with the state encoding
230+
chunk = Buffer.from(chunk, encoding).toString(state.encoding);
231+
} else if (encoding !== state.encoding) {
217232
chunk = Buffer.from(chunk, encoding);
218233
encoding = '';
219234
}
@@ -223,17 +238,6 @@ Readable.prototype.push = function(chunk, encoding) {
223238
skipChunkCheck = true;
224239
}
225240

226-
return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
227-
};
228-
229-
// Unshift should *always* be something directly out of read()
230-
Readable.prototype.unshift = function(chunk) {
231-
return readableAddChunk(this, chunk, null, true, false);
232-
};
233-
234-
function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
235-
debug('readableAddChunk', chunk);
236-
const state = stream._readableState;
237241
if (chunk === null) {
238242
state.reading = false;
239243
onEofChunk(stream, state);
Collapse file
+187Lines changed: 187 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,187 @@
1+
'use strict';
2+
3+
const common = require('../common');
4+
const assert = require('assert');
5+
const { Readable } = require('stream');
6+
7+
{
8+
// Check that strings are saved as Buffer
9+
const readable = new Readable({ read() {} });
10+
11+
const string = 'abc';
12+
13+
readable.on('data', common.mustCall((chunk) => {
14+
assert(Buffer.isBuffer(chunk));
15+
assert.strictEqual(chunk.toString('utf8'), string);
16+
}, 1));
17+
18+
readable.unshift(string);
19+
20+
}
21+
22+
{
23+
// Check that data goes at the beginning
24+
const readable = new Readable({ read() {} });
25+
const unshift = 'front';
26+
const push = 'back';
27+
28+
const expected = [unshift, push];
29+
readable.on('data', common.mustCall((chunk) => {
30+
assert.strictEqual(chunk.toString('utf8'), expected.shift());
31+
}, 2));
32+
33+
34+
readable.push(push);
35+
readable.unshift(unshift);
36+
}
37+
38+
{
39+
// Check that buffer is saved with correct encoding
40+
const readable = new Readable({ read() {} });
41+
42+
const encoding = 'base64';
43+
const string = Buffer.from('abc').toString(encoding);
44+
45+
readable.on('data', common.mustCall((chunk) => {
46+
assert.strictEqual(chunk.toString(encoding), string);
47+
}, 1));
48+
49+
readable.unshift(string, encoding);
50+
51+
}
52+
53+
{
54+
55+
const streamEncoding = 'base64';
56+
57+
function checkEncoding(readable) {
58+
59+
// chunk encodings
60+
const encodings = ['utf8', 'binary', 'hex', 'base64'];
61+
const expected = [];
62+
63+
readable.on('data', common.mustCall((chunk) => {
64+
const { encoding, string } = expected.pop();
65+
assert.strictEqual(chunk.toString(encoding), string);
66+
}, encodings.length));
67+
68+
for (const encoding of encodings) {
69+
const string = 'abc';
70+
71+
// If encoding is the same as the state.encoding the string is
72+
// saved as is
73+
const expect = encoding !== streamEncoding ?
74+
Buffer.from(string, encoding).toString(streamEncoding) : string;
75+
76+
expected.push({ encoding, string: expect });
77+
78+
readable.unshift(string, encoding);
79+
}
80+
}
81+
82+
const r1 = new Readable({ read() {} });
83+
r1.setEncoding(streamEncoding);
84+
checkEncoding(r1);
85+
86+
const r2 = new Readable({ read() {}, encoding: streamEncoding });
87+
checkEncoding(r2);
88+
89+
}
90+
91+
{
92+
// Both .push & .unshift should have the same behaviour
93+
// When setting an encoding, each chunk should be emitted with that encoding
94+
const encoding = 'base64';
95+
96+
function checkEncoding(readable) {
97+
const string = 'abc';
98+
readable.on('data', common.mustCall((chunk) => {
99+
assert.strictEqual(chunk, Buffer.from(string).toString(encoding));
100+
}, 2));
101+
102+
readable.push(string);
103+
readable.unshift(string);
104+
}
105+
106+
const r1 = new Readable({ read() {} });
107+
r1.setEncoding(encoding);
108+
checkEncoding(r1);
109+
110+
const r2 = new Readable({ read() {}, encoding });
111+
checkEncoding(r2);
112+
113+
}
114+
115+
{
116+
// Check that error is thrown for invalid chunks
117+
118+
const readable = new Readable({ read() {} });
119+
function checkError(fn) {
120+
common.expectsError(fn, {
121+
code: 'ERR_INVALID_ARG_TYPE',
122+
type: TypeError
123+
});
124+
}
125+
126+
checkError(() => readable.unshift([]));
127+
checkError(() => readable.unshift({}));
128+
checkError(() => readable.unshift(0));
129+
130+
}
131+
132+
{
133+
// Check that ObjectMode works
134+
const readable = new Readable({ objectMode: true, read() {} });
135+
136+
const chunks = ['a', 1, {}, []];
137+
138+
readable.on('data', common.mustCall((chunk) => {
139+
assert.strictEqual(chunk, chunks.pop());
140+
}, chunks.length));
141+
142+
for (const chunk of chunks) {
143+
readable.unshift(chunk);
144+
}
145+
}
146+
147+
{
148+
149+
// Should not throw: https://github.com/nodejs/node/issues/27192
150+
const highWaterMark = 50;
151+
class ArrayReader extends Readable {
152+
constructor(opt) {
153+
super({ highWaterMark });
154+
// The error happened only when pushing above hwm
155+
this.buffer = new Array(highWaterMark * 2).fill(0).map(String);
156+
}
157+
_read(size) {
158+
while (this.buffer.length) {
159+
const chunk = this.buffer.shift();
160+
if (!this.buffer.length) {
161+
this.push(chunk);
162+
this.push(null);
163+
return true;
164+
}
165+
if (!this.push(chunk))
166+
return;
167+
}
168+
}
169+
}
170+
171+
function onRead() {
172+
while (null !== (stream.read())) {
173+
// Remove the 'readable' listener before unshifting
174+
stream.removeListener('readable', onRead);
175+
stream.unshift('a');
176+
stream.on('data', (chunk) => {
177+
console.log(chunk.length);
178+
});
179+
break;
180+
}
181+
}
182+
183+
const stream = new ArrayReader();
184+
stream.once('readable', common.mustCall(onRead));
185+
stream.on('end', common.mustCall(() => {}));
186+
187+
}

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.