diff --git a/.github/workflows/browsers.yml b/.github/workflows/browsers.yml
new file mode 100644
index 0000000000..4bf2ab12eb
--- /dev/null
+++ b/.github/workflows/browsers.yml
@@ -0,0 +1,44 @@
+name: Browsers
+
+on: [push, pull_request]
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+ name: Browsers
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: ['ubuntu-latest', 'windows-latest', 'macos-latest']
+ browser: ['chrome', 'firefox', 'safari', 'edge']
+ bundler: ['browserify', 'esbuild', 'rollup', 'webpack']
+ exclude:
+ - os: ubuntu-latest
+ browser: safari
+ - os: windows-latest
+ browser: safari
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Use Node.js 16
+ uses: actions/setup-node@v3
+ with:
+ node-version: 16
+ - name: Restore cached dependencies
+ uses: actions/cache@v3
+ with:
+ path: node_modules
+ key: node-modules-${{ matrix.os }}-${{ hashFiles('package.json') }}
+ - name: Install dependencies
+ run: npm install
+ - name: Install Browser
+ run: ./node_modules/.bin/playwright install ${{ fromJSON('{"chrome":"chromium","edge":"msedge","firefox":"firefox","safari":"webkit"}')[matrix.browser] }}
+ - name: Bundle code
+ run: npm run test:prepare ${{ matrix.bundler }}
+ - name: Bundle readable-stream code with readable-stream specific bundlers
+ run: npm run test:readable-stream-only ${{ matrix.bundler }}
+ - name: Run Tests on Browsers
+ run: npm run test:browsers ${{ matrix.browser }} ${{ matrix.bundler }}
diff --git a/.github/workflows/bundlers.yml b/.github/workflows/bundlers.yml
new file mode 100644
index 0000000000..2e8bc15689
--- /dev/null
+++ b/.github/workflows/bundlers.yml
@@ -0,0 +1,44 @@
+name: Bundlers
+
+on: [push, pull_request]
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+ name: Bundlers
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest, windows-latest, macos-latest]
+ node-version: [12.x, 14.x, 16.x, 18.x, 20.x]
+ bundler: ['browserify', 'esbuild', 'rollup', 'webpack']
+ exclude:
+ - os: windows-latest
+ node-version: 12.x
+ - os: windows-latest
+ node-version: 14.x
+ - os: macos-latest
+ node-version: 12.x
+ - os: macos-latest
+ node-version: 14.x
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }}
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ - name: Restore cached dependencies
+ uses: actions/cache@v3
+ with:
+ path: node_modules
+ key: node-modules-${{ matrix.os }}-${{ hashFiles('package.json') }}
+ - name: Install dependencies
+ run: npm install
+ - name: Bundle code
+ run: npm run test:prepare ${{ matrix.bundler }}
+ - name: Run Tests on bundler
+ run: npm run test:bundlers ${{ matrix.bundler }}
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 0000000000..2963ccba7a
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,40 @@
+name: Lint
+
+on: [push, pull_request]
+
+permissions:
+ contents: read
+
+jobs:
+ lint:
+ name: Node.js
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest]
+ node-version: [20.x]
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }}
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ - name: Restore cached dependencies
+ uses: actions/cache@v3
+ with:
+ path: node_modules
+ key: node-modules-${{ hashFiles('package.json') }}
+ - name: Install dependencies
+ run: npm install
+ - name: Build
+ run: npm run build
+ - name: Check generated files
+ run: |
+ # Check if diff is empty
+ git diff --exit-code --stat ${{ github.sha }} -- lib/
+ - name: Check format
+ run: npm run test:format
+ - name: Check for lint issues
+ run: npm run lint
diff --git a/.github/workflows/node.yml b/.github/workflows/node.yml
new file mode 100644
index 0000000000..a5df249716
--- /dev/null
+++ b/.github/workflows/node.yml
@@ -0,0 +1,41 @@
+name: Node.js
+
+on: [push, pull_request]
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+ name: Node.js
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest, windows-latest, macos-latest]
+ node-version: [12.x, 14.x, 16.x, 18.x, 20.x, 21.x]
+ exclude:
+ - os: windows-latest
+ node-version: 12.x
+ - os: windows-latest
+ node-version: 14.x
+ - os: macos-latest
+ node-version: 12.x
+ - os: macos-latest
+ node-version: 14.x
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Use Node.js ${{ matrix.node-version }} on ${{ matrix.os }}
+ uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ - name: Restore cached dependencies
+ uses: actions/cache@v3
+ with:
+ path: node_modules
+ key: node-modules-${{ hashFiles('package.json') }}
+ - name: Install dependencies
+ run: npm install
+ - name: Run Tests
+ run: npm run coverage
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000..00f58635bc
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,6 @@
+coverage/
+node_modules/
+node-*.tar.gz
+package-lock.json
+tmp/
+readable-stream-test/dist/
\ No newline at end of file
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000..f478d58dca
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,38 @@
+# Developer's Certificate of Origin 1.1
+
+By making a contribution to this project, I certify that:
+
+* (a) The contribution was created in whole or in part by me and I
+ have the right to submit it under the open source license
+ indicated in the file; or
+
+* (b) The contribution is based upon previous work that, to the best
+ of my knowledge, is covered under an appropriate open source
+ license and I have the right under that license to submit that
+ work with modifications, whether created in whole or in part
+ by me, under the same open source license (unless I am
+ permitted to submit under a different license), as indicated
+ in the file; or
+
+* (c) The contribution was provided directly to me by some other
+ person who certified (a), (b) or (c) and I have not modified
+ it.
+
+* (d) I understand and agree that this project and the contribution
+ are public and that a record of the contribution (including all
+ personal information I submit with it, including my sign-off) is
+ maintained indefinitely and may be redistributed consistent with
+ this project or the open source license(s) involved.
+
+## Moderation Policy
+
+The [Node.js Moderation Policy] applies to this WG.
+
+## Code of Conduct
+
+The [Node.js Code of Conduct][] applies to this WG.
+
+[Node.js Code of Conduct]:
+https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
+[Node.js Moderation Policy]:
+https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md
diff --git a/GOVERNANCE.md b/GOVERNANCE.md
new file mode 100644
index 0000000000..16ffb93f24
--- /dev/null
+++ b/GOVERNANCE.md
@@ -0,0 +1,136 @@
+### Streams Working Group
+
+The Node.js Streams is jointly governed by a Working Group
+(WG)
+that is responsible for high-level guidance of the project.
+
+The WG has final authority over this project including:
+
+* Technical direction
+* Project governance and process (including this policy)
+* Contribution policy
+* GitHub repository hosting
+* Conduct guidelines
+* Maintaining the list of additional Collaborators
+
+For the current list of WG members, see the project
+[README.md](./README.md#current-project-team-members).
+
+### Collaborators
+
+The readable-stream GitHub repository is
+maintained by the WG and additional Collaborators who are added by the
+WG on an ongoing basis.
+
+Individuals making significant and valuable contributions are made
+Collaborators and given commit-access to the project. These
+individuals are identified by the WG and their addition as
+Collaborators is discussed during the WG meeting.
+
+_Note:_ If you make a significant contribution and are not considered
+for commit-access log an issue or contact a WG member directly and it
+will be brought up in the next WG meeting.
+
+Modifications of the contents of the readable-stream repository are
+made on
+a collaborative basis. Anybody with a GitHub account may propose a
+modification via pull request and it will be considered by the project
+Collaborators. All pull requests must be reviewed and accepted by a
+Collaborator with sufficient expertise who is able to take full
+responsibility for the change. In the case of pull requests proposed
+by an existing Collaborator, an additional Collaborator is required
+for sign-off. Consensus should be sought if additional Collaborators
+participate and there is disagreement around a particular
+modification. See _Consensus Seeking Process_ below for further detail
+on the consensus model used for governance.
+
+Collaborators may opt to elevate significant or controversial
+modifications, or modifications that have not found consensus to the
+WG for discussion by assigning the ***WG-agenda*** tag to a pull
+request or issue. The WG should serve as the final arbiter where
+required.
+
+For the current list of Collaborators, see the project
+[README.md](./README.md#members).
+
+### WG Membership
+
+WG seats are not time-limited. There is no fixed size of the WG.
+However, the expected target is between 6 and 12, to ensure adequate
+coverage of important areas of expertise, balanced with the ability to
+make decisions efficiently.
+
+There is no specific set of requirements or qualifications for WG
+membership beyond these rules.
+
+The WG may add additional members to the WG by unanimous consensus.
+
+A WG member may be removed from the WG by voluntary resignation, or by
+unanimous consensus of all other WG members.
+
+Changes to WG membership should be posted in the agenda, and may be
+suggested as any other agenda item (see "WG Meetings" below).
+
+If an addition or removal is proposed during a meeting, and the full
+WG is not in attendance to participate, then the addition or removal
+is added to the agenda for the subsequent meeting. This is to ensure
+that all members are given the opportunity to participate in all
+membership decisions. If a WG member is unable to attend a meeting
+where a planned membership decision is being made, then their consent
+is assumed.
+
+No more than 1/3 of the WG members may be affiliated with the same
+employer. If removal or resignation of a WG member, or a change of
+employment by a WG member, creates a situation where more than 1/3 of
+the WG membership shares an employer, then the situation must be
+immediately remedied by the resignation or removal of one or more WG
+members affiliated with the over-represented employer(s).
+
+### WG Meetings
+
+The WG meets occasionally on a Google Hangout On Air. A designated moderator
+approved by the WG runs the meeting. Each meeting should be
+published to YouTube.
+
+Items are added to the WG agenda that are considered contentious or
+are modifications of governance, contribution policy, WG membership,
+or release process.
+
+The intention of the agenda is not to approve or review all patches;
+that should happen continuously on GitHub and be handled by the larger
+group of Collaborators.
+
+Any community member or contributor can ask that something be added to
+the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
+WG member or the moderator can add the item to the agenda by adding
+the ***WG-agenda*** tag to the issue.
+
+Prior to each WG meeting the moderator will share the Agenda with
+members of the WG. WG members can add any items they like to the
+agenda at the beginning of each meeting. The moderator and the WG
+cannot veto or remove items.
+
+The WG may invite persons or representatives from certain projects to
+participate in a non-voting capacity.
+
+The moderator is responsible for summarizing the discussion of each
+agenda item and sends it as a pull request after the meeting.
+
+### Consensus Seeking Process
+
+The WG follows a
+[Consensus
+Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
+decision-making model.
+
+When an agenda item has appeared to reach a consensus the moderator
+will ask "Does anyone object?" as a final call for dissent from the
+consensus.
+
+If an agenda item cannot reach a consensus a WG member can call for
+either a closing vote or a vote to table the issue to the next
+meeting. The call for a vote must be seconded by a majority of the WG
+or else the discussion will continue. Simple majority wins.
+
+Note that changes to WG membership require a majority consensus. See
+"WG Membership" above.
diff --git a/LICENSE b/LICENSE
index 0c44ae716d..2873b3b2e5 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,27 +1,47 @@
-Copyright (c) Isaac Z. Schlueter ("Author")
-All rights reserved.
-
-The BSD License
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
-BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
-OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
-IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
diff --git a/README.md b/README.md
index 403daaabba..339af1b6e9 100644
--- a/README.md
+++ b/README.md
@@ -1,127 +1,116 @@
# readable-stream
- Stability: 1 - Experimental
-
-An exploration of a new kind of readable streams for Node.js
-
-This is an abstract class designed to be extended. It also provides a
-`wrap` method that you can use to provide the simpler readable API for
-streams that have the "readable stream" interface of Node 0.8 and
-before.
-
-## Usage
-
-```javascript
-var Readable = require('readable-stream');
-var r = new Readable();
-
-r.read = function(n) {
- // your magic goes here.
- // return n bytes, or null if there is nothing to be read.
- // if you return null, then you MUST emit 'readable' at some
- // point in the future if there are bytes available, or 'end'
- // if you are not going to have any more data.
- //
- // You MUST NOT emit either 'end' or 'readable' before
- // returning from this function, but you MAY emit 'end' or
- // 'readable' in process.nextTick().
-};
-
-r.on('end', function() {
- // no more bytes will be provided.
-});
-
-r.on('readable', function() {
- // now is the time to call read() again.
-});
-```
-
-## Justification
-
-Writable streams in node are very straightforward to use and extend.
-The `write` method either returns `true` if the bytes could be
-completely handled and another `write` should be performed, or `false`
-if you would like the user to back off a bit, in which case a `drain`
-event at some point in the future will let them continue writing. The
-`end()` method lets the user indicate that no more bytes will be
-written. That's pretty much the entire required interface for
-writing.
-
-However, readable streams in Node 0.8 and before are rather
-complicated.
-
-1. The `data` events start coming right away, no matter what. There
- is no way to do other actions before consuming data, without
- handling buffering yourself.
-2. If you extend the interface in userland programs, then you must
- implement `pause()` and `resume()` methods, and take care of
- buffering yourself.
-
-So, while writers only have to implement `write()`, `end()`, and
-`drain`, readers have to implement (at minimum):
-
-* `pause()` method
-* `resume()` method
-* `data` event
-* `end` event
-
-If you are using a readable stream, and want to just get the first 10
-bytes, make a decision, and then pass the rest off to somewhere else,
-then you have to handle buffering, pausing, and so on. This is all
-rather brittle and easy to get wrong for all but the most trivial use
-cases.
+**_Node.js core streams for userland_**
-Additionally, this all made the `reader.pipe(writer)` method
-unnecessarily complicated and difficult to extend without breaking
-something. Backpressure and error handling is especially challenging
-and brittle.
+[](https://npm.im/readable-stream)
+[](https://www.npmjs.org/package/readable-stream)
+[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ANode.js)
+[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ABrowsers)
-### Solution
-
-The reader does not have pause/resume methods. If you want to consume
-the bytes, you call `read()`. If bytes are not being consumed, then
-effectively the stream is in a paused state. It exerts backpressure
-on upstream connections, doesn't read from files, etc.
-
-If `read()` returns `null`, then a future `readable` event will be
-fired when there are more bytes ready to be consumed.
-
-This is simpler and conceptually closer to the underlying mechanisms.
-The resulting `pipe()` method is much shorter and simpler.
+```bash
+npm install readable-stream
+```
-### Compatibility
+This package is a mirror of the streams implementations in Node.js 18.19.0.
+
+Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v18.19.0/docs/api/stream.html).
+
+If you want to guarantee a stable streams base, regardless of what version of
+Node you, or the users of your libraries are using, use **readable-stream** _only_ and avoid the _"stream"_ module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
+
+As of version 2.0.0 **readable-stream** uses semantic versioning.
+
+## Version 4.x.x
+
+v4.x.x of `readable-stream` is a cut from Node 18. This version supports Node 12, 14, 16 and 18, as well as evergreen browsers.
+The breaking changes introduced by v4 are composed of the combined breaking changes in:
+* [Node v12](https://nodejs.org/en/blog/release/v12.0.0/)
+* [Node v13](https://nodejs.org/en/blog/release/v13.0.0/)
+* [Node v14](https://nodejs.org/en/blog/release/v14.0.0/)
+* [Node v15](https://nodejs.org/en/blog/release/v15.0.0/)
+* [Node v16](https://nodejs.org/en/blog/release/v16.0.0/)
+* [Node v17](https://nodejs.org/en/blog/release/v17.0.0/)
+* [Node v18](https://nodejs.org/en/blog/release/v18.0.0/)
+
+This also includes _many_ new features.
+
+## Version 3.x.x
+
+v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows:
+
+1. Error codes: https://github.com/nodejs/node/pull/13310,
+ https://github.com/nodejs/node/pull/13291,
+ https://github.com/nodejs/node/pull/16589,
+ https://github.com/nodejs/node/pull/15042,
+ https://github.com/nodejs/node/pull/15665,
+ https://github.com/nodejs/readable-stream/pull/344
+2. 'readable' have precedence over flowing
+ https://github.com/nodejs/node/pull/18994
+3. make virtual methods errors consistent
+ https://github.com/nodejs/node/pull/18813
+4. updated streams error handling
+ https://github.com/nodejs/node/pull/18438
+5. writable.end should return this.
+ https://github.com/nodejs/node/pull/18780
+6. readable continues to read when push('')
+ https://github.com/nodejs/node/pull/18211
+7. add custom inspect to BufferList
+ https://github.com/nodejs/node/pull/17907
+8. always defer 'readable' with nextTick
+ https://github.com/nodejs/node/pull/17979
+
+## Version 2.x.x
+
+v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11.
+
+# Usage
+
+You can swap your `require('stream')` with `require('readable-stream')`
+without any changes, if you are just using one of the main classes and
+functions.
+
+```js
+const {
+ Readable,
+ Writable,
+ Transform,
+ Duplex,
+ pipeline,
+ finished
+} = require('readable-stream')
+```
-It's not particularly difficult to wrap older-style streams in this
-new interface, or to wrap this type of stream in the older-style
-interface.
+Note that `require('stream')` will return `Stream`, while
+`require('readable-stream')` will return `Readable`. We discourage using
+whatever is exported directly, but rather use one of the properties as
+shown in the example above.
-The `Readable` class takes an argument which is an old-style stream
-with `data` events and `pause()` and `resume()` methods, and uses that
-as the data source. For example:
+## Usage In Browsers
-```javascript
-var r = new Readable(oldReadableStream);
+You will need a bundler like [`browserify`](https://github.com/browserify/browserify#readme), [`webpack`](https://webpack.js.org/), [`parcel`](https://github.com/parcel-bundler/parcel#readme) or similar. Polyfills are no longer required since version 4.2.0.
-// now you can use r.read(), and it will emit 'readable' events
-```
+# Streams Working Group
-The `Readable` class will also automatically convert into an old-style
-`data`-emitting stream if any listeners are added to the `data` event.
-So, this works fine, though you of course lose a lot of the benefits of
-the new interface:
+`readable-stream` is maintained by the Streams Working Group, which
+oversees the development and maintenance of the Streams API within
+Node.js. The responsibilities of the Streams Working Group include:
-```javascript
-var r = new ReadableThing();
+- Addressing stream issues on the Node.js issue tracker.
+- Authoring and editing stream documentation within the Node.js project.
+- Reviewing changes to stream subclasses within the Node.js project.
+- Redirecting changes to streams from the Node.js project to this
+ project.
+- Assisting in the implementation of stream providers within Node.js.
+- Recommending versions of `readable-stream` to be included in Node.js.
+- Messaging about the future of streams to give the community advance
+ notice of changes.
-r.on('data', function(chunk) {
- // ...
-});
+
-// now pause, resume, etc. are patched into place, and r will
-// continually call read() until it returns null, emitting the
-// returned chunks in 'data' events.
+## Team Members
-r.on('end', function() {
- // ...
-});
-```
+- **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
+- **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
+ - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
+- **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com>
+- **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl>
diff --git a/babel.config.cjs b/babel.config.cjs
new file mode 100644
index 0000000000..d675db2f15
--- /dev/null
+++ b/babel.config.cjs
@@ -0,0 +1,3 @@
+module.exports = {
+ plugins: ['@babel/proposal-nullish-coalescing-operator', '@babel/proposal-optional-chaining']
+}
diff --git a/build/build.mjs b/build/build.mjs
new file mode 100644
index 0000000000..feaff82f85
--- /dev/null
+++ b/build/build.mjs
@@ -0,0 +1,231 @@
+import { transform } from '@babel/core'
+import { createReadStream } from 'node:fs'
+import { mkdir, readdir, readFile, rm, writeFile } from 'node:fs/promises'
+import { dirname, resolve } from 'node:path'
+import process from 'node:process'
+import { finished } from 'node:stream/promises'
+import { fileURLToPath } from 'node:url'
+import prettier from 'prettier'
+import { Parse } from 'tar'
+import { request } from 'undici'
+import prettierConfig from '../prettier.config.cjs'
+import { aliases, skippedSources, sources } from './files.mjs'
+import { footers } from './footers.mjs'
+import { headers } from './headers.mjs'
+import { replacements } from './replacements.mjs'
+
+const baseMatcher = /^(?:lib|test)/
+const strictMatcher = /^(['"]use strict.+)/m
+
+function highlightFile(file, color) {
+ return `\x1b[${color}m${file.replace(process.cwd() + '/', '')}\x1b[0m`
+}
+
+function info(message) {
+ console.log(`\x1b[34m[INFO]\x1b[0m ${message}`)
+}
+
+async function extract(nodeVersion, tarFile) {
+ const sourcesMatcher = sources.map((s) => new RegExp(s))
+
+ info(`Extracting Node.js ${nodeVersion} tar file ...`)
+ const contents = []
+ const tarPrefix = `node-v${nodeVersion}/`
+ const parser = new Parse()
+
+ parser.on('entry', (entry) => {
+ const dst = entry.path.replace(tarPrefix, '')
+
+ if (
+ entry.type === 'Directory' ||
+ skippedSources.includes(dst) ||
+ !baseMatcher.test(dst) ||
+ !sourcesMatcher.some((s) => s.test(dst))
+ ) {
+ return entry.resume()
+ }
+
+ let buffer = Buffer.alloc(0)
+
+ entry.on('data', (chunk) => {
+ buffer = Buffer.concat([buffer, chunk])
+ })
+
+ entry.on('end', () => {
+ const content = buffer.toString('utf-8')
+
+ // Enqueue file
+ contents.push([dst, content])
+
+ // Some special cases when file aliasing is needed
+ if (aliases[dst]) {
+ for (const alias of aliases[dst]) {
+ contents.push([alias, content])
+ }
+ }
+ })
+
+ entry.resume()
+ })
+
+ await finished(tarFile.pipe(parser))
+ info('extraction done')
+ return contents
+}
+
+async function processFiles(contents) {
+ const replacementsKeys = Object.keys(replacements)
+ const headersKeys = Object.keys(headers)
+ const footersKeys = Object.keys(footers)
+
+ prettierConfig.parser = 'babel'
+
+ for (let [path, content] of contents) {
+ const modifications = []
+ const matchingReplacements = replacementsKeys.filter((k) => new RegExp(k).test(path))
+ const matchingHeaders = headersKeys.filter((k) => new RegExp(k).test(path))
+ const matchingFooters = footersKeys.filter((k) => new RegExp(k).test(path))
+
+ // Perform replacements
+ if (matchingReplacements.length) {
+ modifications.push(highlightFile('replacements', 33))
+
+ for (const matching of matchingReplacements) {
+ for (const [from, to] of replacements[matching]) {
+ content = content.replaceAll(new RegExp(from, 'gm'), to)
+ }
+ }
+ }
+
+ // Prepend headers
+ if (matchingHeaders.length) {
+ modifications.push(highlightFile('headers', 33))
+
+ for (const footerKey of matchingHeaders) {
+ for (const header of headers[footerKey]) {
+ if (content.match(strictMatcher)) {
+ content = content.replace(strictMatcher, `$&;${header}`)
+ } else {
+ content = header + content
+ }
+ }
+ }
+ }
+
+ // Append footers
+ if (matchingFooters.length) {
+ modifications.push(highlightFile('footers', 33))
+
+ for (const footerKey of matchingFooters) {
+ for (const footer of footers[footerKey]) {
+ content += footer
+ }
+ }
+ }
+
+ // Process the file through babel and prettier
+ if (path.endsWith('.js')) {
+ modifications.push(highlightFile('babel', 33), highlightFile('prettier', 33))
+ content = prettier.format(await transform(content).code.replaceAll('void 0', 'undefined'), prettierConfig)
+ }
+
+ if (!modifications.length) {
+ modifications.push('no modifications')
+ }
+
+ // Write the file
+ info(`Creating file ${highlightFile(path, 32)} (${modifications.join(', ')}) ...`)
+ await writeFile(path, content, 'utf-8')
+ }
+}
+
+async function downloadNode(nodeVersion) {
+ // Download node
+ const downloadUrl = `https://nodejs.org/dist/v${nodeVersion}/node-v${nodeVersion}.tar.gz`
+ info(`Downloading ${downloadUrl} ...`)
+ const { statusCode, body } = await request(downloadUrl, { pipelining: 0 })
+
+ if (statusCode !== 200) {
+ info(`Downloading failed with HTTP code ${statusCode}.`)
+ process.exit(1)
+ }
+
+ return body
+}
+
+async function main() {
+ const __dirname = fileURLToPath(new URL('.', import.meta.url))
+ const rootDir = resolve(__dirname, '..')
+
+ if (process.cwd() !== rootDir) {
+ console.error('Please run this from the root directory of readable-stream repository.')
+ return process.exit(1)
+ }
+
+ const nodeVersion = process.argv[2]
+
+ if (!nodeVersion?.match(/^\d+\.\d+\.\d+/)) {
+ console.error('Usage: build.js xx.yy.zz [node.tar.gz]')
+ return process.exit(1)
+ }
+
+ // Cleanup existing folder
+ await rm('lib', { recursive: true, force: true })
+ await rm('test', { recursive: true, force: true })
+
+ // Download or open the tar file
+ let tarFile
+
+ if (process.argv[3]) {
+ tarFile = createReadStream(process.argv[3])
+ } else {
+ tarFile = await downloadNode(nodeVersion)
+ }
+
+ // Extract contents
+ const contents = await extract(nodeVersion, tarFile)
+
+ // Update Node version in README.md
+ replacements['README.md'][0][1] = replacements['README.md'][0][1].replace('$2', nodeVersion)
+ replacements['README.md'][1][1] = replacements['README.md'][1][1].replace('$2', nodeVersion)
+
+ // Add custom files
+ contents.push(['lib/ours/browser.js', await readFile('src/browser.js', 'utf-8')])
+ contents.push(['lib/ours/index.js', await readFile('src/index.js', 'utf-8')])
+ contents.push(['lib/ours/errors.js', await readFile('src/errors.js', 'utf-8')])
+ contents.push(['lib/ours/primordials.js', await readFile('src/primordials.js', 'utf-8')])
+ contents.push(['lib/ours/util.js', await readFile('src/util.js', 'utf-8')])
+ contents.push(['lib/ours/util/inspect.js', await readFile('src/util/inspect.js', 'utf-8')])
+
+ for (const file of await readdir('src/test/ours')) {
+ contents.push([`test/ours/${file}`, await readFile(`src/test/ours/${file}`, 'utf-8')])
+ }
+
+ for (const file of await readdir('src/test/browser')) {
+ if (file.endsWith('fixtures')) {
+ continue
+ }
+
+ contents.push([`test/browser/${file}`, await readFile(`src/test/browser/${file}`, 'utf-8')])
+ }
+
+ for (const file of await readdir('src/test/browser/fixtures')) {
+ contents.push([`test/browser/fixtures/${file}`, await readFile(`src/test/browser/fixtures/${file}`, 'utf-8')])
+ }
+
+ contents.push(['README.md', await readFile('./README.md', 'utf-8')])
+
+ // Create paths
+ const paths = new Set(contents.map((c) => dirname(c[0])))
+ paths.delete('.')
+
+ for (const path of paths.values()) {
+ info(`Creating directory ${highlightFile(path, 32)} ...`)
+ await mkdir(path, { recursive: true, force: true })
+ }
+
+ // Perform replacements
+ await processFiles(contents)
+}
+
+await main()
diff --git a/build/files.mjs b/build/files.mjs
new file mode 100644
index 0000000000..64fd2841d0
--- /dev/null
+++ b/build/files.mjs
@@ -0,0 +1,33 @@
+export const sources = [
+ 'lib/_stream_.+',
+ 'lib/internal/streams/.+',
+ 'lib/internal/validators.js',
+ 'lib/stream.js',
+ 'lib/stream/promises.js',
+ 'test/common/fixtures.js',
+ 'test/common/fixtures.mjs',
+ 'test/common/index.js',
+ 'test/common/index.mjs',
+ 'test/common/tmpdir.js',
+ 'test/fixtures/[^/]+.txt',
+ 'test/parallel/test-readable.+',
+ 'test/parallel/test-stream.+'
+]
+
+export const skippedSources = [
+ 'lib/_stream_wrap.js',
+ 'test/parallel/test-stream-consumers.js',
+ 'test/parallel/test-stream-destroy.js',
+ 'test/parallel/test-stream-duplex.js',
+ 'test/parallel/test-stream-readable-strategy-option.js',
+ 'test/parallel/test-stream-map.js',
+ 'test/parallel/test-stream-pipeline.js',
+ 'test/parallel/test-stream-readable-async-iterators.js',
+ 'test/parallel/test-stream-wrap-drain.js',
+ 'test/parallel/test-stream-wrap-encoding.js',
+ 'test/parallel/test-stream-wrap.js',
+ 'test/parallel/test-stream-toWeb-allows-server-response.js',
+ 'test/parallel/test-readable-from-web-enqueue-then-close.js'
+]
+
+export const aliases = {}
diff --git a/build/footers.mjs b/build/footers.mjs
new file mode 100644
index 0000000000..d9316811e6
--- /dev/null
+++ b/build/footers.mjs
@@ -0,0 +1,24 @@
+const testTicksDisableHook = `
+ /* replacement start */
+ process.on('beforeExit', (code) => {
+ hook.disable();
+ });
+ /* replacement end */
+`
+
+const testParallel = `
+ /* replacement start */
+ process.on('beforeExit', (code) => {
+ if(code === 0) {
+ tap.pass('test succeeded');
+ } else {
+ tap.fail(\`test failed - exited code \${code}\`);
+ }
+ });
+ /* replacement end */
+`
+
+export const footers = {
+ 'test/parallel/test-stream-writable-samecb-singletick.js': testTicksDisableHook,
+ 'test/parallel/.+': testParallel
+}
diff --git a/build/headers.mjs b/build/headers.mjs
new file mode 100644
index 0000000000..9f056aea04
--- /dev/null
+++ b/build/headers.mjs
@@ -0,0 +1,47 @@
+const bufferRequire = `
+ /* replacement start */
+
+ const { Buffer } = require('buffer')
+
+ /* replacement end */
+`
+
+const processRequire = `
+ /* replacement start */
+
+ const process = require('process/')
+
+ /* replacement end */
+`
+
+const testPolyfills = `
+ /* replacement start */
+ const AbortController = globalThis.AbortController || require('abort-controller').AbortController;
+ const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal;
+ const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget;
+
+ if(typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function() {
+ const controller = new AbortController();
+ controller.abort();
+
+ return controller.signal;
+ }
+ }
+ /* replacement end */
+`
+
+export const headers = {
+ 'lib/stream.js':
+ [bufferRequire],
+ 'lib/internal/streams/(destroy|duplexify|end-of-stream|from|pipeline|readable|writable).js':
+ [processRequire],
+ 'test/browser/test-stream-(big-packet|pipe-cleanup-pause|pipeline|readable-event|transform-constructor-set-methods|transform-split-objectmode|unshift-empty-chunk|unshift-read-race|writable-change-default-encoding|writable-constructor-set-methods|writable-decoded-encoding|writev).js':
+ [bufferRequire],
+ 'test/browser/test-stream2-(base64-single-char-read-end|compatibility|large-read-stall|pipe-error-handling|readable-empty-buffer-no-eof|readable-from-list|readable-legacy-drain|readable-non-empty-end|readable-wrap|set-encoding|transform|writable).js':
+ [bufferRequire],
+ 'test/browser/test-stream3-pause-then-read.js':
+ [bufferRequire],
+ 'test/parallel/test-stream-(add-abort-signal|drop-take|duplex-destroy|flatMap|forEach|filter|finished|readable-destroy|reduce|toArray|writable-destroy).js':
+ [testPolyfills]
+}
diff --git a/build/replacements.mjs b/build/replacements.mjs
new file mode 100644
index 0000000000..e5d97ecc7e
--- /dev/null
+++ b/build/replacements.mjs
@@ -0,0 +1,399 @@
+const legacyStreamsRequireStream = ["require\\('stream'\\)", "require('./stream')"]
+
+const internalStreamsBufferPolyfill = [
+ "'use strict'",
+ `
+ 'use strict'
+
+ const bufferModule = require('buffer');
+ `
+]
+
+const noNodeColon = ["node:", '']
+
+const internalStreamsAbortControllerPolyfill = [
+ "'use strict'",
+ `
+ 'use strict'
+
+ `
+]
+
+const internalStreamsAbortControllerPolyfill2 = [
+ "'use strict'",
+ `
+ 'use strict'
+
+ const AbortController = globalThis.AbortController || require(\'abort-controller\').AbortController;
+
+ `
+]
+
+const internalStreamsNoRequireBlob = [
+ "const \\{\\n isBlob,\\n\\} = require\\('internal/blob'\\);",
+ `
+ const Blob = globalThis.Blob || bufferModule.Blob;
+ const isBlob = typeof Blob !== 'undefined' ? function isBlob (b) { return b instanceof Blob } : function isBlob(b) { return false; }
+ `
+]
+
+const internalStreamsInspectCustom = ['inspect.custom', "Symbol.for('nodejs.util.inspect.custom')"]
+
+const internalStreamsNoRequireAbortController = [
+ 'const \\{ AbortController \\} = .+',
+ 'const AbortController = globalThis.AbortController || require(\'abort-controller\').AbortController;'
+]
+
+const internalStreamsNoRequireAbortController2 = [
+ 'const \\{ AbortController, AbortSignal \\} = .+',
+ 'const AbortController = globalThis.AbortController || require(\'abort-controller\').AbortController;'
+]
+
+const internalStreamsRequireInternal = ["require\\('internal/([^']+)'\\)", "require('../$1')"]
+
+const internalStreamsRequireErrors = ["require\\('internal/errors'\\)", "require('../../ours/errors')"]
+
+const internalStreamsRequireEventEmitter = ['const EE =', 'const { EventEmitter: EE } =']
+
+const internalStreamsRequirePrimordials = ['= primordials', "= require('../../ours/primordials')"]
+
+const internalStreamsRequireRelativeUtil = [
+ 'const \\{ (once|createDeferredPromise|) \\} = .+;',
+ "const { $1 } = require('../../ours/util');"
+]
+
+const internalStreamsRequireRelativeDuplex = ['instanceof Stream.Duplex', "instanceof require('./duplex')"]
+
+const internalStreamsRequireStream = ["require\\('stream'\\)", "require('../../stream')"]
+
+const internalStreamsRequireStreams = ["require\\('internal/streams/([^']+)'\\)", "require('./$1')"]
+
+const streamSlashPromisesToStreamDotPromises= ["require\\('(node:)?stream/promises'\\)", "require('../../lib/stream').promises"]
+
+const internalStreamsRequireUtil = [
+ "require\\('internal/util(?:/(?:debuglog|inspect))?'\\)",
+ "require('../../ours/util')"
+]
+
+const internalStreamsRequireUtilDebuglog = ["require\\('internal/util/debuglog'\\)", "require('../../ours/util')"]
+
+const internalStreamsRequireWebStream = ["require\\('internal/webstreams/adapters'\\)", '{}']
+
+const internalStreamsWeakHandler = [
+ "const \\{ kWeakHandler \\} = require\\('../event_target'\\);",
+ "require('../event_target');const kWeakHandler = require('../../ours/primordials').Symbol('kWeak');"
+]
+
+const internalStreamsWeakHandler2 = [
+ "const \\{ kWeakHandler, kResistStopPropagation \\} = .*;",
+ "const kWeakHandler = require('../../ours/primordials').Symbol('kWeak');\nconst kResistStopPropagation = require('../../ours/primordials').Symbol('kResistStopPropagation');"
+]
+
+const internalValidatorsNoCoalesceAssignment = [
+ '\\s*(.+) \\?\\?= (.+)',
+ `
+ if (typeof $1 === 'undefined') {
+ $1 = $2
+ }
+ `
+]
+
+const internalValidatorsNoRequireSignals = [
+ "const \\{ signals \\} = internalBinding\\('constants'\\).os;",
+ 'const signals = {};'
+]
+
+const internalValidatorsRequireAssert = ["require\\('internal/assert'\\)", "require('assert')"]
+
+const internalValidatorsRequireAsyncHooks = ["require\\('./async_hooks'\\)", "require('internal/async_hooks')"]
+
+const internalValidatorsRequireErrors = ["require\\('internal/errors'\\)", "require('../ours/errors')"]
+
+const internalValidatorsRequirePrimordials = ['= primordials', "= require('../ours/primordials')"]
+
+const internalValidatorsRequireRelativeUtil = ["require\\('internal/util'\\)", "require('../ours/util')"]
+
+const internalValidatorsRequireUtilTypes = ["require\\('internal/util/types'\\)", "require('../ours/util').types"]
+
+const internalValidatorsNumericSeparator = ['4_294_967_295', '4294967295']
+
+const streamIndexIsUint8Array = [
+ "Stream._isUint8Array = require\\('internal/util/types'\\).isUint8Array;",
+ `
+ Stream._isUint8Array = function isUint8Array(value) {
+ return value instanceof Uint8Array
+ };
+ `
+]
+
+const streamIndexRequireInternal = ["require\\('internal/([^']+)'\\)", "require('./internal/$1')"]
+
+const streamIndexRequireInternalBuffer = ["require\\('internal/buffer'\\)", '{}']
+
+const streamIndexRequireErrors = ["require\\('internal/errors'\\);", "require('./ours/errors');"]
+
+const streamIndexRequirePrimordials = ['= primordials', "= require('./ours/primordials')"]
+
+const streamIndexRequirePromises = ["require\\('stream/promises'\\);", "require('./stream/promises');"]
+
+const streamIndexRequireUtil = ["require\\('internal/util'\\)", "require('./ours/util')"]
+
+const streamIndexUint8ArrayToBuffer = ['new internalBuffer.FastBuffer', 'Buffer.from']
+
+const streamsRequireErrors = ["require\\('internal/errors'\\)", "require('../ours/errors')"]
+
+const streamsRequireInternal = ["require\\('internal/(.+)'\\)", "require('../internal/$1')"]
+
+const streamsRequirePrimordials = ['= primordials', "= require('../ours/primordials')"]
+
+const stringDecoderRequirePackage = ["require\\('string_decoder'\\)", "require('string_decoder/')"]
+
+const testCommonKnownGlobals = [
+ 'let knownGlobals = \\[(\\n\\s+)',
+ `
+ let knownGlobals = [\n
+ typeof AggregateError !== 'undefined' ? AggregateError : require('../../lib/ours/util').AggregateError,
+ typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController,
+ typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal,
+ typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget,
+ typeof navigator !== 'undefined' ? navigator : {},
+ `
+]
+
+const testParallelBindings = [
+ "const \\{ internalBinding \\} = require\\('../../lib/internal/test/binding'\\);",
+ 'const internalBinding = process.binding'
+]
+
+const testParallelHasOwn = ['Object.hasOwn\\(', 'Reflect.has(']
+
+const testParallelIncludeTap = [
+ "('use strict')",
+ `
+ $1
+
+ const tap = require('tap');
+ const silentConsole = { log() {}, error() {} };
+ `
+]
+
+const testParallelImportStreamInMjs = [" from 'stream';", "from '../../lib/ours/index.js';"]
+
+const testParallelImportTapInMjs = ["(from 'assert';)", "$1\nimport tap from 'tap';"]
+
+const testParallelDuplexFromBlob = [
+ "const \\{ Blob \\} = require\\('buffer'\\);",
+ "const Blob = globalThis.Blob || require('buffer').Blob"
+]
+
+const testParallelDuplexSkipWithoutBlob = [
+ "(\\{\n const blob = new Blob\\(\\['blob'\\]\\))",
+ "if (typeof Blob !== 'undefined') $1"
+]
+
+const testParallelFinishedEvent = ["res.on\\('close", "res.on('finish"]
+
+const testParallelFlatMapWinLineSeparator = [
+ "'xyz\\\\n'\\.repeat\\(5\\)",
+ "(process.platform === 'win32' ? 'xyz\\r\\n' : 'xyz\\n').repeat(5)"
+]
+
+const testParallelPreprocessWinLineSeparator = [
+ 'assert.strictEqual\\(streamedData, modelData\\);',
+ "assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\\r\\n/g, '\\n') : modelData);"
+]
+
+const testParallelReadableBufferListInspect = [
+ 'assert.strictEqual\\(\\n\\s+util.inspect\\(\\[ list \\], \\{ compact: false \\}\\),\\n\\s+`\\[\\n\\s+BufferList \\{\\n\\s+head: \\[Object\\],\\n\\s+tail: \\[Object\\],\\n\\s+length: 4\\n\\s+\\}\\n\\]`\\);',
+ `
+ assert.strictEqual(typeof list.head, 'object');
+ assert.strictEqual(typeof list.tail, 'object');
+ assert.strictEqual(list.length, 4);
+ `
+]
+
+const testParallelRequireStream = ["require\\('stream'\\)", "require('../../lib/ours/index')"]
+
+const testParallelRequireStreamConsumer = ["require\\('stream/consumer'\\)", "require('../../lib/stream/consumer')"]
+
+const testParallelRequireStreamInternals = ["require\\('(internal/.+)'\\)", "require('../../lib/$1')"]
+
+const testParallelRequireStreamInternalsLegacy = ["require\\('(_stream_\\w+)'\\)", "require('../../lib/$1')"]
+
+const testParallelRequireStreamPromises = ["require\\('stream/promises'\\)", "require('../../lib/stream/promises')"]
+
+const testParallelRequireStreamWeb = ["require\\('stream/web'\\)", "require('../../lib/stream/web')"]
+
+const testParallelSilentConsole = ['console.(log|error)', 'silentConsole.$1']
+
+const testParallelTimersPromises = [
+ "const { setTimeout } = require\\('timers/promises'\\);",
+ `
+ const st = require('timers').setTimeout;
+
+ function setTimeout(ms) {
+ return new Promise(resolve => {
+ st(resolve, ms);
+ });
+ }
+ `
+]
+
+const testParallelTicksReenableConsoleLog = ['silentConsole.log\\(i\\);', 'console.log(i);']
+
+const testParallelTickSaveHook = ['async_hooks.createHook\\(\\{', 'const hook = async_hooks.createHook({']
+
+const removefromWebReadableMethod = ['Readable.fromWeb = function\\s\\s*\\([^)]*\\)\\s*{[^}]*}', '']
+
+const removetoWebReadableMethod = ['Readable.toWeb = function\\s\\s*\\([^)]*\\)\\s*{[^}]*}', '']
+
+const readmeInfo = ['(This package is a mirror of the streams implementations in Node.js) (\\d+.\\d+.\\d+).', '$1 $2.']
+
+const readmeLink = ['(\\[Node.js website\\]\\(https://nodejs.org/dist/v)(\\d+.\\d+.\\d+)', '$1$2']
+
+const streamRequire = [ "require\\('stream'\\)", "require('../../lib/stream.js')" ]
+
+const removeWebStreamsFromDuplexFromTest= [
+ 'const { ReadableStream, WritableStream } = .+;',
+ `function makeATestReadableStream(value) {
+ return Readable.from([value])
+}
+function makeATestWritableStream(writeFunc) {
+ return new Writable({
+ write(chunk, enc, cb) {
+ writeFunc(chunk)
+ cb()
+ }
+ })
+}
+`
+]
+
+const duplexFromTestWebStreamNeutralizeReadable = [
+ 'makeATestReadableStream\\(value\\) {',
+ 'makeATestReadableStreamOff(value) {'
+]
+
+const duplexFromTestWebStreamNeutralizeWritable = [
+ 'makeATestWritableStream\\(writeFunc\\) {',
+ 'makeATestWritableStreamOff(writeFunc) {'
+]
+
+const polyfillAddAbortListener = [
+ 'addAbortListener \\?\\?= require\\(\'events\'\\)\\.addAbortListener',
+ 'addAbortListener = addAbortListener || require(\'../../ours/util\').addAbortListener'
+]
+
+const abortSignalAny = [
+ 'AbortSignal.any',
+ 'require(\'../../ours/util\').AbortSignalAny'
+]
+
+const asyncDisposeTest = [
+ 'Symbol.asyncDispose',
+ 'require(\'../../lib/ours/primordials\').SymbolAsyncDispose'
+]
+
+export const replacements = {
+ 'lib/_stream.+': [legacyStreamsRequireStream],
+ 'lib/internal/streams/duplexify.+': [
+ internalStreamsBufferPolyfill,
+ internalStreamsAbortControllerPolyfill,
+ internalStreamsNoRequireBlob,
+ internalStreamsNoRequireAbortController
+ ],
+ 'lib/internal/streams/(operators|pipeline).+': [
+ internalStreamsAbortControllerPolyfill,
+ internalStreamsNoRequireAbortController,
+ internalStreamsNoRequireAbortController2,
+ internalStreamsWeakHandler2,
+ abortSignalAny
+ ],
+ 'lib/internal/streams/add-abort-signal.js': [
+ polyfillAddAbortListener
+ ],
+ 'lib/internal/streams/readable.js': [
+ removefromWebReadableMethod,
+ removetoWebReadableMethod,
+ stringDecoderRequirePackage
+ ],
+ 'lib/internal/streams/.+': [
+ internalStreamsRequireErrors,
+ internalStreamsRequireEventEmitter,
+ internalStreamsRequirePrimordials,
+ internalStreamsRequireRelativeDuplex,
+ internalStreamsRequireRelativeUtil,
+ internalStreamsRequireStream,
+ internalStreamsRequireStreams,
+ internalStreamsRequireUtil,
+ internalStreamsRequireUtilDebuglog,
+ internalStreamsRequireWebStream,
+ internalStreamsRequireInternal,
+ internalStreamsWeakHandler,
+ internalStreamsInspectCustom,
+ polyfillAddAbortListener
+ ],
+ 'lib/internal/validators.js': [
+ internalValidatorsRequireAssert,
+ internalValidatorsRequireAsyncHooks,
+ internalValidatorsRequireErrors,
+ internalValidatorsRequirePrimordials,
+ internalValidatorsRequireRelativeUtil,
+ internalValidatorsRequireUtilTypes,
+ internalValidatorsNoRequireSignals,
+ internalValidatorsNoCoalesceAssignment,
+ internalValidatorsNumericSeparator
+ ],
+ 'lib/stream.js': [
+ streamIndexIsUint8Array,
+ streamIndexUint8ArrayToBuffer,
+ streamIndexRequireInternalBuffer,
+ streamIndexRequireErrors,
+ streamIndexRequirePrimordials,
+ streamIndexRequirePromises,
+ streamIndexRequireUtil,
+ streamIndexRequireInternal
+ ],
+ 'lib/stream/.+': [streamsRequireErrors, streamsRequirePrimordials, streamsRequireInternal, streamRequire],
+ 'test/common/index.js': [testCommonKnownGlobals],
+ 'test/parallel/.+': [
+ testParallelIncludeTap,
+ testParallelRequireStream,
+ testParallelRequireStreamConsumer,
+ testParallelRequireStreamInternals,
+ testParallelRequireStreamInternalsLegacy,
+ testParallelRequireStreamPromises,
+ testParallelRequireStreamWeb,
+ testParallelImportStreamInMjs,
+ testParallelImportTapInMjs,
+ testParallelBindings,
+ testParallelHasOwn,
+ testParallelSilentConsole,
+ testParallelTimersPromises,
+ noNodeColon
+ ],
+ 'test/parallel/test-stream-duplex-from.js': [
+ testParallelDuplexFromBlob,
+ testParallelDuplexSkipWithoutBlob,
+ duplexFromTestWebStreamNeutralizeReadable,
+ duplexFromTestWebStreamNeutralizeWritable,
+ removeWebStreamsFromDuplexFromTest
+ ],
+ 'test/parallel/test-stream-finished.js': [testParallelFinishedEvent],
+ 'test/parallel/test-stream-readable-dispose.js': [asyncDisposeTest],
+ 'test/parallel/test-stream-flatMap.js': [testParallelFlatMapWinLineSeparator],
+ 'test/parallel/test-stream-preprocess.js': [testParallelPreprocessWinLineSeparator],
+ 'test/parallel/test-stream-writable-samecb-singletick.js': [
+ testParallelTicksReenableConsoleLog,
+ testParallelTickSaveHook
+ ],
+ 'test/parallel/test-stream3-pipeline-async-iterator.js': [
+ internalStreamsAbortControllerPolyfill2,
+ streamSlashPromisesToStreamDotPromises
+ ],
+ 'test/parallel/test-stream-compose-operator.js': [
+ internalStreamsAbortControllerPolyfill2
+ ],
+ 'test/parallel/test-stream2-readable-from-list.js': [testParallelReadableBufferListInspect],
+ 'README.md': [readmeInfo, readmeLink]
+}
diff --git a/c8.json b/c8.json
new file mode 100644
index 0000000000..ea07a2272a
--- /dev/null
+++ b/c8.json
@@ -0,0 +1,9 @@
+{
+ "include": ["lib"],
+ "reporter": ["text", "html"],
+ "check-coverage": true,
+ "branches": 50,
+ "functions": 50,
+ "lines": 50,
+ "statements": 50
+}
\ No newline at end of file
diff --git a/eslint-plugin-local/index.mjs b/eslint-plugin-local/index.mjs
new file mode 100644
index 0000000000..ade9e6923e
--- /dev/null
+++ b/eslint-plugin-local/index.mjs
@@ -0,0 +1,9 @@
+'use strict'
+
+import nbi from './no-big-int.mjs'
+
+export default {
+ rules: {
+ 'no-big-int': nbi,
+ },
+}
\ No newline at end of file
diff --git a/eslint-plugin-local/no-big-int.mjs b/eslint-plugin-local/no-big-int.mjs
new file mode 100644
index 0000000000..242c08ab0d
--- /dev/null
+++ b/eslint-plugin-local/no-big-int.mjs
@@ -0,0 +1,26 @@
+'use strict'
+
+export default {
+ meta: {
+ docs: {
+ description: 'disallow `bigint` syntax',
+ category: 'ES2020',
+ recommended: false,
+ },
+ fixable: null,
+ messages: {
+ forbidden: 'ES2020 `bigint` syntax is forbidden.',
+ },
+ schema: [],
+ type: 'problem',
+ },
+ create(context) {
+ return {
+ Literal(node) {
+ if (node.bigint != null) {
+ context.report({ messageId: 'forbidden', node })
+ }
+ },
+ }
+ },
+}
diff --git a/eslint.config.mjs b/eslint.config.mjs
new file mode 100644
index 0000000000..c6b39328f5
--- /dev/null
+++ b/eslint.config.mjs
@@ -0,0 +1,27 @@
+import { FlatCompat } from '@eslint/eslintrc'
+import eslintPluginLocal from './eslint-plugin-local/index.mjs'
+
+const compat = new FlatCompat()
+
+export default [
+ // standard,
+ ...compat.extends('eslint-config-standard'),
+ {
+ files: ['**/**.js', '**/**.mjs'],
+ languageOptions: {
+ sourceType: 'module',
+ ecmaVersion: 'latest'
+ },
+ plugins: { local: eslintPluginLocal },
+ rules: {
+ /*
+ This is inserted to make this compatible with prettier.
+ Once https://github.com/prettier/prettier/issues/3845 and https://github.com/prettier/prettier/issues/3847 are solved this might be not needed any more.
+ */
+ 'space-before-function-paren': 0,
+ curly: [2, 'all'],
+ 'local/no-big-int': 'error',
+ 'no-undef': 'warn'
+ }
+ }
+]
diff --git a/examples/capslock-type.cjs b/examples/capslock-type.cjs
new file mode 100644
index 0000000000..f7b48bdbf2
--- /dev/null
+++ b/examples/capslock-type.cjs
@@ -0,0 +1,31 @@
+'use strict'
+
+const { Transform } = require('../lib/ours/index')
+
+class MyStream extends Transform {
+ _transform(chunk, encoding, callback) {
+ callback(null, Buffer.from(chunk, encoding).toString('utf-8').toUpperCase())
+ }
+}
+
+const s = new MyStream()
+
+process.stdin.resume()
+process.stdin.pipe(s).pipe(process.stdout)
+
+if (process.stdin.setRawMode) {
+ process.stdin.setRawMode(true)
+}
+
+process.stdin.on('data', function (c) {
+ c = c.toString()
+
+ if (c === '\u0003' || c === '\u0004') {
+ process.stdin.pause()
+ s.end()
+ }
+
+ if (c === '\r') {
+ process.stdout.write('\n')
+ }
+})
diff --git a/examples/typer-fsr.js b/examples/typer-fsr.js
deleted file mode 100644
index 7e715844bd..0000000000
--- a/examples/typer-fsr.js
+++ /dev/null
@@ -1,15 +0,0 @@
-var fs = require('fs');
-var FSReadable = require('../fs.js');
-var rst = new FSReadable(__filename);
-
-rst.on('end', function() {
- process.stdin.pause();
-});
-
-process.stdin.setRawMode(true);
-process.stdin.on('data', function() {
- var c = rst.read(3);
- if (!c) return;
- process.stdout.write(c);
-});
-process.stdin.resume();
diff --git a/examples/typer.js b/examples/typer.js
deleted file mode 100644
index a86f8e8643..0000000000
--- a/examples/typer.js
+++ /dev/null
@@ -1,17 +0,0 @@
-var fs = require('fs');
-var fst = fs.createReadStream(__filename);
-var Readable = require('../readable.js');
-var rst = new Readable();
-rst.wrap(fst);
-
-rst.on('end', function() {
- process.stdin.pause();
-});
-
-process.stdin.setRawMode(true);
-process.stdin.on('data', function() {
- var c = rst.read(1);
- if (!c) return;
- process.stdout.write(c);
-});
-process.stdin.resume();
diff --git a/examples/typer.mjs b/examples/typer.mjs
new file mode 100644
index 0000000000..c063b5d187
--- /dev/null
+++ b/examples/typer.mjs
@@ -0,0 +1,24 @@
+import { createReadStream } from 'node:fs'
+import process from 'node:process'
+import { fileURLToPath } from 'node:url'
+import { Readable } from '../lib/ours/index.js'
+
+const fst = createReadStream(fileURLToPath(new URL(import.meta.url)))
+const rst = new Readable()
+
+rst.wrap(fst)
+
+rst.on('end', function () {
+ process.stdin.pause()
+})
+
+console.log("Every time you press a key, you will see more contents of the source file. Let's begin!\n\n")
+process.stdin.setRawMode(true)
+process.stdin.on('data', function () {
+ const c = rst.read(100)
+ if (!c) {
+ return setTimeout(process.exit, 500)
+ }
+ process.stdout.write(c)
+})
+process.stdin.resume()
diff --git a/from-list.js b/from-list.js
deleted file mode 100644
index 75234861f8..0000000000
--- a/from-list.js
+++ /dev/null
@@ -1,59 +0,0 @@
-'use strict';
-
-// Read n bytes from the supplied list of buffers.
-// the length is the sum of all the buffers in the list.
-
-module.exports = fromList;
-
-function fromList(n, list, length) {
- var ret;
-
- // nothing in the list, definitely empty.
- if (list.length === 0) {
- return null;
- }
-
- if (typeof length === 'undefined') {
- // didn't tell us the length of the list.
- // flatten and proceed from there.
- var buf = Buffer.concat(list);
- length = buf.length;
- list.length = 0;
- list.push(buf);
- }
-
- if (length === 0) {
- ret = null;
- } else if (!n || n >= length) {
- // read it all, truncate the array.
- ret = Buffer.concat(list);
- list.length = 0;
- } else {
- // read just some of it.
- if (n < list[0].length) {
- // just take a part of the first list item.
- var buf = list[0];
- ret = buf.slice(0, n);
- list[0] = buf.slice(n);
- } else if (n === list[0].length) {
- // first list is a perfect match
- ret = list.shift();
- } else {
- // complex case.
- // we have enough to cover it, but it spans past the first buffer.
- ret = new Buffer(n);
- var c = 0;
- for (var i = 0, l = list.length; i < l && c < n; i++) {
- var buf = list.shift();
- var cpy = Math.min(n - c, buf.length);
- buf.copy(ret, c, 0, cpy);
- if (cpy < buf.length) {
- list.unshift(buf.slice(cpy));
- }
- c += cpy;
- }
- }
- }
-
- return ret;
-}
diff --git a/fs.js b/fs.js
deleted file mode 100644
index 729228feb8..0000000000
--- a/fs.js
+++ /dev/null
@@ -1,158 +0,0 @@
-'use strict';
-
-module.exports = FSReadable;
-
-// This uses the existing bindings in Node's FS module to
-// implement a read-method style readable stream.
-//
-// In a perfect world, some of this dancing and buffering would
-// not be necessary; we could just open the file using async IO,
-// and then read() synchronously until we raise EWOULDBLOCK.
-//
-// It a just-slightly-less imperfect world, FS readable streams
-// would be the *only* stream that implements this kind of buffering
-// behavior, since TCP and pipes can be reliably implemented in this
-// fashion at a much lower level.
-
-var Readable = require('./readable.js');
-var util = require('util');
-var fs = require('fs');
-var StringDecoder = require('string_decoder').StringDecoder;
-var assert = require('assert');
-
-// a very basic memory pool. this optimization helps revent lots
-// of allocations when there are many fs readable streams happening
-// concurrently.
-var pool;
-var minPoolSpace = 128;
-var poolSize = 40 * 1024;
-function allocNewPool() {
- pool = new Buffer(poolSize);
- pool.used = 0;
-}
-
-util.inherits(FSReadable, Readable);
-
-function FSReadable(path, options) {
- if (!options) options = {};
-
- Readable.apply(this, options);
-
- this.path = path;
- this.flags = 'r';
- this.mode = 438; //=0666
- this.fd = null;
- this.bufferSize = 64 * 1024;
- this.lowWaterMark = 16 * 1024;
-
- Object.keys(options).forEach(function(k) {
- this[k] = options[k];
- }, this);
-
- // cast to an int
- assert(typeof this.bufferSize === 'number');
- this.bufferSize = ~~this.bufferSize;
-
- if (this.encoding) {
- this._decoder = new StringDecoder(this.encoding);
- }
-
- var typeofStart = typeof this.start;
- if (typeofStart !== 'undefined') {
- if (typeofStart !== 'number') {
- throw new TypeError('start must be a Number');
- }
- var typeofEnd = typeof this.end;
- if (typeofEnd === 'undefined') {
- this.end = Infinity;
- } else if (typeofEnd !== 'number') {
- throw new TypeError('end must be a Number');
- }
-
- this.pos = this.start;
- }
-
- if (typeof this.fd !== 'number') {
- this.open();
- }
-}
-
-FSReadable.prototype.open = function() {
- fs.open(this.path, this.flags, this.mode, function(er, fd) {
- if (er) {
- this.destroy();
- this.emit('error', er);
- return;
- }
-
- this.fd = fd;
- this.emit('open', fd);
- }.bind(this));
-}
-
-FSReadable.prototype._read = function(n, cb) {
- if (this.fd === null) {
- this.once('open', this._read.bind(this, n, cb));
- return;
- }
-
- if (this.reading || this.ended || this.destroyed) return;
- this.reading = true;
-
- if (!pool || pool.length - pool.used < minPoolSpace) {
- // discard the old pool. Can't add to the free list because
- // users might have refernces to slices on it.
- pool = null;
- allocNewPool();
- }
-
- var thisPool = pool;
- var toRead = Math.min(pool.length - pool.used, n);
- var start = pool.used;
-
- if (this.pos !== undefined) {
- toRead = Math.min(this.end - this.pos + 1, toRead);
- }
-
- if (toRead <= 0) {
- this.reading = false;
- this.emit('readable');
- return;
- }
-
- fs.read(this.fd, pool, pool.used, toRead, this.pos, onread.bind(this));
-
- function onread(er, bytesRead) {
- this.reading = false;
-
- if (er) {
- this.destroy();
- return cb(er);
- }
-
- var b = null;
- if (bytesRead > 0) {
- b = thisPool.slice(start, start + bytesRead);
- }
- cb(null, b);
- }
-}
-
-FSReadable.prototype.close = function(cb) {
- if (cb) this.once('close', cb);
- if (this.closed || this.fd === null) {
- if (this.fd === null) this.once('open', this.destroy);
- return process.nextTick(this.emit.bind(this, 'close'));
- }
- this.closed = true;
-
- fs.close(this.fd, function(er) {
- if (er) this.emit('error', er);
- else this.emit('close');
- }.bind(this));
-};
-
-FSReadable.prototype.destroy = function() {
- this.destroyed = true;
- fs.close(this.fd, function() {});
-};
diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js
new file mode 100644
index 0000000000..e03c6bf5ff
--- /dev/null
+++ b/lib/_stream_duplex.js
@@ -0,0 +1,4 @@
+'use strict'
+
+// Keep this file as an alias for the full stream module.
+module.exports = require('./stream').Duplex
diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js
new file mode 100644
index 0000000000..1206dc4555
--- /dev/null
+++ b/lib/_stream_passthrough.js
@@ -0,0 +1,4 @@
+'use strict'
+
+// Keep this file as an alias for the full stream module.
+module.exports = require('./stream').PassThrough
diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
new file mode 100644
index 0000000000..49416586f2
--- /dev/null
+++ b/lib/_stream_readable.js
@@ -0,0 +1,4 @@
+'use strict'
+
+// Keep this file as an alias for the full stream module.
+module.exports = require('./stream').Readable
diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js
new file mode 100644
index 0000000000..ef227b12c5
--- /dev/null
+++ b/lib/_stream_transform.js
@@ -0,0 +1,4 @@
+'use strict'
+
+// Keep this file as an alias for the full stream module.
+module.exports = require('./stream').Transform
diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js
new file mode 100644
index 0000000000..00c7b037ce
--- /dev/null
+++ b/lib/_stream_writable.js
@@ -0,0 +1,4 @@
+'use strict'
+
+// Keep this file as an alias for the full stream module.
+module.exports = require('./stream').Writable
diff --git a/lib/internal/streams/add-abort-signal.js b/lib/internal/streams/add-abort-signal.js
new file mode 100644
index 0000000000..3475b6e047
--- /dev/null
+++ b/lib/internal/streams/add-abort-signal.js
@@ -0,0 +1,52 @@
+'use strict'
+
+const { SymbolDispose } = require('../../ours/primordials')
+const { AbortError, codes } = require('../../ours/errors')
+const { isNodeStream, isWebStream, kControllerErrorFunction } = require('./utils')
+const eos = require('./end-of-stream')
+const { ERR_INVALID_ARG_TYPE } = codes
+let addAbortListener
+
+// This method is inlined here for readable-stream
+// It also does not allow for signal to not exist on the stream
+// https://github.com/nodejs/node/pull/36061#discussion_r533718029
+const validateAbortSignal = (signal, name) => {
+ if (typeof signal !== 'object' || !('aborted' in signal)) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
+ }
+}
+module.exports.addAbortSignal = function addAbortSignal(signal, stream) {
+ validateAbortSignal(signal, 'signal')
+ if (!isNodeStream(stream) && !isWebStream(stream)) {
+ throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)
+ }
+ return module.exports.addAbortSignalNoValidate(signal, stream)
+}
+module.exports.addAbortSignalNoValidate = function (signal, stream) {
+ if (typeof signal !== 'object' || !('aborted' in signal)) {
+ return stream
+ }
+ const onAbort = isNodeStream(stream)
+ ? () => {
+ stream.destroy(
+ new AbortError(undefined, {
+ cause: signal.reason
+ })
+ )
+ }
+ : () => {
+ stream[kControllerErrorFunction](
+ new AbortError(undefined, {
+ cause: signal.reason
+ })
+ )
+ }
+ if (signal.aborted) {
+ onAbort()
+ } else {
+ addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
+ const disposable = addAbortListener(signal, onAbort)
+ eos(stream, disposable[SymbolDispose])
+ }
+ return stream
+}
diff --git a/lib/internal/streams/buffer_list.js b/lib/internal/streams/buffer_list.js
new file mode 100644
index 0000000000..b55e35cf9a
--- /dev/null
+++ b/lib/internal/streams/buffer_list.js
@@ -0,0 +1,157 @@
+'use strict'
+
+const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = require('../../ours/primordials')
+const { Buffer } = require('buffer')
+const { inspect } = require('../../ours/util')
+module.exports = class BufferList {
+ constructor() {
+ this.head = null
+ this.tail = null
+ this.length = 0
+ }
+ push(v) {
+ const entry = {
+ data: v,
+ next: null
+ }
+ if (this.length > 0) this.tail.next = entry
+ else this.head = entry
+ this.tail = entry
+ ++this.length
+ }
+ unshift(v) {
+ const entry = {
+ data: v,
+ next: this.head
+ }
+ if (this.length === 0) this.tail = entry
+ this.head = entry
+ ++this.length
+ }
+ shift() {
+ if (this.length === 0) return
+ const ret = this.head.data
+ if (this.length === 1) this.head = this.tail = null
+ else this.head = this.head.next
+ --this.length
+ return ret
+ }
+ clear() {
+ this.head = this.tail = null
+ this.length = 0
+ }
+ join(s) {
+ if (this.length === 0) return ''
+ let p = this.head
+ let ret = '' + p.data
+ while ((p = p.next) !== null) ret += s + p.data
+ return ret
+ }
+ concat(n) {
+ if (this.length === 0) return Buffer.alloc(0)
+ const ret = Buffer.allocUnsafe(n >>> 0)
+ let p = this.head
+ let i = 0
+ while (p) {
+ TypedArrayPrototypeSet(ret, p.data, i)
+ i += p.data.length
+ p = p.next
+ }
+ return ret
+ }
+
+ // Consumes a specified amount of bytes or characters from the buffered data.
+ consume(n, hasStrings) {
+ const data = this.head.data
+ if (n < data.length) {
+ // `slice` is the same for buffers and strings.
+ const slice = data.slice(0, n)
+ this.head.data = data.slice(n)
+ return slice
+ }
+ if (n === data.length) {
+ // First chunk is a perfect match.
+ return this.shift()
+ }
+ // Result spans more than one buffer.
+ return hasStrings ? this._getString(n) : this._getBuffer(n)
+ }
+ first() {
+ return this.head.data
+ }
+ *[SymbolIterator]() {
+ for (let p = this.head; p; p = p.next) {
+ yield p.data
+ }
+ }
+
+ // Consumes a specified amount of characters from the buffered data.
+ _getString(n) {
+ let ret = ''
+ let p = this.head
+ let c = 0
+ do {
+ const str = p.data
+ if (n > str.length) {
+ ret += str
+ n -= str.length
+ } else {
+ if (n === str.length) {
+ ret += str
+ ++c
+ if (p.next) this.head = p.next
+ else this.head = this.tail = null
+ } else {
+ ret += StringPrototypeSlice(str, 0, n)
+ this.head = p
+ p.data = StringPrototypeSlice(str, n)
+ }
+ break
+ }
+ ++c
+ } while ((p = p.next) !== null)
+ this.length -= c
+ return ret
+ }
+
+ // Consumes a specified amount of bytes from the buffered data.
+ _getBuffer(n) {
+ const ret = Buffer.allocUnsafe(n)
+ const retLen = n
+ let p = this.head
+ let c = 0
+ do {
+ const buf = p.data
+ if (n > buf.length) {
+ TypedArrayPrototypeSet(ret, buf, retLen - n)
+ n -= buf.length
+ } else {
+ if (n === buf.length) {
+ TypedArrayPrototypeSet(ret, buf, retLen - n)
+ ++c
+ if (p.next) this.head = p.next
+ else this.head = this.tail = null
+ } else {
+ TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n)
+ this.head = p
+ p.data = buf.slice(n)
+ }
+ break
+ }
+ ++c
+ } while ((p = p.next) !== null)
+ this.length -= c
+ return ret
+ }
+
+ // Make sure the linked list only shows the minimal necessary information.
+ [Symbol.for('nodejs.util.inspect.custom')](_, options) {
+ return inspect(this, {
+ ...options,
+ // Only inspect one level.
+ depth: 0,
+ // It should not recurse.
+ customInspect: false
+ })
+ }
+}
diff --git a/lib/internal/streams/compose.js b/lib/internal/streams/compose.js
new file mode 100644
index 0000000000..b399d540f3
--- /dev/null
+++ b/lib/internal/streams/compose.js
@@ -0,0 +1,194 @@
+'use strict'
+
+const { pipeline } = require('./pipeline')
+const Duplex = require('./duplex')
+const { destroyer } = require('./destroy')
+const {
+ isNodeStream,
+ isReadable,
+ isWritable,
+ isWebStream,
+ isTransformStream,
+ isWritableStream,
+ isReadableStream
+} = require('./utils')
+const {
+ AbortError,
+ codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }
+} = require('../../ours/errors')
+const eos = require('./end-of-stream')
+module.exports = function compose(...streams) {
+ if (streams.length === 0) {
+ throw new ERR_MISSING_ARGS('streams')
+ }
+ if (streams.length === 1) {
+ return Duplex.from(streams[0])
+ }
+ const orgStreams = [...streams]
+ if (typeof streams[0] === 'function') {
+ streams[0] = Duplex.from(streams[0])
+ }
+ if (typeof streams[streams.length - 1] === 'function') {
+ const idx = streams.length - 1
+ streams[idx] = Duplex.from(streams[idx])
+ }
+ for (let n = 0; n < streams.length; ++n) {
+ if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) {
+ // TODO(ronag): Add checks for non streams.
+ continue
+ }
+ if (
+ n < streams.length - 1 &&
+ !(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n]))
+ ) {
+ throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable')
+ }
+ if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) {
+ throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable')
+ }
+ }
+ let ondrain
+ let onfinish
+ let onreadable
+ let onclose
+ let d
+ function onfinished(err) {
+ const cb = onclose
+ onclose = null
+ if (cb) {
+ cb(err)
+ } else if (err) {
+ d.destroy(err)
+ } else if (!readable && !writable) {
+ d.destroy()
+ }
+ }
+ const head = streams[0]
+ const tail = pipeline(streams, onfinished)
+ const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head))
+ const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail))
+
+ // TODO(ronag): Avoid double buffering.
+ // Implement Writable/Readable/Duplex traits.
+ // See, https://github.com/nodejs/node/pull/33515.
+ d = new Duplex({
+ // TODO (ronag): highWaterMark?
+ writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode),
+ readableObjectMode: !!(tail !== null && tail !== undefined && tail.readableObjectMode),
+ writable,
+ readable
+ })
+ if (writable) {
+ if (isNodeStream(head)) {
+ d._write = function (chunk, encoding, callback) {
+ if (head.write(chunk, encoding)) {
+ callback()
+ } else {
+ ondrain = callback
+ }
+ }
+ d._final = function (callback) {
+ head.end()
+ onfinish = callback
+ }
+ head.on('drain', function () {
+ if (ondrain) {
+ const cb = ondrain
+ ondrain = null
+ cb()
+ }
+ })
+ } else if (isWebStream(head)) {
+ const writable = isTransformStream(head) ? head.writable : head
+ const writer = writable.getWriter()
+ d._write = async function (chunk, encoding, callback) {
+ try {
+ await writer.ready
+ writer.write(chunk).catch(() => {})
+ callback()
+ } catch (err) {
+ callback(err)
+ }
+ }
+ d._final = async function (callback) {
+ try {
+ await writer.ready
+ writer.close().catch(() => {})
+ onfinish = callback
+ } catch (err) {
+ callback(err)
+ }
+ }
+ }
+ const toRead = isTransformStream(tail) ? tail.readable : tail
+ eos(toRead, () => {
+ if (onfinish) {
+ const cb = onfinish
+ onfinish = null
+ cb()
+ }
+ })
+ }
+ if (readable) {
+ if (isNodeStream(tail)) {
+ tail.on('readable', function () {
+ if (onreadable) {
+ const cb = onreadable
+ onreadable = null
+ cb()
+ }
+ })
+ tail.on('end', function () {
+ d.push(null)
+ })
+ d._read = function () {
+ while (true) {
+ const buf = tail.read()
+ if (buf === null) {
+ onreadable = d._read
+ return
+ }
+ if (!d.push(buf)) {
+ return
+ }
+ }
+ }
+ } else if (isWebStream(tail)) {
+ const readable = isTransformStream(tail) ? tail.readable : tail
+ const reader = readable.getReader()
+ d._read = async function () {
+ while (true) {
+ try {
+ const { value, done } = await reader.read()
+ if (!d.push(value)) {
+ return
+ }
+ if (done) {
+ d.push(null)
+ return
+ }
+ } catch {
+ return
+ }
+ }
+ }
+ }
+ }
+ d._destroy = function (err, callback) {
+ if (!err && onclose !== null) {
+ err = new AbortError()
+ }
+ onreadable = null
+ ondrain = null
+ onfinish = null
+ if (onclose === null) {
+ callback(err)
+ } else {
+ onclose = callback
+ if (isNodeStream(tail)) {
+ destroyer(tail, err)
+ }
+ }
+ }
+ return d
+}
diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js
new file mode 100644
index 0000000000..38292315ee
--- /dev/null
+++ b/lib/internal/streams/destroy.js
@@ -0,0 +1,290 @@
+'use strict'
+
+/* replacement start */
+
+const process = require('process/')
+
+/* replacement end */
+
+const {
+ aggregateTwoErrors,
+ codes: { ERR_MULTIPLE_CALLBACK },
+ AbortError
+} = require('../../ours/errors')
+const { Symbol } = require('../../ours/primordials')
+const { kIsDestroyed, isDestroyed, isFinished, isServerRequest } = require('./utils')
+const kDestroy = Symbol('kDestroy')
+const kConstruct = Symbol('kConstruct')
+function checkError(err, w, r) {
+ if (err) {
+ // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
+ err.stack // eslint-disable-line no-unused-expressions
+
+ if (w && !w.errored) {
+ w.errored = err
+ }
+ if (r && !r.errored) {
+ r.errored = err
+ }
+ }
+}
+
+// Backwards compat. cb() is undocumented and unused in core but
+// unfortunately might be used by modules.
+function destroy(err, cb) {
+ const r = this._readableState
+ const w = this._writableState
+ // With duplex streams we use the writable side for state.
+ const s = w || r
+ if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {
+ if (typeof cb === 'function') {
+ cb()
+ }
+ return this
+ }
+
+ // We set destroyed to true before firing error callbacks in order
+ // to make it re-entrance safe in case destroy() is called within callbacks
+ checkError(err, w, r)
+ if (w) {
+ w.destroyed = true
+ }
+ if (r) {
+ r.destroyed = true
+ }
+
+ // If still constructing then defer calling _destroy.
+ if (!s.constructed) {
+ this.once(kDestroy, function (er) {
+ _destroy(this, aggregateTwoErrors(er, err), cb)
+ })
+ } else {
+ _destroy(this, err, cb)
+ }
+ return this
+}
+function _destroy(self, err, cb) {
+ let called = false
+ function onDestroy(err) {
+ if (called) {
+ return
+ }
+ called = true
+ const r = self._readableState
+ const w = self._writableState
+ checkError(err, w, r)
+ if (w) {
+ w.closed = true
+ }
+ if (r) {
+ r.closed = true
+ }
+ if (typeof cb === 'function') {
+ cb(err)
+ }
+ if (err) {
+ process.nextTick(emitErrorCloseNT, self, err)
+ } else {
+ process.nextTick(emitCloseNT, self)
+ }
+ }
+ try {
+ self._destroy(err || null, onDestroy)
+ } catch (err) {
+ onDestroy(err)
+ }
+}
+function emitErrorCloseNT(self, err) {
+ emitErrorNT(self, err)
+ emitCloseNT(self)
+}
+function emitCloseNT(self) {
+ const r = self._readableState
+ const w = self._writableState
+ if (w) {
+ w.closeEmitted = true
+ }
+ if (r) {
+ r.closeEmitted = true
+ }
+ if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) {
+ self.emit('close')
+ }
+}
+function emitErrorNT(self, err) {
+ const r = self._readableState
+ const w = self._writableState
+ if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) {
+ return
+ }
+ if (w) {
+ w.errorEmitted = true
+ }
+ if (r) {
+ r.errorEmitted = true
+ }
+ self.emit('error', err)
+}
+function undestroy() {
+ const r = this._readableState
+ const w = this._writableState
+ if (r) {
+ r.constructed = true
+ r.closed = false
+ r.closeEmitted = false
+ r.destroyed = false
+ r.errored = null
+ r.errorEmitted = false
+ r.reading = false
+ r.ended = r.readable === false
+ r.endEmitted = r.readable === false
+ }
+ if (w) {
+ w.constructed = true
+ w.destroyed = false
+ w.closed = false
+ w.closeEmitted = false
+ w.errored = null
+ w.errorEmitted = false
+ w.finalCalled = false
+ w.prefinished = false
+ w.ended = w.writable === false
+ w.ending = w.writable === false
+ w.finished = w.writable === false
+ }
+}
+function errorOrDestroy(stream, err, sync) {
+ // We have tests that rely on errors being emitted
+ // in the same tick, so changing this is semver major.
+ // For now when you opt-in to autoDestroy we allow
+ // the error to be emitted nextTick. In a future
+ // semver major update we should change the default to this.
+
+ const r = stream._readableState
+ const w = stream._writableState
+ if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {
+ return this
+ }
+ if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy))
+ stream.destroy(err)
+ else if (err) {
+ // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
+ err.stack // eslint-disable-line no-unused-expressions
+
+ if (w && !w.errored) {
+ w.errored = err
+ }
+ if (r && !r.errored) {
+ r.errored = err
+ }
+ if (sync) {
+ process.nextTick(emitErrorNT, stream, err)
+ } else {
+ emitErrorNT(stream, err)
+ }
+ }
+}
+function construct(stream, cb) {
+ if (typeof stream._construct !== 'function') {
+ return
+ }
+ const r = stream._readableState
+ const w = stream._writableState
+ if (r) {
+ r.constructed = false
+ }
+ if (w) {
+ w.constructed = false
+ }
+ stream.once(kConstruct, cb)
+ if (stream.listenerCount(kConstruct) > 1) {
+ // Duplex
+ return
+ }
+ process.nextTick(constructNT, stream)
+}
+function constructNT(stream) {
+ let called = false
+ function onConstruct(err) {
+ if (called) {
+ errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK())
+ return
+ }
+ called = true
+ const r = stream._readableState
+ const w = stream._writableState
+ const s = w || r
+ if (r) {
+ r.constructed = true
+ }
+ if (w) {
+ w.constructed = true
+ }
+ if (s.destroyed) {
+ stream.emit(kDestroy, err)
+ } else if (err) {
+ errorOrDestroy(stream, err, true)
+ } else {
+ process.nextTick(emitConstructNT, stream)
+ }
+ }
+ try {
+ stream._construct((err) => {
+ process.nextTick(onConstruct, err)
+ })
+ } catch (err) {
+ process.nextTick(onConstruct, err)
+ }
+}
+function emitConstructNT(stream) {
+ stream.emit(kConstruct)
+}
+function isRequest(stream) {
+ return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function'
+}
+function emitCloseLegacy(stream) {
+ stream.emit('close')
+}
+function emitErrorCloseLegacy(stream, err) {
+ stream.emit('error', err)
+ process.nextTick(emitCloseLegacy, stream)
+}
+
+// Normalize destroy for legacy.
+function destroyer(stream, err) {
+ if (!stream || isDestroyed(stream)) {
+ return
+ }
+ if (!err && !isFinished(stream)) {
+ err = new AbortError()
+ }
+
+ // TODO: Remove isRequest branches.
+ if (isServerRequest(stream)) {
+ stream.socket = null
+ stream.destroy(err)
+ } else if (isRequest(stream)) {
+ stream.abort()
+ } else if (isRequest(stream.req)) {
+ stream.req.abort()
+ } else if (typeof stream.destroy === 'function') {
+ stream.destroy(err)
+ } else if (typeof stream.close === 'function') {
+ // TODO: Don't lose err?
+ stream.close()
+ } else if (err) {
+ process.nextTick(emitErrorCloseLegacy, stream, err)
+ } else {
+ process.nextTick(emitCloseLegacy, stream)
+ }
+ if (!stream.destroyed) {
+ stream[kIsDestroyed] = true
+ }
+}
+module.exports = {
+ construct,
+ destroyer,
+ destroy,
+ undestroy,
+ errorOrDestroy
+}
diff --git a/lib/internal/streams/duplex.js b/lib/internal/streams/duplex.js
new file mode 100644
index 0000000000..dd08396738
--- /dev/null
+++ b/lib/internal/streams/duplex.js
@@ -0,0 +1,143 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a duplex stream is just a stream that is both readable and writable.
+// Since JS doesn't have multiple prototype inheritance, this class
+// prototypically inherits from Readable, and then parasitically from
+// Writable.
+
+'use strict'
+
+const {
+ ObjectDefineProperties,
+ ObjectGetOwnPropertyDescriptor,
+ ObjectKeys,
+ ObjectSetPrototypeOf
+} = require('../../ours/primordials')
+module.exports = Duplex
+const Readable = require('./readable')
+const Writable = require('./writable')
+ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype)
+ObjectSetPrototypeOf(Duplex, Readable)
+{
+ const keys = ObjectKeys(Writable.prototype)
+ // Allow the keys array to be GC'ed.
+ for (let i = 0; i < keys.length; i++) {
+ const method = keys[i]
+ if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]
+ }
+}
+function Duplex(options) {
+ if (!(this instanceof Duplex)) return new Duplex(options)
+ Readable.call(this, options)
+ Writable.call(this, options)
+ if (options) {
+ this.allowHalfOpen = options.allowHalfOpen !== false
+ if (options.readable === false) {
+ this._readableState.readable = false
+ this._readableState.ended = true
+ this._readableState.endEmitted = true
+ }
+ if (options.writable === false) {
+ this._writableState.writable = false
+ this._writableState.ending = true
+ this._writableState.ended = true
+ this._writableState.finished = true
+ }
+ } else {
+ this.allowHalfOpen = true
+ }
+}
+ObjectDefineProperties(Duplex.prototype, {
+ writable: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable')
+ },
+ writableHighWaterMark: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark')
+ },
+ writableObjectMode: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode')
+ },
+ writableBuffer: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer')
+ },
+ writableLength: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength')
+ },
+ writableFinished: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished')
+ },
+ writableCorked: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked')
+ },
+ writableEnded: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded')
+ },
+ writableNeedDrain: {
+ __proto__: null,
+ ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain')
+ },
+ destroyed: {
+ __proto__: null,
+ get() {
+ if (this._readableState === undefined || this._writableState === undefined) {
+ return false
+ }
+ return this._readableState.destroyed && this._writableState.destroyed
+ },
+ set(value) {
+ // Backward compatibility, the user is explicitly
+ // managing destroyed.
+ if (this._readableState && this._writableState) {
+ this._readableState.destroyed = value
+ this._writableState.destroyed = value
+ }
+ }
+ }
+})
+let webStreamsAdapters
+
+// Lazy to avoid circular references
+function lazyWebStreams() {
+ if (webStreamsAdapters === undefined) webStreamsAdapters = {}
+ return webStreamsAdapters
+}
+Duplex.fromWeb = function (pair, options) {
+ return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options)
+}
+Duplex.toWeb = function (duplex) {
+ return lazyWebStreams().newReadableWritablePairFromDuplex(duplex)
+}
+let duplexify
+Duplex.from = function (body) {
+ if (!duplexify) {
+ duplexify = require('./duplexify')
+ }
+ return duplexify(body, 'body')
+}
diff --git a/lib/internal/streams/duplexify.js b/lib/internal/streams/duplexify.js
new file mode 100644
index 0000000000..05740d70ff
--- /dev/null
+++ b/lib/internal/streams/duplexify.js
@@ -0,0 +1,378 @@
+/* replacement start */
+
+const process = require('process/')
+
+/* replacement end */
+
+;('use strict')
+const bufferModule = require('buffer')
+const {
+ isReadable,
+ isWritable,
+ isIterable,
+ isNodeStream,
+ isReadableNodeStream,
+ isWritableNodeStream,
+ isDuplexNodeStream,
+ isReadableStream,
+ isWritableStream
+} = require('./utils')
+const eos = require('./end-of-stream')
+const {
+ AbortError,
+ codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE }
+} = require('../../ours/errors')
+const { destroyer } = require('./destroy')
+const Duplex = require('./duplex')
+const Readable = require('./readable')
+const Writable = require('./writable')
+const { createDeferredPromise } = require('../../ours/util')
+const from = require('./from')
+const Blob = globalThis.Blob || bufferModule.Blob
+const isBlob =
+ typeof Blob !== 'undefined'
+ ? function isBlob(b) {
+ return b instanceof Blob
+ }
+ : function isBlob(b) {
+ return false
+ }
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const { FunctionPrototypeCall } = require('../../ours/primordials')
+
+// This is needed for pre node 17.
+class Duplexify extends Duplex {
+ constructor(options) {
+ super(options)
+
+ // https://github.com/nodejs/node/pull/34385
+
+ if ((options === null || options === undefined ? undefined : options.readable) === false) {
+ this._readableState.readable = false
+ this._readableState.ended = true
+ this._readableState.endEmitted = true
+ }
+ if ((options === null || options === undefined ? undefined : options.writable) === false) {
+ this._writableState.writable = false
+ this._writableState.ending = true
+ this._writableState.ended = true
+ this._writableState.finished = true
+ }
+ }
+}
+module.exports = function duplexify(body, name) {
+ if (isDuplexNodeStream(body)) {
+ return body
+ }
+ if (isReadableNodeStream(body)) {
+ return _duplexify({
+ readable: body
+ })
+ }
+ if (isWritableNodeStream(body)) {
+ return _duplexify({
+ writable: body
+ })
+ }
+ if (isNodeStream(body)) {
+ return _duplexify({
+ writable: false,
+ readable: false
+ })
+ }
+ if (isReadableStream(body)) {
+ return _duplexify({
+ readable: Readable.fromWeb(body)
+ })
+ }
+ if (isWritableStream(body)) {
+ return _duplexify({
+ writable: Writable.fromWeb(body)
+ })
+ }
+ if (typeof body === 'function') {
+ const { value, write, final, destroy } = fromAsyncGen(body)
+ if (isIterable(value)) {
+ return from(Duplexify, value, {
+ // TODO (ronag): highWaterMark?
+ objectMode: true,
+ write,
+ final,
+ destroy
+ })
+ }
+ const then = value === null || value === undefined ? undefined : value.then
+ if (typeof then === 'function') {
+ let d
+ const promise = FunctionPrototypeCall(
+ then,
+ value,
+ (val) => {
+ if (val != null) {
+ throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val)
+ }
+ },
+ (err) => {
+ destroyer(d, err)
+ }
+ )
+ return (d = new Duplexify({
+ // TODO (ronag): highWaterMark?
+ objectMode: true,
+ readable: false,
+ write,
+ final(cb) {
+ final(async () => {
+ try {
+ await promise
+ process.nextTick(cb, null)
+ } catch (err) {
+ process.nextTick(cb, err)
+ }
+ })
+ },
+ destroy
+ }))
+ }
+ throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value)
+ }
+ if (isBlob(body)) {
+ return duplexify(body.arrayBuffer())
+ }
+ if (isIterable(body)) {
+ return from(Duplexify, body, {
+ // TODO (ronag): highWaterMark?
+ objectMode: true,
+ writable: false
+ })
+ }
+ if (
+ isReadableStream(body === null || body === undefined ? undefined : body.readable) &&
+ isWritableStream(body === null || body === undefined ? undefined : body.writable)
+ ) {
+ return Duplexify.fromWeb(body)
+ }
+ if (
+ typeof (body === null || body === undefined ? undefined : body.writable) === 'object' ||
+ typeof (body === null || body === undefined ? undefined : body.readable) === 'object'
+ ) {
+ const readable =
+ body !== null && body !== undefined && body.readable
+ ? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable)
+ ? body === null || body === undefined
+ ? undefined
+ : body.readable
+ : duplexify(body.readable)
+ : undefined
+ const writable =
+ body !== null && body !== undefined && body.writable
+ ? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable)
+ ? body === null || body === undefined
+ ? undefined
+ : body.writable
+ : duplexify(body.writable)
+ : undefined
+ return _duplexify({
+ readable,
+ writable
+ })
+ }
+ const then = body === null || body === undefined ? undefined : body.then
+ if (typeof then === 'function') {
+ let d
+ FunctionPrototypeCall(
+ then,
+ body,
+ (val) => {
+ if (val != null) {
+ d.push(val)
+ }
+ d.push(null)
+ },
+ (err) => {
+ destroyer(d, err)
+ }
+ )
+ return (d = new Duplexify({
+ objectMode: true,
+ writable: false,
+ read() {}
+ }))
+ }
+ throw new ERR_INVALID_ARG_TYPE(
+ name,
+ [
+ 'Blob',
+ 'ReadableStream',
+ 'WritableStream',
+ 'Stream',
+ 'Iterable',
+ 'AsyncIterable',
+ 'Function',
+ '{ readable, writable } pair',
+ 'Promise'
+ ],
+ body
+ )
+}
+function fromAsyncGen(fn) {
+ let { promise, resolve } = createDeferredPromise()
+ const ac = new AbortController()
+ const signal = ac.signal
+ const value = fn(
+ (async function* () {
+ while (true) {
+ const _promise = promise
+ promise = null
+ const { chunk, done, cb } = await _promise
+ process.nextTick(cb)
+ if (done) return
+ if (signal.aborted)
+ throw new AbortError(undefined, {
+ cause: signal.reason
+ })
+ ;({ promise, resolve } = createDeferredPromise())
+ yield chunk
+ }
+ })(),
+ {
+ signal
+ }
+ )
+ return {
+ value,
+ write(chunk, encoding, cb) {
+ const _resolve = resolve
+ resolve = null
+ _resolve({
+ chunk,
+ done: false,
+ cb
+ })
+ },
+ final(cb) {
+ const _resolve = resolve
+ resolve = null
+ _resolve({
+ done: true,
+ cb
+ })
+ },
+ destroy(err, cb) {
+ ac.abort()
+ cb(err)
+ }
+ }
+}
+function _duplexify(pair) {
+ const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable
+ const w = pair.writable
+ let readable = !!isReadable(r)
+ let writable = !!isWritable(w)
+ let ondrain
+ let onfinish
+ let onreadable
+ let onclose
+ let d
+ function onfinished(err) {
+ const cb = onclose
+ onclose = null
+ if (cb) {
+ cb(err)
+ } else if (err) {
+ d.destroy(err)
+ }
+ }
+
+ // TODO(ronag): Avoid double buffering.
+ // Implement Writable/Readable/Duplex traits.
+ // See, https://github.com/nodejs/node/pull/33515.
+ d = new Duplexify({
+ // TODO (ronag): highWaterMark?
+ readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode),
+ writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode),
+ readable,
+ writable
+ })
+ if (writable) {
+ eos(w, (err) => {
+ writable = false
+ if (err) {
+ destroyer(r, err)
+ }
+ onfinished(err)
+ })
+ d._write = function (chunk, encoding, callback) {
+ if (w.write(chunk, encoding)) {
+ callback()
+ } else {
+ ondrain = callback
+ }
+ }
+ d._final = function (callback) {
+ w.end()
+ onfinish = callback
+ }
+ w.on('drain', function () {
+ if (ondrain) {
+ const cb = ondrain
+ ondrain = null
+ cb()
+ }
+ })
+ w.on('finish', function () {
+ if (onfinish) {
+ const cb = onfinish
+ onfinish = null
+ cb()
+ }
+ })
+ }
+ if (readable) {
+ eos(r, (err) => {
+ readable = false
+ if (err) {
+ destroyer(r, err)
+ }
+ onfinished(err)
+ })
+ r.on('readable', function () {
+ if (onreadable) {
+ const cb = onreadable
+ onreadable = null
+ cb()
+ }
+ })
+ r.on('end', function () {
+ d.push(null)
+ })
+ d._read = function () {
+ while (true) {
+ const buf = r.read()
+ if (buf === null) {
+ onreadable = d._read
+ return
+ }
+ if (!d.push(buf)) {
+ return
+ }
+ }
+ }
+ }
+ d._destroy = function (err, callback) {
+ if (!err && onclose !== null) {
+ err = new AbortError()
+ }
+ onreadable = null
+ ondrain = null
+ onfinish = null
+ if (onclose === null) {
+ callback(err)
+ } else {
+ onclose = callback
+ destroyer(w, err)
+ destroyer(r, err)
+ }
+ }
+ return d
+}
diff --git a/lib/internal/streams/end-of-stream.js b/lib/internal/streams/end-of-stream.js
new file mode 100644
index 0000000000..94d18321d2
--- /dev/null
+++ b/lib/internal/streams/end-of-stream.js
@@ -0,0 +1,286 @@
+// Ported from https://github.com/mafintosh/end-of-stream with
+// permission from the author, Mathias Buus (@mafintosh).
+
+'use strict'
+
+/* replacement start */
+
+const process = require('process/')
+
+/* replacement end */
+
+const { AbortError, codes } = require('../../ours/errors')
+const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes
+const { kEmptyObject, once } = require('../../ours/util')
+const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = require('../validators')
+const { Promise, PromisePrototypeThen, SymbolDispose } = require('../../ours/primordials')
+const {
+ isClosed,
+ isReadable,
+ isReadableNodeStream,
+ isReadableStream,
+ isReadableFinished,
+ isReadableErrored,
+ isWritable,
+ isWritableNodeStream,
+ isWritableStream,
+ isWritableFinished,
+ isWritableErrored,
+ isNodeStream,
+ willEmitClose: _willEmitClose,
+ kIsClosedPromise
+} = require('./utils')
+let addAbortListener
+function isRequest(stream) {
+ return stream.setHeader && typeof stream.abort === 'function'
+}
+const nop = () => {}
+function eos(stream, options, callback) {
+ var _options$readable, _options$writable
+ if (arguments.length === 2) {
+ callback = options
+ options = kEmptyObject
+ } else if (options == null) {
+ options = kEmptyObject
+ } else {
+ validateObject(options, 'options')
+ }
+ validateFunction(callback, 'callback')
+ validateAbortSignal(options.signal, 'options.signal')
+ callback = once(callback)
+ if (isReadableStream(stream) || isWritableStream(stream)) {
+ return eosWeb(stream, options, callback)
+ }
+ if (!isNodeStream(stream)) {
+ throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)
+ }
+ const readable =
+ (_options$readable = options.readable) !== null && _options$readable !== undefined
+ ? _options$readable
+ : isReadableNodeStream(stream)
+ const writable =
+ (_options$writable = options.writable) !== null && _options$writable !== undefined
+ ? _options$writable
+ : isWritableNodeStream(stream)
+ const wState = stream._writableState
+ const rState = stream._readableState
+ const onlegacyfinish = () => {
+ if (!stream.writable) {
+ onfinish()
+ }
+ }
+
+ // TODO (ronag): Improve soft detection to include core modules and
+ // common ecosystem modules that do properly emit 'close' but fail
+ // this generic check.
+ let willEmitClose =
+ _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable
+ let writableFinished = isWritableFinished(stream, false)
+ const onfinish = () => {
+ writableFinished = true
+ // Stream should not be destroyed here. If it is that
+ // means that user space is doing something differently and
+ // we cannot trust willEmitClose.
+ if (stream.destroyed) {
+ willEmitClose = false
+ }
+ if (willEmitClose && (!stream.readable || readable)) {
+ return
+ }
+ if (!readable || readableFinished) {
+ callback.call(stream)
+ }
+ }
+ let readableFinished = isReadableFinished(stream, false)
+ const onend = () => {
+ readableFinished = true
+ // Stream should not be destroyed here. If it is that
+ // means that user space is doing something differently and
+ // we cannot trust willEmitClose.
+ if (stream.destroyed) {
+ willEmitClose = false
+ }
+ if (willEmitClose && (!stream.writable || writable)) {
+ return
+ }
+ if (!writable || writableFinished) {
+ callback.call(stream)
+ }
+ }
+ const onerror = (err) => {
+ callback.call(stream, err)
+ }
+ let closed = isClosed(stream)
+ const onclose = () => {
+ closed = true
+ const errored = isWritableErrored(stream) || isReadableErrored(stream)
+ if (errored && typeof errored !== 'boolean') {
+ return callback.call(stream, errored)
+ }
+ if (readable && !readableFinished && isReadableNodeStream(stream, true)) {
+ if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
+ }
+ if (writable && !writableFinished) {
+ if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
+ }
+ callback.call(stream)
+ }
+ const onclosed = () => {
+ closed = true
+ const errored = isWritableErrored(stream) || isReadableErrored(stream)
+ if (errored && typeof errored !== 'boolean') {
+ return callback.call(stream, errored)
+ }
+ callback.call(stream)
+ }
+ const onrequest = () => {
+ stream.req.on('finish', onfinish)
+ }
+ if (isRequest(stream)) {
+ stream.on('complete', onfinish)
+ if (!willEmitClose) {
+ stream.on('abort', onclose)
+ }
+ if (stream.req) {
+ onrequest()
+ } else {
+ stream.on('request', onrequest)
+ }
+ } else if (writable && !wState) {
+ // legacy streams
+ stream.on('end', onlegacyfinish)
+ stream.on('close', onlegacyfinish)
+ }
+
+ // Not all streams will emit 'close' after 'aborted'.
+ if (!willEmitClose && typeof stream.aborted === 'boolean') {
+ stream.on('aborted', onclose)
+ }
+ stream.on('end', onend)
+ stream.on('finish', onfinish)
+ if (options.error !== false) {
+ stream.on('error', onerror)
+ }
+ stream.on('close', onclose)
+ if (closed) {
+ process.nextTick(onclose)
+ } else if (
+ (wState !== null && wState !== undefined && wState.errorEmitted) ||
+ (rState !== null && rState !== undefined && rState.errorEmitted)
+ ) {
+ if (!willEmitClose) {
+ process.nextTick(onclosed)
+ }
+ } else if (
+ !readable &&
+ (!willEmitClose || isReadable(stream)) &&
+ (writableFinished || isWritable(stream) === false)
+ ) {
+ process.nextTick(onclosed)
+ } else if (
+ !writable &&
+ (!willEmitClose || isWritable(stream)) &&
+ (readableFinished || isReadable(stream) === false)
+ ) {
+ process.nextTick(onclosed)
+ } else if (rState && stream.req && stream.aborted) {
+ process.nextTick(onclosed)
+ }
+ const cleanup = () => {
+ callback = nop
+ stream.removeListener('aborted', onclose)
+ stream.removeListener('complete', onfinish)
+ stream.removeListener('abort', onclose)
+ stream.removeListener('request', onrequest)
+ if (stream.req) stream.req.removeListener('finish', onfinish)
+ stream.removeListener('end', onlegacyfinish)
+ stream.removeListener('close', onlegacyfinish)
+ stream.removeListener('finish', onfinish)
+ stream.removeListener('end', onend)
+ stream.removeListener('error', onerror)
+ stream.removeListener('close', onclose)
+ }
+ if (options.signal && !closed) {
+ const abort = () => {
+ // Keep it because cleanup removes it.
+ const endCallback = callback
+ cleanup()
+ endCallback.call(
+ stream,
+ new AbortError(undefined, {
+ cause: options.signal.reason
+ })
+ )
+ }
+ if (options.signal.aborted) {
+ process.nextTick(abort)
+ } else {
+ addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
+ const disposable = addAbortListener(options.signal, abort)
+ const originalCallback = callback
+ callback = once((...args) => {
+ disposable[SymbolDispose]()
+ originalCallback.apply(stream, args)
+ })
+ }
+ }
+ return cleanup
+}
+function eosWeb(stream, options, callback) {
+ let isAborted = false
+ let abort = nop
+ if (options.signal) {
+ abort = () => {
+ isAborted = true
+ callback.call(
+ stream,
+ new AbortError(undefined, {
+ cause: options.signal.reason
+ })
+ )
+ }
+ if (options.signal.aborted) {
+ process.nextTick(abort)
+ } else {
+ addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
+ const disposable = addAbortListener(options.signal, abort)
+ const originalCallback = callback
+ callback = once((...args) => {
+ disposable[SymbolDispose]()
+ originalCallback.apply(stream, args)
+ })
+ }
+ }
+ const resolverFn = (...args) => {
+ if (!isAborted) {
+ process.nextTick(() => callback.apply(stream, args))
+ }
+ }
+ PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn)
+ return nop
+}
+function finished(stream, opts) {
+ var _opts
+ let autoCleanup = false
+ if (opts === null) {
+ opts = kEmptyObject
+ }
+ if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) {
+ validateBoolean(opts.cleanup, 'cleanup')
+ autoCleanup = opts.cleanup
+ }
+ return new Promise((resolve, reject) => {
+ const cleanup = eos(stream, opts, (err) => {
+ if (autoCleanup) {
+ cleanup()
+ }
+ if (err) {
+ reject(err)
+ } else {
+ resolve()
+ }
+ })
+ })
+}
+module.exports = eos
+module.exports.finished = finished
diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js
new file mode 100644
index 0000000000..c7e7531402
--- /dev/null
+++ b/lib/internal/streams/from.js
@@ -0,0 +1,98 @@
+'use strict'
+
+/* replacement start */
+
+const process = require('process/')
+
+/* replacement end */
+
+const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials')
+const { Buffer } = require('buffer')
+const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require('../../ours/errors').codes
+function from(Readable, iterable, opts) {
+ let iterator
+ if (typeof iterable === 'string' || iterable instanceof Buffer) {
+ return new Readable({
+ objectMode: true,
+ ...opts,
+ read() {
+ this.push(iterable)
+ this.push(null)
+ }
+ })
+ }
+ let isAsync
+ if (iterable && iterable[SymbolAsyncIterator]) {
+ isAsync = true
+ iterator = iterable[SymbolAsyncIterator]()
+ } else if (iterable && iterable[SymbolIterator]) {
+ isAsync = false
+ iterator = iterable[SymbolIterator]()
+ } else {
+ throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable)
+ }
+ const readable = new Readable({
+ objectMode: true,
+ highWaterMark: 1,
+ // TODO(ronag): What options should be allowed?
+ ...opts
+ })
+
+ // Flag to protect against _read
+ // being called before last iteration completion.
+ let reading = false
+ readable._read = function () {
+ if (!reading) {
+ reading = true
+ next()
+ }
+ }
+ readable._destroy = function (error, cb) {
+ PromisePrototypeThen(
+ close(error),
+ () => process.nextTick(cb, error),
+ // nextTick is here in case cb throws
+ (e) => process.nextTick(cb, e || error)
+ )
+ }
+ async function close(error) {
+ const hadError = error !== undefined && error !== null
+ const hasThrow = typeof iterator.throw === 'function'
+ if (hadError && hasThrow) {
+ const { value, done } = await iterator.throw(error)
+ await value
+ if (done) {
+ return
+ }
+ }
+ if (typeof iterator.return === 'function') {
+ const { value } = await iterator.return()
+ await value
+ }
+ }
+ async function next() {
+ for (;;) {
+ try {
+ const { value, done } = isAsync ? await iterator.next() : iterator.next()
+ if (done) {
+ readable.push(null)
+ } else {
+ const res = value && typeof value.then === 'function' ? await value : value
+ if (res === null) {
+ reading = false
+ throw new ERR_STREAM_NULL_VALUES()
+ } else if (readable.push(res)) {
+ continue
+ } else {
+ reading = false
+ }
+ }
+ } catch (err) {
+ readable.destroy(err)
+ }
+ break
+ }
+ }
+ return readable
+}
+module.exports = from
diff --git a/lib/internal/streams/lazy_transform.js b/lib/internal/streams/lazy_transform.js
new file mode 100644
index 0000000000..439461a127
--- /dev/null
+++ b/lib/internal/streams/lazy_transform.js
@@ -0,0 +1,51 @@
+// LazyTransform is a special type of Transform stream that is lazily loaded.
+// This is used for performance with bi-API-ship: when two APIs are available
+// for the stream, one conventional and one non-conventional.
+'use strict'
+
+const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf } = require('../../ours/primordials')
+const stream = require('../../stream')
+const { getDefaultEncoding } = require('../crypto/util')
+module.exports = LazyTransform
+function LazyTransform(options) {
+ this._options = options
+}
+ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype)
+ObjectSetPrototypeOf(LazyTransform, stream.Transform)
+function makeGetter(name) {
+ return function () {
+ stream.Transform.call(this, this._options)
+ this._writableState.decodeStrings = false
+ if (!this._options || !this._options.defaultEncoding) {
+ this._writableState.defaultEncoding = getDefaultEncoding()
+ }
+ return this[name]
+ }
+}
+function makeSetter(name) {
+ return function (val) {
+ ObjectDefineProperty(this, name, {
+ __proto__: null,
+ value: val,
+ enumerable: true,
+ configurable: true,
+ writable: true
+ })
+ }
+}
+ObjectDefineProperties(LazyTransform.prototype, {
+ _readableState: {
+ __proto__: null,
+ get: makeGetter('_readableState'),
+ set: makeSetter('_readableState'),
+ configurable: true,
+ enumerable: true
+ },
+ _writableState: {
+ __proto__: null,
+ get: makeGetter('_writableState'),
+ set: makeSetter('_writableState'),
+ configurable: true,
+ enumerable: true
+ }
+})
diff --git a/lib/internal/streams/legacy.js b/lib/internal/streams/legacy.js
new file mode 100644
index 0000000000..d492f7ff4e
--- /dev/null
+++ b/lib/internal/streams/legacy.js
@@ -0,0 +1,89 @@
+'use strict'
+
+const { ArrayIsArray, ObjectSetPrototypeOf } = require('../../ours/primordials')
+const { EventEmitter: EE } = require('events')
+function Stream(opts) {
+ EE.call(this, opts)
+}
+ObjectSetPrototypeOf(Stream.prototype, EE.prototype)
+ObjectSetPrototypeOf(Stream, EE)
+Stream.prototype.pipe = function (dest, options) {
+ const source = this
+ function ondata(chunk) {
+ if (dest.writable && dest.write(chunk) === false && source.pause) {
+ source.pause()
+ }
+ }
+ source.on('data', ondata)
+ function ondrain() {
+ if (source.readable && source.resume) {
+ source.resume()
+ }
+ }
+ dest.on('drain', ondrain)
+
+ // If the 'end' option is not supplied, dest.end() will be called when
+ // source gets the 'end' or 'close' events. Only dest.end() once.
+ if (!dest._isStdio && (!options || options.end !== false)) {
+ source.on('end', onend)
+ source.on('close', onclose)
+ }
+ let didOnEnd = false
+ function onend() {
+ if (didOnEnd) return
+ didOnEnd = true
+ dest.end()
+ }
+ function onclose() {
+ if (didOnEnd) return
+ didOnEnd = true
+ if (typeof dest.destroy === 'function') dest.destroy()
+ }
+
+ // Don't leave dangling pipes when there are errors.
+ function onerror(er) {
+ cleanup()
+ if (EE.listenerCount(this, 'error') === 0) {
+ this.emit('error', er)
+ }
+ }
+ prependListener(source, 'error', onerror)
+ prependListener(dest, 'error', onerror)
+
+ // Remove all the event listeners that were added.
+ function cleanup() {
+ source.removeListener('data', ondata)
+ dest.removeListener('drain', ondrain)
+ source.removeListener('end', onend)
+ source.removeListener('close', onclose)
+ source.removeListener('error', onerror)
+ dest.removeListener('error', onerror)
+ source.removeListener('end', cleanup)
+ source.removeListener('close', cleanup)
+ dest.removeListener('close', cleanup)
+ }
+ source.on('end', cleanup)
+ source.on('close', cleanup)
+ dest.on('close', cleanup)
+ dest.emit('pipe', source)
+
+ // Allow for unix-like usage: A.pipe(B).pipe(C)
+ return dest
+}
+function prependListener(emitter, event, fn) {
+ // Sadly this is not cacheable as some libraries bundle their own
+ // event emitter implementation with them.
+ if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn)
+
+ // This is a hack to make sure that our error handler is attached before any
+ // userland ones. NEVER DO THIS. This is here only because this code needs
+ // to continue to work with older versions of Node.js that do not include
+ // the prependListener() method. The goal is to eventually remove this hack.
+ if (!emitter._events || !emitter._events[event]) emitter.on(event, fn)
+ else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn)
+ else emitter._events[event] = [fn, emitter._events[event]]
+}
+module.exports = {
+ Stream,
+ prependListener
+}
diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js
new file mode 100644
index 0000000000..7eff11047e
--- /dev/null
+++ b/lib/internal/streams/operators.js
@@ -0,0 +1,457 @@
+'use strict'
+
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const {
+ codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE },
+ AbortError
+} = require('../../ours/errors')
+const { validateAbortSignal, validateInteger, validateObject } = require('../validators')
+const kWeakHandler = require('../../ours/primordials').Symbol('kWeak')
+const kResistStopPropagation = require('../../ours/primordials').Symbol('kResistStopPropagation')
+const { finished } = require('./end-of-stream')
+const staticCompose = require('./compose')
+const { addAbortSignalNoValidate } = require('./add-abort-signal')
+const { isWritable, isNodeStream } = require('./utils')
+const { deprecate } = require('../../ours/util')
+const {
+ ArrayPrototypePush,
+ Boolean,
+ MathFloor,
+ Number,
+ NumberIsNaN,
+ Promise,
+ PromiseReject,
+ PromiseResolve,
+ PromisePrototypeThen,
+ Symbol
+} = require('../../ours/primordials')
+const kEmpty = Symbol('kEmpty')
+const kEof = Symbol('kEof')
+function compose(stream, options) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+ if (isNodeStream(stream) && !isWritable(stream)) {
+ throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable')
+ }
+ const composedStream = staticCompose(this, stream)
+ if (options !== null && options !== undefined && options.signal) {
+ // Not validating as we already validated before
+ addAbortSignalNoValidate(options.signal, composedStream)
+ }
+ return composedStream
+}
+function map(fn, options) {
+ if (typeof fn !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
+ }
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+ let concurrency = 1
+ if ((options === null || options === undefined ? undefined : options.concurrency) != null) {
+ concurrency = MathFloor(options.concurrency)
+ }
+ let highWaterMark = concurrency - 1
+ if ((options === null || options === undefined ? undefined : options.highWaterMark) != null) {
+ highWaterMark = MathFloor(options.highWaterMark)
+ }
+ validateInteger(concurrency, 'options.concurrency', 1)
+ validateInteger(highWaterMark, 'options.highWaterMark', 0)
+ highWaterMark += concurrency
+ return async function* map() {
+ const signal = require('../../ours/util').AbortSignalAny(
+ [options === null || options === undefined ? undefined : options.signal].filter(Boolean)
+ )
+ const stream = this
+ const queue = []
+ const signalOpt = {
+ signal
+ }
+ let next
+ let resume
+ let done = false
+ let cnt = 0
+ function onCatch() {
+ done = true
+ afterItemProcessed()
+ }
+ function afterItemProcessed() {
+ cnt -= 1
+ maybeResume()
+ }
+ function maybeResume() {
+ if (resume && !done && cnt < concurrency && queue.length < highWaterMark) {
+ resume()
+ resume = null
+ }
+ }
+ async function pump() {
+ try {
+ for await (let val of stream) {
+ if (done) {
+ return
+ }
+ if (signal.aborted) {
+ throw new AbortError()
+ }
+ try {
+ val = fn(val, signalOpt)
+ if (val === kEmpty) {
+ continue
+ }
+ val = PromiseResolve(val)
+ } catch (err) {
+ val = PromiseReject(err)
+ }
+ cnt += 1
+ PromisePrototypeThen(val, afterItemProcessed, onCatch)
+ queue.push(val)
+ if (next) {
+ next()
+ next = null
+ }
+ if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) {
+ await new Promise((resolve) => {
+ resume = resolve
+ })
+ }
+ }
+ queue.push(kEof)
+ } catch (err) {
+ const val = PromiseReject(err)
+ PromisePrototypeThen(val, afterItemProcessed, onCatch)
+ queue.push(val)
+ } finally {
+ done = true
+ if (next) {
+ next()
+ next = null
+ }
+ }
+ }
+ pump()
+ try {
+ while (true) {
+ while (queue.length > 0) {
+ const val = await queue[0]
+ if (val === kEof) {
+ return
+ }
+ if (signal.aborted) {
+ throw new AbortError()
+ }
+ if (val !== kEmpty) {
+ yield val
+ }
+ queue.shift()
+ maybeResume()
+ }
+ await new Promise((resolve) => {
+ next = resolve
+ })
+ }
+ } finally {
+ done = true
+ if (resume) {
+ resume()
+ resume = null
+ }
+ }
+ }.call(this)
+}
+function asIndexedPairs(options = undefined) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+ return async function* asIndexedPairs() {
+ let index = 0
+ for await (const val of this) {
+ var _options$signal
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal = options.signal) !== null &&
+ _options$signal !== undefined &&
+ _options$signal.aborted
+ ) {
+ throw new AbortError({
+ cause: options.signal.reason
+ })
+ }
+ yield [index++, val]
+ }
+ }.call(this)
+}
+async function some(fn, options = undefined) {
+ for await (const unused of filter.call(this, fn, options)) {
+ return true
+ }
+ return false
+}
+async function every(fn, options = undefined) {
+ if (typeof fn !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
+ }
+ // https://en.wikipedia.org/wiki/De_Morgan%27s_laws
+ return !(await some.call(
+ this,
+ async (...args) => {
+ return !(await fn(...args))
+ },
+ options
+ ))
+}
+async function find(fn, options) {
+ for await (const result of filter.call(this, fn, options)) {
+ return result
+ }
+ return undefined
+}
+async function forEach(fn, options) {
+ if (typeof fn !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
+ }
+ async function forEachFn(value, options) {
+ await fn(value, options)
+ return kEmpty
+ }
+ // eslint-disable-next-line no-unused-vars
+ for await (const unused of map.call(this, forEachFn, options));
+}
+function filter(fn, options) {
+ if (typeof fn !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
+ }
+ async function filterFn(value, options) {
+ if (await fn(value, options)) {
+ return value
+ }
+ return kEmpty
+ }
+ return map.call(this, filterFn, options)
+}
+
+// Specific to provide better error to reduce since the argument is only
+// missing if the stream has no items in it - but the code is still appropriate
+class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS {
+ constructor() {
+ super('reduce')
+ this.message = 'Reduce of an empty stream requires an initial value'
+ }
+}
+async function reduce(reducer, initialValue, options) {
+ var _options$signal2
+ if (typeof reducer !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer)
+ }
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+ let hasInitialValue = arguments.length > 1
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal2 = options.signal) !== null &&
+ _options$signal2 !== undefined &&
+ _options$signal2.aborted
+ ) {
+ const err = new AbortError(undefined, {
+ cause: options.signal.reason
+ })
+ this.once('error', () => {}) // The error is already propagated
+ await finished(this.destroy(err))
+ throw err
+ }
+ const ac = new AbortController()
+ const signal = ac.signal
+ if (options !== null && options !== undefined && options.signal) {
+ const opts = {
+ once: true,
+ [kWeakHandler]: this,
+ [kResistStopPropagation]: true
+ }
+ options.signal.addEventListener('abort', () => ac.abort(), opts)
+ }
+ let gotAnyItemFromStream = false
+ try {
+ for await (const value of this) {
+ var _options$signal3
+ gotAnyItemFromStream = true
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal3 = options.signal) !== null &&
+ _options$signal3 !== undefined &&
+ _options$signal3.aborted
+ ) {
+ throw new AbortError()
+ }
+ if (!hasInitialValue) {
+ initialValue = value
+ hasInitialValue = true
+ } else {
+ initialValue = await reducer(initialValue, value, {
+ signal
+ })
+ }
+ }
+ if (!gotAnyItemFromStream && !hasInitialValue) {
+ throw new ReduceAwareErrMissingArgs()
+ }
+ } finally {
+ ac.abort()
+ }
+ return initialValue
+}
+async function toArray(options) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+ const result = []
+ for await (const val of this) {
+ var _options$signal4
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal4 = options.signal) !== null &&
+ _options$signal4 !== undefined &&
+ _options$signal4.aborted
+ ) {
+ throw new AbortError(undefined, {
+ cause: options.signal.reason
+ })
+ }
+ ArrayPrototypePush(result, val)
+ }
+ return result
+}
+function flatMap(fn, options) {
+ const values = map.call(this, fn, options)
+ return async function* flatMap() {
+ for await (const val of values) {
+ yield* val
+ }
+ }.call(this)
+}
+function toIntegerOrInfinity(number) {
+ // We coerce here to align with the spec
+ // https://github.com/tc39/proposal-iterator-helpers/issues/169
+ number = Number(number)
+ if (NumberIsNaN(number)) {
+ return 0
+ }
+ if (number < 0) {
+ throw new ERR_OUT_OF_RANGE('number', '>= 0', number)
+ }
+ return number
+}
+function drop(number, options = undefined) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+ number = toIntegerOrInfinity(number)
+ return async function* drop() {
+ var _options$signal5
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal5 = options.signal) !== null &&
+ _options$signal5 !== undefined &&
+ _options$signal5.aborted
+ ) {
+ throw new AbortError()
+ }
+ for await (const val of this) {
+ var _options$signal6
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal6 = options.signal) !== null &&
+ _options$signal6 !== undefined &&
+ _options$signal6.aborted
+ ) {
+ throw new AbortError()
+ }
+ if (number-- <= 0) {
+ yield val
+ }
+ }
+ }.call(this)
+}
+function take(number, options = undefined) {
+ if (options != null) {
+ validateObject(options, 'options')
+ }
+ if ((options === null || options === undefined ? undefined : options.signal) != null) {
+ validateAbortSignal(options.signal, 'options.signal')
+ }
+ number = toIntegerOrInfinity(number)
+ return async function* take() {
+ var _options$signal7
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal7 = options.signal) !== null &&
+ _options$signal7 !== undefined &&
+ _options$signal7.aborted
+ ) {
+ throw new AbortError()
+ }
+ for await (const val of this) {
+ var _options$signal8
+ if (
+ options !== null &&
+ options !== undefined &&
+ (_options$signal8 = options.signal) !== null &&
+ _options$signal8 !== undefined &&
+ _options$signal8.aborted
+ ) {
+ throw new AbortError()
+ }
+ if (number-- > 0) {
+ yield val
+ }
+
+ // Don't get another item from iterator in case we reached the end
+ if (number <= 0) {
+ return
+ }
+ }
+ }.call(this)
+}
+module.exports.streamReturningOperators = {
+ asIndexedPairs: deprecate(asIndexedPairs, 'readable.asIndexedPairs will be removed in a future version.'),
+ drop,
+ filter,
+ flatMap,
+ map,
+ take,
+ compose
+}
+module.exports.promiseReturningOperators = {
+ every,
+ forEach,
+ reduce,
+ toArray,
+ some,
+ find
+}
diff --git a/lib/internal/streams/passthrough.js b/lib/internal/streams/passthrough.js
new file mode 100644
index 0000000000..ed4f486c3b
--- /dev/null
+++ b/lib/internal/streams/passthrough.js
@@ -0,0 +1,39 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a passthrough stream.
+// basically just the most minimal sort of Transform stream.
+// Every written chunk gets output as-is.
+
+'use strict'
+
+const { ObjectSetPrototypeOf } = require('../../ours/primordials')
+module.exports = PassThrough
+const Transform = require('./transform')
+ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype)
+ObjectSetPrototypeOf(PassThrough, Transform)
+function PassThrough(options) {
+ if (!(this instanceof PassThrough)) return new PassThrough(options)
+ Transform.call(this, options)
+}
+PassThrough.prototype._transform = function (chunk, encoding, cb) {
+ cb(null, chunk)
+}
diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js
new file mode 100644
index 0000000000..a2bab88006
--- /dev/null
+++ b/lib/internal/streams/pipeline.js
@@ -0,0 +1,471 @@
+/* replacement start */
+
+const process = require('process/')
+
+/* replacement end */
+// Ported from https://github.com/mafintosh/pump with
+// permission from the author, Mathias Buus (@mafintosh).
+
+;('use strict')
+const { ArrayIsArray, Promise, SymbolAsyncIterator, SymbolDispose } = require('../../ours/primordials')
+const eos = require('./end-of-stream')
+const { once } = require('../../ours/util')
+const destroyImpl = require('./destroy')
+const Duplex = require('./duplex')
+const {
+ aggregateTwoErrors,
+ codes: {
+ ERR_INVALID_ARG_TYPE,
+ ERR_INVALID_RETURN_VALUE,
+ ERR_MISSING_ARGS,
+ ERR_STREAM_DESTROYED,
+ ERR_STREAM_PREMATURE_CLOSE
+ },
+ AbortError
+} = require('../../ours/errors')
+const { validateFunction, validateAbortSignal } = require('../validators')
+const {
+ isIterable,
+ isReadable,
+ isReadableNodeStream,
+ isNodeStream,
+ isTransformStream,
+ isWebStream,
+ isReadableStream,
+ isReadableFinished
+} = require('./utils')
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+let PassThrough
+let Readable
+let addAbortListener
+function destroyer(stream, reading, writing) {
+ let finished = false
+ stream.on('close', () => {
+ finished = true
+ })
+ const cleanup = eos(
+ stream,
+ {
+ readable: reading,
+ writable: writing
+ },
+ (err) => {
+ finished = !err
+ }
+ )
+ return {
+ destroy: (err) => {
+ if (finished) return
+ finished = true
+ destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe'))
+ },
+ cleanup
+ }
+}
+function popCallback(streams) {
+ // Streams should never be an empty array. It should always contain at least
+ // a single stream. Therefore optimize for the average case instead of
+ // checking for length === 0 as well.
+ validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]')
+ return streams.pop()
+}
+function makeAsyncIterable(val) {
+ if (isIterable(val)) {
+ return val
+ } else if (isReadableNodeStream(val)) {
+ // Legacy streams are not Iterable.
+ return fromReadable(val)
+ }
+ throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val)
+}
+async function* fromReadable(val) {
+ if (!Readable) {
+ Readable = require('./readable')
+ }
+ yield* Readable.prototype[SymbolAsyncIterator].call(val)
+}
+async function pumpToNode(iterable, writable, finish, { end }) {
+ let error
+ let onresolve = null
+ const resume = (err) => {
+ if (err) {
+ error = err
+ }
+ if (onresolve) {
+ const callback = onresolve
+ onresolve = null
+ callback()
+ }
+ }
+ const wait = () =>
+ new Promise((resolve, reject) => {
+ if (error) {
+ reject(error)
+ } else {
+ onresolve = () => {
+ if (error) {
+ reject(error)
+ } else {
+ resolve()
+ }
+ }
+ }
+ })
+ writable.on('drain', resume)
+ const cleanup = eos(
+ writable,
+ {
+ readable: false
+ },
+ resume
+ )
+ try {
+ if (writable.writableNeedDrain) {
+ await wait()
+ }
+ for await (const chunk of iterable) {
+ if (!writable.write(chunk)) {
+ await wait()
+ }
+ }
+ if (end) {
+ writable.end()
+ await wait()
+ }
+ finish()
+ } catch (err) {
+ finish(error !== err ? aggregateTwoErrors(error, err) : err)
+ } finally {
+ cleanup()
+ writable.off('drain', resume)
+ }
+}
+async function pumpToWeb(readable, writable, finish, { end }) {
+ if (isTransformStream(writable)) {
+ writable = writable.writable
+ }
+ // https://streams.spec.whatwg.org/#example-manual-write-with-backpressure
+ const writer = writable.getWriter()
+ try {
+ for await (const chunk of readable) {
+ await writer.ready
+ writer.write(chunk).catch(() => {})
+ }
+ await writer.ready
+ if (end) {
+ await writer.close()
+ }
+ finish()
+ } catch (err) {
+ try {
+ await writer.abort(err)
+ finish(err)
+ } catch (err) {
+ finish(err)
+ }
+ }
+}
+function pipeline(...streams) {
+ return pipelineImpl(streams, once(popCallback(streams)))
+}
+function pipelineImpl(streams, callback, opts) {
+ if (streams.length === 1 && ArrayIsArray(streams[0])) {
+ streams = streams[0]
+ }
+ if (streams.length < 2) {
+ throw new ERR_MISSING_ARGS('streams')
+ }
+ const ac = new AbortController()
+ const signal = ac.signal
+ const outerSignal = opts === null || opts === undefined ? undefined : opts.signal
+
+ // Need to cleanup event listeners if last stream is readable
+ // https://github.com/nodejs/node/issues/35452
+ const lastStreamCleanup = []
+ validateAbortSignal(outerSignal, 'options.signal')
+ function abort() {
+ finishImpl(new AbortError())
+ }
+ addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
+ let disposable
+ if (outerSignal) {
+ disposable = addAbortListener(outerSignal, abort)
+ }
+ let error
+ let value
+ const destroys = []
+ let finishCount = 0
+ function finish(err) {
+ finishImpl(err, --finishCount === 0)
+ }
+ function finishImpl(err, final) {
+ var _disposable
+ if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) {
+ error = err
+ }
+ if (!error && !final) {
+ return
+ }
+ while (destroys.length) {
+ destroys.shift()(error)
+ }
+ ;(_disposable = disposable) === null || _disposable === undefined ? undefined : _disposable[SymbolDispose]()
+ ac.abort()
+ if (final) {
+ if (!error) {
+ lastStreamCleanup.forEach((fn) => fn())
+ }
+ process.nextTick(callback, error, value)
+ }
+ }
+ let ret
+ for (let i = 0; i < streams.length; i++) {
+ const stream = streams[i]
+ const reading = i < streams.length - 1
+ const writing = i > 0
+ const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false
+ const isLastStream = i === streams.length - 1
+ if (isNodeStream(stream)) {
+ if (end) {
+ const { destroy, cleanup } = destroyer(stream, reading, writing)
+ destroys.push(destroy)
+ if (isReadable(stream) && isLastStream) {
+ lastStreamCleanup.push(cleanup)
+ }
+ }
+
+ // Catch stream errors that occur after pipe/pump has completed.
+ function onError(err) {
+ if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
+ finish(err)
+ }
+ }
+ stream.on('error', onError)
+ if (isReadable(stream) && isLastStream) {
+ lastStreamCleanup.push(() => {
+ stream.removeListener('error', onError)
+ })
+ }
+ }
+ if (i === 0) {
+ if (typeof stream === 'function') {
+ ret = stream({
+ signal
+ })
+ if (!isIterable(ret)) {
+ throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret)
+ }
+ } else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) {
+ ret = stream
+ } else {
+ ret = Duplex.from(stream)
+ }
+ } else if (typeof stream === 'function') {
+ if (isTransformStream(ret)) {
+ var _ret
+ ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable)
+ } else {
+ ret = makeAsyncIterable(ret)
+ }
+ ret = stream(ret, {
+ signal
+ })
+ if (reading) {
+ if (!isIterable(ret, true)) {
+ throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret)
+ }
+ } else {
+ var _ret2
+ if (!PassThrough) {
+ PassThrough = require('./passthrough')
+ }
+
+ // If the last argument to pipeline is not a stream
+ // we must create a proxy stream so that pipeline(...)
+ // always returns a stream which can be further
+ // composed through `.pipe(stream)`.
+
+ const pt = new PassThrough({
+ objectMode: true
+ })
+
+ // Handle Promises/A+ spec, `then` could be a getter that throws on
+ // second use.
+ const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then
+ if (typeof then === 'function') {
+ finishCount++
+ then.call(
+ ret,
+ (val) => {
+ value = val
+ if (val != null) {
+ pt.write(val)
+ }
+ if (end) {
+ pt.end()
+ }
+ process.nextTick(finish)
+ },
+ (err) => {
+ pt.destroy(err)
+ process.nextTick(finish, err)
+ }
+ )
+ } else if (isIterable(ret, true)) {
+ finishCount++
+ pumpToNode(ret, pt, finish, {
+ end
+ })
+ } else if (isReadableStream(ret) || isTransformStream(ret)) {
+ const toRead = ret.readable || ret
+ finishCount++
+ pumpToNode(toRead, pt, finish, {
+ end
+ })
+ } else {
+ throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret)
+ }
+ ret = pt
+ const { destroy, cleanup } = destroyer(ret, false, true)
+ destroys.push(destroy)
+ if (isLastStream) {
+ lastStreamCleanup.push(cleanup)
+ }
+ }
+ } else if (isNodeStream(stream)) {
+ if (isReadableNodeStream(ret)) {
+ finishCount += 2
+ const cleanup = pipe(ret, stream, finish, {
+ end
+ })
+ if (isReadable(stream) && isLastStream) {
+ lastStreamCleanup.push(cleanup)
+ }
+ } else if (isTransformStream(ret) || isReadableStream(ret)) {
+ const toRead = ret.readable || ret
+ finishCount++
+ pumpToNode(toRead, stream, finish, {
+ end
+ })
+ } else if (isIterable(ret)) {
+ finishCount++
+ pumpToNode(ret, stream, finish, {
+ end
+ })
+ } else {
+ throw new ERR_INVALID_ARG_TYPE(
+ 'val',
+ ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],
+ ret
+ )
+ }
+ ret = stream
+ } else if (isWebStream(stream)) {
+ if (isReadableNodeStream(ret)) {
+ finishCount++
+ pumpToWeb(makeAsyncIterable(ret), stream, finish, {
+ end
+ })
+ } else if (isReadableStream(ret) || isIterable(ret)) {
+ finishCount++
+ pumpToWeb(ret, stream, finish, {
+ end
+ })
+ } else if (isTransformStream(ret)) {
+ finishCount++
+ pumpToWeb(ret.readable, stream, finish, {
+ end
+ })
+ } else {
+ throw new ERR_INVALID_ARG_TYPE(
+ 'val',
+ ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],
+ ret
+ )
+ }
+ ret = stream
+ } else {
+ ret = Duplex.from(stream)
+ }
+ }
+ if (
+ (signal !== null && signal !== undefined && signal.aborted) ||
+ (outerSignal !== null && outerSignal !== undefined && outerSignal.aborted)
+ ) {
+ process.nextTick(abort)
+ }
+ return ret
+}
+function pipe(src, dst, finish, { end }) {
+ let ended = false
+ dst.on('close', () => {
+ if (!ended) {
+ // Finish if the destination closes before the source has completed.
+ finish(new ERR_STREAM_PREMATURE_CLOSE())
+ }
+ })
+ src.pipe(dst, {
+ end: false
+ }) // If end is true we already will have a listener to end dst.
+
+ if (end) {
+ // Compat. Before node v10.12.0 stdio used to throw an error so
+ // pipe() did/does not end() stdio destinations.
+ // Now they allow it but "secretly" don't close the underlying fd.
+
+ function endFn() {
+ ended = true
+ dst.end()
+ }
+ if (isReadableFinished(src)) {
+ // End the destination if the source has already ended.
+ process.nextTick(endFn)
+ } else {
+ src.once('end', endFn)
+ }
+ } else {
+ finish()
+ }
+ eos(
+ src,
+ {
+ readable: true,
+ writable: false
+ },
+ (err) => {
+ const rState = src._readableState
+ if (
+ err &&
+ err.code === 'ERR_STREAM_PREMATURE_CLOSE' &&
+ rState &&
+ rState.ended &&
+ !rState.errored &&
+ !rState.errorEmitted
+ ) {
+ // Some readable streams will emit 'close' before 'end'. However, since
+ // this is on the readable side 'end' should still be emitted if the
+ // stream has been ended and no error emitted. This should be allowed in
+ // favor of backwards compatibility. Since the stream is piped to a
+ // destination this should not result in any observable difference.
+ // We don't need to check if this is a writable premature close since
+ // eos will only fail with premature close on the reading side for
+ // duplex streams.
+ src.once('end', finish).once('error', finish)
+ } else {
+ finish(err)
+ }
+ }
+ )
+ return eos(
+ dst,
+ {
+ readable: false,
+ writable: true
+ },
+ finish
+ )
+}
+module.exports = {
+ pipelineImpl,
+ pipeline
+}
diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js
new file mode 100644
index 0000000000..90c7316056
--- /dev/null
+++ b/lib/internal/streams/readable.js
@@ -0,0 +1,1290 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+/* replacement start */
+
+const process = require('process/')
+
+/* replacement end */
+
+const {
+ ArrayPrototypeIndexOf,
+ NumberIsInteger,
+ NumberIsNaN,
+ NumberParseInt,
+ ObjectDefineProperties,
+ ObjectKeys,
+ ObjectSetPrototypeOf,
+ Promise,
+ SafeSet,
+ SymbolAsyncDispose,
+ SymbolAsyncIterator,
+ Symbol
+} = require('../../ours/primordials')
+module.exports = Readable
+Readable.ReadableState = ReadableState
+const { EventEmitter: EE } = require('events')
+const { Stream, prependListener } = require('./legacy')
+const { Buffer } = require('buffer')
+const { addAbortSignal } = require('./add-abort-signal')
+const eos = require('./end-of-stream')
+let debug = require('../../ours/util').debuglog('stream', (fn) => {
+ debug = fn
+})
+const BufferList = require('./buffer_list')
+const destroyImpl = require('./destroy')
+const { getHighWaterMark, getDefaultHighWaterMark } = require('./state')
+const {
+ aggregateTwoErrors,
+ codes: {
+ ERR_INVALID_ARG_TYPE,
+ ERR_METHOD_NOT_IMPLEMENTED,
+ ERR_OUT_OF_RANGE,
+ ERR_STREAM_PUSH_AFTER_EOF,
+ ERR_STREAM_UNSHIFT_AFTER_END_EVENT
+ },
+ AbortError
+} = require('../../ours/errors')
+const { validateObject } = require('../validators')
+const kPaused = Symbol('kPaused')
+const { StringDecoder } = require('string_decoder/')
+const from = require('./from')
+ObjectSetPrototypeOf(Readable.prototype, Stream.prototype)
+ObjectSetPrototypeOf(Readable, Stream)
+const nop = () => {}
+const { errorOrDestroy } = destroyImpl
+const kObjectMode = 1 << 0
+const kEnded = 1 << 1
+const kEndEmitted = 1 << 2
+const kReading = 1 << 3
+const kConstructed = 1 << 4
+const kSync = 1 << 5
+const kNeedReadable = 1 << 6
+const kEmittedReadable = 1 << 7
+const kReadableListening = 1 << 8
+const kResumeScheduled = 1 << 9
+const kErrorEmitted = 1 << 10
+const kEmitClose = 1 << 11
+const kAutoDestroy = 1 << 12
+const kDestroyed = 1 << 13
+const kClosed = 1 << 14
+const kCloseEmitted = 1 << 15
+const kMultiAwaitDrain = 1 << 16
+const kReadingMore = 1 << 17
+const kDataEmitted = 1 << 18
+
+// TODO(benjamingr) it is likely slower to do it this way than with free functions
+function makeBitMapDescriptor(bit) {
+ return {
+ enumerable: false,
+ get() {
+ return (this.state & bit) !== 0
+ },
+ set(value) {
+ if (value) this.state |= bit
+ else this.state &= ~bit
+ }
+ }
+}
+ObjectDefineProperties(ReadableState.prototype, {
+ objectMode: makeBitMapDescriptor(kObjectMode),
+ ended: makeBitMapDescriptor(kEnded),
+ endEmitted: makeBitMapDescriptor(kEndEmitted),
+ reading: makeBitMapDescriptor(kReading),
+ // Stream is still being constructed and cannot be
+ // destroyed until construction finished or failed.
+ // Async construction is opt in, therefore we start as
+ // constructed.
+ constructed: makeBitMapDescriptor(kConstructed),
+ // A flag to be able to tell if the event 'readable'/'data' is emitted
+ // immediately, or on a later tick. We set this to true at first, because
+ // any actions that shouldn't happen until "later" should generally also
+ // not happen before the first read call.
+ sync: makeBitMapDescriptor(kSync),
+ // Whenever we return null, then we set a flag to say
+ // that we're awaiting a 'readable' event emission.
+ needReadable: makeBitMapDescriptor(kNeedReadable),
+ emittedReadable: makeBitMapDescriptor(kEmittedReadable),
+ readableListening: makeBitMapDescriptor(kReadableListening),
+ resumeScheduled: makeBitMapDescriptor(kResumeScheduled),
+ // True if the error was already emitted and should not be thrown again.
+ errorEmitted: makeBitMapDescriptor(kErrorEmitted),
+ emitClose: makeBitMapDescriptor(kEmitClose),
+ autoDestroy: makeBitMapDescriptor(kAutoDestroy),
+ // Has it been destroyed.
+ destroyed: makeBitMapDescriptor(kDestroyed),
+ // Indicates whether the stream has finished destroying.
+ closed: makeBitMapDescriptor(kClosed),
+ // True if close has been emitted or would have been emitted
+ // depending on emitClose.
+ closeEmitted: makeBitMapDescriptor(kCloseEmitted),
+ multiAwaitDrain: makeBitMapDescriptor(kMultiAwaitDrain),
+ // If true, a maybeReadMore has been scheduled.
+ readingMore: makeBitMapDescriptor(kReadingMore),
+ dataEmitted: makeBitMapDescriptor(kDataEmitted)
+})
+function ReadableState(options, stream, isDuplex) {
+ // Duplex streams are both readable and writable, but share
+ // the same options object.
+ // However, some cases require setting options to different
+ // values for the readable and the writable sides of the duplex stream.
+ // These options can be provided separately as readableXXX and writableXXX.
+ if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex')
+
+ // Bit map field to store ReadableState more effciently with 1 bit per field
+ // instead of a V8 slot per field.
+ this.state = kEmitClose | kAutoDestroy | kConstructed | kSync
+ // Object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away.
+ if (options && options.objectMode) this.state |= kObjectMode
+ if (isDuplex && options && options.readableObjectMode) this.state |= kObjectMode
+
+ // The point at which it stops calling _read() to fill the buffer
+ // Note: 0 is a valid value, means "don't call _read preemptively ever"
+ this.highWaterMark = options
+ ? getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex)
+ : getDefaultHighWaterMark(false)
+
+ // A linked list is used to store data chunks instead of an array because the
+ // linked list can remove elements from the beginning faster than
+ // array.shift().
+ this.buffer = new BufferList()
+ this.length = 0
+ this.pipes = []
+ this.flowing = null
+ this[kPaused] = null
+
+ // Should close be emitted on destroy. Defaults to true.
+ if (options && options.emitClose === false) this.state &= ~kEmitClose
+
+ // Should .destroy() be called after 'end' (and potentially 'finish').
+ if (options && options.autoDestroy === false) this.state &= ~kAutoDestroy
+
+ // Indicates whether the stream has errored. When true no further
+ // _read calls, 'data' or 'readable' events should occur. This is needed
+ // since when autoDestroy is disabled we need a way to tell whether the
+ // stream has failed.
+ this.errored = null
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'
+
+ // Ref the piped dest which we need a drain event on it
+ // type: null | Writable | Set.
+ this.awaitDrainWriters = null
+ this.decoder = null
+ this.encoding = null
+ if (options && options.encoding) {
+ this.decoder = new StringDecoder(options.encoding)
+ this.encoding = options.encoding
+ }
+}
+function Readable(options) {
+ if (!(this instanceof Readable)) return new Readable(options)
+
+ // Checking for a Stream.Duplex instance is faster here instead of inside
+ // the ReadableState constructor, at least with V8 6.5.
+ const isDuplex = this instanceof require('./duplex')
+ this._readableState = new ReadableState(options, this, isDuplex)
+ if (options) {
+ if (typeof options.read === 'function') this._read = options.read
+ if (typeof options.destroy === 'function') this._destroy = options.destroy
+ if (typeof options.construct === 'function') this._construct = options.construct
+ if (options.signal && !isDuplex) addAbortSignal(options.signal, this)
+ }
+ Stream.call(this, options)
+ destroyImpl.construct(this, () => {
+ if (this._readableState.needReadable) {
+ maybeReadMore(this, this._readableState)
+ }
+ })
+}
+Readable.prototype.destroy = destroyImpl.destroy
+Readable.prototype._undestroy = destroyImpl.undestroy
+Readable.prototype._destroy = function (err, cb) {
+ cb(err)
+}
+Readable.prototype[EE.captureRejectionSymbol] = function (err) {
+ this.destroy(err)
+}
+Readable.prototype[SymbolAsyncDispose] = function () {
+ let error
+ if (!this.destroyed) {
+ error = this.readableEnded ? null : new AbortError()
+ this.destroy(error)
+ }
+ return new Promise((resolve, reject) => eos(this, (err) => (err && err !== error ? reject(err) : resolve(null))))
+}
+
+// Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+Readable.prototype.push = function (chunk, encoding) {
+ return readableAddChunk(this, chunk, encoding, false)
+}
+
+// Unshift should *always* be something directly out of read().
+Readable.prototype.unshift = function (chunk, encoding) {
+ return readableAddChunk(this, chunk, encoding, true)
+}
+function readableAddChunk(stream, chunk, encoding, addToFront) {
+ debug('readableAddChunk', chunk)
+ const state = stream._readableState
+ let err
+ if ((state.state & kObjectMode) === 0) {
+ if (typeof chunk === 'string') {
+ encoding = encoding || state.defaultEncoding
+ if (state.encoding !== encoding) {
+ if (addToFront && state.encoding) {
+ // When unshifting, if state.encoding is set, we have to save
+ // the string in the BufferList with the state encoding.
+ chunk = Buffer.from(chunk, encoding).toString(state.encoding)
+ } else {
+ chunk = Buffer.from(chunk, encoding)
+ encoding = ''
+ }
+ }
+ } else if (chunk instanceof Buffer) {
+ encoding = ''
+ } else if (Stream._isUint8Array(chunk)) {
+ chunk = Stream._uint8ArrayToBuffer(chunk)
+ encoding = ''
+ } else if (chunk != null) {
+ err = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)
+ }
+ }
+ if (err) {
+ errorOrDestroy(stream, err)
+ } else if (chunk === null) {
+ state.state &= ~kReading
+ onEofChunk(stream, state)
+ } else if ((state.state & kObjectMode) !== 0 || (chunk && chunk.length > 0)) {
+ if (addToFront) {
+ if ((state.state & kEndEmitted) !== 0) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT())
+ else if (state.destroyed || state.errored) return false
+ else addChunk(stream, state, chunk, true)
+ } else if (state.ended) {
+ errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF())
+ } else if (state.destroyed || state.errored) {
+ return false
+ } else {
+ state.state &= ~kReading
+ if (state.decoder && !encoding) {
+ chunk = state.decoder.write(chunk)
+ if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false)
+ else maybeReadMore(stream, state)
+ } else {
+ addChunk(stream, state, chunk, false)
+ }
+ }
+ } else if (!addToFront) {
+ state.state &= ~kReading
+ maybeReadMore(stream, state)
+ }
+
+ // We can push more data if we are below the highWaterMark.
+ // Also, if we have no data yet, we can stand some more bytes.
+ // This is to work around cases where hwm=0, such as the repl.
+ return !state.ended && (state.length < state.highWaterMark || state.length === 0)
+}
+function addChunk(stream, state, chunk, addToFront) {
+ if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount('data') > 0) {
+ // Use the guard to avoid creating `Set()` repeatedly
+ // when we have multiple pipes.
+ if ((state.state & kMultiAwaitDrain) !== 0) {
+ state.awaitDrainWriters.clear()
+ } else {
+ state.awaitDrainWriters = null
+ }
+ state.dataEmitted = true
+ stream.emit('data', chunk)
+ } else {
+ // Update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length
+ if (addToFront) state.buffer.unshift(chunk)
+ else state.buffer.push(chunk)
+ if ((state.state & kNeedReadable) !== 0) emitReadable(stream)
+ }
+ maybeReadMore(stream, state)
+}
+Readable.prototype.isPaused = function () {
+ const state = this._readableState
+ return state[kPaused] === true || state.flowing === false
+}
+
+// Backwards compatibility.
+Readable.prototype.setEncoding = function (enc) {
+ const decoder = new StringDecoder(enc)
+ this._readableState.decoder = decoder
+ // If setEncoding(null), decoder.encoding equals utf8.
+ this._readableState.encoding = this._readableState.decoder.encoding
+ const buffer = this._readableState.buffer
+ // Iterate over current buffer to convert already stored Buffers:
+ let content = ''
+ for (const data of buffer) {
+ content += decoder.write(data)
+ }
+ buffer.clear()
+ if (content !== '') buffer.push(content)
+ this._readableState.length = content.length
+ return this
+}
+
+// Don't raise the hwm > 1GB.
+const MAX_HWM = 0x40000000
+function computeNewHighWaterMark(n) {
+ if (n > MAX_HWM) {
+ throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n)
+ } else {
+ // Get the next highest power of 2 to prevent increasing hwm excessively in
+ // tiny amounts.
+ n--
+ n |= n >>> 1
+ n |= n >>> 2
+ n |= n >>> 4
+ n |= n >>> 8
+ n |= n >>> 16
+ n++
+ }
+ return n
+}
+
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function howMuchToRead(n, state) {
+ if (n <= 0 || (state.length === 0 && state.ended)) return 0
+ if ((state.state & kObjectMode) !== 0) return 1
+ if (NumberIsNaN(n)) {
+ // Only flow one buffer at a time.
+ if (state.flowing && state.length) return state.buffer.first().length
+ return state.length
+ }
+ if (n <= state.length) return n
+ return state.ended ? state.length : 0
+}
+
+// You can override either this method, or the async _read(n) below.
+Readable.prototype.read = function (n) {
+ debug('read', n)
+ // Same as parseInt(undefined, 10), however V8 7.3 performance regressed
+ // in this scenario, so we are doing it manually.
+ if (n === undefined) {
+ n = NaN
+ } else if (!NumberIsInteger(n)) {
+ n = NumberParseInt(n, 10)
+ }
+ const state = this._readableState
+ const nOrig = n
+
+ // If we're asking for more than the current hwm, then raise the hwm.
+ if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n)
+ if (n !== 0) state.state &= ~kEmittedReadable
+
+ // If we're doing read(0) to trigger a readable event, but we
+ // already have a bunch of data in the buffer, then just trigger
+ // the 'readable' event and move on.
+ if (
+ n === 0 &&
+ state.needReadable &&
+ ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)
+ ) {
+ debug('read: emitReadable', state.length, state.ended)
+ if (state.length === 0 && state.ended) endReadable(this)
+ else emitReadable(this)
+ return null
+ }
+ n = howMuchToRead(n, state)
+
+ // If we've ended, and we're now clear, then finish it up.
+ if (n === 0 && state.ended) {
+ if (state.length === 0) endReadable(this)
+ return null
+ }
+
+ // All the actual chunk generation logic needs to be
+ // *below* the call to _read. The reason is that in certain
+ // synthetic stream cases, such as passthrough streams, _read
+ // may be a completely synchronous operation which may change
+ // the state of the read buffer, providing enough data when
+ // before there was *not* enough.
+ //
+ // So, the steps are:
+ // 1. Figure out what the state of things will be after we do
+ // a read from the buffer.
+ //
+ // 2. If that resulting state will trigger a _read, then call _read.
+ // Note that this may be asynchronous, or synchronous. Yes, it is
+ // deeply ugly to write APIs this way, but that still doesn't mean
+ // that the Readable class should behave improperly, as streams are
+ // designed to be sync/async agnostic.
+ // Take note if the _read call is sync or async (ie, if the read call
+ // has returned yet), so that we know whether or not it's safe to emit
+ // 'readable' etc.
+ //
+ // 3. Actually pull the requested chunks out of the buffer and return.
+
+ // if we need a readable event, then we need to do some reading.
+ let doRead = (state.state & kNeedReadable) !== 0
+ debug('need readable', doRead)
+
+ // If we currently have less than the highWaterMark, then also read some.
+ if (state.length === 0 || state.length - n < state.highWaterMark) {
+ doRead = true
+ debug('length less than watermark', doRead)
+ }
+
+ // However, if we've ended, then there's no point, if we're already
+ // reading, then it's unnecessary, if we're constructing we have to wait,
+ // and if we're destroyed or errored, then it's not allowed,
+ if (state.ended || state.reading || state.destroyed || state.errored || !state.constructed) {
+ doRead = false
+ debug('reading, ended or constructing', doRead)
+ } else if (doRead) {
+ debug('do read')
+ state.state |= kReading | kSync
+ // If the length is currently zero, then we *need* a readable event.
+ if (state.length === 0) state.state |= kNeedReadable
+
+ // Call internal read method
+ try {
+ this._read(state.highWaterMark)
+ } catch (err) {
+ errorOrDestroy(this, err)
+ }
+ state.state &= ~kSync
+
+ // If _read pushed data synchronously, then `reading` will be false,
+ // and we need to re-evaluate how much data we can return to the user.
+ if (!state.reading) n = howMuchToRead(nOrig, state)
+ }
+ let ret
+ if (n > 0) ret = fromList(n, state)
+ else ret = null
+ if (ret === null) {
+ state.needReadable = state.length <= state.highWaterMark
+ n = 0
+ } else {
+ state.length -= n
+ if (state.multiAwaitDrain) {
+ state.awaitDrainWriters.clear()
+ } else {
+ state.awaitDrainWriters = null
+ }
+ }
+ if (state.length === 0) {
+ // If we have nothing in the buffer, then we want to know
+ // as soon as we *do* get something into the buffer.
+ if (!state.ended) state.needReadable = true
+
+ // If we tried to read() past the EOF, then emit end on the next tick.
+ if (nOrig !== n && state.ended) endReadable(this)
+ }
+ if (ret !== null && !state.errorEmitted && !state.closeEmitted) {
+ state.dataEmitted = true
+ this.emit('data', ret)
+ }
+ return ret
+}
+function onEofChunk(stream, state) {
+ debug('onEofChunk')
+ if (state.ended) return
+ if (state.decoder) {
+ const chunk = state.decoder.end()
+ if (chunk && chunk.length) {
+ state.buffer.push(chunk)
+ state.length += state.objectMode ? 1 : chunk.length
+ }
+ }
+ state.ended = true
+ if (state.sync) {
+ // If we are sync, wait until next tick to emit the data.
+ // Otherwise we risk emitting data in the flow()
+ // the readable code triggers during a read() call.
+ emitReadable(stream)
+ } else {
+ // Emit 'readable' now to make sure it gets picked up.
+ state.needReadable = false
+ state.emittedReadable = true
+ // We have to emit readable now that we are EOF. Modules
+ // in the ecosystem (e.g. dicer) rely on this event being sync.
+ emitReadable_(stream)
+ }
+}
+
+// Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow. This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+function emitReadable(stream) {
+ const state = stream._readableState
+ debug('emitReadable', state.needReadable, state.emittedReadable)
+ state.needReadable = false
+ if (!state.emittedReadable) {
+ debug('emitReadable', state.flowing)
+ state.emittedReadable = true
+ process.nextTick(emitReadable_, stream)
+ }
+}
+function emitReadable_(stream) {
+ const state = stream._readableState
+ debug('emitReadable_', state.destroyed, state.length, state.ended)
+ if (!state.destroyed && !state.errored && (state.length || state.ended)) {
+ stream.emit('readable')
+ state.emittedReadable = false
+ }
+
+ // The stream needs another readable event if:
+ // 1. It is not flowing, as the flow mechanism will take
+ // care of it.
+ // 2. It is not ended.
+ // 3. It is below the highWaterMark, so we can schedule
+ // another readable later.
+ state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark
+ flow(stream)
+}
+
+// At this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data. that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+function maybeReadMore(stream, state) {
+ if (!state.readingMore && state.constructed) {
+ state.readingMore = true
+ process.nextTick(maybeReadMore_, stream, state)
+ }
+}
+function maybeReadMore_(stream, state) {
+ // Attempt to read more data if we should.
+ //
+ // The conditions for reading more data are (one of):
+ // - Not enough data buffered (state.length < state.highWaterMark). The loop
+ // is responsible for filling the buffer with enough data if such data
+ // is available. If highWaterMark is 0 and we are not in the flowing mode
+ // we should _not_ attempt to buffer any extra data. We'll get more data
+ // when the stream consumer calls read() instead.
+ // - No data in the buffer, and the stream is in flowing mode. In this mode
+ // the loop below is responsible for ensuring read() is called. Failing to
+ // call read here would abort the flow and there's no other mechanism for
+ // continuing the flow if the stream consumer has just subscribed to the
+ // 'data' event.
+ //
+ // In addition to the above conditions to keep reading data, the following
+ // conditions prevent the data from being read:
+ // - The stream has ended (state.ended).
+ // - There is already a pending 'read' operation (state.reading). This is a
+ // case where the stream has called the implementation defined _read()
+ // method, but they are processing the call asynchronously and have _not_
+ // called push() with new data. In this case we skip performing more
+ // read()s. The execution ends in this method again after the _read() ends
+ // up calling push() with more data.
+ while (
+ !state.reading &&
+ !state.ended &&
+ (state.length < state.highWaterMark || (state.flowing && state.length === 0))
+ ) {
+ const len = state.length
+ debug('maybeReadMore read 0')
+ stream.read(0)
+ if (len === state.length)
+ // Didn't get any data, stop spinning.
+ break
+ }
+ state.readingMore = false
+}
+
+// Abstract method. to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+Readable.prototype._read = function (n) {
+ throw new ERR_METHOD_NOT_IMPLEMENTED('_read()')
+}
+Readable.prototype.pipe = function (dest, pipeOpts) {
+ const src = this
+ const state = this._readableState
+ if (state.pipes.length === 1) {
+ if (!state.multiAwaitDrain) {
+ state.multiAwaitDrain = true
+ state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : [])
+ }
+ }
+ state.pipes.push(dest)
+ debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts)
+ const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr
+ const endFn = doEnd ? onend : unpipe
+ if (state.endEmitted) process.nextTick(endFn)
+ else src.once('end', endFn)
+ dest.on('unpipe', onunpipe)
+ function onunpipe(readable, unpipeInfo) {
+ debug('onunpipe')
+ if (readable === src) {
+ if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
+ unpipeInfo.hasUnpiped = true
+ cleanup()
+ }
+ }
+ }
+ function onend() {
+ debug('onend')
+ dest.end()
+ }
+ let ondrain
+ let cleanedUp = false
+ function cleanup() {
+ debug('cleanup')
+ // Cleanup event handlers once the pipe is broken.
+ dest.removeListener('close', onclose)
+ dest.removeListener('finish', onfinish)
+ if (ondrain) {
+ dest.removeListener('drain', ondrain)
+ }
+ dest.removeListener('error', onerror)
+ dest.removeListener('unpipe', onunpipe)
+ src.removeListener('end', onend)
+ src.removeListener('end', unpipe)
+ src.removeListener('data', ondata)
+ cleanedUp = true
+
+ // If the reader is waiting for a drain event from this
+ // specific writer, then it would cause it to never start
+ // flowing again.
+ // So, if this is awaiting a drain, then we just call it now.
+ // If we don't know, then assume that we are waiting for one.
+ if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain()
+ }
+ function pause() {
+ // If the user unpiped during `dest.write()`, it is possible
+ // to get stuck in a permanently paused state if that write
+ // also returned false.
+ // => Check whether `dest` is still a piping destination.
+ if (!cleanedUp) {
+ if (state.pipes.length === 1 && state.pipes[0] === dest) {
+ debug('false write response, pause', 0)
+ state.awaitDrainWriters = dest
+ state.multiAwaitDrain = false
+ } else if (state.pipes.length > 1 && state.pipes.includes(dest)) {
+ debug('false write response, pause', state.awaitDrainWriters.size)
+ state.awaitDrainWriters.add(dest)
+ }
+ src.pause()
+ }
+ if (!ondrain) {
+ // When the dest drains, it reduces the awaitDrain counter
+ // on the source. This would be more elegant with a .once()
+ // handler in flow(), but adding and removing repeatedly is
+ // too slow.
+ ondrain = pipeOnDrain(src, dest)
+ dest.on('drain', ondrain)
+ }
+ }
+ src.on('data', ondata)
+ function ondata(chunk) {
+ debug('ondata')
+ const ret = dest.write(chunk)
+ debug('dest.write', ret)
+ if (ret === false) {
+ pause()
+ }
+ }
+
+ // If the dest has an error, then stop piping into it.
+ // However, don't suppress the throwing behavior for this.
+ function onerror(er) {
+ debug('onerror', er)
+ unpipe()
+ dest.removeListener('error', onerror)
+ if (dest.listenerCount('error') === 0) {
+ const s = dest._writableState || dest._readableState
+ if (s && !s.errorEmitted) {
+ // User incorrectly emitted 'error' directly on the stream.
+ errorOrDestroy(dest, er)
+ } else {
+ dest.emit('error', er)
+ }
+ }
+ }
+
+ // Make sure our error handler is attached before userland ones.
+ prependListener(dest, 'error', onerror)
+
+ // Both close and finish should trigger unpipe, but only once.
+ function onclose() {
+ dest.removeListener('finish', onfinish)
+ unpipe()
+ }
+ dest.once('close', onclose)
+ function onfinish() {
+ debug('onfinish')
+ dest.removeListener('close', onclose)
+ unpipe()
+ }
+ dest.once('finish', onfinish)
+ function unpipe() {
+ debug('unpipe')
+ src.unpipe(dest)
+ }
+
+ // Tell the dest that it's being piped to.
+ dest.emit('pipe', src)
+
+ // Start the flow if it hasn't been started already.
+
+ if (dest.writableNeedDrain === true) {
+ pause()
+ } else if (!state.flowing) {
+ debug('pipe resume')
+ src.resume()
+ }
+ return dest
+}
+function pipeOnDrain(src, dest) {
+ return function pipeOnDrainFunctionResult() {
+ const state = src._readableState
+
+ // `ondrain` will call directly,
+ // `this` maybe not a reference to dest,
+ // so we use the real dest here.
+ if (state.awaitDrainWriters === dest) {
+ debug('pipeOnDrain', 1)
+ state.awaitDrainWriters = null
+ } else if (state.multiAwaitDrain) {
+ debug('pipeOnDrain', state.awaitDrainWriters.size)
+ state.awaitDrainWriters.delete(dest)
+ }
+ if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && src.listenerCount('data')) {
+ src.resume()
+ }
+ }
+}
+Readable.prototype.unpipe = function (dest) {
+ const state = this._readableState
+ const unpipeInfo = {
+ hasUnpiped: false
+ }
+
+ // If we're not piping anywhere, then do nothing.
+ if (state.pipes.length === 0) return this
+ if (!dest) {
+ // remove all.
+ const dests = state.pipes
+ state.pipes = []
+ this.pause()
+ for (let i = 0; i < dests.length; i++)
+ dests[i].emit('unpipe', this, {
+ hasUnpiped: false
+ })
+ return this
+ }
+
+ // Try to find the right one.
+ const index = ArrayPrototypeIndexOf(state.pipes, dest)
+ if (index === -1) return this
+ state.pipes.splice(index, 1)
+ if (state.pipes.length === 0) this.pause()
+ dest.emit('unpipe', this, unpipeInfo)
+ return this
+}
+
+// Set up data events if they are asked for
+// Ensure readable listeners eventually get something.
+Readable.prototype.on = function (ev, fn) {
+ const res = Stream.prototype.on.call(this, ev, fn)
+ const state = this._readableState
+ if (ev === 'data') {
+ // Update readableListening so that resume() may be a no-op
+ // a few lines down. This is needed to support once('readable').
+ state.readableListening = this.listenerCount('readable') > 0
+
+ // Try start flowing on next tick if stream isn't explicitly paused.
+ if (state.flowing !== false) this.resume()
+ } else if (ev === 'readable') {
+ if (!state.endEmitted && !state.readableListening) {
+ state.readableListening = state.needReadable = true
+ state.flowing = false
+ state.emittedReadable = false
+ debug('on readable', state.length, state.reading)
+ if (state.length) {
+ emitReadable(this)
+ } else if (!state.reading) {
+ process.nextTick(nReadingNextTick, this)
+ }
+ }
+ }
+ return res
+}
+Readable.prototype.addListener = Readable.prototype.on
+Readable.prototype.removeListener = function (ev, fn) {
+ const res = Stream.prototype.removeListener.call(this, ev, fn)
+ if (ev === 'readable') {
+ // We need to check if there is someone still listening to
+ // readable and reset the state. However this needs to happen
+ // after readable has been emitted but before I/O (nextTick) to
+ // support once('readable', fn) cycles. This means that calling
+ // resume within the same tick will have no
+ // effect.
+ process.nextTick(updateReadableListening, this)
+ }
+ return res
+}
+Readable.prototype.off = Readable.prototype.removeListener
+Readable.prototype.removeAllListeners = function (ev) {
+ const res = Stream.prototype.removeAllListeners.apply(this, arguments)
+ if (ev === 'readable' || ev === undefined) {
+ // We need to check if there is someone still listening to
+ // readable and reset the state. However this needs to happen
+ // after readable has been emitted but before I/O (nextTick) to
+ // support once('readable', fn) cycles. This means that calling
+ // resume within the same tick will have no
+ // effect.
+ process.nextTick(updateReadableListening, this)
+ }
+ return res
+}
+function updateReadableListening(self) {
+ const state = self._readableState
+ state.readableListening = self.listenerCount('readable') > 0
+ if (state.resumeScheduled && state[kPaused] === false) {
+ // Flowing needs to be set to true now, otherwise
+ // the upcoming resume will not flow.
+ state.flowing = true
+
+ // Crude way to check if we should resume.
+ } else if (self.listenerCount('data') > 0) {
+ self.resume()
+ } else if (!state.readableListening) {
+ state.flowing = null
+ }
+}
+function nReadingNextTick(self) {
+ debug('readable nexttick read 0')
+ self.read(0)
+}
+
+// pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+Readable.prototype.resume = function () {
+ const state = this._readableState
+ if (!state.flowing) {
+ debug('resume')
+ // We flow only if there is no one listening
+ // for readable, but we still have to call
+ // resume().
+ state.flowing = !state.readableListening
+ resume(this, state)
+ }
+ state[kPaused] = false
+ return this
+}
+function resume(stream, state) {
+ if (!state.resumeScheduled) {
+ state.resumeScheduled = true
+ process.nextTick(resume_, stream, state)
+ }
+}
+function resume_(stream, state) {
+ debug('resume', state.reading)
+ if (!state.reading) {
+ stream.read(0)
+ }
+ state.resumeScheduled = false
+ stream.emit('resume')
+ flow(stream)
+ if (state.flowing && !state.reading) stream.read(0)
+}
+Readable.prototype.pause = function () {
+ debug('call pause flowing=%j', this._readableState.flowing)
+ if (this._readableState.flowing !== false) {
+ debug('pause')
+ this._readableState.flowing = false
+ this.emit('pause')
+ }
+ this._readableState[kPaused] = true
+ return this
+}
+function flow(stream) {
+ const state = stream._readableState
+ debug('flow', state.flowing)
+ while (state.flowing && stream.read() !== null);
+}
+
+// Wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+Readable.prototype.wrap = function (stream) {
+ let paused = false
+
+ // TODO (ronag): Should this.destroy(err) emit
+ // 'error' on the wrapped stream? Would require
+ // a static factory method, e.g. Readable.wrap(stream).
+
+ stream.on('data', (chunk) => {
+ if (!this.push(chunk) && stream.pause) {
+ paused = true
+ stream.pause()
+ }
+ })
+ stream.on('end', () => {
+ this.push(null)
+ })
+ stream.on('error', (err) => {
+ errorOrDestroy(this, err)
+ })
+ stream.on('close', () => {
+ this.destroy()
+ })
+ stream.on('destroy', () => {
+ this.destroy()
+ })
+ this._read = () => {
+ if (paused && stream.resume) {
+ paused = false
+ stream.resume()
+ }
+ }
+
+ // Proxy all the other methods. Important when wrapping filters and duplexes.
+ const streamKeys = ObjectKeys(stream)
+ for (let j = 1; j < streamKeys.length; j++) {
+ const i = streamKeys[j]
+ if (this[i] === undefined && typeof stream[i] === 'function') {
+ this[i] = stream[i].bind(stream)
+ }
+ }
+ return this
+}
+Readable.prototype[SymbolAsyncIterator] = function () {
+ return streamToAsyncIterator(this)
+}
+Readable.prototype.iterator = function (options) {
+ if (options !== undefined) {
+ validateObject(options, 'options')
+ }
+ return streamToAsyncIterator(this, options)
+}
+function streamToAsyncIterator(stream, options) {
+ if (typeof stream.read !== 'function') {
+ stream = Readable.wrap(stream, {
+ objectMode: true
+ })
+ }
+ const iter = createAsyncIterator(stream, options)
+ iter.stream = stream
+ return iter
+}
+async function* createAsyncIterator(stream, options) {
+ let callback = nop
+ function next(resolve) {
+ if (this === stream) {
+ callback()
+ callback = nop
+ } else {
+ callback = resolve
+ }
+ }
+ stream.on('readable', next)
+ let error
+ const cleanup = eos(
+ stream,
+ {
+ writable: false
+ },
+ (err) => {
+ error = err ? aggregateTwoErrors(error, err) : null
+ callback()
+ callback = nop
+ }
+ )
+ try {
+ while (true) {
+ const chunk = stream.destroyed ? null : stream.read()
+ if (chunk !== null) {
+ yield chunk
+ } else if (error) {
+ throw error
+ } else if (error === null) {
+ return
+ } else {
+ await new Promise(next)
+ }
+ }
+ } catch (err) {
+ error = aggregateTwoErrors(error, err)
+ throw error
+ } finally {
+ if (
+ (error || (options === null || options === undefined ? undefined : options.destroyOnReturn) !== false) &&
+ (error === undefined || stream._readableState.autoDestroy)
+ ) {
+ destroyImpl.destroyer(stream, null)
+ } else {
+ stream.off('readable', next)
+ cleanup()
+ }
+ }
+}
+
+// Making it explicit these properties are not enumerable
+// because otherwise some prototype manipulation in
+// userland will fail.
+ObjectDefineProperties(Readable.prototype, {
+ readable: {
+ __proto__: null,
+ get() {
+ const r = this._readableState
+ // r.readable === false means that this is part of a Duplex stream
+ // where the readable side was disabled upon construction.
+ // Compat. The user might manually disable readable side through
+ // deprecated setter.
+ return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted
+ },
+ set(val) {
+ // Backwards compat.
+ if (this._readableState) {
+ this._readableState.readable = !!val
+ }
+ }
+ },
+ readableDidRead: {
+ __proto__: null,
+ enumerable: false,
+ get: function () {
+ return this._readableState.dataEmitted
+ }
+ },
+ readableAborted: {
+ __proto__: null,
+ enumerable: false,
+ get: function () {
+ return !!(
+ this._readableState.readable !== false &&
+ (this._readableState.destroyed || this._readableState.errored) &&
+ !this._readableState.endEmitted
+ )
+ }
+ },
+ readableHighWaterMark: {
+ __proto__: null,
+ enumerable: false,
+ get: function () {
+ return this._readableState.highWaterMark
+ }
+ },
+ readableBuffer: {
+ __proto__: null,
+ enumerable: false,
+ get: function () {
+ return this._readableState && this._readableState.buffer
+ }
+ },
+ readableFlowing: {
+ __proto__: null,
+ enumerable: false,
+ get: function () {
+ return this._readableState.flowing
+ },
+ set: function (state) {
+ if (this._readableState) {
+ this._readableState.flowing = state
+ }
+ }
+ },
+ readableLength: {
+ __proto__: null,
+ enumerable: false,
+ get() {
+ return this._readableState.length
+ }
+ },
+ readableObjectMode: {
+ __proto__: null,
+ enumerable: false,
+ get() {
+ return this._readableState ? this._readableState.objectMode : false
+ }
+ },
+ readableEncoding: {
+ __proto__: null,
+ enumerable: false,
+ get() {
+ return this._readableState ? this._readableState.encoding : null
+ }
+ },
+ errored: {
+ __proto__: null,
+ enumerable: false,
+ get() {
+ return this._readableState ? this._readableState.errored : null
+ }
+ },
+ closed: {
+ __proto__: null,
+ get() {
+ return this._readableState ? this._readableState.closed : false
+ }
+ },
+ destroyed: {
+ __proto__: null,
+ enumerable: false,
+ get() {
+ return this._readableState ? this._readableState.destroyed : false
+ },
+ set(value) {
+ // We ignore the value if the stream
+ // has not been initialized yet.
+ if (!this._readableState) {
+ return
+ }
+
+ // Backward compatibility, the user is explicitly
+ // managing destroyed.
+ this._readableState.destroyed = value
+ }
+ },
+ readableEnded: {
+ __proto__: null,
+ enumerable: false,
+ get() {
+ return this._readableState ? this._readableState.endEmitted : false
+ }
+ }
+})
+ObjectDefineProperties(ReadableState.prototype, {
+ // Legacy getter for `pipesCount`.
+ pipesCount: {
+ __proto__: null,
+ get() {
+ return this.pipes.length
+ }
+ },
+ // Legacy property for `paused`.
+ paused: {
+ __proto__: null,
+ get() {
+ return this[kPaused] !== false
+ },
+ set(value) {
+ this[kPaused] = !!value
+ }
+ }
+})
+
+// Exposed for testing purposes only.
+Readable._fromList = fromList
+
+// Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromList(n, state) {
+ // nothing buffered.
+ if (state.length === 0) return null
+ let ret
+ if (state.objectMode) ret = state.buffer.shift()
+ else if (!n || n >= state.length) {
+ // Read it all, truncate the list.
+ if (state.decoder) ret = state.buffer.join('')
+ else if (state.buffer.length === 1) ret = state.buffer.first()
+ else ret = state.buffer.concat(state.length)
+ state.buffer.clear()
+ } else {
+ // read part of list.
+ ret = state.buffer.consume(n, state.decoder)
+ }
+ return ret
+}
+function endReadable(stream) {
+ const state = stream._readableState
+ debug('endReadable', state.endEmitted)
+ if (!state.endEmitted) {
+ state.ended = true
+ process.nextTick(endReadableNT, state, stream)
+ }
+}
+function endReadableNT(state, stream) {
+ debug('endReadableNT', state.endEmitted, state.length)
+
+ // Check that we didn't get one last unshift.
+ if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) {
+ state.endEmitted = true
+ stream.emit('end')
+ if (stream.writable && stream.allowHalfOpen === false) {
+ process.nextTick(endWritableNT, stream)
+ } else if (state.autoDestroy) {
+ // In case of duplex streams we need a way to detect
+ // if the writable side is ready for autoDestroy as well.
+ const wState = stream._writableState
+ const autoDestroy =
+ !wState ||
+ (wState.autoDestroy &&
+ // We don't expect the writable to ever 'finish'
+ // if writable is explicitly set to false.
+ (wState.finished || wState.writable === false))
+ if (autoDestroy) {
+ stream.destroy()
+ }
+ }
+ }
+}
+function endWritableNT(stream) {
+ const writable = stream.writable && !stream.writableEnded && !stream.destroyed
+ if (writable) {
+ stream.end()
+ }
+}
+Readable.from = function (iterable, opts) {
+ return from(Readable, iterable, opts)
+}
+let webStreamsAdapters
+
+// Lazy to avoid circular references
+function lazyWebStreams() {
+ if (webStreamsAdapters === undefined) webStreamsAdapters = {}
+ return webStreamsAdapters
+}
+Readable.fromWeb = function (readableStream, options) {
+ return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options)
+}
+Readable.toWeb = function (streamReadable, options) {
+ return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable, options)
+}
+Readable.wrap = function (src, options) {
+ var _ref, _src$readableObjectMo
+ return new Readable({
+ objectMode:
+ (_ref =
+ (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== undefined
+ ? _src$readableObjectMo
+ : src.objectMode) !== null && _ref !== undefined
+ ? _ref
+ : true,
+ ...options,
+ destroy(err, callback) {
+ destroyImpl.destroyer(src, err)
+ callback(err)
+ }
+ }).wrap(src)
+}
diff --git a/lib/internal/streams/state.js b/lib/internal/streams/state.js
new file mode 100644
index 0000000000..79294a04bc
--- /dev/null
+++ b/lib/internal/streams/state.js
@@ -0,0 +1,39 @@
+'use strict'
+
+const { MathFloor, NumberIsInteger } = require('../../ours/primordials')
+const { validateInteger } = require('../validators')
+const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes
+let defaultHighWaterMarkBytes = 16 * 1024
+let defaultHighWaterMarkObjectMode = 16
+function highWaterMarkFrom(options, isDuplex, duplexKey) {
+ return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null
+}
+function getDefaultHighWaterMark(objectMode) {
+ return objectMode ? defaultHighWaterMarkObjectMode : defaultHighWaterMarkBytes
+}
+function setDefaultHighWaterMark(objectMode, value) {
+ validateInteger(value, 'value', 0)
+ if (objectMode) {
+ defaultHighWaterMarkObjectMode = value
+ } else {
+ defaultHighWaterMarkBytes = value
+ }
+}
+function getHighWaterMark(state, options, duplexKey, isDuplex) {
+ const hwm = highWaterMarkFrom(options, isDuplex, duplexKey)
+ if (hwm != null) {
+ if (!NumberIsInteger(hwm) || hwm < 0) {
+ const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark'
+ throw new ERR_INVALID_ARG_VALUE(name, hwm)
+ }
+ return MathFloor(hwm)
+ }
+
+ // Default value
+ return getDefaultHighWaterMark(state.objectMode)
+}
+module.exports = {
+ getHighWaterMark,
+ getDefaultHighWaterMark,
+ setDefaultHighWaterMark
+}
diff --git a/lib/internal/streams/transform.js b/lib/internal/streams/transform.js
new file mode 100644
index 0000000000..fa9413a447
--- /dev/null
+++ b/lib/internal/streams/transform.js
@@ -0,0 +1,180 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// a transform stream is a readable/writable stream where you do
+// something with the data. Sometimes it's called a "filter",
+// but that's not a great name for it, since that implies a thing where
+// some bits pass through, and others are simply ignored. (That would
+// be a valid example of a transform, of course.)
+//
+// While the output is causally related to the input, it's not a
+// necessarily symmetric or synchronous transformation. For example,
+// a zlib stream might take multiple plain-text writes(), and then
+// emit a single compressed chunk some time in the future.
+//
+// Here's how this works:
+//
+// The Transform stream has all the aspects of the readable and writable
+// stream classes. When you write(chunk), that calls _write(chunk,cb)
+// internally, and returns false if there's a lot of pending writes
+// buffered up. When you call read(), that calls _read(n) until
+// there's enough pending readable data buffered up.
+//
+// In a transform stream, the written data is placed in a buffer. When
+// _read(n) is called, it transforms the queued up data, calling the
+// buffered _write cb's as it consumes chunks. If consuming a single
+// written chunk would result in multiple output chunks, then the first
+// outputted bit calls the readcb, and subsequent chunks just go into
+// the read buffer, and will cause it to emit 'readable' if necessary.
+//
+// This way, back-pressure is actually determined by the reading side,
+// since _read has to be called to start processing a new chunk. However,
+// a pathological inflate type of transform can cause excessive buffering
+// here. For example, imagine a stream where every byte of input is
+// interpreted as an integer from 0-255, and then results in that many
+// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
+// 1kb of data being output. In this case, you could write a very small
+// amount of input, and end up with a very large amount of output. In
+// such a pathological inflating mechanism, there'd be no way to tell
+// the system to stop doing the transform. A single 4MB write could
+// cause the system to run out of memory.
+//
+// However, even in such a pathological case, only a single written chunk
+// would be consumed, and then the rest would wait (un-transformed) until
+// the results of the previous transformed chunk were consumed.
+
+'use strict'
+
+const { ObjectSetPrototypeOf, Symbol } = require('../../ours/primordials')
+module.exports = Transform
+const { ERR_METHOD_NOT_IMPLEMENTED } = require('../../ours/errors').codes
+const Duplex = require('./duplex')
+const { getHighWaterMark } = require('./state')
+ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype)
+ObjectSetPrototypeOf(Transform, Duplex)
+const kCallback = Symbol('kCallback')
+function Transform(options) {
+ if (!(this instanceof Transform)) return new Transform(options)
+
+ // TODO (ronag): This should preferably always be
+ // applied but would be semver-major. Or even better;
+ // make Transform a Readable with the Writable interface.
+ const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null
+ if (readableHighWaterMark === 0) {
+ // A Duplex will buffer both on the writable and readable side while
+ // a Transform just wants to buffer hwm number of elements. To avoid
+ // buffering twice we disable buffering on the writable side.
+ options = {
+ ...options,
+ highWaterMark: null,
+ readableHighWaterMark,
+ // TODO (ronag): 0 is not optimal since we have
+ // a "bug" where we check needDrain before calling _write and not after.
+ // Refs: https://github.com/nodejs/node/pull/32887
+ // Refs: https://github.com/nodejs/node/pull/35941
+ writableHighWaterMark: options.writableHighWaterMark || 0
+ }
+ }
+ Duplex.call(this, options)
+
+ // We have implemented the _read method, and done the other things
+ // that Readable wants before the first _read call, so unset the
+ // sync guard flag.
+ this._readableState.sync = false
+ this[kCallback] = null
+ if (options) {
+ if (typeof options.transform === 'function') this._transform = options.transform
+ if (typeof options.flush === 'function') this._flush = options.flush
+ }
+
+ // When the writable side finishes, then flush out anything remaining.
+ // Backwards compat. Some Transform streams incorrectly implement _final
+ // instead of or in addition to _flush. By using 'prefinish' instead of
+ // implementing _final we continue supporting this unfortunate use case.
+ this.on('prefinish', prefinish)
+}
+function final(cb) {
+ if (typeof this._flush === 'function' && !this.destroyed) {
+ this._flush((er, data) => {
+ if (er) {
+ if (cb) {
+ cb(er)
+ } else {
+ this.destroy(er)
+ }
+ return
+ }
+ if (data != null) {
+ this.push(data)
+ }
+ this.push(null)
+ if (cb) {
+ cb()
+ }
+ })
+ } else {
+ this.push(null)
+ if (cb) {
+ cb()
+ }
+ }
+}
+function prefinish() {
+ if (this._final !== final) {
+ final.call(this)
+ }
+}
+Transform.prototype._final = final
+Transform.prototype._transform = function (chunk, encoding, callback) {
+ throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()')
+}
+Transform.prototype._write = function (chunk, encoding, callback) {
+ const rState = this._readableState
+ const wState = this._writableState
+ const length = rState.length
+ this._transform(chunk, encoding, (err, val) => {
+ if (err) {
+ callback(err)
+ return
+ }
+ if (val != null) {
+ this.push(val)
+ }
+ if (
+ wState.ended ||
+ // Backwards compat.
+ length === rState.length ||
+ // Backwards compat.
+ rState.length < rState.highWaterMark
+ ) {
+ callback()
+ } else {
+ this[kCallback] = callback
+ }
+ })
+}
+Transform.prototype._read = function () {
+ if (this[kCallback]) {
+ const callback = this[kCallback]
+ this[kCallback] = null
+ callback()
+ }
+}
diff --git a/lib/internal/streams/utils.js b/lib/internal/streams/utils.js
new file mode 100644
index 0000000000..f0d7884e4a
--- /dev/null
+++ b/lib/internal/streams/utils.js
@@ -0,0 +1,327 @@
+'use strict'
+
+const { SymbolAsyncIterator, SymbolIterator, SymbolFor } = require('../../ours/primordials')
+
+// We need to use SymbolFor to make these globally available
+// for interopt with readable-stream, i.e. readable-stream
+// and node core needs to be able to read/write private state
+// from each other for proper interoperability.
+const kIsDestroyed = SymbolFor('nodejs.stream.destroyed')
+const kIsErrored = SymbolFor('nodejs.stream.errored')
+const kIsReadable = SymbolFor('nodejs.stream.readable')
+const kIsWritable = SymbolFor('nodejs.stream.writable')
+const kIsDisturbed = SymbolFor('nodejs.stream.disturbed')
+const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise')
+const kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction')
+function isReadableNodeStream(obj, strict = false) {
+ var _obj$_readableState
+ return !!(
+ (
+ obj &&
+ typeof obj.pipe === 'function' &&
+ typeof obj.on === 'function' &&
+ (!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) &&
+ (!obj._writableState ||
+ ((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined
+ ? undefined
+ : _obj$_readableState.readable) !== false) &&
+ // Duplex
+ (!obj._writableState || obj._readableState)
+ ) // Writable has .pipe.
+ )
+}
+function isWritableNodeStream(obj) {
+ var _obj$_writableState
+ return !!(
+ (
+ obj &&
+ typeof obj.write === 'function' &&
+ typeof obj.on === 'function' &&
+ (!obj._readableState ||
+ ((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined
+ ? undefined
+ : _obj$_writableState.writable) !== false)
+ ) // Duplex
+ )
+}
+function isDuplexNodeStream(obj) {
+ return !!(
+ obj &&
+ typeof obj.pipe === 'function' &&
+ obj._readableState &&
+ typeof obj.on === 'function' &&
+ typeof obj.write === 'function'
+ )
+}
+function isNodeStream(obj) {
+ return (
+ obj &&
+ (obj._readableState ||
+ obj._writableState ||
+ (typeof obj.write === 'function' && typeof obj.on === 'function') ||
+ (typeof obj.pipe === 'function' && typeof obj.on === 'function'))
+ )
+}
+function isReadableStream(obj) {
+ return !!(
+ obj &&
+ !isNodeStream(obj) &&
+ typeof obj.pipeThrough === 'function' &&
+ typeof obj.getReader === 'function' &&
+ typeof obj.cancel === 'function'
+ )
+}
+function isWritableStream(obj) {
+ return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function')
+}
+function isTransformStream(obj) {
+ return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object')
+}
+function isWebStream(obj) {
+ return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj)
+}
+function isIterable(obj, isAsync) {
+ if (obj == null) return false
+ if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'
+ if (isAsync === false) return typeof obj[SymbolIterator] === 'function'
+ return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function'
+}
+function isDestroyed(stream) {
+ if (!isNodeStream(stream)) return null
+ const wState = stream._writableState
+ const rState = stream._readableState
+ const state = wState || rState
+ return !!(stream.destroyed || stream[kIsDestroyed] || (state !== null && state !== undefined && state.destroyed))
+}
+
+// Have been end():d.
+function isWritableEnded(stream) {
+ if (!isWritableNodeStream(stream)) return null
+ if (stream.writableEnded === true) return true
+ const wState = stream._writableState
+ if (wState !== null && wState !== undefined && wState.errored) return false
+ if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null
+ return wState.ended
+}
+
+// Have emitted 'finish'.
+function isWritableFinished(stream, strict) {
+ if (!isWritableNodeStream(stream)) return null
+ if (stream.writableFinished === true) return true
+ const wState = stream._writableState
+ if (wState !== null && wState !== undefined && wState.errored) return false
+ if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null
+ return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0))
+}
+
+// Have been push(null):d.
+function isReadableEnded(stream) {
+ if (!isReadableNodeStream(stream)) return null
+ if (stream.readableEnded === true) return true
+ const rState = stream._readableState
+ if (!rState || rState.errored) return false
+ if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null
+ return rState.ended
+}
+
+// Have emitted 'end'.
+function isReadableFinished(stream, strict) {
+ if (!isReadableNodeStream(stream)) return null
+ const rState = stream._readableState
+ if (rState !== null && rState !== undefined && rState.errored) return false
+ if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null
+ return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0))
+}
+function isReadable(stream) {
+ if (stream && stream[kIsReadable] != null) return stream[kIsReadable]
+ if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null
+ if (isDestroyed(stream)) return false
+ return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream)
+}
+function isWritable(stream) {
+ if (stream && stream[kIsWritable] != null) return stream[kIsWritable]
+ if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null
+ if (isDestroyed(stream)) return false
+ return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream)
+}
+function isFinished(stream, opts) {
+ if (!isNodeStream(stream)) {
+ return null
+ }
+ if (isDestroyed(stream)) {
+ return true
+ }
+ if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) {
+ return false
+ }
+ if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) {
+ return false
+ }
+ return true
+}
+function isWritableErrored(stream) {
+ var _stream$_writableStat, _stream$_writableStat2
+ if (!isNodeStream(stream)) {
+ return null
+ }
+ if (stream.writableErrored) {
+ return stream.writableErrored
+ }
+ return (_stream$_writableStat =
+ (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined
+ ? undefined
+ : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined
+ ? _stream$_writableStat
+ : null
+}
+function isReadableErrored(stream) {
+ var _stream$_readableStat, _stream$_readableStat2
+ if (!isNodeStream(stream)) {
+ return null
+ }
+ if (stream.readableErrored) {
+ return stream.readableErrored
+ }
+ return (_stream$_readableStat =
+ (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined
+ ? undefined
+ : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined
+ ? _stream$_readableStat
+ : null
+}
+function isClosed(stream) {
+ if (!isNodeStream(stream)) {
+ return null
+ }
+ if (typeof stream.closed === 'boolean') {
+ return stream.closed
+ }
+ const wState = stream._writableState
+ const rState = stream._readableState
+ if (
+ typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' ||
+ typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean'
+ ) {
+ return (
+ (wState === null || wState === undefined ? undefined : wState.closed) ||
+ (rState === null || rState === undefined ? undefined : rState.closed)
+ )
+ }
+ if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) {
+ return stream._closed
+ }
+ return null
+}
+function isOutgoingMessage(stream) {
+ return (
+ typeof stream._closed === 'boolean' &&
+ typeof stream._defaultKeepAlive === 'boolean' &&
+ typeof stream._removedConnection === 'boolean' &&
+ typeof stream._removedContLen === 'boolean'
+ )
+}
+function isServerResponse(stream) {
+ return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream)
+}
+function isServerRequest(stream) {
+ var _stream$req
+ return (
+ typeof stream._consuming === 'boolean' &&
+ typeof stream._dumped === 'boolean' &&
+ ((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) ===
+ undefined
+ )
+}
+function willEmitClose(stream) {
+ if (!isNodeStream(stream)) return null
+ const wState = stream._writableState
+ const rState = stream._readableState
+ const state = wState || rState
+ return (
+ (!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false)
+ )
+}
+function isDisturbed(stream) {
+ var _stream$kIsDisturbed
+ return !!(
+ stream &&
+ ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined
+ ? _stream$kIsDisturbed
+ : stream.readableDidRead || stream.readableAborted)
+ )
+}
+function isErrored(stream) {
+ var _ref,
+ _ref2,
+ _ref3,
+ _ref4,
+ _ref5,
+ _stream$kIsErrored,
+ _stream$_readableStat3,
+ _stream$_writableStat3,
+ _stream$_readableStat4,
+ _stream$_writableStat4
+ return !!(
+ stream &&
+ ((_ref =
+ (_ref2 =
+ (_ref3 =
+ (_ref4 =
+ (_ref5 =
+ (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined
+ ? _stream$kIsErrored
+ : stream.readableErrored) !== null && _ref5 !== undefined
+ ? _ref5
+ : stream.writableErrored) !== null && _ref4 !== undefined
+ ? _ref4
+ : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined
+ ? undefined
+ : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined
+ ? _ref3
+ : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined
+ ? undefined
+ : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined
+ ? _ref2
+ : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined
+ ? undefined
+ : _stream$_readableStat4.errored) !== null && _ref !== undefined
+ ? _ref
+ : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined
+ ? undefined
+ : _stream$_writableStat4.errored)
+ )
+}
+module.exports = {
+ isDestroyed,
+ kIsDestroyed,
+ isDisturbed,
+ kIsDisturbed,
+ isErrored,
+ kIsErrored,
+ isReadable,
+ kIsReadable,
+ kIsClosedPromise,
+ kControllerErrorFunction,
+ kIsWritable,
+ isClosed,
+ isDuplexNodeStream,
+ isFinished,
+ isIterable,
+ isReadableNodeStream,
+ isReadableStream,
+ isReadableEnded,
+ isReadableFinished,
+ isReadableErrored,
+ isNodeStream,
+ isWebStream,
+ isWritable,
+ isWritableNodeStream,
+ isWritableStream,
+ isWritableEnded,
+ isWritableFinished,
+ isWritableErrored,
+ isServerRequest,
+ isServerResponse,
+ willEmitClose,
+ isTransformStream
+}
diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js
new file mode 100644
index 0000000000..b4ecf0e21d
--- /dev/null
+++ b/lib/internal/streams/writable.js
@@ -0,0 +1,819 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// A bit simpler than readable streams.
+// Implement an async ._write(chunk, encoding, cb), and it'll handle all
+// the drain event emission and buffering.
+
+'use strict'
+
+/* replacement start */
+
+const process = require('process/')
+
+/* replacement end */
+
+const {
+ ArrayPrototypeSlice,
+ Error,
+ FunctionPrototypeSymbolHasInstance,
+ ObjectDefineProperty,
+ ObjectDefineProperties,
+ ObjectSetPrototypeOf,
+ StringPrototypeToLowerCase,
+ Symbol,
+ SymbolHasInstance
+} = require('../../ours/primordials')
+module.exports = Writable
+Writable.WritableState = WritableState
+const { EventEmitter: EE } = require('events')
+const Stream = require('./legacy').Stream
+const { Buffer } = require('buffer')
+const destroyImpl = require('./destroy')
+const { addAbortSignal } = require('./add-abort-signal')
+const { getHighWaterMark, getDefaultHighWaterMark } = require('./state')
+const {
+ ERR_INVALID_ARG_TYPE,
+ ERR_METHOD_NOT_IMPLEMENTED,
+ ERR_MULTIPLE_CALLBACK,
+ ERR_STREAM_CANNOT_PIPE,
+ ERR_STREAM_DESTROYED,
+ ERR_STREAM_ALREADY_FINISHED,
+ ERR_STREAM_NULL_VALUES,
+ ERR_STREAM_WRITE_AFTER_END,
+ ERR_UNKNOWN_ENCODING
+} = require('../../ours/errors').codes
+const { errorOrDestroy } = destroyImpl
+ObjectSetPrototypeOf(Writable.prototype, Stream.prototype)
+ObjectSetPrototypeOf(Writable, Stream)
+function nop() {}
+const kOnFinished = Symbol('kOnFinished')
+function WritableState(options, stream, isDuplex) {
+ // Duplex streams are both readable and writable, but share
+ // the same options object.
+ // However, some cases require setting options to different
+ // values for the readable and the writable sides of the duplex stream,
+ // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
+ if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex')
+
+ // Object stream flag to indicate whether or not this stream
+ // contains buffers or objects.
+ this.objectMode = !!(options && options.objectMode)
+ if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode)
+
+ // The point at which write() starts returning false
+ // Note: 0 is a valid value, means that we always return false if
+ // the entire buffer is not flushed immediately on write().
+ this.highWaterMark = options
+ ? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex)
+ : getDefaultHighWaterMark(false)
+
+ // if _final has been called.
+ this.finalCalled = false
+
+ // drain event flag.
+ this.needDrain = false
+ // At the start of calling end()
+ this.ending = false
+ // When end() has been called, and returned.
+ this.ended = false
+ // When 'finish' is emitted.
+ this.finished = false
+
+ // Has it been destroyed
+ this.destroyed = false
+
+ // Should we decode strings into buffers before passing to _write?
+ // this is here so that some node-core streams can optimize string
+ // handling at a lower level.
+ const noDecode = !!(options && options.decodeStrings === false)
+ this.decodeStrings = !noDecode
+
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'
+
+ // Not an actual buffer we keep track of, but a measurement
+ // of how much we're waiting to get pushed to some underlying
+ // socket or file.
+ this.length = 0
+
+ // A flag to see when we're in the middle of a write.
+ this.writing = false
+
+ // When true all writes will be buffered until .uncork() call.
+ this.corked = 0
+
+ // A flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, because any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ this.sync = true
+
+ // A flag to know if we're processing previously buffered items, which
+ // may call the _write() callback in the same tick, so that we don't
+ // end up in an overlapped onwrite situation.
+ this.bufferProcessing = false
+
+ // The callback that's passed to _write(chunk, cb).
+ this.onwrite = onwrite.bind(undefined, stream)
+
+ // The callback that the user supplies to write(chunk, encoding, cb).
+ this.writecb = null
+
+ // The amount that is being written when _write is called.
+ this.writelen = 0
+
+ // Storage for data passed to the afterWrite() callback in case of
+ // synchronous _write() completion.
+ this.afterWriteTickInfo = null
+ resetBuffer(this)
+
+ // Number of pending user-supplied write callbacks
+ // this must be 0 before 'finish' can be emitted.
+ this.pendingcb = 0
+
+ // Stream is still being constructed and cannot be
+ // destroyed until construction finished or failed.
+ // Async construction is opt in, therefore we start as
+ // constructed.
+ this.constructed = true
+
+ // Emit prefinish if the only thing we're waiting for is _write cbs
+ // This is relevant for synchronous Transform streams.
+ this.prefinished = false
+
+ // True if the error was already emitted and should not be thrown again.
+ this.errorEmitted = false
+
+ // Should close be emitted on destroy. Defaults to true.
+ this.emitClose = !options || options.emitClose !== false
+
+ // Should .destroy() be called after 'finish' (and potentially 'end').
+ this.autoDestroy = !options || options.autoDestroy !== false
+
+ // Indicates whether the stream has errored. When true all write() calls
+ // should return false. This is needed since when autoDestroy
+ // is disabled we need a way to tell whether the stream has failed.
+ this.errored = null
+
+ // Indicates whether the stream has finished destroying.
+ this.closed = false
+
+ // True if close has been emitted or would have been emitted
+ // depending on emitClose.
+ this.closeEmitted = false
+ this[kOnFinished] = []
+}
+function resetBuffer(state) {
+ state.buffered = []
+ state.bufferedIndex = 0
+ state.allBuffers = true
+ state.allNoop = true
+}
+WritableState.prototype.getBuffer = function getBuffer() {
+ return ArrayPrototypeSlice(this.buffered, this.bufferedIndex)
+}
+ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', {
+ __proto__: null,
+ get() {
+ return this.buffered.length - this.bufferedIndex
+ }
+})
+function Writable(options) {
+ // Writable ctor is applied to Duplexes, too.
+ // `realHasInstance` is necessary because using plain `instanceof`
+ // would return false, as no `_writableState` property is attached.
+
+ // Trying to use the custom `instanceof` for Writable here will also break the
+ // Node.js LazyTransform implementation, which has a non-trivial getter for
+ // `_writableState` that would lead to infinite recursion.
+
+ // Checking for a Stream.Duplex instance is faster here instead of inside
+ // the WritableState constructor, at least with V8 6.5.
+ const isDuplex = this instanceof require('./duplex')
+ if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options)
+ this._writableState = new WritableState(options, this, isDuplex)
+ if (options) {
+ if (typeof options.write === 'function') this._write = options.write
+ if (typeof options.writev === 'function') this._writev = options.writev
+ if (typeof options.destroy === 'function') this._destroy = options.destroy
+ if (typeof options.final === 'function') this._final = options.final
+ if (typeof options.construct === 'function') this._construct = options.construct
+ if (options.signal) addAbortSignal(options.signal, this)
+ }
+ Stream.call(this, options)
+ destroyImpl.construct(this, () => {
+ const state = this._writableState
+ if (!state.writing) {
+ clearBuffer(this, state)
+ }
+ finishMaybe(this, state)
+ })
+}
+ObjectDefineProperty(Writable, SymbolHasInstance, {
+ __proto__: null,
+ value: function (object) {
+ if (FunctionPrototypeSymbolHasInstance(this, object)) return true
+ if (this !== Writable) return false
+ return object && object._writableState instanceof WritableState
+ }
+})
+
+// Otherwise people can pipe Writable streams, which is just wrong.
+Writable.prototype.pipe = function () {
+ errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE())
+}
+function _write(stream, chunk, encoding, cb) {
+ const state = stream._writableState
+ if (typeof encoding === 'function') {
+ cb = encoding
+ encoding = state.defaultEncoding
+ } else {
+ if (!encoding) encoding = state.defaultEncoding
+ else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
+ if (typeof cb !== 'function') cb = nop
+ }
+ if (chunk === null) {
+ throw new ERR_STREAM_NULL_VALUES()
+ } else if (!state.objectMode) {
+ if (typeof chunk === 'string') {
+ if (state.decodeStrings !== false) {
+ chunk = Buffer.from(chunk, encoding)
+ encoding = 'buffer'
+ }
+ } else if (chunk instanceof Buffer) {
+ encoding = 'buffer'
+ } else if (Stream._isUint8Array(chunk)) {
+ chunk = Stream._uint8ArrayToBuffer(chunk)
+ encoding = 'buffer'
+ } else {
+ throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)
+ }
+ }
+ let err
+ if (state.ending) {
+ err = new ERR_STREAM_WRITE_AFTER_END()
+ } else if (state.destroyed) {
+ err = new ERR_STREAM_DESTROYED('write')
+ }
+ if (err) {
+ process.nextTick(cb, err)
+ errorOrDestroy(stream, err, true)
+ return err
+ }
+ state.pendingcb++
+ return writeOrBuffer(stream, state, chunk, encoding, cb)
+}
+Writable.prototype.write = function (chunk, encoding, cb) {
+ return _write(this, chunk, encoding, cb) === true
+}
+Writable.prototype.cork = function () {
+ this._writableState.corked++
+}
+Writable.prototype.uncork = function () {
+ const state = this._writableState
+ if (state.corked) {
+ state.corked--
+ if (!state.writing) clearBuffer(this, state)
+ }
+}
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+ // node::ParseEncoding() requires lower case.
+ if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding)
+ if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
+ this._writableState.defaultEncoding = encoding
+ return this
+}
+
+// If we're already writing something, then just put this
+// in the queue, and wait our turn. Otherwise, call _write
+// If we return false, then we need a drain event, so set that flag.
+function writeOrBuffer(stream, state, chunk, encoding, callback) {
+ const len = state.objectMode ? 1 : chunk.length
+ state.length += len
+
+ // stream._write resets state.length
+ const ret = state.length < state.highWaterMark
+ // We must ensure that previous needDrain will not be reset to false.
+ if (!ret) state.needDrain = true
+ if (state.writing || state.corked || state.errored || !state.constructed) {
+ state.buffered.push({
+ chunk,
+ encoding,
+ callback
+ })
+ if (state.allBuffers && encoding !== 'buffer') {
+ state.allBuffers = false
+ }
+ if (state.allNoop && callback !== nop) {
+ state.allNoop = false
+ }
+ } else {
+ state.writelen = len
+ state.writecb = callback
+ state.writing = true
+ state.sync = true
+ stream._write(chunk, encoding, state.onwrite)
+ state.sync = false
+ }
+
+ // Return false if errored or destroyed in order to break
+ // any synchronous while(stream.write(data)) loops.
+ return ret && !state.errored && !state.destroyed
+}
+function doWrite(stream, state, writev, len, chunk, encoding, cb) {
+ state.writelen = len
+ state.writecb = cb
+ state.writing = true
+ state.sync = true
+ if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'))
+ else if (writev) stream._writev(chunk, state.onwrite)
+ else stream._write(chunk, encoding, state.onwrite)
+ state.sync = false
+}
+function onwriteError(stream, state, er, cb) {
+ --state.pendingcb
+ cb(er)
+ // Ensure callbacks are invoked even when autoDestroy is
+ // not enabled. Passing `er` here doesn't make sense since
+ // it's related to one specific write, not to the buffered
+ // writes.
+ errorBuffer(state)
+ // This can emit error, but error must always follow cb.
+ errorOrDestroy(stream, er)
+}
+function onwrite(stream, er) {
+ const state = stream._writableState
+ const sync = state.sync
+ const cb = state.writecb
+ if (typeof cb !== 'function') {
+ errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK())
+ return
+ }
+ state.writing = false
+ state.writecb = null
+ state.length -= state.writelen
+ state.writelen = 0
+ if (er) {
+ // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
+ er.stack // eslint-disable-line no-unused-expressions
+
+ if (!state.errored) {
+ state.errored = er
+ }
+
+ // In case of duplex streams we need to notify the readable side of the
+ // error.
+ if (stream._readableState && !stream._readableState.errored) {
+ stream._readableState.errored = er
+ }
+ if (sync) {
+ process.nextTick(onwriteError, stream, state, er, cb)
+ } else {
+ onwriteError(stream, state, er, cb)
+ }
+ } else {
+ if (state.buffered.length > state.bufferedIndex) {
+ clearBuffer(stream, state)
+ }
+ if (sync) {
+ // It is a common case that the callback passed to .write() is always
+ // the same. In that case, we do not schedule a new nextTick(), but
+ // rather just increase a counter, to improve performance and avoid
+ // memory allocations.
+ if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) {
+ state.afterWriteTickInfo.count++
+ } else {
+ state.afterWriteTickInfo = {
+ count: 1,
+ cb,
+ stream,
+ state
+ }
+ process.nextTick(afterWriteTick, state.afterWriteTickInfo)
+ }
+ } else {
+ afterWrite(stream, state, 1, cb)
+ }
+ }
+}
+function afterWriteTick({ stream, state, count, cb }) {
+ state.afterWriteTickInfo = null
+ return afterWrite(stream, state, count, cb)
+}
+function afterWrite(stream, state, count, cb) {
+ const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain
+ if (needDrain) {
+ state.needDrain = false
+ stream.emit('drain')
+ }
+ while (count-- > 0) {
+ state.pendingcb--
+ cb()
+ }
+ if (state.destroyed) {
+ errorBuffer(state)
+ }
+ finishMaybe(stream, state)
+}
+
+// If there's something in the buffer waiting, then invoke callbacks.
+function errorBuffer(state) {
+ if (state.writing) {
+ return
+ }
+ for (let n = state.bufferedIndex; n < state.buffered.length; ++n) {
+ var _state$errored
+ const { chunk, callback } = state.buffered[n]
+ const len = state.objectMode ? 1 : chunk.length
+ state.length -= len
+ callback(
+ (_state$errored = state.errored) !== null && _state$errored !== undefined
+ ? _state$errored
+ : new ERR_STREAM_DESTROYED('write')
+ )
+ }
+ const onfinishCallbacks = state[kOnFinished].splice(0)
+ for (let i = 0; i < onfinishCallbacks.length; i++) {
+ var _state$errored2
+ onfinishCallbacks[i](
+ (_state$errored2 = state.errored) !== null && _state$errored2 !== undefined
+ ? _state$errored2
+ : new ERR_STREAM_DESTROYED('end')
+ )
+ }
+ resetBuffer(state)
+}
+
+// If there's something in the buffer waiting, then process it.
+function clearBuffer(stream, state) {
+ if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) {
+ return
+ }
+ const { buffered, bufferedIndex, objectMode } = state
+ const bufferedLength = buffered.length - bufferedIndex
+ if (!bufferedLength) {
+ return
+ }
+ let i = bufferedIndex
+ state.bufferProcessing = true
+ if (bufferedLength > 1 && stream._writev) {
+ state.pendingcb -= bufferedLength - 1
+ const callback = state.allNoop
+ ? nop
+ : (err) => {
+ for (let n = i; n < buffered.length; ++n) {
+ buffered[n].callback(err)
+ }
+ }
+ // Make a copy of `buffered` if it's going to be used by `callback` above,
+ // since `doWrite` will mutate the array.
+ const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i)
+ chunks.allBuffers = state.allBuffers
+ doWrite(stream, state, true, state.length, chunks, '', callback)
+ resetBuffer(state)
+ } else {
+ do {
+ const { chunk, encoding, callback } = buffered[i]
+ buffered[i++] = null
+ const len = objectMode ? 1 : chunk.length
+ doWrite(stream, state, false, len, chunk, encoding, callback)
+ } while (i < buffered.length && !state.writing)
+ if (i === buffered.length) {
+ resetBuffer(state)
+ } else if (i > 256) {
+ buffered.splice(0, i)
+ state.bufferedIndex = 0
+ } else {
+ state.bufferedIndex = i
+ }
+ }
+ state.bufferProcessing = false
+}
+Writable.prototype._write = function (chunk, encoding, cb) {
+ if (this._writev) {
+ this._writev(
+ [
+ {
+ chunk,
+ encoding
+ }
+ ],
+ cb
+ )
+ } else {
+ throw new ERR_METHOD_NOT_IMPLEMENTED('_write()')
+ }
+}
+Writable.prototype._writev = null
+Writable.prototype.end = function (chunk, encoding, cb) {
+ const state = this._writableState
+ if (typeof chunk === 'function') {
+ cb = chunk
+ chunk = null
+ encoding = null
+ } else if (typeof encoding === 'function') {
+ cb = encoding
+ encoding = null
+ }
+ let err
+ if (chunk !== null && chunk !== undefined) {
+ const ret = _write(this, chunk, encoding)
+ if (ret instanceof Error) {
+ err = ret
+ }
+ }
+
+ // .end() fully uncorks.
+ if (state.corked) {
+ state.corked = 1
+ this.uncork()
+ }
+ if (err) {
+ // Do nothing...
+ } else if (!state.errored && !state.ending) {
+ // This is forgiving in terms of unnecessary calls to end() and can hide
+ // logic errors. However, usually such errors are harmless and causing a
+ // hard error can be disproportionately destructive. It is not always
+ // trivial for the user to determine whether end() needs to be called
+ // or not.
+
+ state.ending = true
+ finishMaybe(this, state, true)
+ state.ended = true
+ } else if (state.finished) {
+ err = new ERR_STREAM_ALREADY_FINISHED('end')
+ } else if (state.destroyed) {
+ err = new ERR_STREAM_DESTROYED('end')
+ }
+ if (typeof cb === 'function') {
+ if (err || state.finished) {
+ process.nextTick(cb, err)
+ } else {
+ state[kOnFinished].push(cb)
+ }
+ }
+ return this
+}
+function needFinish(state) {
+ return (
+ state.ending &&
+ !state.destroyed &&
+ state.constructed &&
+ state.length === 0 &&
+ !state.errored &&
+ state.buffered.length === 0 &&
+ !state.finished &&
+ !state.writing &&
+ !state.errorEmitted &&
+ !state.closeEmitted
+ )
+}
+function callFinal(stream, state) {
+ let called = false
+ function onFinish(err) {
+ if (called) {
+ errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK())
+ return
+ }
+ called = true
+ state.pendingcb--
+ if (err) {
+ const onfinishCallbacks = state[kOnFinished].splice(0)
+ for (let i = 0; i < onfinishCallbacks.length; i++) {
+ onfinishCallbacks[i](err)
+ }
+ errorOrDestroy(stream, err, state.sync)
+ } else if (needFinish(state)) {
+ state.prefinished = true
+ stream.emit('prefinish')
+ // Backwards compat. Don't check state.sync here.
+ // Some streams assume 'finish' will be emitted
+ // asynchronously relative to _final callback.
+ state.pendingcb++
+ process.nextTick(finish, stream, state)
+ }
+ }
+ state.sync = true
+ state.pendingcb++
+ try {
+ stream._final(onFinish)
+ } catch (err) {
+ onFinish(err)
+ }
+ state.sync = false
+}
+function prefinish(stream, state) {
+ if (!state.prefinished && !state.finalCalled) {
+ if (typeof stream._final === 'function' && !state.destroyed) {
+ state.finalCalled = true
+ callFinal(stream, state)
+ } else {
+ state.prefinished = true
+ stream.emit('prefinish')
+ }
+ }
+}
+function finishMaybe(stream, state, sync) {
+ if (needFinish(state)) {
+ prefinish(stream, state)
+ if (state.pendingcb === 0) {
+ if (sync) {
+ state.pendingcb++
+ process.nextTick(
+ (stream, state) => {
+ if (needFinish(state)) {
+ finish(stream, state)
+ } else {
+ state.pendingcb--
+ }
+ },
+ stream,
+ state
+ )
+ } else if (needFinish(state)) {
+ state.pendingcb++
+ finish(stream, state)
+ }
+ }
+ }
+}
+function finish(stream, state) {
+ state.pendingcb--
+ state.finished = true
+ const onfinishCallbacks = state[kOnFinished].splice(0)
+ for (let i = 0; i < onfinishCallbacks.length; i++) {
+ onfinishCallbacks[i]()
+ }
+ stream.emit('finish')
+ if (state.autoDestroy) {
+ // In case of duplex streams we need a way to detect
+ // if the readable side is ready for autoDestroy as well.
+ const rState = stream._readableState
+ const autoDestroy =
+ !rState ||
+ (rState.autoDestroy &&
+ // We don't expect the readable to ever 'end'
+ // if readable is explicitly set to false.
+ (rState.endEmitted || rState.readable === false))
+ if (autoDestroy) {
+ stream.destroy()
+ }
+ }
+}
+ObjectDefineProperties(Writable.prototype, {
+ closed: {
+ __proto__: null,
+ get() {
+ return this._writableState ? this._writableState.closed : false
+ }
+ },
+ destroyed: {
+ __proto__: null,
+ get() {
+ return this._writableState ? this._writableState.destroyed : false
+ },
+ set(value) {
+ // Backward compatibility, the user is explicitly managing destroyed.
+ if (this._writableState) {
+ this._writableState.destroyed = value
+ }
+ }
+ },
+ writable: {
+ __proto__: null,
+ get() {
+ const w = this._writableState
+ // w.writable === false means that this is part of a Duplex stream
+ // where the writable side was disabled upon construction.
+ // Compat. The user might manually disable writable side through
+ // deprecated setter.
+ return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended
+ },
+ set(val) {
+ // Backwards compatible.
+ if (this._writableState) {
+ this._writableState.writable = !!val
+ }
+ }
+ },
+ writableFinished: {
+ __proto__: null,
+ get() {
+ return this._writableState ? this._writableState.finished : false
+ }
+ },
+ writableObjectMode: {
+ __proto__: null,
+ get() {
+ return this._writableState ? this._writableState.objectMode : false
+ }
+ },
+ writableBuffer: {
+ __proto__: null,
+ get() {
+ return this._writableState && this._writableState.getBuffer()
+ }
+ },
+ writableEnded: {
+ __proto__: null,
+ get() {
+ return this._writableState ? this._writableState.ending : false
+ }
+ },
+ writableNeedDrain: {
+ __proto__: null,
+ get() {
+ const wState = this._writableState
+ if (!wState) return false
+ return !wState.destroyed && !wState.ending && wState.needDrain
+ }
+ },
+ writableHighWaterMark: {
+ __proto__: null,
+ get() {
+ return this._writableState && this._writableState.highWaterMark
+ }
+ },
+ writableCorked: {
+ __proto__: null,
+ get() {
+ return this._writableState ? this._writableState.corked : 0
+ }
+ },
+ writableLength: {
+ __proto__: null,
+ get() {
+ return this._writableState && this._writableState.length
+ }
+ },
+ errored: {
+ __proto__: null,
+ enumerable: false,
+ get() {
+ return this._writableState ? this._writableState.errored : null
+ }
+ },
+ writableAborted: {
+ __proto__: null,
+ enumerable: false,
+ get: function () {
+ return !!(
+ this._writableState.writable !== false &&
+ (this._writableState.destroyed || this._writableState.errored) &&
+ !this._writableState.finished
+ )
+ }
+ }
+})
+const destroy = destroyImpl.destroy
+Writable.prototype.destroy = function (err, cb) {
+ const state = this._writableState
+
+ // Invoke pending callbacks.
+ if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) {
+ process.nextTick(errorBuffer, state)
+ }
+ destroy.call(this, err, cb)
+ return this
+}
+Writable.prototype._undestroy = destroyImpl.undestroy
+Writable.prototype._destroy = function (err, cb) {
+ cb(err)
+}
+Writable.prototype[EE.captureRejectionSymbol] = function (err) {
+ this.destroy(err)
+}
+let webStreamsAdapters
+
+// Lazy to avoid circular references
+function lazyWebStreams() {
+ if (webStreamsAdapters === undefined) webStreamsAdapters = {}
+ return webStreamsAdapters
+}
+Writable.fromWeb = function (writableStream, options) {
+ return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options)
+}
+Writable.toWeb = function (streamWritable) {
+ return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable)
+}
diff --git a/lib/internal/validators.js b/lib/internal/validators.js
new file mode 100644
index 0000000000..f90068445c
--- /dev/null
+++ b/lib/internal/validators.js
@@ -0,0 +1,530 @@
+/* eslint jsdoc/require-jsdoc: "error" */
+
+'use strict'
+
+const {
+ ArrayIsArray,
+ ArrayPrototypeIncludes,
+ ArrayPrototypeJoin,
+ ArrayPrototypeMap,
+ NumberIsInteger,
+ NumberIsNaN,
+ NumberMAX_SAFE_INTEGER,
+ NumberMIN_SAFE_INTEGER,
+ NumberParseInt,
+ ObjectPrototypeHasOwnProperty,
+ RegExpPrototypeExec,
+ String,
+ StringPrototypeToUpperCase,
+ StringPrototypeTrim
+} = require('../ours/primordials')
+const {
+ hideStackFrames,
+ codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL }
+} = require('../ours/errors')
+const { normalizeEncoding } = require('../ours/util')
+const { isAsyncFunction, isArrayBufferView } = require('../ours/util').types
+const signals = {}
+
+/**
+ * @param {*} value
+ * @returns {boolean}
+ */
+function isInt32(value) {
+ return value === (value | 0)
+}
+
+/**
+ * @param {*} value
+ * @returns {boolean}
+ */
+function isUint32(value) {
+ return value === value >>> 0
+}
+const octalReg = /^[0-7]+$/
+const modeDesc = 'must be a 32-bit unsigned integer or an octal string'
+
+/**
+ * Parse and validate values that will be converted into mode_t (the S_*
+ * constants). Only valid numbers and octal strings are allowed. They could be
+ * converted to 32-bit unsigned integers or non-negative signed integers in the
+ * C++ land, but any value higher than 0o777 will result in platform-specific
+ * behaviors.
+ * @param {*} value Values to be validated
+ * @param {string} name Name of the argument
+ * @param {number} [def] If specified, will be returned for invalid values
+ * @returns {number}
+ */
+function parseFileMode(value, name, def) {
+ if (typeof value === 'undefined') {
+ value = def
+ }
+ if (typeof value === 'string') {
+ if (RegExpPrototypeExec(octalReg, value) === null) {
+ throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc)
+ }
+ value = NumberParseInt(value, 8)
+ }
+ validateUint32(value, name)
+ return value
+}
+
+/**
+ * @callback validateInteger
+ * @param {*} value
+ * @param {string} name
+ * @param {number} [min]
+ * @param {number} [max]
+ * @returns {asserts value is number}
+ */
+
+/** @type {validateInteger} */
+const validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => {
+ if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
+ if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
+ if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
+})
+
+/**
+ * @callback validateInt32
+ * @param {*} value
+ * @param {string} name
+ * @param {number} [min]
+ * @param {number} [max]
+ * @returns {asserts value is number}
+ */
+
+/** @type {validateInt32} */
+const validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => {
+ // The defaults for min and max correspond to the limits of 32-bit integers.
+ if (typeof value !== 'number') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
+ }
+ if (!NumberIsInteger(value)) {
+ throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
+ }
+ if (value < min || value > max) {
+ throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
+ }
+})
+
+/**
+ * @callback validateUint32
+ * @param {*} value
+ * @param {string} name
+ * @param {number|boolean} [positive=false]
+ * @returns {asserts value is number}
+ */
+
+/** @type {validateUint32} */
+const validateUint32 = hideStackFrames((value, name, positive = false) => {
+ if (typeof value !== 'number') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
+ }
+ if (!NumberIsInteger(value)) {
+ throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
+ }
+ const min = positive ? 1 : 0
+ // 2 ** 32 === 4294967296
+ const max = 4294967295
+ if (value < min || value > max) {
+ throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
+ }
+})
+
+/**
+ * @callback validateString
+ * @param {*} value
+ * @param {string} name
+ * @returns {asserts value is string}
+ */
+
+/** @type {validateString} */
+function validateString(value, name) {
+ if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value)
+}
+
+/**
+ * @callback validateNumber
+ * @param {*} value
+ * @param {string} name
+ * @param {number} [min]
+ * @param {number} [max]
+ * @returns {asserts value is number}
+ */
+
+/** @type {validateNumber} */
+function validateNumber(value, name, min = undefined, max) {
+ if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
+ if (
+ (min != null && value < min) ||
+ (max != null && value > max) ||
+ ((min != null || max != null) && NumberIsNaN(value))
+ ) {
+ throw new ERR_OUT_OF_RANGE(
+ name,
+ `${min != null ? `>= ${min}` : ''}${min != null && max != null ? ' && ' : ''}${max != null ? `<= ${max}` : ''}`,
+ value
+ )
+ }
+}
+
+/**
+ * @callback validateOneOf
+ * @template T
+ * @param {T} value
+ * @param {string} name
+ * @param {T[]} oneOf
+ */
+
+/** @type {validateOneOf} */
+const validateOneOf = hideStackFrames((value, name, oneOf) => {
+ if (!ArrayPrototypeIncludes(oneOf, value)) {
+ const allowed = ArrayPrototypeJoin(
+ ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))),
+ ', '
+ )
+ const reason = 'must be one of: ' + allowed
+ throw new ERR_INVALID_ARG_VALUE(name, value, reason)
+ }
+})
+
+/**
+ * @callback validateBoolean
+ * @param {*} value
+ * @param {string} name
+ * @returns {asserts value is boolean}
+ */
+
+/** @type {validateBoolean} */
+function validateBoolean(value, name) {
+ if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value)
+}
+
+/**
+ * @param {any} options
+ * @param {string} key
+ * @param {boolean} defaultValue
+ * @returns {boolean}
+ */
+function getOwnPropertyValueOrDefault(options, key, defaultValue) {
+ return options == null || !ObjectPrototypeHasOwnProperty(options, key) ? defaultValue : options[key]
+}
+
+/**
+ * @callback validateObject
+ * @param {*} value
+ * @param {string} name
+ * @param {{
+ * allowArray?: boolean,
+ * allowFunction?: boolean,
+ * nullable?: boolean
+ * }} [options]
+ */
+
+/** @type {validateObject} */
+const validateObject = hideStackFrames((value, name, options = null) => {
+ const allowArray = getOwnPropertyValueOrDefault(options, 'allowArray', false)
+ const allowFunction = getOwnPropertyValueOrDefault(options, 'allowFunction', false)
+ const nullable = getOwnPropertyValueOrDefault(options, 'nullable', false)
+ if (
+ (!nullable && value === null) ||
+ (!allowArray && ArrayIsArray(value)) ||
+ (typeof value !== 'object' && (!allowFunction || typeof value !== 'function'))
+ ) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Object', value)
+ }
+})
+
+/**
+ * @callback validateDictionary - We are using the Web IDL Standard definition
+ * of "dictionary" here, which means any value
+ * whose Type is either Undefined, Null, or
+ * Object (which includes functions).
+ * @param {*} value
+ * @param {string} name
+ * @see https://webidl.spec.whatwg.org/#es-dictionary
+ * @see https://tc39.es/ecma262/#table-typeof-operator-results
+ */
+
+/** @type {validateDictionary} */
+const validateDictionary = hideStackFrames((value, name) => {
+ if (value != null && typeof value !== 'object' && typeof value !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value)
+ }
+})
+
+/**
+ * @callback validateArray
+ * @param {*} value
+ * @param {string} name
+ * @param {number} [minLength]
+ * @returns {asserts value is any[]}
+ */
+
+/** @type {validateArray} */
+const validateArray = hideStackFrames((value, name, minLength = 0) => {
+ if (!ArrayIsArray(value)) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Array', value)
+ }
+ if (value.length < minLength) {
+ const reason = `must be longer than ${minLength}`
+ throw new ERR_INVALID_ARG_VALUE(name, value, reason)
+ }
+})
+
+/**
+ * @callback validateStringArray
+ * @param {*} value
+ * @param {string} name
+ * @returns {asserts value is string[]}
+ */
+
+/** @type {validateStringArray} */
+function validateStringArray(value, name) {
+ validateArray(value, name)
+ for (let i = 0; i < value.length; i++) {
+ validateString(value[i], `${name}[${i}]`)
+ }
+}
+
+/**
+ * @callback validateBooleanArray
+ * @param {*} value
+ * @param {string} name
+ * @returns {asserts value is boolean[]}
+ */
+
+/** @type {validateBooleanArray} */
+function validateBooleanArray(value, name) {
+ validateArray(value, name)
+ for (let i = 0; i < value.length; i++) {
+ validateBoolean(value[i], `${name}[${i}]`)
+ }
+}
+
+/**
+ * @callback validateAbortSignalArray
+ * @param {*} value
+ * @param {string} name
+ * @returns {asserts value is AbortSignal[]}
+ */
+
+/** @type {validateAbortSignalArray} */
+function validateAbortSignalArray(value, name) {
+ validateArray(value, name)
+ for (let i = 0; i < value.length; i++) {
+ const signal = value[i]
+ const indexedName = `${name}[${i}]`
+ if (signal == null) {
+ throw new ERR_INVALID_ARG_TYPE(indexedName, 'AbortSignal', signal)
+ }
+ validateAbortSignal(signal, indexedName)
+ }
+}
+
+/**
+ * @param {*} signal
+ * @param {string} [name='signal']
+ * @returns {asserts signal is keyof signals}
+ */
+function validateSignalName(signal, name = 'signal') {
+ validateString(signal, name)
+ if (signals[signal] === undefined) {
+ if (signals[StringPrototypeToUpperCase(signal)] !== undefined) {
+ throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)')
+ }
+ throw new ERR_UNKNOWN_SIGNAL(signal)
+ }
+}
+
+/**
+ * @callback validateBuffer
+ * @param {*} buffer
+ * @param {string} [name='buffer']
+ * @returns {asserts buffer is ArrayBufferView}
+ */
+
+/** @type {validateBuffer} */
+const validateBuffer = hideStackFrames((buffer, name = 'buffer') => {
+ if (!isArrayBufferView(buffer)) {
+ throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer)
+ }
+})
+
+/**
+ * @param {string} data
+ * @param {string} encoding
+ */
+function validateEncoding(data, encoding) {
+ const normalizedEncoding = normalizeEncoding(encoding)
+ const length = data.length
+ if (normalizedEncoding === 'hex' && length % 2 !== 0) {
+ throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`)
+ }
+}
+
+/**
+ * Check that the port number is not NaN when coerced to a number,
+ * is an integer and that it falls within the legal range of port numbers.
+ * @param {*} port
+ * @param {string} [name='Port']
+ * @param {boolean} [allowZero=true]
+ * @returns {number}
+ */
+function validatePort(port, name = 'Port', allowZero = true) {
+ if (
+ (typeof port !== 'number' && typeof port !== 'string') ||
+ (typeof port === 'string' && StringPrototypeTrim(port).length === 0) ||
+ +port !== +port >>> 0 ||
+ port > 0xffff ||
+ (port === 0 && !allowZero)
+ ) {
+ throw new ERR_SOCKET_BAD_PORT(name, port, allowZero)
+ }
+ return port | 0
+}
+
+/**
+ * @callback validateAbortSignal
+ * @param {*} signal
+ * @param {string} name
+ */
+
+/** @type {validateAbortSignal} */
+const validateAbortSignal = hideStackFrames((signal, name) => {
+ if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
+ }
+})
+
+/**
+ * @callback validateFunction
+ * @param {*} value
+ * @param {string} name
+ * @returns {asserts value is Function}
+ */
+
+/** @type {validateFunction} */
+const validateFunction = hideStackFrames((value, name) => {
+ if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
+})
+
+/**
+ * @callback validatePlainFunction
+ * @param {*} value
+ * @param {string} name
+ * @returns {asserts value is Function}
+ */
+
+/** @type {validatePlainFunction} */
+const validatePlainFunction = hideStackFrames((value, name) => {
+ if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
+})
+
+/**
+ * @callback validateUndefined
+ * @param {*} value
+ * @param {string} name
+ * @returns {asserts value is undefined}
+ */
+
+/** @type {validateUndefined} */
+const validateUndefined = hideStackFrames((value, name) => {
+ if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value)
+})
+
+/**
+ * @template T
+ * @param {T} value
+ * @param {string} name
+ * @param {T[]} union
+ */
+function validateUnion(value, name, union) {
+ if (!ArrayPrototypeIncludes(union, value)) {
+ throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value)
+ }
+}
+
+/*
+ The rules for the Link header field are described here:
+ https://www.rfc-editor.org/rfc/rfc8288.html#section-3
+
+ This regex validates any string surrounded by angle brackets
+ (not necessarily a valid URI reference) followed by zero or more
+ link-params separated by semicolons.
+*/
+const linkValueRegExp = /^(?:<[^>]*>)(?:\s*;\s*[^;"\s]+(?:=(")?[^;"\s]*\1)?)*$/
+
+/**
+ * @param {any} value
+ * @param {string} name
+ */
+function validateLinkHeaderFormat(value, name) {
+ if (typeof value === 'undefined' || !RegExpPrototypeExec(linkValueRegExp, value)) {
+ throw new ERR_INVALID_ARG_VALUE(
+ name,
+ value,
+ 'must be an array or string of format "; rel=preload; as=style"'
+ )
+ }
+}
+
+/**
+ * @param {any} hints
+ * @return {string}
+ */
+function validateLinkHeaderValue(hints) {
+ if (typeof hints === 'string') {
+ validateLinkHeaderFormat(hints, 'hints')
+ return hints
+ } else if (ArrayIsArray(hints)) {
+ const hintsLength = hints.length
+ let result = ''
+ if (hintsLength === 0) {
+ return result
+ }
+ for (let i = 0; i < hintsLength; i++) {
+ const link = hints[i]
+ validateLinkHeaderFormat(link, 'hints')
+ result += link
+ if (i !== hintsLength - 1) {
+ result += ', '
+ }
+ }
+ return result
+ }
+ throw new ERR_INVALID_ARG_VALUE(
+ 'hints',
+ hints,
+ 'must be an array or string of format "; rel=preload; as=style"'
+ )
+}
+module.exports = {
+ isInt32,
+ isUint32,
+ parseFileMode,
+ validateArray,
+ validateStringArray,
+ validateBooleanArray,
+ validateAbortSignalArray,
+ validateBoolean,
+ validateBuffer,
+ validateDictionary,
+ validateEncoding,
+ validateFunction,
+ validateInt32,
+ validateInteger,
+ validateNumber,
+ validateObject,
+ validateOneOf,
+ validatePlainFunction,
+ validatePort,
+ validateSignalName,
+ validateString,
+ validateUint32,
+ validateUndefined,
+ validateUnion,
+ validateAbortSignal,
+ validateLinkHeaderValue
+}
diff --git a/lib/ours/browser.js b/lib/ours/browser.js
new file mode 100644
index 0000000000..39acef3d7d
--- /dev/null
+++ b/lib/ours/browser.js
@@ -0,0 +1,35 @@
+'use strict'
+
+const CustomStream = require('../stream')
+const promises = require('../stream/promises')
+const originalDestroy = CustomStream.Readable.destroy
+module.exports = CustomStream.Readable
+
+// Explicit export naming is needed for ESM
+module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
+module.exports._isUint8Array = CustomStream._isUint8Array
+module.exports.isDisturbed = CustomStream.isDisturbed
+module.exports.isErrored = CustomStream.isErrored
+module.exports.isReadable = CustomStream.isReadable
+module.exports.Readable = CustomStream.Readable
+module.exports.Writable = CustomStream.Writable
+module.exports.Duplex = CustomStream.Duplex
+module.exports.Transform = CustomStream.Transform
+module.exports.PassThrough = CustomStream.PassThrough
+module.exports.addAbortSignal = CustomStream.addAbortSignal
+module.exports.finished = CustomStream.finished
+module.exports.destroy = CustomStream.destroy
+module.exports.destroy = originalDestroy
+module.exports.pipeline = CustomStream.pipeline
+module.exports.compose = CustomStream.compose
+Object.defineProperty(CustomStream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+})
+module.exports.Stream = CustomStream.Stream
+
+// Allow default importing
+module.exports.default = module.exports
diff --git a/lib/ours/errors.js b/lib/ours/errors.js
new file mode 100644
index 0000000000..979957e3c1
--- /dev/null
+++ b/lib/ours/errors.js
@@ -0,0 +1,343 @@
+'use strict'
+
+const { format, inspect } = require('./util/inspect')
+const { AggregateError: CustomAggregateError } = require('./primordials')
+
+/*
+ This file is a reduced and adapted version of the main lib/internal/errors.js file defined at
+
+ https://github.com/nodejs/node/blob/main/lib/internal/errors.js
+
+ Don't try to replace with the original file and keep it up to date (starting from E(...) definitions)
+ with the upstream file.
+*/
+
+const AggregateError = globalThis.AggregateError || CustomAggregateError
+const kIsNodeError = Symbol('kIsNodeError')
+const kTypes = [
+ 'string',
+ 'function',
+ 'number',
+ 'object',
+ // Accept 'Function' and 'Object' as alternative to the lower cased version.
+ 'Function',
+ 'Object',
+ 'boolean',
+ 'bigint',
+ 'symbol'
+]
+const classRegExp = /^([A-Z][a-z0-9]*)+$/
+const nodeInternalPrefix = '__node_internal_'
+const codes = {}
+function assert(value, message) {
+ if (!value) {
+ throw new codes.ERR_INTERNAL_ASSERTION(message)
+ }
+}
+
+// Only use this for integers! Decimal numbers do not work with this function.
+function addNumericalSeparator(val) {
+ let res = ''
+ let i = val.length
+ const start = val[0] === '-' ? 1 : 0
+ for (; i >= start + 4; i -= 3) {
+ res = `_${val.slice(i - 3, i)}${res}`
+ }
+ return `${val.slice(0, i)}${res}`
+}
+function getMessage(key, msg, args) {
+ if (typeof msg === 'function') {
+ assert(
+ msg.length <= args.length,
+ // Default options do not count.
+ `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`
+ )
+ return msg(...args)
+ }
+ const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length
+ assert(
+ expectedLength === args.length,
+ `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`
+ )
+ if (args.length === 0) {
+ return msg
+ }
+ return format(msg, ...args)
+}
+function E(code, message, Base) {
+ if (!Base) {
+ Base = Error
+ }
+ class NodeError extends Base {
+ constructor(...args) {
+ super(getMessage(code, message, args))
+ }
+ toString() {
+ return `${this.name} [${code}]: ${this.message}`
+ }
+ }
+ Object.defineProperties(NodeError.prototype, {
+ name: {
+ value: Base.name,
+ writable: true,
+ enumerable: false,
+ configurable: true
+ },
+ toString: {
+ value() {
+ return `${this.name} [${code}]: ${this.message}`
+ },
+ writable: true,
+ enumerable: false,
+ configurable: true
+ }
+ })
+ NodeError.prototype.code = code
+ NodeError.prototype[kIsNodeError] = true
+ codes[code] = NodeError
+}
+function hideStackFrames(fn) {
+ // We rename the functions that will be hidden to cut off the stacktrace
+ // at the outermost one
+ const hidden = nodeInternalPrefix + fn.name
+ Object.defineProperty(fn, 'name', {
+ value: hidden
+ })
+ return fn
+}
+function aggregateTwoErrors(innerError, outerError) {
+ if (innerError && outerError && innerError !== outerError) {
+ if (Array.isArray(outerError.errors)) {
+ // If `outerError` is already an `AggregateError`.
+ outerError.errors.push(innerError)
+ return outerError
+ }
+ const err = new AggregateError([outerError, innerError], outerError.message)
+ err.code = outerError.code
+ return err
+ }
+ return innerError || outerError
+}
+class AbortError extends Error {
+ constructor(message = 'The operation was aborted', options = undefined) {
+ if (options !== undefined && typeof options !== 'object') {
+ throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options)
+ }
+ super(message, options)
+ this.code = 'ABORT_ERR'
+ this.name = 'AbortError'
+ }
+}
+E('ERR_ASSERTION', '%s', Error)
+E(
+ 'ERR_INVALID_ARG_TYPE',
+ (name, expected, actual) => {
+ assert(typeof name === 'string', "'name' must be a string")
+ if (!Array.isArray(expected)) {
+ expected = [expected]
+ }
+ let msg = 'The '
+ if (name.endsWith(' argument')) {
+ // For cases like 'first argument'
+ msg += `${name} `
+ } else {
+ msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} `
+ }
+ msg += 'must be '
+ const types = []
+ const instances = []
+ const other = []
+ for (const value of expected) {
+ assert(typeof value === 'string', 'All expected entries have to be of type string')
+ if (kTypes.includes(value)) {
+ types.push(value.toLowerCase())
+ } else if (classRegExp.test(value)) {
+ instances.push(value)
+ } else {
+ assert(value !== 'object', 'The value "object" should be written as "Object"')
+ other.push(value)
+ }
+ }
+
+ // Special handle `object` in case other instances are allowed to outline
+ // the differences between each other.
+ if (instances.length > 0) {
+ const pos = types.indexOf('object')
+ if (pos !== -1) {
+ types.splice(types, pos, 1)
+ instances.push('Object')
+ }
+ }
+ if (types.length > 0) {
+ switch (types.length) {
+ case 1:
+ msg += `of type ${types[0]}`
+ break
+ case 2:
+ msg += `one of type ${types[0]} or ${types[1]}`
+ break
+ default: {
+ const last = types.pop()
+ msg += `one of type ${types.join(', ')}, or ${last}`
+ }
+ }
+ if (instances.length > 0 || other.length > 0) {
+ msg += ' or '
+ }
+ }
+ if (instances.length > 0) {
+ switch (instances.length) {
+ case 1:
+ msg += `an instance of ${instances[0]}`
+ break
+ case 2:
+ msg += `an instance of ${instances[0]} or ${instances[1]}`
+ break
+ default: {
+ const last = instances.pop()
+ msg += `an instance of ${instances.join(', ')}, or ${last}`
+ }
+ }
+ if (other.length > 0) {
+ msg += ' or '
+ }
+ }
+ switch (other.length) {
+ case 0:
+ break
+ case 1:
+ if (other[0].toLowerCase() !== other[0]) {
+ msg += 'an '
+ }
+ msg += `${other[0]}`
+ break
+ case 2:
+ msg += `one of ${other[0]} or ${other[1]}`
+ break
+ default: {
+ const last = other.pop()
+ msg += `one of ${other.join(', ')}, or ${last}`
+ }
+ }
+ if (actual == null) {
+ msg += `. Received ${actual}`
+ } else if (typeof actual === 'function' && actual.name) {
+ msg += `. Received function ${actual.name}`
+ } else if (typeof actual === 'object') {
+ var _actual$constructor
+ if (
+ (_actual$constructor = actual.constructor) !== null &&
+ _actual$constructor !== undefined &&
+ _actual$constructor.name
+ ) {
+ msg += `. Received an instance of ${actual.constructor.name}`
+ } else {
+ const inspected = inspect(actual, {
+ depth: -1
+ })
+ msg += `. Received ${inspected}`
+ }
+ } else {
+ let inspected = inspect(actual, {
+ colors: false
+ })
+ if (inspected.length > 25) {
+ inspected = `${inspected.slice(0, 25)}...`
+ }
+ msg += `. Received type ${typeof actual} (${inspected})`
+ }
+ return msg
+ },
+ TypeError
+)
+E(
+ 'ERR_INVALID_ARG_VALUE',
+ (name, value, reason = 'is invalid') => {
+ let inspected = inspect(value)
+ if (inspected.length > 128) {
+ inspected = inspected.slice(0, 128) + '...'
+ }
+ const type = name.includes('.') ? 'property' : 'argument'
+ return `The ${type} '${name}' ${reason}. Received ${inspected}`
+ },
+ TypeError
+)
+E(
+ 'ERR_INVALID_RETURN_VALUE',
+ (input, name, value) => {
+ var _value$constructor
+ const type =
+ value !== null &&
+ value !== undefined &&
+ (_value$constructor = value.constructor) !== null &&
+ _value$constructor !== undefined &&
+ _value$constructor.name
+ ? `instance of ${value.constructor.name}`
+ : `type ${typeof value}`
+ return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`
+ },
+ TypeError
+)
+E(
+ 'ERR_MISSING_ARGS',
+ (...args) => {
+ assert(args.length > 0, 'At least one arg needs to be specified')
+ let msg
+ const len = args.length
+ args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ')
+ switch (len) {
+ case 1:
+ msg += `The ${args[0]} argument`
+ break
+ case 2:
+ msg += `The ${args[0]} and ${args[1]} arguments`
+ break
+ default:
+ {
+ const last = args.pop()
+ msg += `The ${args.join(', ')}, and ${last} arguments`
+ }
+ break
+ }
+ return `${msg} must be specified`
+ },
+ TypeError
+)
+E(
+ 'ERR_OUT_OF_RANGE',
+ (str, range, input) => {
+ assert(range, 'Missing "range" argument')
+ let received
+ if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {
+ received = addNumericalSeparator(String(input))
+ } else if (typeof input === 'bigint') {
+ received = String(input)
+ const limit = BigInt(2) ** BigInt(32)
+ if (input > limit || input < -limit) {
+ received = addNumericalSeparator(received)
+ }
+ received += 'n'
+ } else {
+ received = inspect(input)
+ }
+ return `The value of "${str}" is out of range. It must be ${range}. Received ${received}`
+ },
+ RangeError
+)
+E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error)
+E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error)
+E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error)
+E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error)
+E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error)
+E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError)
+E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error)
+E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error)
+E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error)
+E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error)
+E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError)
+module.exports = {
+ AbortError,
+ aggregateTwoErrors: hideStackFrames(aggregateTwoErrors),
+ hideStackFrames,
+ codes
+}
diff --git a/lib/ours/index.js b/lib/ours/index.js
new file mode 100644
index 0000000000..6cdd2d7855
--- /dev/null
+++ b/lib/ours/index.js
@@ -0,0 +1,65 @@
+'use strict'
+
+const Stream = require('stream')
+if (Stream && process.env.READABLE_STREAM === 'disable') {
+ const promises = Stream.promises
+
+ // Explicit export naming is needed for ESM
+ module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer
+ module.exports._isUint8Array = Stream._isUint8Array
+ module.exports.isDisturbed = Stream.isDisturbed
+ module.exports.isErrored = Stream.isErrored
+ module.exports.isReadable = Stream.isReadable
+ module.exports.Readable = Stream.Readable
+ module.exports.Writable = Stream.Writable
+ module.exports.Duplex = Stream.Duplex
+ module.exports.Transform = Stream.Transform
+ module.exports.PassThrough = Stream.PassThrough
+ module.exports.addAbortSignal = Stream.addAbortSignal
+ module.exports.finished = Stream.finished
+ module.exports.destroy = Stream.destroy
+ module.exports.pipeline = Stream.pipeline
+ module.exports.compose = Stream.compose
+ Object.defineProperty(Stream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+ })
+ module.exports.Stream = Stream.Stream
+} else {
+ const CustomStream = require('../stream')
+ const promises = require('../stream/promises')
+ const originalDestroy = CustomStream.Readable.destroy
+ module.exports = CustomStream.Readable
+
+ // Explicit export naming is needed for ESM
+ module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
+ module.exports._isUint8Array = CustomStream._isUint8Array
+ module.exports.isDisturbed = CustomStream.isDisturbed
+ module.exports.isErrored = CustomStream.isErrored
+ module.exports.isReadable = CustomStream.isReadable
+ module.exports.Readable = CustomStream.Readable
+ module.exports.Writable = CustomStream.Writable
+ module.exports.Duplex = CustomStream.Duplex
+ module.exports.Transform = CustomStream.Transform
+ module.exports.PassThrough = CustomStream.PassThrough
+ module.exports.addAbortSignal = CustomStream.addAbortSignal
+ module.exports.finished = CustomStream.finished
+ module.exports.destroy = CustomStream.destroy
+ module.exports.destroy = originalDestroy
+ module.exports.pipeline = CustomStream.pipeline
+ module.exports.compose = CustomStream.compose
+ Object.defineProperty(CustomStream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+ })
+ module.exports.Stream = CustomStream.Stream
+}
+
+// Allow default importing
+module.exports.default = module.exports
diff --git a/lib/ours/primordials.js b/lib/ours/primordials.js
new file mode 100644
index 0000000000..81856fcfaa
--- /dev/null
+++ b/lib/ours/primordials.js
@@ -0,0 +1,124 @@
+'use strict'
+
+/*
+ This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at
+
+ https://github.com/nodejs/node/blob/main/lib/internal/per_context/primordials.js
+
+ Don't try to replace with the original file and keep it up to date with the upstream file.
+*/
+
+// This is a simplified version of AggregateError
+class AggregateError extends Error {
+ constructor(errors) {
+ if (!Array.isArray(errors)) {
+ throw new TypeError(`Expected input to be an Array, got ${typeof errors}`)
+ }
+ let message = ''
+ for (let i = 0; i < errors.length; i++) {
+ message += ` ${errors[i].stack}\n`
+ }
+ super(message)
+ this.name = 'AggregateError'
+ this.errors = errors
+ }
+}
+module.exports = {
+ AggregateError,
+ ArrayIsArray(self) {
+ return Array.isArray(self)
+ },
+ ArrayPrototypeIncludes(self, el) {
+ return self.includes(el)
+ },
+ ArrayPrototypeIndexOf(self, el) {
+ return self.indexOf(el)
+ },
+ ArrayPrototypeJoin(self, sep) {
+ return self.join(sep)
+ },
+ ArrayPrototypeMap(self, fn) {
+ return self.map(fn)
+ },
+ ArrayPrototypePop(self, el) {
+ return self.pop(el)
+ },
+ ArrayPrototypePush(self, el) {
+ return self.push(el)
+ },
+ ArrayPrototypeSlice(self, start, end) {
+ return self.slice(start, end)
+ },
+ Error,
+ FunctionPrototypeCall(fn, thisArgs, ...args) {
+ return fn.call(thisArgs, ...args)
+ },
+ FunctionPrototypeSymbolHasInstance(self, instance) {
+ return Function.prototype[Symbol.hasInstance].call(self, instance)
+ },
+ MathFloor: Math.floor,
+ Number,
+ NumberIsInteger: Number.isInteger,
+ NumberIsNaN: Number.isNaN,
+ NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER,
+ NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER,
+ NumberParseInt: Number.parseInt,
+ ObjectDefineProperties(self, props) {
+ return Object.defineProperties(self, props)
+ },
+ ObjectDefineProperty(self, name, prop) {
+ return Object.defineProperty(self, name, prop)
+ },
+ ObjectGetOwnPropertyDescriptor(self, name) {
+ return Object.getOwnPropertyDescriptor(self, name)
+ },
+ ObjectKeys(obj) {
+ return Object.keys(obj)
+ },
+ ObjectSetPrototypeOf(target, proto) {
+ return Object.setPrototypeOf(target, proto)
+ },
+ Promise,
+ PromisePrototypeCatch(self, fn) {
+ return self.catch(fn)
+ },
+ PromisePrototypeThen(self, thenFn, catchFn) {
+ return self.then(thenFn, catchFn)
+ },
+ PromiseReject(err) {
+ return Promise.reject(err)
+ },
+ PromiseResolve(val) {
+ return Promise.resolve(val)
+ },
+ ReflectApply: Reflect.apply,
+ RegExpPrototypeTest(self, value) {
+ return self.test(value)
+ },
+ SafeSet: Set,
+ String,
+ StringPrototypeSlice(self, start, end) {
+ return self.slice(start, end)
+ },
+ StringPrototypeToLowerCase(self) {
+ return self.toLowerCase()
+ },
+ StringPrototypeToUpperCase(self) {
+ return self.toUpperCase()
+ },
+ StringPrototypeTrim(self) {
+ return self.trim()
+ },
+ Symbol,
+ SymbolFor: Symbol.for,
+ SymbolAsyncIterator: Symbol.asyncIterator,
+ SymbolHasInstance: Symbol.hasInstance,
+ SymbolIterator: Symbol.iterator,
+ SymbolDispose: Symbol.dispose || Symbol('Symbol.dispose'),
+ SymbolAsyncDispose: Symbol.asyncDispose || Symbol('Symbol.asyncDispose'),
+ TypedArrayPrototypeSet(self, buf, len) {
+ return self.set(buf, len)
+ },
+ Boolean,
+ Uint8Array
+}
diff --git a/lib/ours/util.js b/lib/ours/util.js
new file mode 100644
index 0000000000..b560361ffa
--- /dev/null
+++ b/lib/ours/util.js
@@ -0,0 +1,148 @@
+'use strict'
+
+const bufferModule = require('buffer')
+const { format, inspect } = require('./util/inspect')
+const {
+ codes: { ERR_INVALID_ARG_TYPE }
+} = require('./errors')
+const { kResistStopPropagation, AggregateError, SymbolDispose } = require('./primordials')
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor
+const Blob = globalThis.Blob || bufferModule.Blob
+/* eslint-disable indent */
+const isBlob =
+ typeof Blob !== 'undefined'
+ ? function isBlob(b) {
+ // eslint-disable-next-line indent
+ return b instanceof Blob
+ }
+ : function isBlob(b) {
+ return false
+ }
+/* eslint-enable indent */
+
+const validateAbortSignal = (signal, name) => {
+ if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
+ }
+}
+const validateFunction = (value, name) => {
+ if (typeof value !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
+ }
+}
+module.exports = {
+ AggregateError,
+ kEmptyObject: Object.freeze({}),
+ once(callback) {
+ let called = false
+ return function (...args) {
+ if (called) {
+ return
+ }
+ called = true
+ callback.apply(this, args)
+ }
+ },
+ createDeferredPromise: function () {
+ let resolve
+ let reject
+
+ // eslint-disable-next-line promise/param-names
+ const promise = new Promise((res, rej) => {
+ resolve = res
+ reject = rej
+ })
+ return {
+ promise,
+ resolve,
+ reject
+ }
+ },
+ promisify(fn) {
+ return new Promise((resolve, reject) => {
+ fn((err, ...args) => {
+ if (err) {
+ return reject(err)
+ }
+ return resolve(...args)
+ })
+ })
+ },
+ debuglog() {
+ return function () {}
+ },
+ format,
+ inspect,
+ types: {
+ isAsyncFunction(fn) {
+ return fn instanceof AsyncFunction
+ },
+ isArrayBufferView(arr) {
+ return ArrayBuffer.isView(arr)
+ }
+ },
+ isBlob,
+ deprecate(fn, message) {
+ return fn
+ },
+ addAbortListener:
+ require('events').addAbortListener ||
+ function addAbortListener(signal, listener) {
+ if (signal === undefined) {
+ throw new ERR_INVALID_ARG_TYPE('signal', 'AbortSignal', signal)
+ }
+ validateAbortSignal(signal, 'signal')
+ validateFunction(listener, 'listener')
+ let removeEventListener
+ if (signal.aborted) {
+ queueMicrotask(() => listener())
+ } else {
+ signal.addEventListener('abort', listener, {
+ __proto__: null,
+ once: true,
+ [kResistStopPropagation]: true
+ })
+ removeEventListener = () => {
+ signal.removeEventListener('abort', listener)
+ }
+ }
+ return {
+ __proto__: null,
+ [SymbolDispose]() {
+ var _removeEventListener
+ ;(_removeEventListener = removeEventListener) === null || _removeEventListener === undefined
+ ? undefined
+ : _removeEventListener()
+ }
+ }
+ },
+ AbortSignalAny:
+ AbortSignal.any ||
+ function AbortSignalAny(signals) {
+ // Fast path if there is only one signal.
+ if (signals.length === 1) {
+ return signals[0]
+ }
+ const ac = new AbortController()
+ const abort = () => ac.abort()
+ signals.forEach((signal) => {
+ validateAbortSignal(signal, 'signals')
+ signal.addEventListener('abort', abort, {
+ once: true
+ })
+ })
+ ac.signal.addEventListener(
+ 'abort',
+ () => {
+ signals.forEach((signal) => signal.removeEventListener('abort', abort))
+ },
+ {
+ once: true
+ }
+ )
+ return ac.signal
+ }
+}
+module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom')
diff --git a/lib/ours/util/inspect.js b/lib/ours/util/inspect.js
new file mode 100644
index 0000000000..e00844570f
--- /dev/null
+++ b/lib/ours/util/inspect.js
@@ -0,0 +1,55 @@
+'use strict'
+
+/*
+ This file is a reduced and adapted version of the main lib/internal/util/inspect.js file defined at
+
+ https://github.com/nodejs/node/blob/main/lib/internal/util/inspect.js
+
+ Don't try to replace with the original file and keep it up to date with the upstream file.
+*/
+module.exports = {
+ format(format, ...args) {
+ // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args
+ return format.replace(/%([sdifj])/g, function (...[_unused, type]) {
+ const replacement = args.shift()
+ if (type === 'f') {
+ return replacement.toFixed(6)
+ } else if (type === 'j') {
+ return JSON.stringify(replacement)
+ } else if (type === 's' && typeof replacement === 'object') {
+ const ctor = replacement.constructor !== Object ? replacement.constructor.name : ''
+ return `${ctor} {}`.trim()
+ } else {
+ return replacement.toString()
+ }
+ })
+ },
+ inspect(value) {
+ // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options
+ switch (typeof value) {
+ case 'string':
+ if (value.includes("'")) {
+ if (!value.includes('"')) {
+ return `"${value}"`
+ } else if (!value.includes('`') && !value.includes('${')) {
+ return `\`${value}\``
+ }
+ }
+ return `'${value}'`
+ case 'number':
+ if (isNaN(value)) {
+ return 'NaN'
+ } else if (Object.is(value, -0)) {
+ return String(value)
+ }
+ return value
+ case 'bigint':
+ return `${String(value)}n`
+ case 'boolean':
+ case 'undefined':
+ return String(value)
+ case 'object':
+ return '{}'
+ }
+ }
+}
diff --git a/lib/stream.js b/lib/stream.js
new file mode 100644
index 0000000000..1e2cab315a
--- /dev/null
+++ b/lib/stream.js
@@ -0,0 +1,143 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+/* replacement start */
+
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { ObjectDefineProperty, ObjectKeys, ReflectApply } = require('./ours/primordials')
+const {
+ promisify: { custom: customPromisify }
+} = require('./ours/util')
+const { streamReturningOperators, promiseReturningOperators } = require('./internal/streams/operators')
+const {
+ codes: { ERR_ILLEGAL_CONSTRUCTOR }
+} = require('./ours/errors')
+const compose = require('./internal/streams/compose')
+const { setDefaultHighWaterMark, getDefaultHighWaterMark } = require('./internal/streams/state')
+const { pipeline } = require('./internal/streams/pipeline')
+const { destroyer } = require('./internal/streams/destroy')
+const eos = require('./internal/streams/end-of-stream')
+const internalBuffer = {}
+const promises = require('./stream/promises')
+const utils = require('./internal/streams/utils')
+const Stream = (module.exports = require('./internal/streams/legacy').Stream)
+Stream.isDestroyed = utils.isDestroyed
+Stream.isDisturbed = utils.isDisturbed
+Stream.isErrored = utils.isErrored
+Stream.isReadable = utils.isReadable
+Stream.isWritable = utils.isWritable
+Stream.Readable = require('./internal/streams/readable')
+for (const key of ObjectKeys(streamReturningOperators)) {
+ const op = streamReturningOperators[key]
+ function fn(...args) {
+ if (new.target) {
+ throw ERR_ILLEGAL_CONSTRUCTOR()
+ }
+ return Stream.Readable.from(ReflectApply(op, this, args))
+ }
+ ObjectDefineProperty(fn, 'name', {
+ __proto__: null,
+ value: op.name
+ })
+ ObjectDefineProperty(fn, 'length', {
+ __proto__: null,
+ value: op.length
+ })
+ ObjectDefineProperty(Stream.Readable.prototype, key, {
+ __proto__: null,
+ value: fn,
+ enumerable: false,
+ configurable: true,
+ writable: true
+ })
+}
+for (const key of ObjectKeys(promiseReturningOperators)) {
+ const op = promiseReturningOperators[key]
+ function fn(...args) {
+ if (new.target) {
+ throw ERR_ILLEGAL_CONSTRUCTOR()
+ }
+ return ReflectApply(op, this, args)
+ }
+ ObjectDefineProperty(fn, 'name', {
+ __proto__: null,
+ value: op.name
+ })
+ ObjectDefineProperty(fn, 'length', {
+ __proto__: null,
+ value: op.length
+ })
+ ObjectDefineProperty(Stream.Readable.prototype, key, {
+ __proto__: null,
+ value: fn,
+ enumerable: false,
+ configurable: true,
+ writable: true
+ })
+}
+Stream.Writable = require('./internal/streams/writable')
+Stream.Duplex = require('./internal/streams/duplex')
+Stream.Transform = require('./internal/streams/transform')
+Stream.PassThrough = require('./internal/streams/passthrough')
+Stream.pipeline = pipeline
+const { addAbortSignal } = require('./internal/streams/add-abort-signal')
+Stream.addAbortSignal = addAbortSignal
+Stream.finished = eos
+Stream.destroy = destroyer
+Stream.compose = compose
+Stream.setDefaultHighWaterMark = setDefaultHighWaterMark
+Stream.getDefaultHighWaterMark = getDefaultHighWaterMark
+ObjectDefineProperty(Stream, 'promises', {
+ __proto__: null,
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+})
+ObjectDefineProperty(pipeline, customPromisify, {
+ __proto__: null,
+ enumerable: true,
+ get() {
+ return promises.pipeline
+ }
+})
+ObjectDefineProperty(eos, customPromisify, {
+ __proto__: null,
+ enumerable: true,
+ get() {
+ return promises.finished
+ }
+})
+
+// Backwards-compat with node 0.4.x
+Stream.Stream = Stream
+Stream._isUint8Array = function isUint8Array(value) {
+ return value instanceof Uint8Array
+}
+Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) {
+ return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
+}
diff --git a/lib/stream/promises.js b/lib/stream/promises.js
new file mode 100644
index 0000000000..5d4ce15f49
--- /dev/null
+++ b/lib/stream/promises.js
@@ -0,0 +1,43 @@
+'use strict'
+
+const { ArrayPrototypePop, Promise } = require('../ours/primordials')
+const { isIterable, isNodeStream, isWebStream } = require('../internal/streams/utils')
+const { pipelineImpl: pl } = require('../internal/streams/pipeline')
+const { finished } = require('../internal/streams/end-of-stream')
+require('../../lib/stream.js')
+function pipeline(...streams) {
+ return new Promise((resolve, reject) => {
+ let signal
+ let end
+ const lastArg = streams[streams.length - 1]
+ if (
+ lastArg &&
+ typeof lastArg === 'object' &&
+ !isNodeStream(lastArg) &&
+ !isIterable(lastArg) &&
+ !isWebStream(lastArg)
+ ) {
+ const options = ArrayPrototypePop(streams)
+ signal = options.signal
+ end = options.end
+ }
+ pl(
+ streams,
+ (err, value) => {
+ if (err) {
+ reject(err)
+ } else {
+ resolve(value)
+ }
+ },
+ {
+ signal,
+ end
+ }
+ )
+ })
+}
+module.exports = {
+ finished,
+ pipeline
+}
diff --git a/package.json b/package.json
index 9bba6a765f..98b48e686f 100644
--- a/package.json
+++ b/package.json
@@ -1,24 +1,88 @@
{
"name": "readable-stream",
- "version": "0.0.2",
- "description": "An exploration of a new kind of readable streams for Node.js",
- "main": "readable.js",
- "dependencies": {},
- "devDependencies": {
- "tap": "~0.2.6"
- },
- "scripts": {
- "test": "tap test/*.js"
- },
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/readable-stream"
- },
+ "version": "4.7.0",
+ "description": "Node.js Streams, a user-land copy of the stream library from Node.js",
+ "homepage": "https://github.com/nodejs/readable-stream",
+ "license": "MIT",
+ "licenses": [
+ {
+ "type": "MIT",
+ "url": "https://choosealicense.com/licenses/mit/"
+ }
+ ],
"keywords": [
"readable",
"stream",
"pipe"
],
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "BSD"
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/nodejs/readable-stream"
+ },
+ "bugs": {
+ "url": "https://github.com/nodejs/readable-stream/issues"
+ },
+ "main": "lib/ours/index.js",
+ "files": [
+ "lib",
+ "LICENSE",
+ "README.md"
+ ],
+ "browser": {
+ "util": "./lib/ours/util.js",
+ "./lib/ours/index.js": "./lib/ours/browser.js"
+ },
+ "scripts": {
+ "build": "node build/build.mjs 18.19.0",
+ "postbuild": "prettier -w lib test",
+ "test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
+ "test:prepare": "node test/browser/runner-prepare.mjs",
+ "test:browsers": "node test/browser/runner-browser.mjs",
+ "test:bundlers": "node test/browser/runner-node.mjs",
+ "test:readable-stream-only": "node readable-stream-test/runner-prepare.mjs",
+ "coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
+ "format": "prettier -w src lib test",
+ "test:format": "prettier -c src lib test",
+ "lint": "eslint src"
+ },
+ "dependencies": {
+ "abort-controller": "^3.0.0",
+ "buffer": "^6.0.3",
+ "events": "^3.3.0",
+ "process": "^0.11.10",
+ "string_decoder": "^1.3.0"
+ },
+ "devDependencies": {
+ "@babel/core": "^7.17.10",
+ "@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7",
+ "@babel/plugin-proposal-optional-chaining": "^7.16.7",
+ "@eslint/eslintrc": "^3.2.0",
+ "@rollup/plugin-commonjs": "^22.0.0",
+ "@rollup/plugin-inject": "^4.0.4",
+ "@rollup/plugin-node-resolve": "^13.3.0",
+ "@sinonjs/fake-timers": "^9.1.2",
+ "browserify": "^17.0.0",
+ "c8": "^7.11.2",
+ "esbuild": "^0.19.9",
+ "esbuild-plugin-alias": "^0.2.1",
+ "eslint": "^8.15.0",
+ "eslint-config-standard": "^17.0.0",
+ "eslint-plugin-import": "^2.26.0",
+ "eslint-plugin-n": "^15.2.0",
+ "eslint-plugin-promise": "^6.0.0",
+ "playwright": "^1.21.1",
+ "prettier": "^2.6.2",
+ "rollup": "^2.72.1",
+ "rollup-plugin-polyfill-node": "^0.9.0",
+ "tap": "^16.2.0",
+ "tap-mocha-reporter": "^5.0.3",
+ "tape": "^5.5.3",
+ "tar": "^6.1.11",
+ "undici": "^5.1.1",
+ "webpack": "^5.72.1",
+ "webpack-cli": "^4.9.2"
+ },
+ "engines": {
+ "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+ }
}
diff --git a/passthrough.js b/passthrough.js
deleted file mode 100644
index e3289265b9..0000000000
--- a/passthrough.js
+++ /dev/null
@@ -1,56 +0,0 @@
-'use strict';
-// a passthrough stream.
-// whatever you .write(), you can then .read() later.
-// this is not very useful on its own, but it's a handy
-// base class for certain sorts of simple filters and
-// transforms.
-
-module.exports = PassThrough;
-
-var Readable = require('./readable.js');
-var util = require('util');
-
-util.inherits(PassThrough, Readable);
-
-var fromList = require('./from-list.js');
-
-function PassThrough() {
- Readable.apply(this);
-
- this.buffer = [];
- this.length = 0;
-}
-
-// override this:
-PassThrough.prototype.transform = function(c) {
- return c;
-};
-
-PassThrough.prototype.write = function(c) {
- var needEmitReadable = this.length === 0;
-
- c = this.transform(c);
- if (!c || !c.length) return true;
-
- this.buffer.push(c);
- this.length += c.length;
- if (needEmitReadable) this.emit('readable');
- return (this.length === 0);
-};
-
-PassThrough.prototype.end = function(c) {
- this.ended = true;
- if (c && c.length) this.write(c);
- else if (!this.length) this.emit('end');
-};
-
-PassThrough.prototype.read = function(n) {
- if (!n || n >= this.length) n = this.length;
- var ret = fromList(n, this.buffer, this.length);
- this.length = Math.max(this.length - n, 0);
- if (this.length === 0) {
- var ev = this.ended ? 'end' : 'drain';
- process.nextTick(this.emit.bind(this, ev));
- }
- return ret;
-};
diff --git a/prettier.config.cjs b/prettier.config.cjs
new file mode 100644
index 0000000000..3f8a79d266
--- /dev/null
+++ b/prettier.config.cjs
@@ -0,0 +1,7 @@
+module.exports = {
+ printWidth: 120,
+ semi: false,
+ singleQuote: true,
+ bracketSpacing: true,
+ trailingComma: 'none'
+}
diff --git a/readable-stream-test/import.js b/readable-stream-test/import.js
new file mode 100644
index 0000000000..288ff4bf0d
--- /dev/null
+++ b/readable-stream-test/import.js
@@ -0,0 +1,3 @@
+import * as all from '../lib/ours'
+import * as allBrowser from '../lib/ours/browser'
+import * as allRoot from '../lib/ours'
diff --git a/readable-stream-test/runner-prepare.mjs b/readable-stream-test/runner-prepare.mjs
new file mode 100644
index 0000000000..f364ea15b1
--- /dev/null
+++ b/readable-stream-test/runner-prepare.mjs
@@ -0,0 +1,77 @@
+import { exec } from 'child_process'
+import util from '../lib/ours/util.js'
+
+function info(message) {
+ console.log(`\x1b[34m[INFO]\x1b[0m ${message}`)
+}
+
+function error(message) {
+ console.log(`\x1b[31m[INFO]\x1b[0m ${message}`)
+}
+
+async function run(command) {
+ info(`Executing \x1b[33m${command}\x1b[0m ...`)
+ const { promise, reject, resolve } = util.createDeferredPromise()
+
+ let hasOutput = false
+ function logOutput(chunk) {
+ if (!hasOutput) {
+ hasOutput = true
+ console.log('')
+ }
+
+ console.log(chunk.toString('utf-8').trim().replace(/^/gm, ' '))
+ }
+
+ try {
+ const process = exec(command, { stdio: 'pipe' }, (error) => {
+ if (error) {
+ return reject(error)
+ }
+
+ resolve(error)
+ })
+
+ process.stdout.on('data', logOutput)
+ process.stderr.on('data', logOutput)
+ await promise
+
+ if (hasOutput) {
+ console.log('')
+ }
+ } catch (e) {
+ if (hasOutput) {
+ console.log('')
+ }
+
+ error(`Command failed with status code ${e.code}.`)
+ process.exit(1)
+ }
+}
+
+async function main() {
+ const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ error(`Usage: node await runner-prepare.mjs [${validBundlers.join('|')}]`)
+ error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ switch (bundler) {
+ case 'browserify':
+ break
+ case 'esbuild':
+ break
+ case 'rollup':
+ break
+ case 'webpack':
+ await run('webpack -c readable-stream-test/webpack.config.mjs')
+ }
+}
+
+main().catch((e) => {
+ error(e)
+ process.exit(1)
+})
diff --git a/readable-stream-test/webpack.config.mjs b/readable-stream-test/webpack.config.mjs
new file mode 100644
index 0000000000..375c11475a
--- /dev/null
+++ b/readable-stream-test/webpack.config.mjs
@@ -0,0 +1,13 @@
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../')
+
+export default {
+ mode: 'development',
+ entry: './readable-stream-test/import.js',
+ output: {
+ path: resolve(rootDir, 'readable-stream-test', 'dist'),
+ filename: 'import.js'
+ }
+}
diff --git a/readable.js b/readable.js
deleted file mode 100644
index 38a6a2ee5d..0000000000
--- a/readable.js
+++ /dev/null
@@ -1,229 +0,0 @@
-'use strict';
-
-module.exports = Readable;
-
-var Stream = require('stream');
-var util = require('util');
-var fromList = require('./from-list.js');
-
-util.inherits(Readable, Stream);
-
-function Readable(options) {
- options = options || {};
- this.bufferSize = options.bufferSize || 16 * 1024;
- this.lowWaterMark = options.lowWaterMark || 1024;
- this.buffer = [];
- this.length = 0;
- this._pipes = [];
- this._flowing = false;
- Stream.apply(this);
-}
-
-// you can override either this method, or _read(n, cb) below.
-Readable.prototype.read = function(n) {
- if (this.length === 0 && this.ended) {
- process.nextTick(this.emit.bind(this, 'end'));
- return null;
- }
-
- if (isNaN(n) || n <= 0) n = this.length;
- n = Math.min(n, this.length);
-
- var ret = n > 0 ? fromList(n, this.buffer, this.length) : null;
- this.length -= n;
-
- if (!this.ended && this.length < this.lowWaterMark) {
- this._read(this.bufferSize, function onread(er, chunk) {
- if (er) return this.emit('error', er);
-
- if (!chunk || !chunk.length) {
- this.ended = true;
- if (this.length === 0) this.emit('end');
- return;
- }
-
- this.length += chunk.length;
- this.buffer.push(chunk);
- if (this.length < this.lowWaterMark) {
- this._read(this.bufferSize, onread.bind(this));
- }
- this.emit('readable');
- }.bind(this));
- }
-
- return ret;
-};
-
-// abstract method. to be overridden in specific implementation classes.
-Readable.prototype._read = function(n, cb) {
- process.nextTick(cb.bind(this, new Error('not implemented')));
-};
-
-Readable.prototype.pipe = function(dest, opt) {
- var src = this;
- src._pipes.push(dest);
- if ((!opt || opt.end !== false) &&
- dest !== process.stdout &&
- dest !== process.stderr) {
- src.once('end', onend);
- dest.on('unpipe', function(readable) {
- if (readable === src) {
- src.removeListener('end', onend);
- }
- });
- }
-
- dest.emit('pipe', src);
- if (!src._flowing) process.nextTick(flow.bind(src));
- return dest;
-
- function onend() {
- dest.end();
- }
-};
-
-function flow(src) {
- if (!src) src = this;
- var chunk;
- var dest;
- var needDrain = 0;
- while (chunk = src.read()) {
- src._pipes.forEach(function(dest, i, list) {
- var written = dest.write(chunk);
- if (false === written) {
- needDrain++;
- dest.once('drain', ondrain);
- }
- });
- if (needDrain > 0) return;
- }
-
- src.once('readable', flow);
-
- function ondrain() {
- needDrain--;
- if (needDrain === 0) {
- flow(src);
- }
- }
-}
-
-Readable.prototype.unpipe = function(dest) {
- if (!dest) {
- // remove all of them.
- this._pipes.forEach(function(dest, i, list) {
- dest.emit('unpipe', this);
- }, this);
- this._pipes.length = 0;
- } else {
- var i = this._pipes.indexOf(dest);
- if (i !== -1) {
- dest.emit('unpipe', this);
- this._pipes.splice(i, 1);
- }
- }
- return this;
-};
-
-// kludge for on('data', fn) consumers. Sad.
-// This is *not* part of the new readable stream interface.
-// It is an ugly unfortunate mess of history.
-Readable.prototype.on = function(ev, fn) {
- if (ev === 'data') emitDataEvents(this);
- return Stream.prototype.on.call(this, ev, fn);
-};
-Readable.prototype.addListener = Readable.prototype.on;
-
-function emitDataEvents(stream) {
- var paused = false;
- var readable = false;
-
- // convert to an old-style stream.
- stream.readable = true;
- stream.pipe = Stream.prototype.pipe;
- stream.on = stream.addEventListener = Stream.prototype.on;
-
- stream.on('readable', function() {
- readable = true;
- var c;
- while (!paused && (c = stream.read())) {
- stream.emit('data', c);
- }
- if (c === null) readable = false;
- });
-
- stream.pause = function() {
- paused = true;
- };
-
- stream.resume = function() {
- paused = false;
- if (readable) stream.emit('readable');
- };
-}
-
-// wrap an old-style stream
-// This is *not* part of the readable stream interface.
-// It is an ugly unfortunate mess of history.
-Readable.prototype.wrap = function(stream) {
- this.buffer = [];
- this.length = 0;
- var paused = false;
- var ended = false;
-
- stream.on('end', function() {
- ended = true;
- if (this.length === 0) {
- this.emit('end');
- }
- }.bind(this));
-
- stream.on('data', function(chunk) {
- this.buffer.push(chunk);
- this.length += chunk.length;
- this.emit('readable');
- // if not consumed, then pause the stream.
- if (this.length > this.lowWaterMark && !paused) {
- paused = true;
- stream.pause();
- }
- }.bind(this));
-
- // proxy all the other methods.
- // important when wrapping filters and duplexes.
- for (var i in stream) {
- if (typeof stream[i] === 'function' &&
- typeof this[i] === 'undefined') {
- this[i] = function(method) { return function() {
- return stream[method].apply(stream, arguments);
- }}(i);
- }
- }
-
- // proxy certain important events.
- var events = ['error', 'close', 'destroy', 'pause', 'resume'];
- events.forEach(function(ev) {
- stream.on(ev, this.emit.bind(this, ev));
- }.bind(this));
-
- // consume some bytes. if not all is consumed, then
- // pause the underlying stream.
- this.read = function(n) {
- if (this.length === 0) return null;
-
- if (isNaN(n) || n <= 0) n = this.length;
-
- var ret = fromList(n, this.buffer, this.length);
- this.length = Math.max(0, this.length - n);
-
- if (this.length < this.lowWaterMark && paused) {
- stream.resume();
- paused = false;
- }
-
- if (this.length === 0 && ended) {
- process.nextTick(this.emit.bind(this, 'end'));
- }
- return ret;
- };
-};
diff --git a/src/browser.js b/src/browser.js
new file mode 100644
index 0000000000..ac901c17e7
--- /dev/null
+++ b/src/browser.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const CustomStream = require('../stream')
+const promises = require('../stream/promises')
+const originalDestroy = CustomStream.Readable.destroy
+
+module.exports = CustomStream.Readable
+
+// Explicit export naming is needed for ESM
+module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
+module.exports._isUint8Array = CustomStream._isUint8Array
+module.exports.isDisturbed = CustomStream.isDisturbed
+module.exports.isErrored = CustomStream.isErrored
+module.exports.isReadable = CustomStream.isReadable
+module.exports.Readable = CustomStream.Readable
+module.exports.Writable = CustomStream.Writable
+module.exports.Duplex = CustomStream.Duplex
+module.exports.Transform = CustomStream.Transform
+module.exports.PassThrough = CustomStream.PassThrough
+module.exports.addAbortSignal = CustomStream.addAbortSignal
+module.exports.finished = CustomStream.finished
+module.exports.destroy = CustomStream.destroy
+module.exports.destroy = originalDestroy
+module.exports.pipeline = CustomStream.pipeline
+module.exports.compose = CustomStream.compose
+
+Object.defineProperty(CustomStream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+})
+
+module.exports.Stream = CustomStream.Stream
+
+// Allow default importing
+module.exports.default = module.exports
diff --git a/src/errors.js b/src/errors.js
new file mode 100644
index 0000000000..4c391da26c
--- /dev/null
+++ b/src/errors.js
@@ -0,0 +1,371 @@
+'use strict'
+
+const { format, inspect } = require('./util/inspect')
+const { AggregateError: CustomAggregateError } = require('./primordials')
+
+/*
+ This file is a reduced and adapted version of the main lib/internal/errors.js file defined at
+
+ https://github.com/nodejs/node/blob/main/lib/internal/errors.js
+
+ Don't try to replace with the original file and keep it up to date (starting from E(...) definitions)
+ with the upstream file.
+*/
+
+const AggregateError = globalThis.AggregateError || CustomAggregateError
+
+const kIsNodeError = Symbol('kIsNodeError')
+const kTypes = [
+ 'string',
+ 'function',
+ 'number',
+ 'object',
+ // Accept 'Function' and 'Object' as alternative to the lower cased version.
+ 'Function',
+ 'Object',
+ 'boolean',
+ 'bigint',
+ 'symbol'
+]
+const classRegExp = /^([A-Z][a-z0-9]*)+$/
+const nodeInternalPrefix = '__node_internal_'
+const codes = {}
+
+function assert(value, message) {
+ if (!value) {
+ throw new codes.ERR_INTERNAL_ASSERTION(message)
+ }
+}
+
+// Only use this for integers! Decimal numbers do not work with this function.
+function addNumericalSeparator(val) {
+ let res = ''
+ let i = val.length
+ const start = val[0] === '-' ? 1 : 0
+ for (; i >= start + 4; i -= 3) {
+ res = `_${val.slice(i - 3, i)}${res}`
+ }
+ return `${val.slice(0, i)}${res}`
+}
+
+function getMessage(key, msg, args) {
+ if (typeof msg === 'function') {
+ assert(
+ msg.length <= args.length, // Default options do not count.
+ `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`
+ )
+
+ return msg(...args)
+ }
+
+ const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length
+
+ assert(
+ expectedLength === args.length,
+ `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`
+ )
+
+ if (args.length === 0) {
+ return msg
+ }
+
+ return format(msg, ...args)
+}
+
+function E(code, message, Base) {
+ if (!Base) {
+ Base = Error
+ }
+
+ class NodeError extends Base {
+ constructor(...args) {
+ super(getMessage(code, message, args))
+ }
+
+ toString() {
+ return `${this.name} [${code}]: ${this.message}`
+ }
+ }
+
+ Object.defineProperties(NodeError.prototype, {
+ name: {
+ value: Base.name,
+ writable: true,
+ enumerable: false,
+ configurable: true
+ },
+ toString: {
+ value() {
+ return `${this.name} [${code}]: ${this.message}`
+ },
+ writable: true,
+ enumerable: false,
+ configurable: true
+ }
+ })
+ NodeError.prototype.code = code
+ NodeError.prototype[kIsNodeError] = true
+
+ codes[code] = NodeError
+}
+
+function hideStackFrames(fn) {
+ // We rename the functions that will be hidden to cut off the stacktrace
+ // at the outermost one
+ const hidden = nodeInternalPrefix + fn.name
+ Object.defineProperty(fn, 'name', { value: hidden })
+ return fn
+}
+
+function aggregateTwoErrors(innerError, outerError) {
+ if (innerError && outerError && innerError !== outerError) {
+ if (Array.isArray(outerError.errors)) {
+ // If `outerError` is already an `AggregateError`.
+ outerError.errors.push(innerError)
+ return outerError
+ }
+
+ const err = new AggregateError([outerError, innerError], outerError.message)
+ err.code = outerError.code
+ return err
+ }
+
+ return innerError || outerError
+}
+
+class AbortError extends Error {
+ constructor(message = 'The operation was aborted', options = undefined) {
+ if (options !== undefined && typeof options !== 'object') {
+ throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options)
+ }
+
+ super(message, options)
+ this.code = 'ABORT_ERR'
+ this.name = 'AbortError'
+ }
+}
+
+E('ERR_ASSERTION', '%s', Error)
+
+E(
+ 'ERR_INVALID_ARG_TYPE',
+ (name, expected, actual) => {
+ assert(typeof name === 'string', "'name' must be a string")
+
+ if (!Array.isArray(expected)) {
+ expected = [expected]
+ }
+
+ let msg = 'The '
+ if (name.endsWith(' argument')) {
+ // For cases like 'first argument'
+ msg += `${name} `
+ } else {
+ msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} `
+ }
+
+ msg += 'must be '
+
+ const types = []
+ const instances = []
+ const other = []
+
+ for (const value of expected) {
+ assert(typeof value === 'string', 'All expected entries have to be of type string')
+
+ if (kTypes.includes(value)) {
+ types.push(value.toLowerCase())
+ } else if (classRegExp.test(value)) {
+ instances.push(value)
+ } else {
+ assert(value !== 'object', 'The value "object" should be written as "Object"')
+ other.push(value)
+ }
+ }
+
+ // Special handle `object` in case other instances are allowed to outline
+ // the differences between each other.
+ if (instances.length > 0) {
+ const pos = types.indexOf('object')
+
+ if (pos !== -1) {
+ types.splice(types, pos, 1)
+ instances.push('Object')
+ }
+ }
+
+ if (types.length > 0) {
+ switch (types.length) {
+ case 1:
+ msg += `of type ${types[0]}`
+ break
+ case 2:
+ msg += `one of type ${types[0]} or ${types[1]}`
+ break
+ default: {
+ const last = types.pop()
+ msg += `one of type ${types.join(', ')}, or ${last}`
+ }
+ }
+
+ if (instances.length > 0 || other.length > 0) {
+ msg += ' or '
+ }
+ }
+
+ if (instances.length > 0) {
+ switch (instances.length) {
+ case 1:
+ msg += `an instance of ${instances[0]}`
+ break
+ case 2:
+ msg += `an instance of ${instances[0]} or ${instances[1]}`
+ break
+ default: {
+ const last = instances.pop()
+ msg += `an instance of ${instances.join(', ')}, or ${last}`
+ }
+ }
+
+ if (other.length > 0) {
+ msg += ' or '
+ }
+ }
+
+ switch (other.length) {
+ case 0:
+ break
+ case 1:
+ if (other[0].toLowerCase() !== other[0]) {
+ msg += 'an '
+ }
+
+ msg += `${other[0]}`
+ break
+ case 2:
+ msg += `one of ${other[0]} or ${other[1]}`
+ break
+ default: {
+ const last = other.pop()
+ msg += `one of ${other.join(', ')}, or ${last}`
+ }
+ }
+
+ if (actual == null) {
+ msg += `. Received ${actual}`
+ } else if (typeof actual === 'function' && actual.name) {
+ msg += `. Received function ${actual.name}`
+ } else if (typeof actual === 'object') {
+ if (actual.constructor?.name) {
+ msg += `. Received an instance of ${actual.constructor.name}`
+ } else {
+ const inspected = inspect(actual, { depth: -1 })
+ msg += `. Received ${inspected}`
+ }
+ } else {
+ let inspected = inspect(actual, { colors: false })
+ if (inspected.length > 25) {
+ inspected = `${inspected.slice(0, 25)}...`
+ }
+ msg += `. Received type ${typeof actual} (${inspected})`
+ }
+ return msg
+ },
+ TypeError
+)
+
+E(
+ 'ERR_INVALID_ARG_VALUE',
+ (name, value, reason = 'is invalid') => {
+ let inspected = inspect(value)
+ if (inspected.length > 128) {
+ inspected = inspected.slice(0, 128) + '...'
+ }
+ const type = name.includes('.') ? 'property' : 'argument'
+ return `The ${type} '${name}' ${reason}. Received ${inspected}`
+ },
+ TypeError
+)
+
+E(
+ 'ERR_INVALID_RETURN_VALUE',
+ (input, name, value) => {
+ const type = value?.constructor?.name ? `instance of ${value.constructor.name}` : `type ${typeof value}`
+ return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`
+ },
+ TypeError
+)
+
+E(
+ 'ERR_MISSING_ARGS',
+ (...args) => {
+ assert(args.length > 0, 'At least one arg needs to be specified')
+
+ let msg
+ const len = args.length
+ args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ')
+
+ switch (len) {
+ case 1:
+ msg += `The ${args[0]} argument`
+ break
+ case 2:
+ msg += `The ${args[0]} and ${args[1]} arguments`
+ break
+ default:
+ {
+ const last = args.pop()
+ msg += `The ${args.join(', ')}, and ${last} arguments`
+ }
+ break
+ }
+
+ return `${msg} must be specified`
+ },
+ TypeError
+)
+
+E(
+ 'ERR_OUT_OF_RANGE',
+ (str, range, input) => {
+ assert(range, 'Missing "range" argument')
+
+ let received
+
+ if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {
+ received = addNumericalSeparator(String(input))
+ } else if (typeof input === 'bigint') {
+ received = String(input)
+ const limit = BigInt(2) ** BigInt(32)
+ if (input > limit || input < -limit) {
+ received = addNumericalSeparator(received)
+ }
+
+ received += 'n'
+ } else {
+ received = inspect(input)
+ }
+
+ return `The value of "${str}" is out of range. It must be ${range}. Received ${received}`
+ },
+ RangeError
+)
+
+E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error)
+E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error)
+E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error)
+E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error)
+E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error)
+E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError)
+E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error)
+E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error)
+E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error)
+E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error)
+E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError)
+
+module.exports = {
+ AbortError,
+ aggregateTwoErrors: hideStackFrames(aggregateTwoErrors),
+ hideStackFrames,
+ codes
+}
diff --git a/src/index.js b/src/index.js
new file mode 100644
index 0000000000..662b7565ac
--- /dev/null
+++ b/src/index.js
@@ -0,0 +1,71 @@
+'use strict'
+
+const Stream = require('stream')
+
+if (Stream && process.env.READABLE_STREAM === 'disable') {
+ const promises = Stream.promises
+
+ // Explicit export naming is needed for ESM
+ module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer
+ module.exports._isUint8Array = Stream._isUint8Array
+ module.exports.isDisturbed = Stream.isDisturbed
+ module.exports.isErrored = Stream.isErrored
+ module.exports.isReadable = Stream.isReadable
+ module.exports.Readable = Stream.Readable
+ module.exports.Writable = Stream.Writable
+ module.exports.Duplex = Stream.Duplex
+ module.exports.Transform = Stream.Transform
+ module.exports.PassThrough = Stream.PassThrough
+ module.exports.addAbortSignal = Stream.addAbortSignal
+ module.exports.finished = Stream.finished
+ module.exports.destroy = Stream.destroy
+ module.exports.pipeline = Stream.pipeline
+ module.exports.compose = Stream.compose
+
+ Object.defineProperty(Stream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+ })
+
+ module.exports.Stream = Stream.Stream
+} else {
+ const CustomStream = require('../stream')
+ const promises = require('../stream/promises')
+ const originalDestroy = CustomStream.Readable.destroy
+
+ module.exports = CustomStream.Readable
+
+ // Explicit export naming is needed for ESM
+ module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
+ module.exports._isUint8Array = CustomStream._isUint8Array
+ module.exports.isDisturbed = CustomStream.isDisturbed
+ module.exports.isErrored = CustomStream.isErrored
+ module.exports.isReadable = CustomStream.isReadable
+ module.exports.Readable = CustomStream.Readable
+ module.exports.Writable = CustomStream.Writable
+ module.exports.Duplex = CustomStream.Duplex
+ module.exports.Transform = CustomStream.Transform
+ module.exports.PassThrough = CustomStream.PassThrough
+ module.exports.addAbortSignal = CustomStream.addAbortSignal
+ module.exports.finished = CustomStream.finished
+ module.exports.destroy = CustomStream.destroy
+ module.exports.destroy = originalDestroy
+ module.exports.pipeline = CustomStream.pipeline
+ module.exports.compose = CustomStream.compose
+
+ Object.defineProperty(CustomStream, 'promises', {
+ configurable: true,
+ enumerable: true,
+ get() {
+ return promises
+ }
+ })
+
+ module.exports.Stream = CustomStream.Stream
+}
+
+// Allow default importing
+module.exports.default = module.exports
diff --git a/src/primordials.js b/src/primordials.js
new file mode 100644
index 0000000000..ad25a91783
--- /dev/null
+++ b/src/primordials.js
@@ -0,0 +1,127 @@
+'use strict'
+
+/*
+ This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at
+
+ https://github.com/nodejs/node/blob/main/lib/internal/per_context/primordials.js
+
+ Don't try to replace with the original file and keep it up to date with the upstream file.
+*/
+
+// This is a simplified version of AggregateError
+class AggregateError extends Error {
+ constructor(errors) {
+ if (!Array.isArray(errors)) {
+ throw new TypeError(`Expected input to be an Array, got ${typeof errors}`)
+ }
+
+ let message = ''
+ for (let i = 0; i < errors.length; i++) {
+ message += ` ${errors[i].stack}\n`
+ }
+
+ super(message)
+ this.name = 'AggregateError'
+ this.errors = errors
+ }
+}
+
+module.exports = {
+ AggregateError,
+ ArrayIsArray(self) {
+ return Array.isArray(self)
+ },
+ ArrayPrototypeIncludes(self, el) {
+ return self.includes(el)
+ },
+ ArrayPrototypeIndexOf(self, el) {
+ return self.indexOf(el)
+ },
+ ArrayPrototypeJoin(self, sep) {
+ return self.join(sep)
+ },
+ ArrayPrototypeMap(self, fn) {
+ return self.map(fn)
+ },
+ ArrayPrototypePop(self, el) {
+ return self.pop(el)
+ },
+ ArrayPrototypePush(self, el) {
+ return self.push(el)
+ },
+ ArrayPrototypeSlice(self, start, end) {
+ return self.slice(start, end)
+ },
+ Error,
+ FunctionPrototypeCall(fn, thisArgs, ...args) {
+ return fn.call(thisArgs, ...args)
+ },
+ FunctionPrototypeSymbolHasInstance(self, instance) {
+ return Function.prototype[Symbol.hasInstance].call(self, instance)
+ },
+ MathFloor: Math.floor,
+ Number,
+ NumberIsInteger: Number.isInteger,
+ NumberIsNaN: Number.isNaN,
+ NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER,
+ NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER,
+ NumberParseInt: Number.parseInt,
+ ObjectDefineProperties(self, props) {
+ return Object.defineProperties(self, props)
+ },
+ ObjectDefineProperty(self, name, prop) {
+ return Object.defineProperty(self, name, prop)
+ },
+ ObjectGetOwnPropertyDescriptor(self, name) {
+ return Object.getOwnPropertyDescriptor(self, name)
+ },
+ ObjectKeys(obj) {
+ return Object.keys(obj)
+ },
+ ObjectSetPrototypeOf(target, proto) {
+ return Object.setPrototypeOf(target, proto)
+ },
+ Promise,
+ PromisePrototypeCatch(self, fn) {
+ return self.catch(fn)
+ },
+ PromisePrototypeThen(self, thenFn, catchFn) {
+ return self.then(thenFn, catchFn)
+ },
+ PromiseReject(err) {
+ return Promise.reject(err)
+ },
+ PromiseResolve(val) {
+ return Promise.resolve(val)
+ },
+ ReflectApply: Reflect.apply,
+ RegExpPrototypeTest(self, value) {
+ return self.test(value)
+ },
+ SafeSet: Set,
+ String,
+ StringPrototypeSlice(self, start, end) {
+ return self.slice(start, end)
+ },
+ StringPrototypeToLowerCase(self) {
+ return self.toLowerCase()
+ },
+ StringPrototypeToUpperCase(self) {
+ return self.toUpperCase()
+ },
+ StringPrototypeTrim(self) {
+ return self.trim()
+ },
+ Symbol,
+ SymbolFor: Symbol.for,
+ SymbolAsyncIterator: Symbol.asyncIterator,
+ SymbolHasInstance: Symbol.hasInstance,
+ SymbolIterator: Symbol.iterator,
+ SymbolDispose: Symbol.dispose || Symbol('Symbol.dispose'),
+ SymbolAsyncDispose: Symbol.asyncDispose || Symbol('Symbol.asyncDispose'),
+ TypedArrayPrototypeSet(self, buf, len) {
+ return self.set(buf, len)
+ },
+ Boolean,
+ Uint8Array
+}
diff --git a/src/test/browser/fixtures/esbuild-browsers-shims.mjs b/src/test/browser/fixtures/esbuild-browsers-shims.mjs
new file mode 100644
index 0000000000..cd789d3e51
--- /dev/null
+++ b/src/test/browser/fixtures/esbuild-browsers-shims.mjs
@@ -0,0 +1,7 @@
+import * as processModule from 'process'
+
+export const process = processModule
+
+export function setImmediate(fn, ...args) {
+ setTimeout(() => fn(...args), 1)
+}
diff --git a/src/test/browser/fixtures/esbuild.browser.config.mjs b/src/test/browser/fixtures/esbuild.browser.config.mjs
new file mode 100644
index 0000000000..6dd371dd9a
--- /dev/null
+++ b/src/test/browser/fixtures/esbuild.browser.config.mjs
@@ -0,0 +1,23 @@
+import { build } from 'esbuild'
+import alias from 'esbuild-plugin-alias'
+import { createRequire } from 'module'
+
+const require = createRequire(import.meta.url)
+
+build({
+ entryPoints: ['test/browser/test-browser.js'],
+ outfile: 'tmp/esbuild/suite.browser.js',
+ bundle: true,
+ platform: 'browser',
+ plugins: [
+ alias({
+ crypto: require.resolve('crypto-browserify'),
+ path: require.resolve('path-browserify'),
+ stream: require.resolve('stream-browserify')
+ })
+ ],
+ define: {
+ global: 'globalThis'
+ },
+ inject: ['test/browser/fixtures/esbuild-browsers-shims.mjs']
+}).catch(() => process.exit(1))
diff --git a/src/test/browser/fixtures/esbuild.node.config.mjs b/src/test/browser/fixtures/esbuild.node.config.mjs
new file mode 100644
index 0000000000..21f70ad284
--- /dev/null
+++ b/src/test/browser/fixtures/esbuild.node.config.mjs
@@ -0,0 +1,8 @@
+import { build } from 'esbuild'
+
+build({
+ entryPoints: ['test/browser/test-browser.js'],
+ outfile: 'tmp/esbuild/suite.node.js',
+ bundle: true,
+ platform: 'node'
+}).catch(() => process.exit(1))
diff --git a/src/test/browser/fixtures/index.html b/src/test/browser/fixtures/index.html
new file mode 100644
index 0000000000..16b329e8e6
--- /dev/null
+++ b/src/test/browser/fixtures/index.html
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/test/browser/fixtures/prepare.sh b/src/test/browser/fixtures/prepare.sh
new file mode 100644
index 0000000000..56380d61f4
--- /dev/null
+++ b/src/test/browser/fixtures/prepare.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+set -x -e
+
+[ "$BUNDLER" == "" ] && BUNDLER=$1
+
+if [ "$BUNDLER" != "" ]; then
+ rm -rf tmp/$BUNDLER
+ mkdir -p tmp/$BUNDLER
+ cp test/browser/fixtures/index.html tmp/$BUNDLER
+fi
+
+case $BUNDLER in
+ browserify)
+ browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js
+ browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js
+ ;;
+ esbuild)
+ node src/test/browser/fixtures/esbuild.browser.config.mjs
+ node src/test/browser/fixtures/esbuild.node.config.mjs
+ ;;
+ rollup)
+ rollup -c test/browser/fixtures/rollup.browser.config.mjs
+ rollup -c test/browser/fixtures/rollup.node.config.mjs
+ ;;
+ webpack)
+ webpack -c test/browser/fixtures/webpack.browser.config.mjs
+ webpack -c test/browser/fixtures/webpack.node.config.mjs
+ ;;
+ *)
+ echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup or webpack."
+ exit 1
+ ;;
+esac
\ No newline at end of file
diff --git a/src/test/browser/fixtures/rollup.browser.config.mjs b/src/test/browser/fixtures/rollup.browser.config.mjs
new file mode 100644
index 0000000000..4a3080c354
--- /dev/null
+++ b/src/test/browser/fixtures/rollup.browser.config.mjs
@@ -0,0 +1,26 @@
+import commonjs from '@rollup/plugin-commonjs'
+import inject from '@rollup/plugin-inject'
+import nodeResolve from '@rollup/plugin-node-resolve'
+import { resolve } from 'path'
+import nodePolyfill from 'rollup-plugin-polyfill-node'
+
+export default {
+ input: ['test/browser/test-browser.js'],
+ output: {
+ intro: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }',
+ file: 'tmp/rollup/suite.browser.js',
+ format: 'iife',
+ name: 'readableStreamTestSuite'
+ },
+ plugins: [
+ commonjs(),
+ nodePolyfill(),
+ inject({
+ process: resolve('node_modules/process/browser.js')
+ }),
+ nodeResolve({
+ browser: true,
+ preferBuiltins: false
+ })
+ ]
+}
diff --git a/src/test/browser/fixtures/rollup.node.config.mjs b/src/test/browser/fixtures/rollup.node.config.mjs
new file mode 100644
index 0000000000..7eac856bce
--- /dev/null
+++ b/src/test/browser/fixtures/rollup.node.config.mjs
@@ -0,0 +1,19 @@
+import commonjs from '@rollup/plugin-commonjs'
+import nodeResolve from '@rollup/plugin-node-resolve'
+
+export default {
+ input: ['test/browser/test-browser.js'],
+ output: {
+ file: 'tmp/rollup/suite.node.js',
+ format: 'cjs',
+ name: 'readableStreamTestSuite',
+ exports: 'auto'
+ },
+ plugins: [
+ commonjs(),
+ nodeResolve({
+ browser: false,
+ preferBuiltins: true
+ })
+ ]
+}
diff --git a/src/test/browser/fixtures/webpack.browser.config.mjs b/src/test/browser/fixtures/webpack.browser.config.mjs
new file mode 100644
index 0000000000..a2d889e0da
--- /dev/null
+++ b/src/test/browser/fixtures/webpack.browser.config.mjs
@@ -0,0 +1,35 @@
+import { createRequire } from 'module'
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+import webpack from 'webpack'
+
+const require = createRequire(import.meta.url)
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../')
+
+export default {
+ entry: './test/browser/test-browser.js',
+ output: {
+ filename: 'suite.browser.js',
+ path: resolve(rootDir, 'tmp/webpack')
+ },
+ mode: 'production',
+ target: 'web',
+ performance: false,
+ plugins: [
+ new webpack.BannerPlugin({
+ banner: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }',
+ raw: true
+ }),
+ new webpack.ProvidePlugin({
+ process: require.resolve('process')
+ })
+ ],
+ resolve: {
+ aliasFields: ['browser'],
+ fallback: {
+ crypto: require.resolve('crypto-browserify'),
+ path: require.resolve('path-browserify'),
+ stream: require.resolve('stream-browserify')
+ }
+ }
+}
diff --git a/src/test/browser/fixtures/webpack.node.config.mjs b/src/test/browser/fixtures/webpack.node.config.mjs
new file mode 100644
index 0000000000..3b20bdef47
--- /dev/null
+++ b/src/test/browser/fixtures/webpack.node.config.mjs
@@ -0,0 +1,15 @@
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../')
+
+export default {
+ entry: './test/browser/test-browser.js',
+ output: {
+ filename: 'suite.node.js',
+ path: resolve(rootDir, 'tmp/webpack')
+ },
+ mode: 'production',
+ target: 'node',
+ performance: false
+}
diff --git a/src/test/browser/runner-browser.mjs b/src/test/browser/runner-browser.mjs
new file mode 100644
index 0000000000..e8bb84482c
--- /dev/null
+++ b/src/test/browser/runner-browser.mjs
@@ -0,0 +1,109 @@
+import { resolve } from 'node:path'
+import { Readable } from 'node:stream'
+import { fileURLToPath } from 'node:url'
+import { chromium, firefox, webkit } from 'playwright'
+import reporter from 'tap-mocha-reporter'
+import Parser from 'tap-parser'
+
+const validBrowsers = ['chrome', 'firefox', 'safari', 'edge']
+const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+
+function parseEnviroment() {
+ const headless = process.env.HEADLESS !== 'false'
+ const reporter = process.env.SKIP_REPORTER !== 'true'
+
+ let [browser, bundler] = process.argv.slice(2, 4)
+
+ if (!browser) {
+ browser = process.env.BROWSER
+ }
+
+ if (!bundler) {
+ bundler = process.env.BUNDLER
+ }
+
+ if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) {
+ console.error(`Usage: node runner-browser.mjs [${validBrowsers.join('|')}] [${validBundlers.join('|')}]`)
+ console.error('You can also use the BROWSER and BUNDLER environment variables.')
+ process.exit(1)
+ }
+
+ return { browser, bundler, headless, reporter }
+}
+
+function createBrowser({ browser: id, headless }) {
+ switch (id) {
+ case 'firefox':
+ return firefox.launch({ headless })
+ case 'safari':
+ return webkit.launch({ headless })
+ case 'edge':
+ return chromium.launch({ headless, channel: 'msedge' })
+ default:
+ return chromium.launch({ headless })
+ }
+}
+
+function setupTape(page, configuration) {
+ const output = new Readable({ read() {} })
+ const parser = new Parser({ strict: true })
+
+ output.pipe(parser)
+
+ if (configuration.reporter) {
+ output.pipe(reporter('spec'))
+ }
+
+ parser.on('line', (line) => {
+ if (line !== '# readable-stream-finished\n') {
+ if (line.startsWith('# not ok')) {
+ process.exitCode = 1
+ }
+
+ if (!configuration.reporter) {
+ console.log(line.replace(/\n$/, ''))
+ }
+
+ return
+ }
+
+ output.push(null)
+
+ if (configuration.headless) {
+ browser.close()
+ }
+ })
+
+ page.on('console', (msg) => {
+ if (msg.type() === 'error') {
+ console.error(`\x1b[31m\x1b[1mconsole.error:\x1b[0m ${msg.text()}\n`)
+ return
+ }
+
+ output.push(msg.text() + '\n')
+ })
+
+ // Firefox in headless mode is showing an error even if onerror caught it. Disable in that case
+ if (!configuration.headless || configuration.browser !== 'firefox') {
+ page.on('pageerror', (err) => {
+ console.log('\x1b[31m\x1b[1m--- The browser thrown an uncaught error ---\x1b[0m')
+ console.log(err.stack)
+
+ if (configuration.headless) {
+ console.log('\x1b[31m\x1b[1m--- Exiting with exit code 1 ---\x1b[0m')
+ process.exit(1)
+ } else {
+ process.exitCode = 1
+ }
+ })
+ }
+}
+
+const configuration = parseEnviroment()
+const browser = await createBrowser(configuration)
+const page = await browser.newPage()
+setupTape(page, configuration)
+
+// Execute the test suite
+const __dirname = fileURLToPath(new URL('.', import.meta.url))
+await page.goto(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/index.html`)}`)
diff --git a/src/test/browser/runner-node.mjs b/src/test/browser/runner-node.mjs
new file mode 100644
index 0000000000..840d19e2dc
--- /dev/null
+++ b/src/test/browser/runner-node.mjs
@@ -0,0 +1,77 @@
+import { resolve } from 'node:path'
+import { Duplex } from 'node:stream'
+import { fileURLToPath } from 'node:url'
+import reporter from 'tap-mocha-reporter'
+import Parser from 'tap-parser'
+
+const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+
+function parseEnviroment() {
+ const reporter = process.env.SKIP_REPORTER !== 'true'
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ console.error(`Usage: node runner-node.mjs [${validBundlers.join('|')}]`)
+ console.error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ return { bundler, reporter }
+}
+
+function setupTape(configuration) {
+ const output = new Duplex({ read() {}, write() {} })
+ const parser = new Parser({ strict: true })
+
+ globalThis.logger = function (message, ...args) {
+ if (typeof message !== 'string') {
+ console.log(message, ...args)
+ return
+ }
+
+ output.push(message + '\n')
+ }
+
+ output.pipe(parser)
+
+ if (configuration.reporter) {
+ output.pipe(reporter('spec'))
+ }
+
+ process.on('uncaughtException', (err) => {
+ if (global.onerror) {
+ global.onerror(err)
+ } else {
+ process.removeAllListeners('uncaughtException')
+ throw err
+ }
+ })
+
+ parser.on('line', (line) => {
+ if (line === '# readable-stream-finished\n') {
+ output.push(null)
+ output.end()
+ return
+ } else if (line.startsWith('# not ok')) {
+ process.exitCode = 1
+ }
+
+ if (!configuration.reporter) {
+ console.log(line.replace(/\n$/, ''))
+ }
+ })
+}
+
+async function main() {
+ const configuration = parseEnviroment()
+ setupTape(configuration)
+
+ // Execute the test suite
+ const __dirname = fileURLToPath(new URL('.', import.meta.url))
+ await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`)
+}
+
+main().catch((e) => {
+ console.error(e)
+ process.exit(1)
+})
diff --git a/src/test/browser/runner-prepare.mjs b/src/test/browser/runner-prepare.mjs
new file mode 100644
index 0000000000..76e38f8504
--- /dev/null
+++ b/src/test/browser/runner-prepare.mjs
@@ -0,0 +1,107 @@
+import { exec } from 'child_process'
+import { promises } from 'fs'
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+import util from '../../lib/ours/util.js'
+const { copyFile, mkdir, rmdir } = promises
+
+function highlightFile(file) {
+ return `\x1b[33m${file.replace(process.cwd() + '/', '')}\x1b[0m`
+}
+
+function info(message) {
+ console.log(`\x1b[34m[INFO]\x1b[0m ${message}`)
+}
+
+function error(message) {
+ console.log(`\x1b[31m[INFO]\x1b[0m ${message}`)
+}
+
+async function run(command) {
+ info(`Executing \x1b[33m${command}\x1b[0m ...`)
+ const { promise, reject, resolve } = util.createDeferredPromise()
+
+ let hasOutput = false
+ function logOutput(chunk) {
+ if (!hasOutput) {
+ hasOutput = true
+ console.log('')
+ }
+
+ console.log(chunk.toString('utf-8').trim().replace(/^/gm, ' '))
+ }
+
+ try {
+ const process = exec(command, { stdio: 'pipe' }, (error) => {
+ if (error) {
+ return reject(error)
+ }
+
+ resolve(error)
+ })
+
+ process.stdout.on('data', logOutput)
+ process.stderr.on('data', logOutput)
+ await promise
+
+ if (hasOutput) {
+ console.log('')
+ }
+ } catch (e) {
+ if (hasOutput) {
+ console.log('')
+ }
+
+ error(`Command failed with status code ${e.code}.`)
+ process.exit(1)
+ }
+}
+
+async function main() {
+ const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ error(`Usage: node await runner-prepare.mjs [${validBundlers.join('|')}]`)
+ error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), `../../tmp/${bundler}`)
+ const sourceIndex = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../test/browser/fixtures/index.html')
+ const targetIndex = resolve(rootDir, 'index.html')
+
+ info(`Emptying directory ${highlightFile(rootDir)} ...`)
+ try {
+ await rmdir(rootDir, { recursive: true })
+ } catch (e) {
+ // No-op
+ }
+ await mkdir(rootDir, { recursive: true })
+
+ info(`Copying file ${highlightFile(sourceIndex)} to ${highlightFile(targetIndex)} ...`)
+ await copyFile(sourceIndex, targetIndex)
+
+ switch (bundler) {
+ case 'browserify':
+ await run('browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js')
+ await run('browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js')
+ break
+ case 'esbuild':
+ await run('node src/test/browser/fixtures/esbuild.browser.config.mjs')
+ await run('node src/test/browser/fixtures/esbuild.node.config.mjs')
+ break
+ case 'rollup':
+ await run('rollup -c test/browser/fixtures/rollup.browser.config.mjs')
+ await run('rollup -c test/browser/fixtures/rollup.node.config.mjs')
+ break
+ case 'webpack':
+ await run('webpack -c test/browser/fixtures/webpack.browser.config.mjs')
+ await run('webpack -c test/browser/fixtures/webpack.node.config.mjs')
+ }
+}
+
+main().catch((e) => {
+ error(e)
+ process.exit(1)
+})
diff --git a/src/test/browser/symbols.js b/src/test/browser/symbols.js
new file mode 100644
index 0000000000..8450b8f64c
--- /dev/null
+++ b/src/test/browser/symbols.js
@@ -0,0 +1,6 @@
+'use strict'
+
+module.exports = {
+ kReadableStreamSuiteName: Symbol('readable-stream.suiteName'),
+ kReadableStreamSuiteHasMultipleTests: Symbol('readable-stream.suiteHasMultipleTests')
+}
diff --git a/src/test/browser/test-browser.js b/src/test/browser/test-browser.js
new file mode 100644
index 0000000000..a0dbab5e5c
--- /dev/null
+++ b/src/test/browser/test-browser.js
@@ -0,0 +1,136 @@
+'use strict'
+
+const logger = globalThis.logger || console.log
+const tape = require('tape')
+const { createDeferredPromise } = require('../../lib/ours/util')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+let totalTests = 0
+let completed = 0
+let failed = 0
+
+async function test(rootName, fn) {
+ // Gather all tests in the file
+ const tests = {}
+
+ function addTests(name, fn) {
+ tests[`${rootName} - ${name}`] = fn
+ }
+
+ if (fn[kReadableStreamSuiteHasMultipleTests]) {
+ fn(addTests)
+ } else {
+ tests[rootName] = fn
+ }
+
+ // Execute each test in a separate harness and then output overall results
+ for (const [name, subtest] of Object.entries(tests)) {
+ const currentIndex = ++totalTests
+ const harness = tape.createHarness()
+ const { promise, resolve } = createDeferredPromise()
+
+ const messages = [`# Subtest: ${name}`]
+
+ harness.createStream().on('data', function (row) {
+ if (row.startsWith('TAP version') || row.match(new RegExp(`^# (?:${name})`))) {
+ return
+ }
+
+ messages.push(row.trim().replace(/^/gm, ' '))
+ })
+
+ harness.onFinish(() => {
+ const success = harness._exitCode === 0
+
+ messages.push(`${success ? 'ok' : 'not ok'} ${currentIndex} - ${name}`)
+ logger(messages.join('\n'))
+ completed++
+
+ if (!success) {
+ failed++
+ }
+
+ resolve()
+ })
+
+ harness(name, subtest)
+
+ await promise
+ }
+}
+
+async function runTests(suites) {
+ // Setup an interval
+ const interval = setInterval(() => {
+ if (completed < totalTests) {
+ return
+ }
+
+ clearInterval(interval)
+
+ logger(`1..${totalTests}`)
+ logger(`# tests ${totalTests}`)
+ logger(`# pass ${completed - failed}`)
+ logger(`# fail ${failed}`)
+ logger(`# ${failed === 0 ? 'ok' : 'not ok'}`)
+
+ // This line is used by the playwright script to detect we're done
+ logger('# readable-stream-finished')
+ }, 100)
+
+ // Execute each test serially, to avoid side-effects errors when dealing with global error handling
+ for (const suite of suites) {
+ await test(suite[kReadableStreamSuiteName], suite)
+ }
+}
+
+// Important: Do not try to make the require dynamic because bundlers will not like it
+runTests([
+ require('./test-stream-big-packet'),
+ require('./test-stream-big-push'),
+ require('./test-stream-duplex'),
+ require('./test-stream-end-paused'),
+ require('./test-stream-finished'),
+ require('./test-stream-ispaused'),
+ require('./test-stream-pipe-after-end'),
+ require('./test-stream-pipe-cleanup-pause'),
+ require('./test-stream-pipe-cleanup'),
+ require('./test-stream-pipe-error-handling'),
+ require('./test-stream-pipe-event'),
+ require('./test-stream-pipe-without-listenerCount'),
+ require('./test-stream-pipeline'),
+ require('./test-stream-push-order'),
+ require('./test-stream-push-strings'),
+ require('./test-stream-readable-constructor-set-methods'),
+ require('./test-stream-readable-event'),
+ require('./test-stream-sync-write'),
+ require('./test-stream-transform-constructor-set-methods'),
+ require('./test-stream-transform-objectmode-falsey-value'),
+ require('./test-stream-transform-split-objectmode'),
+ require('./test-stream-unshift-empty-chunk'),
+ require('./test-stream-unshift-read-race'),
+ require('./test-stream-writable-change-default-encoding'),
+ require('./test-stream-writable-constructor-set-methods'),
+ require('./test-stream-writable-decoded-encoding'),
+ require('./test-stream-writev'),
+ require('./test-stream2-base64-single-char-read-end'),
+ require('./test-stream2-compatibility'),
+ require('./test-stream2-large-read-stall'),
+ require('./test-stream2-objects'),
+ require('./test-stream2-pipe-error-handling'),
+ require('./test-stream2-pipe-error-once-listener'),
+ require('./test-stream2-push'),
+ require('./test-stream2-readable-empty-buffer-no-eof'),
+ require('./test-stream2-readable-from-list'),
+ require('./test-stream2-readable-legacy-drain'),
+ require('./test-stream2-readable-non-empty-end'),
+ require('./test-stream2-readable-wrap-empty'),
+ require('./test-stream2-readable-wrap'),
+ require('./test-stream2-set-encoding'),
+ require('./test-stream2-transform'),
+ require('./test-stream2-unpipe-drain'),
+ require('./test-stream2-writable'),
+ require('./test-stream3-pause-then-read')
+]).catch((e) => {
+ console.error(e)
+})
diff --git a/src/test/browser/test-stream-big-packet.js b/src/test/browser/test-stream-big-packet.js
new file mode 100644
index 0000000000..8859e4b441
--- /dev/null
+++ b/src/test/browser/test-stream-big-packet.js
@@ -0,0 +1,70 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(3)
+
+ let passed = false
+
+ function PassThrough() {
+ Transform.call(this)
+ }
+ inherits(PassThrough, Transform)
+
+ PassThrough.prototype._transform = function (chunk, encoding, done) {
+ this.push(chunk)
+ done()
+ }
+
+ function TestStream() {
+ Transform.call(this)
+ }
+ inherits(TestStream, Transform)
+
+ TestStream.prototype._transform = function (chunk, encoding, done) {
+ if (!passed) {
+ // Char 'a' only exists in the last write
+ passed = indexOf(chunk.toString(), 'a') >= 0
+ }
+ if (passed) {
+ t.ok(passed)
+ }
+ done()
+ }
+
+ const s1 = new PassThrough()
+ const s2 = new PassThrough()
+ const s3 = new TestStream()
+
+ s1.pipe(s3)
+ // Don't let s2 auto close which may close s3
+ s2.pipe(s3, { end: false })
+
+ // We must write a buffer larger than highWaterMark
+ const big = Buffer.alloc(s1._writableState.highWaterMark + 1)
+ big.fill('x')
+
+ // Since big is larger than highWaterMark, it will be buffered internally.
+ t.notOk(s1.write(big))
+
+ // 'tiny' is small enough to pass through internal buffer.
+ t.ok(s2.write('tiny'))
+
+ // Write some small data in next IO loop, which will never be written to s3
+ // Because 'drain' event is not emitted from s1 and s1 is still paused
+ setImmediate(s1.write.bind(s1), 'later')
+
+ function indexOf(xs, x) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) {
+ return i
+ }
+ }
+ return -1
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-big-packet'
diff --git a/src/test/browser/test-stream-big-push.js b/src/test/browser/test-stream-big-push.js
new file mode 100644
index 0000000000..08d8873f3a
--- /dev/null
+++ b/src/test/browser/test-stream-big-push.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(10)
+
+ const str = 'asdfasdfasdfasdfasdf'
+
+ const r = new Readable({
+ highWaterMark: 5,
+ encoding: 'utf8'
+ })
+
+ let reads = 0
+ let eofed = false
+ let ended = false
+
+ r._read = function (n) {
+ if (reads === 0) {
+ setTimeout(function () {
+ r.push(str)
+ })
+ reads++
+ } else if (reads === 1) {
+ const ret = r.push(str)
+ t.equal(ret, false)
+ reads++
+ } else {
+ t.notOk(eofed)
+ eofed = true
+ r.push(null)
+ }
+ }
+
+ r.on('end', function () {
+ ended = true
+ })
+
+ // push some data in to start.
+ // we've never gotten any read event at this point.
+ const ret = r.push(str)
+
+ // should be false. > hwm
+ t.notOk(ret)
+ let chunk = r.read()
+ t.equal(chunk, str)
+
+ chunk = r.read()
+ t.equal(chunk, null)
+
+ r.once('readable', function () {
+ // this time, we'll get *all* the remaining data, because
+ // it's been added synchronously, as the read WOULD take
+ // us below the hwm, and so it triggered a _read() again,
+ // which synchronously added more, which we then return.
+ chunk = r.read()
+ t.equal(chunk, str + str)
+
+ chunk = r.read()
+ t.equal(chunk, null)
+ })
+
+ r.on('end', function () {
+ t.ok(eofed)
+ t.ok(ended)
+ t.equal(reads, 2)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-big-push'
diff --git a/src/test/browser/test-stream-duplex.js b/src/test/browser/test-stream-duplex.js
new file mode 100644
index 0000000000..1278591382
--- /dev/null
+++ b/src/test/browser/test-stream-duplex.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const { Duplex } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ const stream = new Duplex({ objectMode: true })
+
+ t.ok(stream._readableState.objectMode)
+ t.ok(stream._writableState.objectMode)
+
+ let written
+ let read
+
+ stream._write = function (obj, _, cb) {
+ written = obj
+ cb()
+ }
+
+ stream._read = function () {}
+
+ stream.on('data', function (obj) {
+ read = obj
+ })
+
+ stream.on('end', function () {
+ t.equal(read.val, 1)
+ t.equal(written.val, 2)
+ })
+
+ stream.push({ val: 1 })
+ stream.end({ val: 2 })
+ stream.push(null)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-duplex'
diff --git a/src/test/browser/test-stream-end-paused.js b/src/test/browser/test-stream-end-paused.js
new file mode 100644
index 0000000000..76a98da510
--- /dev/null
+++ b/src/test/browser/test-stream-end-paused.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(2)
+
+ const stream = new Readable()
+ let calledRead = false
+
+ stream._read = function () {
+ t.notOk(calledRead)
+ calledRead = true
+ this.push(null)
+ }
+
+ stream.on('data', function () {
+ throw new Error('should not ever get data')
+ })
+
+ stream.pause()
+
+ setTimeout(function () {
+ stream.on('end', function () {
+ t.ok(calledRead)
+ })
+ stream.resume()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-end-paused'
diff --git a/src/test/browser/test-stream-finished.js b/src/test/browser/test-stream-finished.js
new file mode 100644
index 0000000000..f9ddc907f9
--- /dev/null
+++ b/src/test/browser/test-stream-finished.js
@@ -0,0 +1,70 @@
+'use strict'
+
+const { Writable, Readable, Transform, finished } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('readable finished', function (t) {
+ t.plan(1)
+
+ const rs = new Readable({
+ read: function read() {}
+ })
+
+ finished(rs, (err) => {
+ t.ifErr(err)
+ })
+
+ rs.push(null)
+ rs.resume()
+ })
+
+ test('writable finished', function (t) {
+ t.plan(1)
+
+ const ws = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+
+ finished(ws, (err) => {
+ t.ifErr(err)
+ })
+
+ ws.end()
+ })
+
+ test('transform finished', function (t) {
+ t.plan(3)
+
+ const tr = new Transform({
+ transform: function transform(data, enc, cb) {
+ cb()
+ }
+ })
+
+ let finish = false
+ let ended = false
+
+ tr.on('end', function () {
+ ended = true
+ })
+
+ tr.on('finish', function () {
+ finish = true
+ })
+
+ finished(tr, (err) => {
+ t.ifErr(err)
+ t.ok(finish)
+ t.ok(ended)
+ })
+
+ tr.end()
+ tr.resume()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-finished'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-ispaused.js b/src/test/browser/test-stream-ispaused.js
new file mode 100644
index 0000000000..36e55d7c9e
--- /dev/null
+++ b/src/test/browser/test-stream-ispaused.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ const readable = new stream.Readable()
+
+ // _read is a noop, here.
+ readable._read = () => {}
+
+ // default state of a stream is not "paused"
+ t.notOk(readable.isPaused())
+
+ // make the stream start flowing...
+ readable.on('data', () => {})
+
+ // still not paused.
+ t.notOk(readable.isPaused())
+
+ readable.pause()
+ t.ok(readable.isPaused())
+ readable.resume()
+ t.notOk(readable.isPaused())
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-ispaused'
diff --git a/src/test/browser/test-stream-pipe-after-end.js b/src/test/browser/test-stream-pipe-after-end.js
new file mode 100644
index 0000000000..13aac69313
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-after-end.js
@@ -0,0 +1,69 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ function TestReadable(opt) {
+ if (!(this instanceof TestReadable)) {
+ return new TestReadable(opt)
+ }
+ Readable.call(this, opt)
+ this._ended = false
+ }
+ inherits(TestReadable, Readable)
+
+ TestReadable.prototype._read = function (n) {
+ if (this._ended) {
+ this.emit('error', new Error('_read called twice'))
+ }
+ this._ended = true
+ this.push(null)
+ }
+
+ function TestWritable(opt) {
+ if (!(this instanceof TestWritable)) {
+ return new TestWritable(opt)
+ }
+ Writable.call(this, opt)
+ this._written = []
+ }
+ inherits(TestWritable, Writable)
+
+ TestWritable.prototype._write = function (chunk, encoding, cb) {
+ this._written.push(chunk)
+ cb()
+ }
+
+ // this one should not emit 'end' until we read() from it later.
+ const ender = new TestReadable()
+ let enderEnded = false
+
+ // what happens when you pipe() a Readable that's already ended?
+ const piper = new TestReadable()
+ // pushes EOF null, and length=0, so this will trigger 'end'
+ piper.read()
+
+ setTimeout(function () {
+ ender.on('end', function () {
+ enderEnded = true
+ t.ok(true, 'enderEnded')
+ })
+ t.notOk(enderEnded)
+
+ const c = ender.read()
+ t.equal(c, null)
+
+ const w = new TestWritable()
+ w.on('finish', function () {
+ t.ok(true, 'writableFinished')
+ })
+
+ piper.pipe(w)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-after-end'
diff --git a/src/test/browser/test-stream-pipe-cleanup-pause.js b/src/test/browser/test-stream-pipe-cleanup-pause.js
new file mode 100644
index 0000000000..53078d3b7b
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-cleanup-pause.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(3)
+
+ const reader = new stream.Readable()
+ const writer1 = new stream.Writable()
+ const writer2 = new stream.Writable()
+
+ // 560000 is chosen here because it is larger than the (default) highWaterMark
+ // and will cause `.write()` to return false
+ // See: https://github.com/nodejs/node/issues/2323
+ const buffer = Buffer.alloc(560000)
+
+ reader._read = function () {}
+
+ writer1._write = function (chunk, encoding, cb) {
+ this.emit('chunk-received')
+ cb()
+ }
+
+ writer1.on('chunk-received', function () {
+ reader.unpipe(writer1)
+ reader.pipe(writer2)
+ reader.push(buffer)
+
+ setImmediate(function () {
+ reader.push(buffer)
+
+ setImmediate(function () {
+ reader.push(buffer)
+ })
+ })
+ })
+
+ writer2._write = function (chunk, encoding, cb) {
+ t.ok(true)
+ cb()
+ }
+
+ reader.pipe(writer1)
+ reader.push(buffer)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup-pause'
diff --git a/src/test/browser/test-stream-pipe-cleanup.js b/src/test/browser/test-stream-pipe-cleanup.js
new file mode 100644
index 0000000000..9dcf0dad90
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-cleanup.js
@@ -0,0 +1,117 @@
+'use strict'
+// This test asserts that Stream.prototype.pipe does not leave listeners
+// hanging on the source or dest.
+
+const inherits = require('inherits')
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(27)
+
+ if (/^v0\.8\./.test(process.version)) {
+ return
+ }
+
+ function Writable() {
+ this.writable = true
+ this.endCalls = 0
+ Stream.call(this)
+ }
+ inherits(Writable, Stream)
+
+ Writable.prototype.end = function () {
+ this.endCalls++
+ }
+
+ Writable.prototype.destroy = function () {
+ this.endCalls++
+ }
+
+ function Readable() {
+ this.readable = true
+ Stream.call(this)
+ }
+
+ inherits(Readable, Stream)
+
+ Readable.prototype._read = function () {}
+
+ function Duplex() {
+ this.readable = true
+ Writable.call(this)
+ }
+
+ inherits(Duplex, Writable)
+
+ Duplex.prototype._read = function () {}
+
+ let i = 0
+ let r
+ let w = new Writable()
+ const limit = 100
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('end')
+ }
+ t.equal(0, r.listeners('end').length)
+ t.equal(limit, w.endCalls)
+
+ w.endCalls = 0
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('close')
+ }
+ t.equal(0, r.listeners('close').length)
+ t.equal(limit, w.endCalls)
+
+ w.endCalls = 0
+
+ r = new Readable()
+
+ for (i = 0; i < limit; i++) {
+ w = new Writable()
+ r.pipe(w)
+ w.emit('close')
+ }
+ t.equal(0, w.listeners('close').length)
+
+ r = new Readable()
+ w = new Writable()
+ const d = new Duplex()
+ r.pipe(d) // pipeline A
+ d.pipe(w) // pipeline B
+ t.equal(r.listeners('end').length, 2) // A.onend, A.cleanup
+ t.equal(r.listeners('close').length, 2) // A.onclose, A.cleanup
+ t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup
+ t.equal(d.listeners('close').length, 3) // A.cleanup, B.onclose, B.cleanup
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 1) // B.cleanup
+
+ r.emit('end')
+ t.equal(d.endCalls, 1)
+ t.equal(w.endCalls, 0)
+ t.equal(r.listeners('end').length, 0)
+ t.equal(r.listeners('close').length, 0)
+ t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup
+ t.equal(d.listeners('close').length, 2) // B.onclose, B.cleanup
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 1) // B.cleanup
+
+ d.emit('end')
+ t.equal(d.endCalls, 1)
+ t.equal(w.endCalls, 1)
+ t.equal(r.listeners('end').length, 0)
+ t.equal(r.listeners('close').length, 0)
+ t.equal(d.listeners('end').length, 0)
+ t.equal(d.listeners('close').length, 0)
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 0)
+ d.end()
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup'
diff --git a/src/test/browser/test-stream-pipe-error-handling.js b/src/test/browser/test-stream-pipe-error-handling.js
new file mode 100644
index 0000000000..7cbfbcabb4
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-error-handling.js
@@ -0,0 +1,111 @@
+'use strict'
+
+const { Readable, Writable, Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('Error Listener Catches', function (t) {
+ t.plan(1)
+
+ const source = new Stream()
+ const dest = new Stream()
+
+ source._read = function () {}
+ source.pipe(dest)
+
+ let gotErr = null
+ source.on('error', function (err) {
+ gotErr = err
+ })
+
+ const err = new Error('This stream turned into bacon.')
+ source.emit('error', err)
+ t.strictEqual(gotErr, err)
+ })
+
+ test('Error WithoutListener Throws', function (t) {
+ t.plan(1)
+
+ const source = new Stream()
+ const dest = new Stream()
+
+ source._read = function () {}
+ source.pipe(dest)
+
+ const err = new Error('This stream turned into bacon.')
+
+ let gotErr = null
+ try {
+ source.emit('error', err)
+ } catch (e) {
+ gotErr = e
+ }
+
+ t.strictEqual(gotErr, err)
+ })
+
+ test('Error With Removed Listener Throws', function (t) {
+ t.plan(2)
+
+ const onerror = global.onerror
+
+ const r = new Readable()
+ const w = new Writable()
+ let removed = false
+ let caught = false
+
+ global.onerror = () => {
+ t.notOk(caught)
+ global.onerror = onerror
+ return true
+ }
+
+ r._read = function () {
+ setTimeout(function () {
+ t.ok(removed)
+ w.emit('error', new Error('fail'))
+ })
+ }
+
+ w.on('error', myOnError)
+ r.pipe(w)
+ w.removeListener('error', myOnError)
+ removed = true
+
+ function myOnError(er) {
+ caught = true
+ }
+ })
+
+ test('Error Listener Catches When Wrong Listener Is Removed', function (t) {
+ t.plan(2)
+
+ const r = new Readable()
+ const w = new Writable()
+ let removed = false
+ let caught = false
+
+ r._read = function () {
+ setTimeout(function () {
+ t.ok(removed)
+ w.emit('error', new Error('fail'))
+ })
+ }
+
+ w.on('error', myOnError)
+ w._write = function () {}
+
+ r.pipe(w)
+ // Removing some OTHER random listener should not do anything
+ w.removeListener('error', function () {})
+ removed = true
+
+ function myOnError(er) {
+ t.notOk(caught)
+ caught = true
+ }
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-error-handling'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-pipe-event.js b/src/test/browser/test-stream-pipe-event.js
new file mode 100644
index 0000000000..c03180c20d
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-event.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ function Writable() {
+ this.writable = true
+ Stream.call(this)
+ }
+ inherits(Writable, Stream)
+
+ function Readable() {
+ this.readable = true
+ Stream.call(this)
+ }
+ inherits(Readable, Stream)
+
+ let passed = false
+
+ const w = new Writable()
+ w.on('pipe', function (src) {
+ passed = true
+ })
+
+ const r = new Readable()
+ r._read = function () {}
+ r.pipe(w)
+
+ t.ok(passed)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-event'
diff --git a/src/test/browser/test-stream-pipe-without-listenerCount.js b/src/test/browser/test-stream-pipe-without-listenerCount.js
new file mode 100644
index 0000000000..1e8238cd8b
--- /dev/null
+++ b/src/test/browser/test-stream-pipe-without-listenerCount.js
@@ -0,0 +1,22 @@
+'use strict'
+
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const r = new Stream({
+ read: function () {}
+ })
+ r.listenerCount = undefined
+
+ const w = new Stream()
+ w.on('pipe', function () {
+ r.emit('error', new Error('Readable Error'))
+ })
+
+ t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function')
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-without-listenerCount'
diff --git a/src/test/browser/test-stream-pipeline.js b/src/test/browser/test-stream-pipeline.js
new file mode 100644
index 0000000000..232f336eb6
--- /dev/null
+++ b/src/test/browser/test-stream-pipeline.js
@@ -0,0 +1,114 @@
+'use strict'
+
+const { Readable, Writable, pipeline } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('pipeline', function (t) {
+ t.plan(3)
+
+ let finished = false
+
+ const processed = []
+ const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]
+
+ const read = new Readable({
+ read: function read() {}
+ })
+
+ const write = new Writable({
+ write: function write(data, enc, cb) {
+ processed.push(data)
+ cb()
+ }
+ })
+
+ write.on('finish', function () {
+ finished = true
+ })
+
+ for (let i = 0; i < expected.length; i++) {
+ read.push(expected[i])
+ }
+
+ read.push(null)
+ pipeline(read, write, (err) => {
+ t.ifErr(err)
+ t.ok(finished)
+ t.deepEqual(processed, expected)
+ })
+ })
+
+ test('pipeline missing args', function (t) {
+ t.plan(3)
+
+ const _read = new Readable({
+ read: function read() {}
+ })
+
+ t.throws(function () {
+ pipeline(_read, function () {})
+ })
+
+ t.throws(function () {
+ pipeline(function () {})
+ })
+
+ t.throws(function () {
+ pipeline()
+ })
+ })
+
+ test('pipeline error', function (t) {
+ t.plan(1)
+
+ const _read2 = new Readable({
+ read: function read() {}
+ })
+
+ const _write = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+
+ _read2.push('data')
+
+ setImmediate(function () {
+ return _read2.destroy()
+ })
+
+ pipeline(_read2, _write, (err) => {
+ t.equal(err.message, 'Premature close')
+ })
+ })
+
+ test('pipeline destroy', function (t) {
+ t.plan(2)
+
+ const _read3 = new Readable({
+ read: function read() {}
+ })
+
+ const _write2 = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+
+ _read3.push('data')
+
+ setImmediate(function () {
+ return _read3.destroy(new Error('kaboom'))
+ })
+
+ const dst = pipeline(_read3, _write2, (err) => {
+ t.equal(err.message, 'kaboom')
+ })
+
+ t.equal(dst, _write2)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-pipeline'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-push-order.js b/src/test/browser/test-stream-push-order.js
new file mode 100644
index 0000000000..4afcf756bc
--- /dev/null
+++ b/src/test/browser/test-stream-push-order.js
@@ -0,0 +1,34 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const s = new Readable({
+ highWaterMark: 20,
+ encoding: 'ascii'
+ })
+
+ const list = ['1', '2', '3', '4', '5', '6']
+
+ s._read = function (n) {
+ const one = list.shift()
+ if (!one) {
+ s.push(null)
+ } else {
+ const two = list.shift()
+ s.push(one)
+ s.push(two)
+ }
+ }
+
+ s.read(0)
+
+ setTimeout(function () {
+ t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6')
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-push-order'
diff --git a/src/test/browser/test-stream-push-strings.js b/src/test/browser/test-stream-push-strings.js
new file mode 100644
index 0000000000..bb254c5b39
--- /dev/null
+++ b/src/test/browser/test-stream-push-strings.js
@@ -0,0 +1,57 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(2)
+
+ function MyStream(options) {
+ Readable.call(this, options)
+ this._chunks = 3
+ }
+
+ inherits(MyStream, Readable)
+
+ MyStream.prototype._read = function (n) {
+ switch (this._chunks--) {
+ case 0:
+ return this.push(null)
+ case 1:
+ return setTimeout(
+ function () {
+ this.push('last chunk')
+ }.bind(this),
+ 100
+ )
+ case 2:
+ return this.push('second to last chunk')
+ case 3:
+ return process.nextTick(
+ function () {
+ this.push('first chunk')
+ }.bind(this)
+ )
+ default:
+ throw new Error('?')
+ }
+ }
+ const expect = ['first chunksecond to last chunk', 'last chunk']
+
+ const ms = new MyStream()
+ const results = []
+ ms.on('readable', function () {
+ let chunk
+ while ((chunk = ms.read()) !== null) {
+ results.push(chunk + '')
+ }
+ })
+
+ ms.on('end', function () {
+ t.equal(ms._chunks, -1)
+ t.deepEqual(results, expect)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-push-strings'
diff --git a/src/test/browser/test-stream-readable-constructor-set-methods.js b/src/test/browser/test-stream-readable-constructor-set-methods.js
new file mode 100644
index 0000000000..6d4ff89359
--- /dev/null
+++ b/src/test/browser/test-stream-readable-constructor-set-methods.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(2)
+
+ let _readCalled = false
+
+ function _read(n) {
+ _readCalled = true
+ this.push(null)
+ }
+
+ const r = new Readable({ read: _read })
+ r.resume()
+
+ setTimeout(function () {
+ t.equal(r._read, _read)
+ t.ok(_readCalled)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-readable-constructor-set-methods'
diff --git a/src/test/browser/test-stream-readable-event.js b/src/test/browser/test-stream-readable-event.js
new file mode 100644
index 0000000000..0c821409bf
--- /dev/null
+++ b/src/test/browser/test-stream-readable-event.js
@@ -0,0 +1,110 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('readable events - first', (t) => {
+ t.plan(3)
+
+ // First test, not reading when the readable is added.
+ // make sure that on('readable', ...) triggers a readable event.
+ const r = new Readable({
+ highWaterMark: 3
+ })
+
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+
+ let caughtReadable = false
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(_readCalled)
+
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+
+ test('readable events - second', (t) => {
+ t.plan(3)
+
+ // second test, make sure that readable is re-emitted if there's
+ // already a length, while it IS reading.
+
+ const r = new Readable({
+ highWaterMark: 3
+ })
+
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('bl'))
+
+ let caughtReadable = false
+ setTimeout(function () {
+ // assert we're testing what we think we are
+ t.ok(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.ok(_readCalled)
+
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+
+ test('readable events - third', (t) => {
+ t.plan(3)
+
+ // Third test, not reading when the stream has not passed
+ // the highWaterMark but *has* reached EOF.
+ const r = new Readable({
+ highWaterMark: 30
+ })
+
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+ r.push(null)
+
+ let caughtReadable = false
+ setTimeout(function () {
+ // assert we're testing what we think we are
+ t.notOk(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(_readCalled)
+
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-readable-event'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-sync-write.js b/src/test/browser/test-stream-sync-write.js
new file mode 100644
index 0000000000..dd3a1b2539
--- /dev/null
+++ b/src/test/browser/test-stream-sync-write.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(2)
+
+ let internalCalls = 0
+ let externalCalls = 0
+
+ const InternalStream = function () {
+ Writable.call(this)
+ }
+ inherits(InternalStream, Writable)
+
+ InternalStream.prototype._write = function (chunk, encoding, callback) {
+ internalCalls++
+ callback()
+ }
+
+ const internalStream = new InternalStream()
+
+ const ExternalStream = function (writable) {
+ this._writable = writable
+ Writable.call(this)
+ }
+ inherits(ExternalStream, Writable)
+
+ ExternalStream.prototype._write = function (chunk, encoding, callback) {
+ externalCalls++
+ this._writable.write(chunk, encoding, callback)
+ }
+
+ const externalStream = new ExternalStream(internalStream)
+
+ for (let i = 0; i < 2000; i++) {
+ externalStream.write(i.toString())
+ }
+
+ externalStream.end(() => {
+ t.equal(internalCalls, 2000)
+ t.equal(externalCalls, 2000)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-sync-write'
diff --git a/src/test/browser/test-stream-transform-constructor-set-methods.js b/src/test/browser/test-stream-transform-constructor-set-methods.js
new file mode 100644
index 0000000000..c64df97dcb
--- /dev/null
+++ b/src/test/browser/test-stream-transform-constructor-set-methods.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ let _transformCalled = false
+ function _transform(d, e, n) {
+ _transformCalled = true
+ n()
+ }
+
+ let _flushCalled = false
+ function _flush(n) {
+ _flushCalled = true
+ n()
+ }
+
+ const tr = new Transform({
+ transform: _transform,
+ flush: _flush
+ })
+
+ tr.end(Buffer.from('blerg'))
+ tr.resume()
+
+ tr.on('end', function () {
+ t.equal(tr._transform, _transform)
+ t.equal(tr._flush, _flush)
+ t.ok(_transformCalled)
+ t.ok(_flushCalled)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-constructor-set-methods'
diff --git a/src/test/browser/test-stream-transform-objectmode-falsey-value.js b/src/test/browser/test-stream-transform-objectmode-falsey-value.js
new file mode 100644
index 0000000000..69173cce31
--- /dev/null
+++ b/src/test/browser/test-stream-transform-objectmode-falsey-value.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const { PassThrough } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(13)
+
+ const src = new PassThrough({ objectMode: true })
+ const tx = new PassThrough({ objectMode: true })
+ const dest = new PassThrough({ objectMode: true })
+
+ const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+ const results = []
+ dest.on('end', function () {
+ t.deepEqual(results, expect)
+ })
+
+ dest.on('data', function (x) {
+ results.push(x)
+ })
+
+ src.pipe(tx).pipe(dest)
+
+ let i = -1
+ const int = setInterval(function () {
+ if (i > 10) {
+ src.end()
+ clearInterval(int)
+ } else {
+ t.ok(true)
+ src.write(i++)
+ }
+ }, 10)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-objectmode-falsey-value'
diff --git a/src/test/browser/test-stream-transform-split-objectmode.js b/src/test/browser/test-stream-transform-split-objectmode.js
new file mode 100644
index 0000000000..e50ac2c251
--- /dev/null
+++ b/src/test/browser/test-stream-transform-split-objectmode.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(10)
+
+ const parser = new Transform({ readableObjectMode: true })
+
+ t.ok(parser._readableState.objectMode, 'parser 1')
+ t.notOk(parser._writableState.objectMode, 'parser 2')
+ t.equals(parser._readableState.highWaterMark, 16, 'parser 3')
+ t.equals(parser._writableState.highWaterMark, 16 * 1024, 'parser 4')
+
+ parser._transform = function (chunk, enc, callback) {
+ callback(null, { val: chunk[0] })
+ }
+
+ let parsed
+
+ parser.on('data', function (obj) {
+ parsed = obj
+ })
+
+ parser.end(Buffer.from([42]))
+
+ parser.on('end', function () {
+ t.equals(parsed.val, 42, 'parser ended')
+ })
+
+ const serializer = new Transform({ writableObjectMode: true })
+
+ t.notOk(serializer._readableState.objectMode, 'serializer 1')
+ t.ok(serializer._writableState.objectMode, 'serializer 2')
+ t.equals(serializer._readableState.highWaterMark, 16 * 1024, 'serializer 3')
+ t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4')
+
+ serializer._transform = function (obj, _, callback) {
+ callback(null, Buffer.from([obj.val]))
+ }
+
+ let serialized
+
+ serializer.on('data', function (chunk) {
+ serialized = chunk
+ })
+
+ serializer.write({ val: 42 })
+
+ serializer.on('end', function () {
+ t.equals(serialized[0], 42, 'searlizer ended')
+ })
+ setImmediate(function () {
+ serializer.end()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-transform-split-objectmode'
diff --git a/src/test/browser/test-stream-unshift-empty-chunk.js b/src/test/browser/test-stream-unshift-empty-chunk.js
new file mode 100644
index 0000000000..2ebbd20930
--- /dev/null
+++ b/src/test/browser/test-stream-unshift-empty-chunk.js
@@ -0,0 +1,64 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const r = new Readable()
+ let nChunks = 10
+ const chunk = Buffer.alloc(10)
+ chunk.fill('x')
+
+ r._read = function (n) {
+ setTimeout(function () {
+ r.push(--nChunks === 0 ? null : chunk)
+ })
+ }
+
+ let readAll = false
+ const seen = []
+ r.on('readable', function () {
+ let chunk
+ while ((chunk = r.read())) {
+ seen.push(chunk.toString())
+ // simulate only reading a certain amount of the data,
+ // and then putting the rest of the chunk back into the
+ // stream, like a parser might do. We just fill it with
+ // 'y' so that it's easy to see which bits were touched,
+ // and which were not.
+ const putBack = Buffer.alloc(readAll ? 0 : 5)
+ putBack.fill('y')
+ readAll = !readAll
+ r.unshift(putBack)
+ }
+ })
+
+ const expect = [
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy'
+ ]
+
+ r.on('end', function () {
+ t.deepEqual(seen, expect)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-unshift-empty-chunk'
diff --git a/src/test/browser/test-stream-unshift-read-race.js b/src/test/browser/test-stream-unshift-read-race.js
new file mode 100644
index 0000000000..a600fe1cd4
--- /dev/null
+++ b/src/test/browser/test-stream-unshift-read-race.js
@@ -0,0 +1,124 @@
+'use strict'
+
+// This test verifies that:
+// 1. unshift() does not cause colliding _read() calls.
+// 2. unshift() after the 'end' event is an error, but after the EOF
+// signalling null, it is ok, and just creates a new readable chunk.
+// 3. push() after the EOF signaling null is an error.
+// 4. _read() is not called after pushing the EOF null chunk.
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(139)
+
+ const hwm = 10
+ const r = stream.Readable({ highWaterMark: hwm })
+ const chunks = 10
+
+ const data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2))
+ for (let i = 0; i < data.length; i++) {
+ const c = 'asdf'.charCodeAt(i % 4)
+ data[i] = c
+ }
+
+ let pos = 0
+ let pushedNull = false
+ r._read = function (n) {
+ t.notOk(pushedNull, '_read after null push')
+
+ // every third chunk is fast
+ push(!(chunks % 3))
+
+ function push(fast) {
+ t.notOk(pushedNull, 'push() after null push')
+ const c = pos >= data.length ? null : data.slice(pos, pos + n)
+ pushedNull = c === null
+ if (fast) {
+ pos += n
+ r.push(c)
+ if (c === null) {
+ pushError()
+ }
+ } else {
+ setTimeout(function () {
+ pos += n
+ r.push(c)
+ if (c === null) {
+ pushError()
+ }
+ }, 1)
+ }
+ }
+ }
+
+ function pushError() {
+ r.unshift(Buffer.allocUnsafe(1))
+ w.end()
+
+ const onerror = global.onerror
+ global.onerror = () => {
+ t.ok(true)
+ global.onerror = onerror
+ return true
+ }
+
+ r.push(Buffer.allocUnsafe(1))
+ }
+
+ const w = stream.Writable()
+ const written = []
+ w._write = function (chunk, encoding, cb) {
+ written.push(chunk.toString())
+ cb()
+ }
+
+ r.on('end', t.fail)
+
+ r.on('readable', function () {
+ let chunk
+ while ((chunk = r.read(10)) !== null) {
+ w.write(chunk)
+ if (chunk.length > 4) {
+ r.unshift(Buffer.from('1234'))
+ }
+ }
+ })
+
+ w.on('finish', function () {
+ // each chunk should start with 1234, and then be asfdasdfasdf...
+ // The first got pulled out before the first unshift('1234'), so it's
+ // lacking that piece.
+ t.equal(written[0], 'asdfasdfas')
+ let asdf = 'd'
+
+ // console.error('0: %s', written[0]);
+ for (let i = 1; i < written.length; i++) {
+ // console.error('%s: %s', i.toString(32), written[i]);
+ t.equal(written[i].slice(0, 4), '1234')
+ for (let j = 4; j < written[i].length; j++) {
+ const c = written[i].charAt(j)
+ t.equal(c, asdf)
+ switch (asdf) {
+ case 'a':
+ asdf = 's'
+ break
+ case 's':
+ asdf = 'd'
+ break
+ case 'd':
+ asdf = 'f'
+ break
+ case 'f':
+ asdf = 'a'
+ break
+ }
+ }
+ }
+
+ t.equal(written.length, 18)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-unshift-read-race'
diff --git a/src/test/browser/test-stream-writable-change-default-encoding.js b/src/test/browser/test-stream-writable-change-default-encoding.js
new file mode 100644
index 0000000000..3cfa208e41
--- /dev/null
+++ b/src/test/browser/test-stream-writable-change-default-encoding.js
@@ -0,0 +1,74 @@
+'use strict'
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+inherits(MyWritable, stream.Writable)
+
+MyWritable.prototype._write = function (chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+}
+
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options)
+ this.fn = fn
+}
+
+module.exports = function (test) {
+ test('defaultCondingIsUtf8', (t) => {
+ t.plan(1)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'utf8')
+ },
+ { decodeStrings: false }
+ )
+ m.write('foo')
+ m.end()
+ })
+
+ test('changeDefaultEncodingToAscii', (t) => {
+ t.plan(1)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'ascii')
+ },
+ { decodeStrings: false }
+ )
+ m.setDefaultEncoding('ascii')
+ m.write('bar')
+ m.end()
+ })
+
+ test('changeDefaultEncodingToInvalidValue', (t) => {
+ t.plan(1)
+
+ t.throws(function () {
+ const m = new MyWritable(function (isBuffer, type, enc) {}, { decodeStrings: false })
+ m.setDefaultEncoding({})
+ m.write('bar')
+ m.end()
+ }, TypeError)
+ })
+
+ test('checkVairableCaseEncoding', (t) => {
+ t.plan(1)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'ascii')
+ },
+ { decodeStrings: false }
+ )
+ m.setDefaultEncoding('AsCii')
+ m.write('bar')
+ m.end()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-change-default-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-writable-constructor-set-methods.js b/src/test/browser/test-stream-writable-constructor-set-methods.js
new file mode 100644
index 0000000000..43d935b815
--- /dev/null
+++ b/src/test/browser/test-stream-writable-constructor-set-methods.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const { Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(5)
+
+ let _writeCalled = false
+ function _write(d, e, n) {
+ _writeCalled = true
+ }
+
+ const w = new Writable({ write: _write })
+ w.end(Buffer.from('blerg'))
+
+ let _writevCalled = false
+ let dLength = 0
+ function _writev(d, n) {
+ dLength = d.length
+ _writevCalled = true
+ }
+
+ const w2 = new Writable({ writev: _writev })
+ w2.cork()
+
+ w2.write(Buffer.from('blerg'))
+ w2.write(Buffer.from('blerg'))
+ w2.end()
+
+ setImmediate(function () {
+ t.equal(w._write, _write)
+ t.ok(_writeCalled)
+ t.equal(w2._writev, _writev)
+ t.equal(dLength, 2)
+ t.ok(_writevCalled)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-constructor-set-methods'
diff --git a/src/test/browser/test-stream-writable-decoded-encoding.js b/src/test/browser/test-stream-writable-decoded-encoding.js
new file mode 100644
index 0000000000..628349c9e0
--- /dev/null
+++ b/src/test/browser/test-stream-writable-decoded-encoding.js
@@ -0,0 +1,54 @@
+'use strict'
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options)
+ this.fn = fn
+}
+
+inherits(MyWritable, stream.Writable)
+
+MyWritable.prototype._write = function (chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+}
+
+module.exports = function (test) {
+ test('decodeStringsTrue', (t) => {
+ t.plan(3)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.ok(isBuffer)
+ t.equal(type, 'object')
+ t.equal(enc, 'buffer')
+ // console.log('ok - decoded string is decoded');
+ },
+ { decodeStrings: true }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+ })
+
+ test('decodeStringsFalse', (t) => {
+ t.plan(3)
+
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.notOk(isBuffer)
+ t.equal(type, 'string')
+ t.equal(enc, 'utf8')
+ // console.log('ok - un-decoded string is not decoded');
+ },
+ { decodeStrings: false }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writable-decoded-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream-writev.js b/src/test/browser/test-stream-writev.js
new file mode 100644
index 0000000000..e072bc2388
--- /dev/null
+++ b/src/test/browser/test-stream-writev.js
@@ -0,0 +1,106 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+const queue = []
+for (let decode = 0; decode < 2; decode++) {
+ for (let uncork = 0; uncork < 2; uncork++) {
+ for (let multi = 0; multi < 2; multi++) {
+ queue.push([!!decode, !!uncork, !!multi])
+ }
+ }
+}
+
+function runTest(decode, uncork, multi) {
+ return function (t) {
+ t.plan(8)
+
+ // console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi);
+ let counter = 0
+ let expectCount = 0
+ function cnt(msg) {
+ expectCount++
+ const expect = expectCount
+ return function (er) {
+ if (er) {
+ throw er
+ }
+ counter++
+ t.equal(counter, expect)
+ }
+ }
+
+ const w = new stream.Writable({ decodeStrings: decode })
+ w._write = function (chunk, e, cb) {
+ t.ok(false, 'Should not call _write')
+ }
+
+ const expectChunks = decode
+ ? [
+ { encoding: 'buffer', chunk: [104, 101, 108, 108, 111, 44, 32] },
+ { encoding: 'buffer', chunk: [119, 111, 114, 108, 100] },
+ { encoding: 'buffer', chunk: [33] },
+ { encoding: 'buffer', chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] },
+ { encoding: 'buffer', chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] }
+ ]
+ : [
+ { encoding: 'ascii', chunk: 'hello, ' },
+ { encoding: 'utf8', chunk: 'world' },
+ { encoding: 'buffer', chunk: [33] },
+ { encoding: 'binary', chunk: '\nand then...' },
+ { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }
+ ]
+
+ let actualChunks
+ w._writev = function (chunks, cb) {
+ actualChunks = chunks.map(function (chunk) {
+ return {
+ encoding: chunk.encoding,
+ chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk
+ }
+ })
+ cb()
+ }
+
+ w.cork()
+ w.write('hello, ', 'ascii', cnt('hello'))
+ w.write('world', 'utf8', cnt('world'))
+
+ if (multi) {
+ w.cork()
+ }
+
+ w.write(Buffer.from('!'), 'buffer', cnt('!'))
+ w.write('\nand then...', 'binary', cnt('and then'))
+
+ if (multi) {
+ w.uncork()
+ }
+
+ w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'))
+
+ if (uncork) {
+ w.uncork()
+ }
+
+ w.end(cnt('end'))
+
+ w.on('finish', function () {
+ // make sure finish comes after all the write cb
+ cnt('finish')()
+ t.deepEqual(expectChunks, actualChunks)
+ })
+ }
+}
+
+module.exports = function (test) {
+ for (let i = 0; i < queue.length; i++) {
+ const tr = queue[i]
+
+ test('round ' + i, runTest(tr[0], tr[1], tr[2]))
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream-writev'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-base64-single-char-read-end.js b/src/test/browser/test-stream2-base64-single-char-read-end.js
new file mode 100644
index 0000000000..5b7c131d52
--- /dev/null
+++ b/src/test/browser/test-stream2-base64-single-char-read-end.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const src = new Readable({ encoding: 'base64' })
+ const dst = new Writable()
+ let hasRead = false
+ const accum = []
+
+ src._read = function (n) {
+ if (!hasRead) {
+ hasRead = true
+ process.nextTick(function () {
+ src.push(Buffer.from('1'))
+ src.push(null)
+ })
+ }
+ }
+
+ dst._write = function (chunk, enc, cb) {
+ accum.push(chunk)
+ cb()
+ }
+
+ src.on('end', function () {
+ t.equal(Buffer.concat(accum) + '', 'MQ==')
+ clearTimeout(timeout)
+ })
+
+ src.pipe(dst)
+
+ const timeout = setTimeout(function () {
+ t.fail('timed out waiting for _write')
+ }, 100)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-base64-single-char-read-end'
diff --git a/src/test/browser/test-stream2-compatibility.js b/src/test/browser/test-stream2-compatibility.js
new file mode 100644
index 0000000000..6709029562
--- /dev/null
+++ b/src/test/browser/test-stream2-compatibility.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ let ondataCalled = 0
+
+ function TestReader() {
+ Readable.apply(this)
+ this._buffer = Buffer.alloc(100)
+ this._buffer.fill('x')
+
+ this.on('data', function () {
+ ondataCalled++
+ })
+ }
+
+ inherits(TestReader, Readable)
+
+ TestReader.prototype._read = function (n) {
+ this.push(this._buffer)
+ this._buffer = Buffer.alloc(0)
+ }
+
+ setTimeout(function () {
+ t.equal(ondataCalled, 1)
+ })
+
+ new TestReader().read()
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-compatibility'
diff --git a/src/test/browser/test-stream2-large-read-stall.js b/src/test/browser/test-stream2-large-read-stall.js
new file mode 100644
index 0000000000..17bb7fb2b9
--- /dev/null
+++ b/src/test/browser/test-stream2-large-read-stall.js
@@ -0,0 +1,63 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ // If everything aligns so that you do a read(n) of exactly the
+ // remaining buffer, then make sure that 'end' still emits.
+
+ const READSIZE = 100
+ const PUSHSIZE = 20
+ const PUSHCOUNT = 1000
+ const HWM = 50
+
+ const r = new Readable({
+ highWaterMark: HWM
+ })
+ const rs = r._readableState
+
+ r._read = push
+
+ r.on('readable', function () {
+ false && console.error('>> readable')
+ let ret
+ do {
+ false && console.error(' > read(%d)', READSIZE)
+ ret = r.read(READSIZE)
+ false && console.error(' < %j (%d remain)', ret && ret.length, rs.length)
+ } while (ret && ret.length === READSIZE)
+
+ false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length)
+ })
+
+ r.on('end', function () {
+ t.equal(pushes, PUSHCOUNT + 1)
+
+ false && console.error('end')
+ })
+
+ let pushes = 0
+ function push() {
+ if (pushes > PUSHCOUNT) {
+ return
+ }
+
+ if (pushes++ === PUSHCOUNT) {
+ false && console.error(' push(EOF)')
+ return r.push(null)
+ }
+
+ false && console.error(' push #%d', pushes)
+ if (r.push(Buffer.alloc(PUSHSIZE))) {
+ setTimeout(push)
+ }
+ }
+
+ // start the flow
+ r.read(0)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-large-read-stall'
diff --git a/src/test/browser/test-stream2-objects.js b/src/test/browser/test-stream2-objects.js
new file mode 100644
index 0000000000..c939b07fe6
--- /dev/null
+++ b/src/test/browser/test-stream2-objects.js
@@ -0,0 +1,309 @@
+'use strict'
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+}
+
+function toArray(callback) {
+ const stream = new Writable({ objectMode: true })
+ const list = []
+ stream.write = function (chunk) {
+ list.push(chunk)
+ }
+
+ stream.end = function () {
+ callback(list)
+ }
+
+ return stream
+}
+
+function fromArray(list) {
+ const r = new Readable({ objectMode: true })
+ r._read = noop
+ forEach(list, function (chunk) {
+ r.push(chunk)
+ })
+ r.push(null)
+
+ return r
+}
+
+function noop() {}
+
+module.exports = function (test) {
+ test('can read objects from stream', function (t) {
+ t.plan(3)
+
+ const r = fromArray([{ one: '1' }, { two: '2' }])
+
+ const v1 = r.read()
+ const v2 = r.read()
+ const v3 = r.read()
+
+ t.deepEqual(v1, { one: '1' })
+ t.deepEqual(v2, { two: '2' })
+ t.deepEqual(v3, null)
+ })
+
+ test('can pipe objects into stream', function (t) {
+ t.plan(1)
+
+ const r = fromArray([{ one: '1' }, { two: '2' }])
+
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [{ one: '1' }, { two: '2' }])
+ })
+ )
+ })
+
+ test('read(n) is ignored', function (t) {
+ t.plan(1)
+
+ const r = fromArray([{ one: '1' }, { two: '2' }])
+
+ const value = r.read(2)
+
+ t.deepEqual(value, { one: '1' })
+ })
+
+ test('can read objects from _read (sync)', function (t) {
+ t.plan(1)
+
+ const r = new Readable({ objectMode: true })
+ const list = [{ one: '1' }, { two: '2' }]
+ r._read = function (n) {
+ const item = list.shift()
+ r.push(item || null)
+ }
+
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [{ one: '1' }, { two: '2' }])
+ })
+ )
+ })
+
+ test('can read objects from _read (async)', function (t) {
+ t.plan(1)
+
+ const r = new Readable({ objectMode: true })
+ const list = [{ one: '1' }, { two: '2' }]
+ r._read = function (n) {
+ const item = list.shift()
+ process.nextTick(function () {
+ r.push(item || null)
+ })
+ }
+
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [{ one: '1' }, { two: '2' }])
+ })
+ )
+ })
+
+ test('can read strings as objects', function (t) {
+ t.plan(1)
+
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ const list = ['one', 'two', 'three']
+ forEach(list, function (str) {
+ r.push(str)
+ })
+ r.push(null)
+
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, list)
+ })
+ )
+ })
+
+ test('read(0) for object streams', function (t) {
+ t.plan(1)
+
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+
+ r.push('foobar')
+ r.push(null)
+
+ r.read(0)
+
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, ['foobar'])
+ })
+ )
+ })
+
+ test('falsey values', function (t) {
+ t.plan(1)
+
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+
+ r.push(false)
+ r.push(0)
+ r.push('')
+ r.push(null)
+
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, [false, 0, ''])
+ })
+ )
+ })
+
+ test('high watermark _read', function (t) {
+ t.plan(5)
+
+ const r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ })
+ let calls = 0
+ const list = ['1', '2', '3', '4', '5', '6', '7', '8']
+
+ r._read = function (n) {
+ calls++
+ }
+
+ forEach(list, function (c) {
+ r.push(c)
+ })
+
+ const v = r.read()
+
+ t.equal(calls, 0)
+ t.equal(v, '1')
+
+ const v2 = r.read()
+ t.equal(v2, '2')
+
+ const v3 = r.read()
+ t.equal(v3, '3')
+
+ t.equal(calls, 1)
+ })
+
+ test('high watermark push', function (t) {
+ t.plan(6)
+
+ const r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ })
+ r._read = function (n) {}
+ for (let i = 0; i < 6; i++) {
+ const bool = r.push(i)
+ t.equal(bool, i !== 5)
+ }
+ })
+
+ test('can write objects to stream', function (t) {
+ t.plan(1)
+
+ const w = new Writable({ objectMode: true })
+
+ w._write = function (chunk, encoding, cb) {
+ t.deepEqual(chunk, { foo: 'bar' })
+ cb()
+ }
+
+ w.on('finish', function () {})
+
+ w.write({ foo: 'bar' })
+ w.end()
+ })
+
+ test('can write multiple objects to stream', function (t) {
+ t.plan(1)
+
+ const w = new Writable({ objectMode: true })
+ const list = []
+
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ cb()
+ }
+
+ w.on('finish', function () {
+ t.deepEqual(list, [0, 1, 2, 3, 4])
+ })
+
+ w.write(0)
+ w.write(1)
+ w.write(2)
+ w.write(3)
+ w.write(4)
+ w.end()
+ })
+
+ test('can write strings as objects', function (t) {
+ t.plan(1)
+
+ const w = new Writable({
+ objectMode: true
+ })
+ const list = []
+
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ process.nextTick(cb)
+ }
+
+ w.on('finish', function () {
+ t.deepEqual(list, ['0', '1', '2', '3', '4'])
+ })
+
+ w.write('0')
+ w.write('1')
+ w.write('2')
+ w.write('3')
+ w.write('4')
+ w.end()
+ })
+
+ test('buffers finish until cb is called', function (t) {
+ t.plan(2)
+
+ const w = new Writable({
+ objectMode: true
+ })
+ let called = false
+
+ w._write = function (chunk, encoding, cb) {
+ t.equal(chunk, 'foo')
+
+ process.nextTick(function () {
+ called = true
+ cb()
+ })
+ }
+
+ w.on('finish', function () {
+ t.equal(called, true)
+ })
+
+ w.write('foo')
+ w.end()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-objects'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-pipe-error-handling.js b/src/test/browser/test-stream2-pipe-error-handling.js
new file mode 100644
index 0000000000..e830b9b415
--- /dev/null
+++ b/src/test/browser/test-stream2-pipe-error-handling.js
@@ -0,0 +1,95 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('Error Listener Catches', function (t) {
+ t.plan(3)
+
+ let count = 1000
+
+ const source = new stream.Readable()
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.alloc(n))
+ }
+
+ let unpipedDest
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+
+ const dest = new stream.Writable()
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+
+ source.pipe(dest)
+
+ let gotErr = null
+ dest.on('error', function (err) {
+ gotErr = err
+ })
+
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+
+ const err = new Error('This stream turned into bacon.')
+ dest.emit('error', err)
+ t.strictEqual(gotErr, err)
+ t.strictEqual(unpipedSource, source)
+ t.strictEqual(unpipedDest, dest)
+ })
+
+ test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) {
+ t.plan(3)
+
+ let count = 1000
+
+ const source = new stream.Readable()
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.alloc(n))
+ }
+
+ let unpipedDest
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+
+ const dest = new stream.Writable()
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+
+ source.pipe(dest)
+
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+
+ const err = new Error('This stream turned into bacon.')
+ const onerror = global.onerror
+
+ dest.emit('error', err)
+
+ global.onerror = () => {
+ t.ok(true)
+ t.strictEqual(unpipedSource, source)
+ t.strictEqual(unpipedDest, dest)
+ global.onerror = onerror
+ return true
+ }
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-handling'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-pipe-error-once-listener.js b/src/test/browser/test-stream2-pipe-error-once-listener.js
new file mode 100644
index 0000000000..230da9ad42
--- /dev/null
+++ b/src/test/browser/test-stream2-pipe-error-once-listener.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const Read = function () {
+ stream.Readable.call(this)
+ }
+ inherits(Read, stream.Readable)
+
+ Read.prototype._read = function (size) {
+ this.push('x')
+ this.push(null)
+ }
+
+ const Write = function () {
+ stream.Writable.call(this)
+ }
+ inherits(Write, stream.Writable)
+
+ Write.prototype._write = function (buffer, encoding, cb) {
+ this.emit('error', new Error('boom'))
+ this.emit('alldone')
+ }
+
+ const read = new Read()
+ const write = new Write()
+
+ write.once('error', () => {})
+ write.once('alldone', function () {
+ t.ok(true)
+ })
+
+ read.pipe(write)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-once-listener'
diff --git a/src/test/browser/test-stream2-push.js b/src/test/browser/test-stream2-push.js
new file mode 100644
index 0000000000..ce2916aaa1
--- /dev/null
+++ b/src/test/browser/test-stream2-push.js
@@ -0,0 +1,119 @@
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(33)
+
+ const stream = new Readable({
+ highWaterMark: 16,
+ encoding: 'utf8'
+ })
+
+ const source = new EE()
+
+ stream._read = function () {
+ // console.error('stream._read');
+ readStart()
+ }
+
+ let ended = false
+ stream.on('end', function () {
+ ended = true
+ })
+
+ source.on('data', function (chunk) {
+ const ret = stream.push(chunk)
+ // console.error('data', stream._readableState.length);
+ if (!ret) {
+ readStop()
+ }
+ })
+
+ source.on('end', function () {
+ stream.push(null)
+ })
+
+ let reading = false
+
+ function readStart() {
+ // console.error('readStart');
+ reading = true
+ }
+
+ function readStop() {
+ // console.error('readStop');
+ reading = false
+ process.nextTick(function () {
+ const r = stream.read()
+ if (r !== null) {
+ writer.write(r)
+ }
+ })
+ }
+
+ const writer = new Writable({
+ decodeStrings: false
+ })
+
+ const written = []
+
+ const expectWritten = [
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg'
+ ]
+
+ writer._write = function (chunk, encoding, cb) {
+ // console.error('WRITE %s', chunk);
+ written.push(chunk)
+ process.nextTick(cb)
+ }
+
+ writer.on('finish', finish)
+
+ // now emit some chunks.
+
+ const chunk = 'asdfg'
+
+ let set = 0
+ readStart()
+ data()
+ function data() {
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.notOk(reading)
+ if (set++ < 5) {
+ setTimeout(data, 10)
+ } else {
+ end()
+ }
+ }
+
+ function finish() {
+ // console.error('finish');
+ t.deepEqual(written, expectWritten)
+ }
+
+ function end() {
+ source.emit('end')
+ t.notOk(reading)
+ writer.end(stream.read())
+ setTimeout(function () {
+ t.ok(ended)
+ })
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-push'
diff --git a/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js
new file mode 100644
index 0000000000..35e27a2f2f
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-empty-buffer-no-eof.js
@@ -0,0 +1,98 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('readable empty buffer no eof 1', function (t) {
+ t.plan(1)
+
+ const r = new Readable()
+
+ // should not end when we get a Buffer(0) or '' as the _read result
+ // that just means that there is *temporarily* no data, but to go
+ // ahead and try again later.
+ //
+ // note that this is very unusual. it only works for crypto streams
+ // because the other side of the stream will call read(0) to cycle
+ // data through openssl. that's why we set the timeouts to call
+ // r.read(0) again later, otherwise there is no more work being done
+ // and the process just exits.
+
+ const buf = Buffer.alloc(5)
+ buf.fill('x')
+ let reads = 5
+ r._read = function (n) {
+ switch (reads--) {
+ case 0:
+ return r.push(null) // EOF
+ case 1:
+ return r.push(buf)
+ case 2:
+ setTimeout(r.read.bind(r, 0), 50)
+ return r.push(Buffer.alloc(0)) // Not-EOF!
+ case 3:
+ setTimeout(r.read.bind(r, 0), 50)
+ return process.nextTick(function () {
+ return r.push(Buffer.alloc(0))
+ })
+ case 4:
+ setTimeout(r.read.bind(r, 0), 50)
+ return setTimeout(function () {
+ return r.push(Buffer.alloc(0))
+ })
+ case 5:
+ return setTimeout(function () {
+ return r.push(buf)
+ })
+ default:
+ throw new Error('unreachable')
+ }
+ }
+
+ const results = []
+ function flow() {
+ let chunk
+ while ((chunk = r.read()) !== null) {
+ results.push(chunk + '')
+ }
+ }
+ r.on('readable', flow)
+ r.on('end', function () {
+ results.push('EOF')
+ t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF'])
+ })
+ flow()
+ })
+
+ test('readable empty buffer no eof 2', function (t) {
+ t.plan(1)
+
+ const r = new Readable({ encoding: 'base64' })
+ let reads = 5
+ r._read = function (n) {
+ if (!reads--) {
+ return r.push(null) // EOF
+ } else {
+ return r.push(Buffer.from('x'))
+ }
+ }
+
+ const results = []
+ function flow() {
+ let chunk
+ while ((chunk = r.read()) !== null) {
+ results.push(chunk + '')
+ }
+ }
+ r.on('readable', flow)
+ r.on('end', function () {
+ results.push('EOF')
+ t.deepEqual(results, ['eHh4', 'eHg=', 'EOF'])
+ })
+ flow()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-empty-buffer-no-eof'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-readable-from-list.js b/src/test/browser/test-stream2-readable-from-list.js
new file mode 100644
index 0000000000..f71984e9c4
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-from-list.js
@@ -0,0 +1,70 @@
+'use strict'
+
+const { _fromList: fromList } = require('../../lib/_stream_readable')
+const BufferList = require('../../lib/internal/streams/buffer_list')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+function bufferListFromArray(arr) {
+ const bl = new BufferList()
+ for (let i = 0; i < arr.length; ++i) {
+ bl.push(arr[i])
+ }
+ return bl
+}
+
+module.exports = function (test) {
+ test('buffers', function (t) {
+ t.plan(5)
+
+ let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')]
+ list = bufferListFromArray(list)
+
+ // read more than the first element.
+ let ret = fromList(6, { buffer: list, length: 16 })
+ t.equal(ret.toString(), 'foogba')
+
+ // read exactly the first element.
+ ret = fromList(2, { buffer: list, length: 10 })
+ t.equal(ret.toString(), 'rk')
+
+ // read less than the first element.
+ ret = fromList(2, { buffer: list, length: 8 })
+ t.equal(ret.toString(), 'ba')
+
+ // read more than we have.
+ ret = fromList(100, { buffer: list, length: 6 })
+ t.equal(ret.toString(), 'zykuel')
+
+ // all consumed.
+ t.same(list, new BufferList())
+ })
+
+ test('strings', function (t) {
+ t.plan(5)
+
+ let list = ['foog', 'bark', 'bazy', 'kuel']
+ list = bufferListFromArray(list)
+
+ // read more than the first element.
+ let ret = fromList(6, { buffer: list, length: 16, decoder: true })
+ t.equal(ret, 'foogba')
+
+ // read exactly the first element.
+ ret = fromList(2, { buffer: list, length: 10, decoder: true })
+ t.equal(ret, 'rk')
+
+ // read less than the first element.
+ ret = fromList(2, { buffer: list, length: 8, decoder: true })
+ t.equal(ret, 'ba')
+
+ // read more than we have.
+ ret = fromList(100, { buffer: list, length: 6, decoder: true })
+ t.equal(ret, 'zykuel')
+
+ // all consumed.
+ t.same(list, new BufferList())
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-from-list'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-readable-legacy-drain.js b/src/test/browser/test-stream2-readable-legacy-drain.js
new file mode 100644
index 0000000000..8cd09c2fc6
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-legacy-drain.js
@@ -0,0 +1,47 @@
+'use strict'
+
+const { Stream, Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(3)
+
+ const r = new Readable()
+ const N = 256
+ let reads = 0
+ r._read = function (n) {
+ return r.push(++reads === N ? null : Buffer.alloc(1))
+ }
+
+ r.on('end', function () {
+ t.ok(true, 'rended')
+ })
+
+ const w = new Stream()
+ w.writable = true
+ let writes = 0
+ let buffered = 0
+ w.write = function (c) {
+ writes += c.length
+ buffered += c.length
+ process.nextTick(drain)
+ return false
+ }
+
+ function drain() {
+ if (buffered > 3) {
+ t.ok(false, 'to much buffer')
+ }
+ buffered = 0
+ w.emit('drain')
+ }
+
+ w.end = function () {
+ t.equal(writes, 255)
+ t.ok(true, 'wended')
+ }
+
+ r.pipe(w)
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-legacy-drain'
diff --git a/src/test/browser/test-stream2-readable-non-empty-end.js b/src/test/browser/test-stream2-readable-non-empty-end.js
new file mode 100644
index 0000000000..f9e2983142
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-non-empty-end.js
@@ -0,0 +1,60 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(4)
+
+ let len = 0
+ const chunks = new Array(10)
+ for (let i = 1; i <= 10; i++) {
+ chunks[i - 1] = Buffer.alloc(i)
+ len += i
+ }
+
+ const test = new Readable()
+ let n = 0
+ test._read = function (size) {
+ const chunk = chunks[n++]
+ setTimeout(function () {
+ test.push(chunk === undefined ? null : chunk)
+ })
+ }
+
+ test.on('end', thrower)
+ function thrower() {
+ throw new Error('this should not happen!')
+ }
+
+ let bytesread = 0
+ test.on('readable', function () {
+ const b = len - bytesread - 1
+ const res = test.read(b)
+ if (res) {
+ bytesread += res.length
+ // console.error('br=%d len=%d', bytesread, len);
+ setTimeout(next)
+ }
+ test.read(0)
+ })
+ test.read(0)
+
+ function next() {
+ // now let's make 'end' happen
+ test.removeListener('end', thrower)
+
+ test.on('end', function () {
+ t.ok(true, 'end emitted')
+ })
+
+ // one to get the last byte
+ let r = test.read()
+ t.ok(r)
+ t.equal(r.length, 1)
+ r = test.read()
+ t.equal(r, null)
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-non-empty-end'
diff --git a/src/test/browser/test-stream2-readable-wrap-empty.js b/src/test/browser/test-stream2-readable-wrap-empty.js
new file mode 100644
index 0000000000..7779ac91af
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-wrap-empty.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+const Readable = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(1)
+
+ const oldStream = new EE()
+ oldStream.pause = function () {}
+ oldStream.resume = function () {}
+
+ const newStream = new Readable().wrap(oldStream)
+
+ newStream
+ .on('readable', function () {})
+ .on('end', function () {
+ t.ok(true, 'ended')
+ })
+
+ oldStream.emit('end')
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap-empty'
diff --git a/src/test/browser/test-stream2-readable-wrap.js b/src/test/browser/test-stream2-readable-wrap.js
new file mode 100644
index 0000000000..0dff5fb8f3
--- /dev/null
+++ b/src/test/browser/test-stream2-readable-wrap.js
@@ -0,0 +1,99 @@
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+let run = 0
+
+module.exports = function (test) {
+ function runTest(highWaterMark, objectMode, produce) {
+ test('run #' + ++run, (t) => {
+ t.plan(4)
+
+ const old = new EE()
+ const r = new Readable({ highWaterMark, objectMode })
+ t.equal(r, r.wrap(old))
+
+ let ended = false
+ r.on('end', function () {
+ ended = true
+ })
+
+ old.pause = function () {
+ // console.error('old.pause()');
+ old.emit('pause')
+ flowing = false
+ }
+
+ old.resume = function () {
+ // console.error('old.resume()');
+ old.emit('resume')
+ flow()
+ }
+
+ let flowing
+ let chunks = 10
+ let oldEnded = false
+ const expected = []
+ function flow() {
+ flowing = true
+ // eslint-disable-next-line no-unmodified-loop-condition
+ while (flowing && chunks-- > 0) {
+ const item = produce()
+ expected.push(item)
+ // console.log('old.emit', chunks, flowing);
+ old.emit('data', item)
+ // console.log('after emit', chunks, flowing);
+ }
+ if (chunks <= 0) {
+ oldEnded = true
+ // console.log('old end', chunks, flowing);
+ old.emit('end')
+ }
+ }
+
+ const w = new Writable({ highWaterMark: highWaterMark * 2, objectMode })
+ const written = []
+ w._write = function (chunk, encoding, cb) {
+ // console.log('_write', chunk);
+ written.push(chunk)
+ setTimeout(cb)
+ }
+
+ w.on('finish', function () {
+ performAsserts()
+ })
+
+ r.pipe(w)
+
+ flow()
+
+ function performAsserts() {
+ t.ok(ended)
+ t.ok(oldEnded)
+ t.deepEqual(written, expected)
+ }
+ })
+ }
+
+ runTest(100, false, function () {
+ return Buffer.alloc(100)
+ })
+
+ runTest(10, false, function () {
+ return Buffer.from('xxxxxxxxxx')
+ })
+
+ runTest(1, true, function () {
+ return { foo: 'bar' }
+ })
+
+ const objectChunks = [5, 'a', false, 0, '', 'xyz', { x: 4 }, 7, [], 555]
+ runTest(1, true, function () {
+ return objectChunks.shift()
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-set-encoding.js b/src/test/browser/test-stream2-set-encoding.js
new file mode 100644
index 0000000000..3e092e2408
--- /dev/null
+++ b/src/test/browser/test-stream2-set-encoding.js
@@ -0,0 +1,340 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+inherits(TestReader, Readable)
+
+function TestReader(n, opts) {
+ Readable.call(this, opts)
+
+ this.pos = 0
+ this.len = n || 100
+}
+
+TestReader.prototype._read = function (n) {
+ setTimeout(
+ function () {
+ if (this.pos >= this.len) {
+ // double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+
+ n = Math.min(n, this.len - this.pos)
+ if (n <= 0) {
+ // double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+
+ this.pos += n
+ const ret = Buffer.alloc(n)
+ ret.fill('a')
+
+ // console.log('this.push(ret)', ret);
+
+ return this.push(ret)
+ }.bind(this),
+ 1
+ )
+}
+
+module.exports = function (test) {
+ test('setEncoding utf8', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ tr.setEncoding('utf8')
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('setEncoding hex', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('setEncoding hex with read(13)', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+
+ tr.on('readable', function flow() {
+ // console.log('readable once');
+ let chunk
+ while ((chunk = tr.read(13)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ // console.log('END');
+ t.same(out, expect)
+ })
+ })
+
+ test('setEncoding base64', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ tr.setEncoding('base64')
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('encoding: utf8', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100, { encoding: 'utf8' })
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('encoding: hex', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100, { encoding: 'hex' })
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('encoding: hex with read(13)', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100, { encoding: 'hex' })
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(13)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('encoding: base64', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100, { encoding: 'base64' })
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+
+ test('chainable', function (t) {
+ t.plan(1)
+
+ const tr = new TestReader(100)
+ t.equal(tr.setEncoding('utf8'), tr)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-set-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-transform.js b/src/test/browser/test-stream2-transform.js
new file mode 100644
index 0000000000..e0168307bc
--- /dev/null
+++ b/src/test/browser/test-stream2-transform.js
@@ -0,0 +1,489 @@
+'use strict'
+
+const { PassThrough, Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+module.exports = function (test) {
+ test('writable side consumption', function (t) {
+ t.plan(3)
+
+ const tx = new Transform({
+ highWaterMark: 10
+ })
+
+ let transformed = 0
+ tx._transform = function (chunk, encoding, cb) {
+ transformed += chunk.length
+ tx.push(chunk)
+ cb()
+ }
+
+ for (let i = 1; i <= 10; i++) {
+ tx.write(Buffer.alloc(i))
+ }
+ tx.end()
+
+ t.equal(tx._readableState.length, 10)
+ t.equal(transformed, 10)
+ t.same(
+ tx._writableState.getBuffer().map(function (c) {
+ return c.chunk.length
+ }),
+ [5, 6, 7, 8, 9, 10]
+ )
+ })
+
+ test('passthrough', function (t) {
+ t.plan(4)
+
+ const pt = new PassThrough()
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5).toString(), 'l')
+ })
+
+ test('object passthrough', function (t) {
+ t.plan(7)
+
+ const pt = new PassThrough({ objectMode: true })
+
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({ a: 'b' })
+ pt.end()
+
+ t.equal(pt.read(), 1)
+ t.equal(pt.read(), true)
+ t.equal(pt.read(), false)
+ t.equal(pt.read(), 0)
+ t.equal(pt.read(), 'foo')
+ t.equal(pt.read(), '')
+ t.same(pt.read(), { a: 'b' })
+ })
+
+ test('simple transform', function (t) {
+ t.plan(4)
+
+ const pt = new Transform()
+ pt._transform = function (c, e, cb) {
+ const ret = Buffer.alloc(c.length)
+ ret.fill('x')
+ pt.push(ret)
+ cb()
+ }
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'x')
+ })
+
+ test('simple object transform', function (t) {
+ t.plan(7)
+
+ const pt = new Transform({ objectMode: true })
+ pt._transform = function (c, e, cb) {
+ pt.push(JSON.stringify(c))
+ cb()
+ }
+
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({ a: 'b' })
+ pt.end()
+
+ t.equal(pt.read(), '1')
+ t.equal(pt.read(), 'true')
+ t.equal(pt.read(), 'false')
+ t.equal(pt.read(), '0')
+ t.equal(pt.read(), '"foo"')
+ t.equal(pt.read(), '""')
+ t.equal(pt.read(), '{"a":"b"}')
+ })
+
+ test('async passthrough', function (t) {
+ t.plan(4)
+
+ const pt = new Transform()
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5).toString(), 'l')
+ })
+ })
+
+ test('assymetric transform (expand)', function (t) {
+ t.plan(7)
+
+ const pt = new Transform()
+
+ // emit each chunk 2 times.
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }, 10)
+ }
+
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'foogf')
+ t.equal(pt.read(5).toString(), 'oogba')
+ t.equal(pt.read(5).toString(), 'rkbar')
+ t.equal(pt.read(5).toString(), 'kbazy')
+ t.equal(pt.read(5).toString(), 'bazyk')
+ t.equal(pt.read(5).toString(), 'uelku')
+ t.equal(pt.read(5).toString(), 'el')
+ })
+ })
+
+ test('assymetric transform (compress)', function (t) {
+ t.plan(3)
+
+ const pt = new Transform()
+
+ // each output is the first char of 3 consecutive chunks,
+ // or whatever's left.
+ pt.state = ''
+
+ pt._transform = function (chunk, encoding, cb) {
+ if (!chunk) {
+ chunk = ''
+ }
+ const s = chunk.toString()
+ setTimeout(
+ function () {
+ this.state += s.charAt(0)
+ if (this.state.length === 3) {
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ }
+ cb()
+ }.bind(this),
+ 10
+ )
+ }
+
+ pt._flush = function (cb) {
+ // just output whatever we have.
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ cb()
+ }
+
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.end()
+
+ // 'abcdeabcdeabcd'
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'abcde')
+ t.equal(pt.read(5).toString(), 'abcde')
+ t.equal(pt.read(5).toString(), 'abcd')
+ })
+ })
+
+ // this tests for a stall when data is written to a full stream
+ // that has empty transforms.
+ test('complex transform', function (t) {
+ t.plan(2)
+
+ let count = 0
+ let saved = null
+ const pt = new Transform({ highWaterMark: 3 })
+ pt._transform = function (c, e, cb) {
+ if (count++ === 1) {
+ saved = c
+ } else {
+ if (saved) {
+ pt.push(saved)
+ saved = null
+ }
+ pt.push(c)
+ }
+
+ cb()
+ }
+
+ pt.once('readable', function () {
+ process.nextTick(function () {
+ pt.write(Buffer.from('d'))
+ pt.write(Buffer.from('ef'), function () {
+ pt.end()
+ })
+ t.equal(pt.read().toString(), 'abcdef')
+ t.equal(pt.read(), null)
+ })
+ })
+
+ pt.write(Buffer.from('abc'))
+ })
+
+ test('passthrough event emission', function (t) {
+ t.plan(11)
+
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ // console.error('>>> emit readable %d', emits);
+ emits++
+ })
+
+ pt.write(Buffer.from('foog'))
+
+ // console.error('need emit 0');
+ pt.write(Buffer.from('bark'))
+
+ setTimeout(() => {
+ // console.error('should have emitted readable now 1 === %d', emits)
+ t.equal(emits, 1)
+
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5) + '', 'null')
+
+ // console.error('need emit 1');
+
+ pt.write(Buffer.from('bazy'))
+ // console.error('should have emitted, but not again');
+ pt.write(Buffer.from('kuel'))
+
+ // console.error('should have emitted readable now 2 === %d', emits);
+ setTimeout(() => {
+ t.equal(emits, 2)
+
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 2');
+
+ pt.end()
+
+ setTimeout(() => {
+ t.equal(emits, 3)
+
+ t.equal(pt.read(5).toString(), 'l')
+ t.equal(pt.read(5), null)
+
+ // console.error('should not have emitted again');
+ t.equal(emits, 3)
+ })
+ })
+ })
+ })
+
+ test('passthrough event emission reordered', function (t) {
+ t.plan(10)
+
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ // console.error('emit readable', emits);
+ emits++
+ })
+
+ pt.write(Buffer.from('foog'))
+ // console.error('need emit 0');
+ pt.write(Buffer.from('bark'))
+
+ setTimeout(() => {
+ // console.error('should have emitted readable now 1 === %d', emits);
+ t.equal(emits, 1)
+
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 1');
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'arkba')
+
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 2');
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5), null)
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'l')
+ t.equal(pt.read(5), null)
+ t.equal(emits, 4)
+ })
+ pt.end()
+ })
+ pt.write(Buffer.from('kuel'))
+ })
+
+ pt.write(Buffer.from('bazy'))
+ })
+ })
+
+ test('passthrough facaded', function (t) {
+ t.plan(1)
+
+ // console.error('passthrough facaded');
+ const pt = new PassThrough()
+ const datas = []
+ pt.on('data', function (chunk) {
+ datas.push(chunk.toString())
+ })
+
+ pt.on('end', function () {
+ t.same(datas, ['foog', 'bark', 'bazy', 'kuel'])
+ })
+
+ pt.write(Buffer.from('foog'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bark'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bazy'))
+ setTimeout(function () {
+ pt.write(Buffer.from('kuel'))
+ setTimeout(function () {
+ pt.end()
+ }, 10)
+ }, 10)
+ }, 10)
+ }, 10)
+ })
+
+ test('object transform (json parse)', function (t) {
+ t.plan(5)
+
+ // console.error('json parse stream');
+ const jp = new Transform({ objectMode: true })
+ jp._transform = function (data, encoding, cb) {
+ try {
+ jp.push(JSON.parse(data))
+ cb()
+ } catch (er) {
+ cb(er)
+ }
+ }
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }]
+
+ let ended = false
+ jp.on('end', function () {
+ ended = true
+ })
+
+ forEach(objects, function (obj) {
+ jp.write(JSON.stringify(obj))
+ const res = jp.read()
+ t.same(res, obj)
+ })
+
+ jp.end()
+ // read one more time to get the 'end' event
+ jp.read()
+
+ process.nextTick(function () {
+ t.ok(ended)
+ })
+ })
+
+ test('object transform (json stringify)', function (t) {
+ t.plan(5)
+
+ // console.error('json parse stream');
+ const js = new Transform({ objectMode: true })
+ js._transform = function (data, encoding, cb) {
+ try {
+ js.push(JSON.stringify(data))
+ cb()
+ } catch (er) {
+ cb(er)
+ }
+ }
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ const objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }]
+
+ let ended = false
+ js.on('end', function () {
+ ended = true
+ })
+
+ forEach(objects, function (obj) {
+ js.write(obj)
+ const res = js.read()
+ t.equal(res, JSON.stringify(obj))
+ })
+
+ js.end()
+ // read one more time to get the 'end' event
+ js.read()
+
+ process.nextTick(function () {
+ t.ok(ended)
+ })
+ })
+
+ function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-transform'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream2-unpipe-drain.js b/src/test/browser/test-stream2-unpipe-drain.js
new file mode 100644
index 0000000000..7d3192eec1
--- /dev/null
+++ b/src/test/browser/test-stream2-unpipe-drain.js
@@ -0,0 +1,65 @@
+'use strict'
+
+const crypto = require('crypto')
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ try {
+ crypto.randomBytes(9)
+ } catch (_) {
+ t.plan(1)
+ t.ok(true, 'does not suport random, skipping')
+ return
+ }
+
+ t.plan(2)
+
+ function TestWriter() {
+ stream.Writable.call(this)
+ }
+ inherits(TestWriter, stream.Writable)
+
+ TestWriter.prototype._write = function (buffer, encoding, callback) {
+ // console.log('write called');
+ // super slow write stream (callback never called)
+ }
+
+ const dest = new TestWriter()
+
+ function TestReader(id) {
+ stream.Readable.call(this)
+ this.reads = 0
+ }
+ inherits(TestReader, stream.Readable)
+
+ TestReader.prototype._read = function (size) {
+ this.reads += 1
+ this.push(crypto.randomBytes(size))
+ }
+
+ const src1 = new TestReader()
+ const src2 = new TestReader()
+
+ src1.pipe(dest)
+
+ src1.once('readable', function () {
+ process.nextTick(function () {
+ src2.pipe(dest)
+
+ src2.once('readable', function () {
+ process.nextTick(function () {
+ src1.unpipe(dest)
+ })
+ })
+ })
+ })
+
+ dest.on('unpipe', function () {
+ t.equal(src1.reads, 2)
+ t.equal(src2.reads, 1)
+ })
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-unpipe-drain'
diff --git a/src/test/browser/test-stream2-writable.js b/src/test/browser/test-stream2-writable.js
new file mode 100644
index 0000000000..20d4e9dc5e
--- /dev/null
+++ b/src/test/browser/test-stream2-writable.js
@@ -0,0 +1,445 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Duplex, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+
+inherits(TestWriter, Writable)
+
+function TestWriter() {
+ Writable.apply(this, arguments)
+ this.buffer = []
+ this.written = 0
+}
+
+TestWriter.prototype._write = function (chunk, encoding, cb) {
+ // simulate a small unpredictable latency
+ setTimeout(
+ function () {
+ this.buffer.push(chunk.toString())
+ this.written += chunk.length
+ cb()
+ }.bind(this),
+ Math.floor(Math.random() * 10)
+ )
+}
+inherits(Processstdout, Writable)
+
+function Processstdout() {
+ Writable.apply(this, arguments)
+ this.buffer = []
+ this.written = 0
+}
+
+Processstdout.prototype._write = function (chunk, encoding, cb) {
+ // console.log(chunk.toString());
+ cb()
+}
+const chunks = new Array(50)
+for (let i = 0; i < chunks.length; i++) {
+ chunks[i] = new Array(i + 1).join('x')
+}
+
+if (!process.stdout) {
+ process.stdout = new Processstdout()
+}
+
+module.exports = function (test) {
+ test('write fast', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ })
+
+ forEach(chunks, function (chunk) {
+ // screw backpressure. Just buffer it all up.
+ tw.write(chunk)
+ })
+ tw.end()
+ })
+
+ test('write slow', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ })
+
+ let i = 0
+ ;(function W() {
+ tw.write(chunks[i++])
+ if (i < chunks.length) {
+ setTimeout(W, 10)
+ } else {
+ tw.end()
+ }
+ })()
+ })
+
+ test('write backpressure', function (t) {
+ t.plan(19)
+
+ const tw = new TestWriter({
+ highWaterMark: 50
+ })
+
+ let drains = 0
+
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ t.equal(drains, 17)
+ })
+
+ tw.on('drain', function () {
+ drains++
+ })
+
+ let i = 0
+ ;(function W() {
+ let ret
+ do {
+ ret = tw.write(chunks[i++])
+ } while (ret !== false && i < chunks.length)
+
+ if (i < chunks.length) {
+ t.ok(tw._writableState.length >= 50)
+ tw.once('drain', W)
+ } else {
+ tw.end()
+ }
+ })()
+ })
+
+ test('write bufferize', function (t) {
+ t.plan(50)
+
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+
+ tw.on('finish', function () {
+ forEach(chunks, function (chunk, i) {
+ const actual = Buffer.from(tw.buffer[i])
+ chunk = Buffer.from(chunk)
+
+ // Some combination of encoding and length result in the last byte replaced by two extra null bytes
+ if (actual[actual.length - 1] === 0) {
+ chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])])
+ }
+
+ // In some cases instead there is one byte less
+ if (actual.length === chunk.length - 1) {
+ chunk = chunk.slice(0, chunk.length - 1)
+ }
+
+ t.same(actual, chunk, 'got the expected chunks ' + i)
+ })
+ })
+
+ forEach(chunks, function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+ tw.end()
+ })
+
+ test('write no bufferize', function (t) {
+ t.plan(100)
+
+ const tw = new TestWriter({
+ highWaterMark: 100,
+ decodeStrings: false
+ })
+
+ tw._write = function (chunk, encoding, cb) {
+ t.equals(typeof chunk, 'string')
+ chunk = Buffer.from(chunk, encoding)
+ return TestWriter.prototype._write.call(this, chunk, encoding, cb)
+ }
+
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+
+ tw.on('finish', function () {
+ forEach(chunks, function (chunk, i) {
+ const actual = Buffer.from(tw.buffer[i])
+ chunk = Buffer.from(chunk)
+
+ // Some combination of encoding and length result in the last byte replaced by two extra null bytes
+ if (actual[actual.length - 1] === 0) {
+ chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])])
+ }
+
+ // In some cases instead there is one byte less
+ if (actual.length === chunk.length - 1) {
+ chunk = chunk.slice(0, chunk.length - 1)
+ }
+
+ t.same(actual, chunk, 'got the expected chunks ' + i)
+ })
+ })
+
+ forEach(chunks, function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+ tw.end()
+ })
+
+ test('write callbacks', function (t) {
+ t.plan(2)
+
+ const callbacks = chunks
+ .map(function (chunk, i) {
+ return [
+ i,
+ function (er) {
+ callbacks._called[i] = chunk
+ }
+ ]
+ })
+ .reduce(function (set, x) {
+ set['callback-' + x[0]] = x[1]
+ return set
+ }, {})
+ callbacks._called = []
+
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+
+ tw.on('finish', function () {
+ process.nextTick(function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ t.same(callbacks._called, chunks, 'called all callbacks')
+ })
+ })
+
+ forEach(chunks, function (chunk, i) {
+ tw.write(chunk, callbacks['callback-' + i])
+ })
+ tw.end()
+ })
+
+ test('end callback', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ tw.end(() => {
+ t.ok(true)
+ })
+ })
+
+ test('end callback with chunk', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ tw.end(Buffer.from('hello world'), () => {
+ t.ok(true)
+ })
+ })
+
+ test('end callback with chunk and encoding', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ tw.end('hello world', 'ascii', () => {
+ t.ok(true)
+ })
+ })
+
+ test('end callback after .write() call', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ tw.write(Buffer.from('hello world'))
+ tw.end(() => {
+ t.ok(true)
+ })
+ })
+
+ test('end callback called after write callback', function (t) {
+ t.plan(1)
+
+ const tw = new TestWriter()
+ let writeCalledback = false
+ tw.write(Buffer.from('hello world'), function () {
+ writeCalledback = true
+ })
+ tw.end(function () {
+ t.equal(writeCalledback, true)
+ })
+ })
+
+ test('encoding should be ignored for buffers', function (t) {
+ t.plan(1)
+
+ const tw = new Writable()
+ const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'
+ tw._write = function (chunk, encoding, cb) {
+ t.equal(chunk.toString('hex'), hex)
+ }
+ const buf = Buffer.from(hex, 'hex')
+ tw.write(buf, 'binary')
+ })
+
+ test('writables are not pipable', function (t) {
+ t.plan(1)
+
+ const w = new Writable({ autoDestroy: false })
+ w._write = function () {}
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ })
+ w.pipe(process.stdout)
+ t.ok(gotError)
+ })
+
+ test('duplexes are pipable', function (t) {
+ t.plan(1)
+
+ const d = new Duplex()
+ d._read = function () {}
+ d._write = function () {}
+ let gotError = false
+ d.on('error', function (er) {
+ gotError = true
+ })
+ d.pipe(process.stdout)
+ t.notOk(gotError)
+ })
+
+ test('end(chunk) two times is an error', function (t) {
+ t.plan(2)
+
+ const w = new Writable()
+ w._write = function () {}
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ t.equal(er.message, 'write after end')
+ })
+ w.end('this is the end')
+ w.end('and so is this')
+ process.nextTick(function () {
+ t.ok(gotError)
+ })
+ })
+
+ test('dont end while writing', function (t) {
+ t.plan(2)
+
+ const w = new Writable()
+ let wrote = false
+ w._write = function (chunk, e, cb) {
+ t.notOk(this.writing)
+ wrote = true
+ this.writing = true
+ setTimeout(function () {
+ this.writing = false
+ cb()
+ })
+ }
+ w.on('finish', function () {
+ t.ok(wrote)
+ })
+ w.write(Buffer.alloc(0))
+ w.end()
+ })
+
+ test('finish does not come before write cb', function (t) {
+ t.plan(1)
+
+ const w = new Writable()
+ let writeCb = false
+ w._write = function (chunk, e, cb) {
+ setTimeout(function () {
+ writeCb = true
+ cb()
+ }, 10)
+ }
+ w.on('finish', function () {
+ t.ok(writeCb)
+ })
+ w.write(Buffer.alloc(0))
+ w.end()
+ })
+
+ test('finish does not come before sync _write cb', function (t) {
+ t.plan(1)
+
+ const w = new Writable()
+ let writeCb = false
+ w._write = function (chunk, e, cb) {
+ cb()
+ }
+ w.on('finish', function () {
+ t.ok(writeCb)
+ })
+ w.write(Buffer.alloc(0), function (er) {
+ writeCb = true
+ })
+ w.end()
+ })
+
+ test('finish is emitted if last chunk is empty', function (t) {
+ t.plan(1)
+
+ const w = new Writable()
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
+ w.on('finish', () => {
+ t.ok(true)
+ })
+
+ w.write(Buffer.alloc(1))
+ w.end(Buffer.alloc(0))
+ })
+
+ function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream2-writable'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/src/test/browser/test-stream3-pause-then-read.js b/src/test/browser/test-stream3-pause-then-read.js
new file mode 100644
index 0000000000..6b4399f656
--- /dev/null
+++ b/src/test/browser/test-stream3-pause-then-read.js
@@ -0,0 +1,149 @@
+'use strict'
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+
+module.exports = function (t) {
+ t.plan(7)
+
+ const totalChunks = 100
+ const chunkSize = 99
+ const expectTotalData = totalChunks * chunkSize
+ let expectEndingData = expectTotalData
+
+ const r = new Readable({ highWaterMark: 1000 })
+ let chunks = totalChunks
+ r._read = function (n) {
+ if (!(chunks % 2)) {
+ setImmediate(push)
+ } else if (!(chunks % 3)) {
+ process.nextTick(push)
+ } else {
+ push()
+ }
+ }
+
+ let totalPushed = 0
+ function push() {
+ const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null
+ if (chunk) {
+ totalPushed += chunk.length
+ chunk.fill('x')
+ }
+ r.push(chunk)
+ }
+
+ read100()
+
+ // first we read 100 bytes
+ function read100() {
+ readn(100, onData)
+ }
+
+ function readn(n, then) {
+ // console.error('read %d', n);
+ expectEndingData -= n
+ ;(function read() {
+ const c = r.read(n)
+ if (!c) {
+ r.once('readable', read)
+ } else {
+ t.equal(c.length, n)
+ t.notOk(r._readableState.flowing)
+ then()
+ }
+ })()
+ }
+
+ // then we listen to some data events
+ function onData() {
+ expectEndingData -= 100
+ // console.error('onData');
+ let seen = 0
+ r.on('data', function od(c) {
+ seen += c.length
+ if (seen >= 100) {
+ // seen enough
+ r.removeListener('data', od)
+ r.pause()
+ if (seen > 100) {
+ // oh no, seen too much!
+ // put the extra back.
+ const diff = seen - 100
+ r.unshift(c.slice(c.length - diff))
+ // console.error('seen too much', seen, diff)
+ }
+
+ // Nothing should be lost in between
+ setImmediate(pipeLittle)
+ }
+ })
+ }
+
+ // Just pipe 200 bytes, then unshift the extra and unpipe
+ function pipeLittle() {
+ expectEndingData -= 200
+ // console.error('pipe a little');
+ const w = new Writable()
+ let written = 0
+ w.on('finish', function () {
+ t.equal(written, 200)
+ setImmediate(read1234)
+ })
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ if (written >= 200) {
+ r.unpipe(w)
+ w.end()
+ cb()
+ if (written > 200) {
+ const diff = written - 200
+ written -= diff
+ r.unshift(chunk.slice(chunk.length - diff))
+ }
+ } else {
+ setImmediate(cb)
+ }
+ }
+ r.pipe(w)
+ }
+
+ // now read 1234 more bytes
+ function read1234() {
+ readn(1234, resumePause)
+ }
+
+ function resumePause() {
+ // console.error('resumePause');
+ // don't read anything, just resume and re-pause a whole bunch
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ setImmediate(pipe)
+ }
+
+ function pipe() {
+ // console.error('pipe the rest');
+ const w = new Writable()
+ let written = 0
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ cb()
+ }
+ w.on('finish', function () {
+ // console.error('written', written, totalPushed);
+ t.equal(written, expectEndingData)
+ t.equal(totalPushed, expectTotalData)
+ })
+ r.pipe(w)
+ }
+}
+
+module.exports[kReadableStreamSuiteName] = 'stream3-pause-then-read'
diff --git a/src/test/ours/test-errors.js b/src/test/ours/test-errors.js
new file mode 100644
index 0000000000..a300f17075
--- /dev/null
+++ b/src/test/ours/test-errors.js
@@ -0,0 +1,132 @@
+'use strict'
+
+const t = require('tap')
+const { codes: errors } = require('../../lib/ours/errors')
+
+function checkError(err, Base, name, code, message) {
+ t.ok(err instanceof Base)
+ t.equal(err.name, name)
+ t.equal(err.code, code)
+ t.equal(err.message, message)
+}
+
+// Update this numbers based on the number of checkError below multiplied by the assertions within checkError
+t.plan(17 * 4)
+
+checkError(
+ new errors.ERR_INVALID_ARG_VALUE('name', 0),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_VALUE',
+ "The argument 'name' is invalid. Received 0"
+)
+
+checkError(
+ new errors.ERR_INVALID_ARG_VALUE('name', undefined),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_VALUE',
+ "The argument 'name' is invalid. Received undefined"
+)
+
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], 0),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ 'The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received type number (0)'
+)
+
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('first argument', 'not string', 'foo'),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ "The first argument must be not string. Received type string ('foo')"
+)
+
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('obj.prop', 'string', undefined),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ 'The "obj.prop" property must be of type string. Received undefined'
+)
+
+checkError(
+ new errors.ERR_STREAM_PUSH_AFTER_EOF(),
+ Error,
+ 'Error',
+ 'ERR_STREAM_PUSH_AFTER_EOF',
+ 'stream.push() after EOF'
+)
+
+checkError(
+ new errors.ERR_METHOD_NOT_IMPLEMENTED('_read()'),
+ Error,
+ 'Error',
+ 'ERR_METHOD_NOT_IMPLEMENTED',
+ 'The _read() method is not implemented'
+)
+
+checkError(
+ new errors.ERR_METHOD_NOT_IMPLEMENTED('_write()'),
+ Error,
+ 'Error',
+ 'ERR_METHOD_NOT_IMPLEMENTED',
+ 'The _write() method is not implemented'
+)
+
+checkError(new errors.ERR_STREAM_PREMATURE_CLOSE(), Error, 'Error', 'ERR_STREAM_PREMATURE_CLOSE', 'Premature close')
+
+checkError(
+ new errors.ERR_STREAM_DESTROYED('pipe'),
+ Error,
+ 'Error',
+ 'ERR_STREAM_DESTROYED',
+ 'Cannot call pipe after a stream was destroyed'
+)
+
+checkError(
+ new errors.ERR_STREAM_DESTROYED('write'),
+ Error,
+ 'Error',
+ 'ERR_STREAM_DESTROYED',
+ 'Cannot call write after a stream was destroyed'
+)
+
+checkError(
+ new errors.ERR_MULTIPLE_CALLBACK(),
+ Error,
+ 'Error',
+ 'ERR_MULTIPLE_CALLBACK',
+ 'Callback called multiple times'
+)
+
+checkError(new errors.ERR_STREAM_CANNOT_PIPE(), Error, 'Error', 'ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable')
+
+checkError(new errors.ERR_STREAM_WRITE_AFTER_END(), Error, 'Error', 'ERR_STREAM_WRITE_AFTER_END', 'write after end')
+
+checkError(
+ new errors.ERR_STREAM_NULL_VALUES(),
+ TypeError,
+ 'TypeError',
+ 'ERR_STREAM_NULL_VALUES',
+ 'May not write null values to stream'
+)
+
+checkError(
+ new errors.ERR_UNKNOWN_ENCODING('foo'),
+ TypeError,
+ 'TypeError',
+ 'ERR_UNKNOWN_ENCODING',
+ 'Unknown encoding: foo'
+)
+
+checkError(
+ new errors.ERR_STREAM_UNSHIFT_AFTER_END_EVENT(),
+ Error,
+ 'Error',
+ 'ERR_STREAM_UNSHIFT_AFTER_END_EVENT',
+ 'stream.unshift() after end event'
+)
diff --git a/src/test/ours/test-fake-timers.js b/src/test/ours/test-fake-timers.js
new file mode 100644
index 0000000000..be95e071a7
--- /dev/null
+++ b/src/test/ours/test-fake-timers.js
@@ -0,0 +1,40 @@
+'use strict'
+
+require('../common')
+const t = require('tap')
+const util = require('util')
+const fakeTimers = require('@sinonjs/fake-timers')
+const Transform = require('../../lib/ours/index').Transform
+
+t.plan(1)
+
+function MyTransform() {
+ Transform.call(this)
+}
+
+util.inherits(MyTransform, Transform)
+
+const clock = fakeTimers.install({ toFake: ['setImmediate', 'nextTick'] })
+let stream2DataCalled = false
+
+const stream = new MyTransform()
+stream.on('data', function () {
+ stream.on('end', function () {
+ const stream2 = new MyTransform()
+ stream2.on('data', function () {
+ stream2.on('end', function () {
+ stream2DataCalled = true
+ })
+ setImmediate(function () {
+ stream2.end()
+ })
+ })
+ stream2.emit('data')
+ })
+ stream.end()
+})
+stream.emit('data')
+
+clock.runAll()
+clock.uninstall()
+t.ok(stream2DataCalled)
diff --git a/src/test/ours/test-stream-sync-write.js b/src/test/ours/test-stream-sync-write.js
new file mode 100644
index 0000000000..a12085a238
--- /dev/null
+++ b/src/test/ours/test-stream-sync-write.js
@@ -0,0 +1,44 @@
+'use strict'
+
+require('../common')
+const t = require('tap')
+const util = require('util')
+const stream = require('../../lib/ours/index')
+const WritableStream = stream.Writable
+
+t.plan(1)
+
+const InternalStream = function () {
+ WritableStream.call(this)
+}
+util.inherits(InternalStream, WritableStream)
+
+let invocations = 0
+InternalStream.prototype._write = function (chunk, encoding, callback) {
+ callback()
+}
+
+const internalStream = new InternalStream()
+
+const ExternalStream = function (writable) {
+ this._writable = writable
+ WritableStream.call(this)
+}
+util.inherits(ExternalStream, WritableStream)
+
+ExternalStream.prototype._write = function (chunk, encoding, callback) {
+ this._writable.write(chunk, encoding, callback)
+}
+
+const externalStream = new ExternalStream(internalStream)
+
+for (let i = 0; i < 2000; i++) {
+ externalStream.write(i.toString(), () => {
+ invocations++
+ })
+}
+
+externalStream.end()
+externalStream.on('finish', () => {
+ t.equal(invocations, 2000)
+})
diff --git a/src/util.js b/src/util.js
new file mode 100644
index 0000000000..8fbc8b3470
--- /dev/null
+++ b/src/util.js
@@ -0,0 +1,144 @@
+'use strict'
+
+const bufferModule = require('buffer')
+const { format, inspect } = require('./util/inspect')
+const {
+ codes: { ERR_INVALID_ARG_TYPE }
+} = require('./errors')
+const { kResistStopPropagation, AggregateError, SymbolDispose } = require('./primordials')
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+
+const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor
+const Blob = globalThis.Blob || bufferModule.Blob
+/* eslint-disable indent */
+const isBlob =
+ typeof Blob !== 'undefined'
+ ? function isBlob(b) {
+ // eslint-disable-next-line indent
+ return b instanceof Blob
+ }
+ : function isBlob(b) {
+ return false
+ }
+/* eslint-enable indent */
+
+const validateAbortSignal = (signal, name) => {
+ if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
+ throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
+ }
+}
+const validateFunction = (value, name) => {
+ if (typeof value !== 'function') {
+ throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
+ }
+}
+
+module.exports = {
+ AggregateError,
+ kEmptyObject: Object.freeze({}),
+ once(callback) {
+ let called = false
+ return function (...args) {
+ if (called) {
+ return
+ }
+
+ called = true
+ callback.apply(this, args)
+ }
+ },
+ createDeferredPromise: function () {
+ let resolve
+ let reject
+
+ // eslint-disable-next-line promise/param-names
+ const promise = new Promise((res, rej) => {
+ resolve = res
+ reject = rej
+ })
+ return {
+ promise,
+ resolve,
+ reject
+ }
+ },
+ promisify(fn) {
+ return new Promise((resolve, reject) => {
+ fn((err, ...args) => {
+ if (err) {
+ return reject(err)
+ }
+
+ return resolve(...args)
+ })
+ })
+ },
+ debuglog() {
+ return function () {}
+ },
+ format,
+ inspect,
+ types: {
+ isAsyncFunction(fn) {
+ return fn instanceof AsyncFunction
+ },
+
+ isArrayBufferView(arr) {
+ return ArrayBuffer.isView(arr)
+ }
+ },
+ isBlob,
+ deprecate(fn, message) {
+ return fn
+ },
+ addAbortListener:
+ require('events').addAbortListener ||
+ function addAbortListener(signal, listener) {
+ if (signal === undefined) {
+ throw new ERR_INVALID_ARG_TYPE('signal', 'AbortSignal', signal)
+ }
+ validateAbortSignal(signal, 'signal')
+ validateFunction(listener, 'listener')
+
+ let removeEventListener
+ if (signal.aborted) {
+ queueMicrotask(() => listener())
+ } else {
+ signal.addEventListener('abort', listener, { __proto__: null, once: true, [kResistStopPropagation]: true })
+ removeEventListener = () => {
+ signal.removeEventListener('abort', listener)
+ }
+ }
+ return {
+ __proto__: null,
+ [SymbolDispose]() {
+ removeEventListener?.()
+ }
+ }
+ },
+ AbortSignalAny:
+ AbortSignal.any ||
+ function AbortSignalAny(signals) {
+ // Fast path if there is only one signal.
+ if (signals.length === 1) {
+ return signals[0]
+ }
+ const ac = new AbortController()
+ const abort = () => ac.abort()
+ signals.forEach((signal) => {
+ validateAbortSignal(signal, 'signals')
+ signal.addEventListener('abort', abort, { once: true })
+ })
+ ac.signal.addEventListener(
+ 'abort',
+ () => {
+ signals.forEach((signal) => signal.removeEventListener('abort', abort))
+ },
+ { once: true }
+ )
+ return ac.signal
+ }
+}
+
+module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom')
diff --git a/src/util/inspect.js b/src/util/inspect.js
new file mode 100644
index 0000000000..b6e0b39478
--- /dev/null
+++ b/src/util/inspect.js
@@ -0,0 +1,59 @@
+'use strict'
+
+/*
+ This file is a reduced and adapted version of the main lib/internal/util/inspect.js file defined at
+
+ https://github.com/nodejs/node/blob/main/lib/internal/util/inspect.js
+
+ Don't try to replace with the original file and keep it up to date with the upstream file.
+*/
+
+module.exports = {
+ format(format, ...args) {
+ // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args
+ return format.replace(/%([sdifj])/g, function (...[_unused, type]) {
+ const replacement = args.shift()
+
+ if (type === 'f') {
+ return replacement.toFixed(6)
+ } else if (type === 'j') {
+ return JSON.stringify(replacement)
+ } else if (type === 's' && typeof replacement === 'object') {
+ const ctor = replacement.constructor !== Object ? replacement.constructor.name : ''
+ return `${ctor} {}`.trim()
+ } else {
+ return replacement.toString()
+ }
+ })
+ },
+ inspect(value) {
+ // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options
+ switch (typeof value) {
+ case 'string':
+ if (value.includes("'")) {
+ if (!value.includes('"')) {
+ return `"${value}"`
+ } else if (!value.includes('`') && !value.includes('${')) {
+ return `\`${value}\``
+ }
+ }
+
+ return `'${value}'`
+ case 'number':
+ if (isNaN(value)) {
+ return 'NaN'
+ } else if (Object.is(value, -0)) {
+ return String(value)
+ }
+
+ return value
+ case 'bigint':
+ return `${String(value)}n`
+ case 'boolean':
+ case 'undefined':
+ return String(value)
+ case 'object':
+ return '{}'
+ }
+ }
+}
diff --git a/tap.yml b/tap.yml
new file mode 100644
index 0000000000..8de8ebe6d5
--- /dev/null
+++ b/tap.yml
@@ -0,0 +1,6 @@
+---
+bail: false
+coverage: false
+node-arg:
+ - --expose-internals
+ - --no-warnings
diff --git a/test/basic.js b/test/basic.js
deleted file mode 100644
index 4b81cda25c..0000000000
--- a/test/basic.js
+++ /dev/null
@@ -1,265 +0,0 @@
-var tap = require('tap');
-var R = require('../readable');
-
-var util = require('util');
-var EE = require('events').EventEmitter;
-
-function TestReader(n) {
- R.apply(this);
- this._buffer = new Buffer(n || 100);
- this._buffer.fill('x');
- this._pos = 0;
- this._bufs = 10;
-}
-
-util.inherits(TestReader, R);
-
-TestReader.prototype.read = function(n) {
- var max = this._buffer.length - this._pos;
- n = n || max;
- n = Math.max(n, 0);
- var toRead = Math.min(n, max);
- if (toRead === 0) {
- // simulate the read buffer filling up with some more bytes some time
- // in the future.
- setTimeout(function() {
- this._pos = 0;
- this._bufs -= 1;
- if (this._bufs <= 0) {
- // read them all!
- if (!this.ended) {
- this.emit('end');
- this.ended = true;
- }
- } else {
- this.emit('readable');
- }
- }.bind(this), 10);
- return null;
- }
-
- var ret = this._buffer.slice(this._pos, this._pos + toRead);
- this._pos += toRead;
- return ret;
-};
-
-/////
-
-function TestWriter() {
- EE.apply(this);
- this.received = [];
- this.flush = false;
-}
-
-util.inherits(TestWriter, EE);
-
-TestWriter.prototype.write = function(c) {
- this.received.push(c.toString());
- this.emit('write', c);
- return true;
-
- // flip back and forth between immediate acceptance and not.
- this.flush = !this.flush;
- if (!this.flush) setTimeout(this.emit.bind(this, 'drain'), 10);
- return this.flush;
-};
-
-TestWriter.prototype.end = function(c) {
- if (c) this.write(c);
- this.emit('end', this.received);
-};
-
-////////
-
-tap.test('a most basic test', function(t) {
- var r = new TestReader(20);
-
- var reads = [];
- var expect = [ 'x',
- 'xx',
- 'xxx',
- 'xxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxxxxx',
- 'xxxxxxxxx',
- 'xxx',
- 'xxxxxxxxxxxx',
- 'xxxxxxxx',
- 'xxxxxxxxxxxxxxx',
- 'xxxxx',
- 'xxxxxxxxxxxxxxxxxx',
- 'xx',
- 'xxxxxxxxxxxxxxxxxxxx',
- 'xxxxxxxxxxxxxxxxxxxx',
- 'xxxxxxxxxxxxxxxxxxxx',
- 'xxxxxxxxxxxxxxxxxxxx',
- 'xxxxxxxxxxxxxxxxxxxx' ];
-
- r.on('end', function() {
- t.same(reads, expect);
- t.end();
- });
-
- var readSize = 1;
- function flow() {
- var res;
- while (null !== (res = r.read(readSize++))) {
- reads.push(res.toString());
- }
- r.once('readable', flow);
- }
-
- flow();
-});
-
-tap.test('pipe', function(t) {
- var r = new TestReader(5);
-
- var expect = [ 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx' ]
-
- var w = new TestWriter;
- var flush = true;
- w.on('end', function(received) {
- t.same(received, expect);
- t.end();
- });
-
- r.pipe(w);
-});
-
-
-
-[1,2,3,4,5,6,7,8,9].forEach(function(SPLIT) {
- tap.test('unpipe', function(t) {
- var r = new TestReader(5);
-
- // unpipe after 3 writes, then write to another stream instead.
- var expect = [ 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx' ];
- expect = [ expect.slice(0, SPLIT), expect.slice(SPLIT) ];
-
- var w = [ new TestWriter(), new TestWriter() ];
-
- var writes = SPLIT;
- w[0].on('write', function() {
- if (--writes === 0) {
- r.unpipe();
- w[0].end();
- r.pipe(w[1]);
- }
- });
-
- var ended = 0;
-
- w[0].on('end', function(results) {
- ended++;
- t.same(results, expect[0]);
- });
-
- w[1].on('end', function(results) {
- ended++;
- t.equal(ended, 2);
- t.same(results, expect[1]);
- t.end();
- });
-
- r.pipe(w[0]);
- });
-});
-
-
-// both writers should get the same exact data.
-tap.test('multipipe', function(t) {
- var r = new TestReader(5);
- var w = [ new TestWriter, new TestWriter ];
-
- var expect = [ 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx' ];
-
- var c = 2;
- w[0].on('end', function(received) {
- t.same(received, expect, 'first');
- if (--c === 0) t.end();
- });
- w[1].on('end', function(received) {
- t.same(received, expect, 'second');
- if (--c === 0) t.end();
- });
-
- r.pipe(w[0]);
- r.pipe(w[1]);
-});
-
-
-[1,2,3,4,5,6,7,8,9].forEach(function(SPLIT) {
- tap.test('multi-unpipe', function(t) {
- var r = new TestReader(5);
-
- // unpipe after 3 writes, then write to another stream instead.
- var expect = [ 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx',
- 'xxxxx' ];
- expect = [ expect.slice(0, SPLIT), expect.slice(SPLIT) ];
-
- var w = [ new TestWriter(), new TestWriter(), new TestWriter() ];
-
- var writes = SPLIT;
- w[0].on('write', function() {
- if (--writes === 0) {
- r.unpipe();
- w[0].end();
- r.pipe(w[1]);
- }
- });
-
- var ended = 0;
-
- w[0].on('end', function(results) {
- ended++;
- t.same(results, expect[0]);
- });
-
- w[1].on('end', function(results) {
- ended++;
- t.equal(ended, 2);
- t.same(results, expect[1]);
- t.end();
- });
-
- r.pipe(w[0]);
- r.pipe(w[2]);
- });
-});
diff --git a/test/browser/fixtures/esbuild-browsers-shims.mjs b/test/browser/fixtures/esbuild-browsers-shims.mjs
new file mode 100644
index 0000000000..cd789d3e51
--- /dev/null
+++ b/test/browser/fixtures/esbuild-browsers-shims.mjs
@@ -0,0 +1,7 @@
+import * as processModule from 'process'
+
+export const process = processModule
+
+export function setImmediate(fn, ...args) {
+ setTimeout(() => fn(...args), 1)
+}
diff --git a/test/browser/fixtures/esbuild.browser.config.mjs b/test/browser/fixtures/esbuild.browser.config.mjs
new file mode 100644
index 0000000000..6dd371dd9a
--- /dev/null
+++ b/test/browser/fixtures/esbuild.browser.config.mjs
@@ -0,0 +1,23 @@
+import { build } from 'esbuild'
+import alias from 'esbuild-plugin-alias'
+import { createRequire } from 'module'
+
+const require = createRequire(import.meta.url)
+
+build({
+ entryPoints: ['test/browser/test-browser.js'],
+ outfile: 'tmp/esbuild/suite.browser.js',
+ bundle: true,
+ platform: 'browser',
+ plugins: [
+ alias({
+ crypto: require.resolve('crypto-browserify'),
+ path: require.resolve('path-browserify'),
+ stream: require.resolve('stream-browserify')
+ })
+ ],
+ define: {
+ global: 'globalThis'
+ },
+ inject: ['test/browser/fixtures/esbuild-browsers-shims.mjs']
+}).catch(() => process.exit(1))
diff --git a/test/browser/fixtures/esbuild.node.config.mjs b/test/browser/fixtures/esbuild.node.config.mjs
new file mode 100644
index 0000000000..21f70ad284
--- /dev/null
+++ b/test/browser/fixtures/esbuild.node.config.mjs
@@ -0,0 +1,8 @@
+import { build } from 'esbuild'
+
+build({
+ entryPoints: ['test/browser/test-browser.js'],
+ outfile: 'tmp/esbuild/suite.node.js',
+ bundle: true,
+ platform: 'node'
+}).catch(() => process.exit(1))
diff --git a/test/browser/fixtures/index.html b/test/browser/fixtures/index.html
new file mode 100644
index 0000000000..16b329e8e6
--- /dev/null
+++ b/test/browser/fixtures/index.html
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/browser/fixtures/prepare.sh b/test/browser/fixtures/prepare.sh
new file mode 100644
index 0000000000..56380d61f4
--- /dev/null
+++ b/test/browser/fixtures/prepare.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+set -x -e
+
+[ "$BUNDLER" == "" ] && BUNDLER=$1
+
+if [ "$BUNDLER" != "" ]; then
+ rm -rf tmp/$BUNDLER
+ mkdir -p tmp/$BUNDLER
+ cp test/browser/fixtures/index.html tmp/$BUNDLER
+fi
+
+case $BUNDLER in
+ browserify)
+ browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js
+ browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js
+ ;;
+ esbuild)
+ node src/test/browser/fixtures/esbuild.browser.config.mjs
+ node src/test/browser/fixtures/esbuild.node.config.mjs
+ ;;
+ rollup)
+ rollup -c test/browser/fixtures/rollup.browser.config.mjs
+ rollup -c test/browser/fixtures/rollup.node.config.mjs
+ ;;
+ webpack)
+ webpack -c test/browser/fixtures/webpack.browser.config.mjs
+ webpack -c test/browser/fixtures/webpack.node.config.mjs
+ ;;
+ *)
+ echo "Please set the environment variable BUNDLER to browserify, esbuild, rollup or webpack."
+ exit 1
+ ;;
+esac
\ No newline at end of file
diff --git a/test/browser/fixtures/rollup.browser.config.mjs b/test/browser/fixtures/rollup.browser.config.mjs
new file mode 100644
index 0000000000..4a3080c354
--- /dev/null
+++ b/test/browser/fixtures/rollup.browser.config.mjs
@@ -0,0 +1,26 @@
+import commonjs from '@rollup/plugin-commonjs'
+import inject from '@rollup/plugin-inject'
+import nodeResolve from '@rollup/plugin-node-resolve'
+import { resolve } from 'path'
+import nodePolyfill from 'rollup-plugin-polyfill-node'
+
+export default {
+ input: ['test/browser/test-browser.js'],
+ output: {
+ intro: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }',
+ file: 'tmp/rollup/suite.browser.js',
+ format: 'iife',
+ name: 'readableStreamTestSuite'
+ },
+ plugins: [
+ commonjs(),
+ nodePolyfill(),
+ inject({
+ process: resolve('node_modules/process/browser.js')
+ }),
+ nodeResolve({
+ browser: true,
+ preferBuiltins: false
+ })
+ ]
+}
diff --git a/test/browser/fixtures/rollup.node.config.mjs b/test/browser/fixtures/rollup.node.config.mjs
new file mode 100644
index 0000000000..7eac856bce
--- /dev/null
+++ b/test/browser/fixtures/rollup.node.config.mjs
@@ -0,0 +1,19 @@
+import commonjs from '@rollup/plugin-commonjs'
+import nodeResolve from '@rollup/plugin-node-resolve'
+
+export default {
+ input: ['test/browser/test-browser.js'],
+ output: {
+ file: 'tmp/rollup/suite.node.js',
+ format: 'cjs',
+ name: 'readableStreamTestSuite',
+ exports: 'auto'
+ },
+ plugins: [
+ commonjs(),
+ nodeResolve({
+ browser: false,
+ preferBuiltins: true
+ })
+ ]
+}
diff --git a/test/browser/fixtures/webpack.browser.config.mjs b/test/browser/fixtures/webpack.browser.config.mjs
new file mode 100644
index 0000000000..a2d889e0da
--- /dev/null
+++ b/test/browser/fixtures/webpack.browser.config.mjs
@@ -0,0 +1,35 @@
+import { createRequire } from 'module'
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+import webpack from 'webpack'
+
+const require = createRequire(import.meta.url)
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../')
+
+export default {
+ entry: './test/browser/test-browser.js',
+ output: {
+ filename: 'suite.browser.js',
+ path: resolve(rootDir, 'tmp/webpack')
+ },
+ mode: 'production',
+ target: 'web',
+ performance: false,
+ plugins: [
+ new webpack.BannerPlugin({
+ banner: 'function setImmediate(fn, ...args) { setTimeout(() => fn(...args), 1) }',
+ raw: true
+ }),
+ new webpack.ProvidePlugin({
+ process: require.resolve('process')
+ })
+ ],
+ resolve: {
+ aliasFields: ['browser'],
+ fallback: {
+ crypto: require.resolve('crypto-browserify'),
+ path: require.resolve('path-browserify'),
+ stream: require.resolve('stream-browserify')
+ }
+ }
+}
diff --git a/test/browser/fixtures/webpack.node.config.mjs b/test/browser/fixtures/webpack.node.config.mjs
new file mode 100644
index 0000000000..3b20bdef47
--- /dev/null
+++ b/test/browser/fixtures/webpack.node.config.mjs
@@ -0,0 +1,15 @@
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+
+const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../../')
+
+export default {
+ entry: './test/browser/test-browser.js',
+ output: {
+ filename: 'suite.node.js',
+ path: resolve(rootDir, 'tmp/webpack')
+ },
+ mode: 'production',
+ target: 'node',
+ performance: false
+}
diff --git a/test/browser/runner-browser.mjs b/test/browser/runner-browser.mjs
new file mode 100644
index 0000000000..e8bb84482c
--- /dev/null
+++ b/test/browser/runner-browser.mjs
@@ -0,0 +1,109 @@
+import { resolve } from 'node:path'
+import { Readable } from 'node:stream'
+import { fileURLToPath } from 'node:url'
+import { chromium, firefox, webkit } from 'playwright'
+import reporter from 'tap-mocha-reporter'
+import Parser from 'tap-parser'
+
+const validBrowsers = ['chrome', 'firefox', 'safari', 'edge']
+const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+
+function parseEnviroment() {
+ const headless = process.env.HEADLESS !== 'false'
+ const reporter = process.env.SKIP_REPORTER !== 'true'
+
+ let [browser, bundler] = process.argv.slice(2, 4)
+
+ if (!browser) {
+ browser = process.env.BROWSER
+ }
+
+ if (!bundler) {
+ bundler = process.env.BUNDLER
+ }
+
+ if (!validBrowsers.includes(browser) || !validBundlers.includes(bundler)) {
+ console.error(`Usage: node runner-browser.mjs [${validBrowsers.join('|')}] [${validBundlers.join('|')}]`)
+ console.error('You can also use the BROWSER and BUNDLER environment variables.')
+ process.exit(1)
+ }
+
+ return { browser, bundler, headless, reporter }
+}
+
+function createBrowser({ browser: id, headless }) {
+ switch (id) {
+ case 'firefox':
+ return firefox.launch({ headless })
+ case 'safari':
+ return webkit.launch({ headless })
+ case 'edge':
+ return chromium.launch({ headless, channel: 'msedge' })
+ default:
+ return chromium.launch({ headless })
+ }
+}
+
+function setupTape(page, configuration) {
+ const output = new Readable({ read() {} })
+ const parser = new Parser({ strict: true })
+
+ output.pipe(parser)
+
+ if (configuration.reporter) {
+ output.pipe(reporter('spec'))
+ }
+
+ parser.on('line', (line) => {
+ if (line !== '# readable-stream-finished\n') {
+ if (line.startsWith('# not ok')) {
+ process.exitCode = 1
+ }
+
+ if (!configuration.reporter) {
+ console.log(line.replace(/\n$/, ''))
+ }
+
+ return
+ }
+
+ output.push(null)
+
+ if (configuration.headless) {
+ browser.close()
+ }
+ })
+
+ page.on('console', (msg) => {
+ if (msg.type() === 'error') {
+ console.error(`\x1b[31m\x1b[1mconsole.error:\x1b[0m ${msg.text()}\n`)
+ return
+ }
+
+ output.push(msg.text() + '\n')
+ })
+
+ // Firefox in headless mode is showing an error even if onerror caught it. Disable in that case
+ if (!configuration.headless || configuration.browser !== 'firefox') {
+ page.on('pageerror', (err) => {
+ console.log('\x1b[31m\x1b[1m--- The browser thrown an uncaught error ---\x1b[0m')
+ console.log(err.stack)
+
+ if (configuration.headless) {
+ console.log('\x1b[31m\x1b[1m--- Exiting with exit code 1 ---\x1b[0m')
+ process.exit(1)
+ } else {
+ process.exitCode = 1
+ }
+ })
+ }
+}
+
+const configuration = parseEnviroment()
+const browser = await createBrowser(configuration)
+const page = await browser.newPage()
+setupTape(page, configuration)
+
+// Execute the test suite
+const __dirname = fileURLToPath(new URL('.', import.meta.url))
+await page.goto(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/index.html`)}`)
diff --git a/test/browser/runner-node.mjs b/test/browser/runner-node.mjs
new file mode 100644
index 0000000000..840d19e2dc
--- /dev/null
+++ b/test/browser/runner-node.mjs
@@ -0,0 +1,77 @@
+import { resolve } from 'node:path'
+import { Duplex } from 'node:stream'
+import { fileURLToPath } from 'node:url'
+import reporter from 'tap-mocha-reporter'
+import Parser from 'tap-parser'
+
+const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+
+function parseEnviroment() {
+ const reporter = process.env.SKIP_REPORTER !== 'true'
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ console.error(`Usage: node runner-node.mjs [${validBundlers.join('|')}]`)
+ console.error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ return { bundler, reporter }
+}
+
+function setupTape(configuration) {
+ const output = new Duplex({ read() {}, write() {} })
+ const parser = new Parser({ strict: true })
+
+ globalThis.logger = function (message, ...args) {
+ if (typeof message !== 'string') {
+ console.log(message, ...args)
+ return
+ }
+
+ output.push(message + '\n')
+ }
+
+ output.pipe(parser)
+
+ if (configuration.reporter) {
+ output.pipe(reporter('spec'))
+ }
+
+ process.on('uncaughtException', (err) => {
+ if (global.onerror) {
+ global.onerror(err)
+ } else {
+ process.removeAllListeners('uncaughtException')
+ throw err
+ }
+ })
+
+ parser.on('line', (line) => {
+ if (line === '# readable-stream-finished\n') {
+ output.push(null)
+ output.end()
+ return
+ } else if (line.startsWith('# not ok')) {
+ process.exitCode = 1
+ }
+
+ if (!configuration.reporter) {
+ console.log(line.replace(/\n$/, ''))
+ }
+ })
+}
+
+async function main() {
+ const configuration = parseEnviroment()
+ setupTape(configuration)
+
+ // Execute the test suite
+ const __dirname = fileURLToPath(new URL('.', import.meta.url))
+ await import(`file://${resolve(__dirname, `../../tmp/${configuration.bundler}/suite.node.js`)}`)
+}
+
+main().catch((e) => {
+ console.error(e)
+ process.exit(1)
+})
diff --git a/test/browser/runner-prepare.mjs b/test/browser/runner-prepare.mjs
new file mode 100644
index 0000000000..76e38f8504
--- /dev/null
+++ b/test/browser/runner-prepare.mjs
@@ -0,0 +1,107 @@
+import { exec } from 'child_process'
+import { promises } from 'fs'
+import { resolve } from 'path'
+import { fileURLToPath } from 'url'
+import util from '../../lib/ours/util.js'
+const { copyFile, mkdir, rmdir } = promises
+
+function highlightFile(file) {
+ return `\x1b[33m${file.replace(process.cwd() + '/', '')}\x1b[0m`
+}
+
+function info(message) {
+ console.log(`\x1b[34m[INFO]\x1b[0m ${message}`)
+}
+
+function error(message) {
+ console.log(`\x1b[31m[INFO]\x1b[0m ${message}`)
+}
+
+async function run(command) {
+ info(`Executing \x1b[33m${command}\x1b[0m ...`)
+ const { promise, reject, resolve } = util.createDeferredPromise()
+
+ let hasOutput = false
+ function logOutput(chunk) {
+ if (!hasOutput) {
+ hasOutput = true
+ console.log('')
+ }
+
+ console.log(chunk.toString('utf-8').trim().replace(/^/gm, ' '))
+ }
+
+ try {
+ const process = exec(command, { stdio: 'pipe' }, (error) => {
+ if (error) {
+ return reject(error)
+ }
+
+ resolve(error)
+ })
+
+ process.stdout.on('data', logOutput)
+ process.stderr.on('data', logOutput)
+ await promise
+
+ if (hasOutput) {
+ console.log('')
+ }
+ } catch (e) {
+ if (hasOutput) {
+ console.log('')
+ }
+
+ error(`Command failed with status code ${e.code}.`)
+ process.exit(1)
+ }
+}
+
+async function main() {
+ const validBundlers = ['browserify', 'esbuild', 'rollup', 'webpack']
+ const bundler = process.argv[2] || process.env.BUNDLER
+
+ if (!validBundlers.includes(bundler)) {
+ error(`Usage: node await runner-prepare.mjs [${validBundlers.join('|')}]`)
+ error('You can also use the BUNDLER environment variable.')
+ process.exit(1)
+ }
+
+ const rootDir = resolve(fileURLToPath(new URL('.', import.meta.url)), `../../tmp/${bundler}`)
+ const sourceIndex = resolve(fileURLToPath(new URL('.', import.meta.url)), '../../test/browser/fixtures/index.html')
+ const targetIndex = resolve(rootDir, 'index.html')
+
+ info(`Emptying directory ${highlightFile(rootDir)} ...`)
+ try {
+ await rmdir(rootDir, { recursive: true })
+ } catch (e) {
+ // No-op
+ }
+ await mkdir(rootDir, { recursive: true })
+
+ info(`Copying file ${highlightFile(sourceIndex)} to ${highlightFile(targetIndex)} ...`)
+ await copyFile(sourceIndex, targetIndex)
+
+ switch (bundler) {
+ case 'browserify':
+ await run('browserify test/browser/test-browser.js -o tmp/browserify/suite.browser.js')
+ await run('browserify test/browser/test-browser.js --node -o tmp/browserify/suite.node.js')
+ break
+ case 'esbuild':
+ await run('node src/test/browser/fixtures/esbuild.browser.config.mjs')
+ await run('node src/test/browser/fixtures/esbuild.node.config.mjs')
+ break
+ case 'rollup':
+ await run('rollup -c test/browser/fixtures/rollup.browser.config.mjs')
+ await run('rollup -c test/browser/fixtures/rollup.node.config.mjs')
+ break
+ case 'webpack':
+ await run('webpack -c test/browser/fixtures/webpack.browser.config.mjs')
+ await run('webpack -c test/browser/fixtures/webpack.node.config.mjs')
+ }
+}
+
+main().catch((e) => {
+ error(e)
+ process.exit(1)
+})
diff --git a/test/browser/symbols.js b/test/browser/symbols.js
new file mode 100644
index 0000000000..8450b8f64c
--- /dev/null
+++ b/test/browser/symbols.js
@@ -0,0 +1,6 @@
+'use strict'
+
+module.exports = {
+ kReadableStreamSuiteName: Symbol('readable-stream.suiteName'),
+ kReadableStreamSuiteHasMultipleTests: Symbol('readable-stream.suiteHasMultipleTests')
+}
diff --git a/test/browser/test-browser.js b/test/browser/test-browser.js
new file mode 100644
index 0000000000..bbf8ce69ee
--- /dev/null
+++ b/test/browser/test-browser.js
@@ -0,0 +1,120 @@
+'use strict'
+
+const logger = globalThis.logger || console.log
+const tape = require('tape')
+const { createDeferredPromise } = require('../../lib/ours/util')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+let totalTests = 0
+let completed = 0
+let failed = 0
+async function test(rootName, fn) {
+ // Gather all tests in the file
+ const tests = {}
+ function addTests(name, fn) {
+ tests[`${rootName} - ${name}`] = fn
+ }
+ if (fn[kReadableStreamSuiteHasMultipleTests]) {
+ fn(addTests)
+ } else {
+ tests[rootName] = fn
+ }
+
+ // Execute each test in a separate harness and then output overall results
+ for (const [name, subtest] of Object.entries(tests)) {
+ const currentIndex = ++totalTests
+ const harness = tape.createHarness()
+ const { promise, resolve } = createDeferredPromise()
+ const messages = [`# Subtest: ${name}`]
+ harness.createStream().on('data', function (row) {
+ if (row.startsWith('TAP version') || row.match(new RegExp(`^# (?:${name})`))) {
+ return
+ }
+ messages.push(row.trim().replace(/^/gm, ' '))
+ })
+ harness.onFinish(() => {
+ const success = harness._exitCode === 0
+ messages.push(`${success ? 'ok' : 'not ok'} ${currentIndex} - ${name}`)
+ logger(messages.join('\n'))
+ completed++
+ if (!success) {
+ failed++
+ }
+ resolve()
+ })
+ harness(name, subtest)
+ await promise
+ }
+}
+async function runTests(suites) {
+ // Setup an interval
+ const interval = setInterval(() => {
+ if (completed < totalTests) {
+ return
+ }
+ clearInterval(interval)
+ logger(`1..${totalTests}`)
+ logger(`# tests ${totalTests}`)
+ logger(`# pass ${completed - failed}`)
+ logger(`# fail ${failed}`)
+ logger(`# ${failed === 0 ? 'ok' : 'not ok'}`)
+
+ // This line is used by the playwright script to detect we're done
+ logger('# readable-stream-finished')
+ }, 100)
+
+ // Execute each test serially, to avoid side-effects errors when dealing with global error handling
+ for (const suite of suites) {
+ await test(suite[kReadableStreamSuiteName], suite)
+ }
+}
+
+// Important: Do not try to make the require dynamic because bundlers will not like it
+runTests([
+ require('./test-stream-big-packet'),
+ require('./test-stream-big-push'),
+ require('./test-stream-duplex'),
+ require('./test-stream-end-paused'),
+ require('./test-stream-finished'),
+ require('./test-stream-ispaused'),
+ require('./test-stream-pipe-after-end'),
+ require('./test-stream-pipe-cleanup-pause'),
+ require('./test-stream-pipe-cleanup'),
+ require('./test-stream-pipe-error-handling'),
+ require('./test-stream-pipe-event'),
+ require('./test-stream-pipe-without-listenerCount'),
+ require('./test-stream-pipeline'),
+ require('./test-stream-push-order'),
+ require('./test-stream-push-strings'),
+ require('./test-stream-readable-constructor-set-methods'),
+ require('./test-stream-readable-event'),
+ require('./test-stream-sync-write'),
+ require('./test-stream-transform-constructor-set-methods'),
+ require('./test-stream-transform-objectmode-falsey-value'),
+ require('./test-stream-transform-split-objectmode'),
+ require('./test-stream-unshift-empty-chunk'),
+ require('./test-stream-unshift-read-race'),
+ require('./test-stream-writable-change-default-encoding'),
+ require('./test-stream-writable-constructor-set-methods'),
+ require('./test-stream-writable-decoded-encoding'),
+ require('./test-stream-writev'),
+ require('./test-stream2-base64-single-char-read-end'),
+ require('./test-stream2-compatibility'),
+ require('./test-stream2-large-read-stall'),
+ require('./test-stream2-objects'),
+ require('./test-stream2-pipe-error-handling'),
+ require('./test-stream2-pipe-error-once-listener'),
+ require('./test-stream2-push'),
+ require('./test-stream2-readable-empty-buffer-no-eof'),
+ require('./test-stream2-readable-from-list'),
+ require('./test-stream2-readable-legacy-drain'),
+ require('./test-stream2-readable-non-empty-end'),
+ require('./test-stream2-readable-wrap-empty'),
+ require('./test-stream2-readable-wrap'),
+ require('./test-stream2-set-encoding'),
+ require('./test-stream2-transform'),
+ require('./test-stream2-unpipe-drain'),
+ require('./test-stream2-writable'),
+ require('./test-stream3-pause-then-read')
+]).catch((e) => {
+ console.error(e)
+})
diff --git a/test/browser/test-stream-big-packet.js b/test/browser/test-stream-big-packet.js
new file mode 100644
index 0000000000..58cd37cb9d
--- /dev/null
+++ b/test/browser/test-stream-big-packet.js
@@ -0,0 +1,67 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const inherits = require('inherits')
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(3)
+ let passed = false
+ function PassThrough() {
+ Transform.call(this)
+ }
+ inherits(PassThrough, Transform)
+ PassThrough.prototype._transform = function (chunk, encoding, done) {
+ this.push(chunk)
+ done()
+ }
+ function TestStream() {
+ Transform.call(this)
+ }
+ inherits(TestStream, Transform)
+ TestStream.prototype._transform = function (chunk, encoding, done) {
+ if (!passed) {
+ // Char 'a' only exists in the last write
+ passed = indexOf(chunk.toString(), 'a') >= 0
+ }
+ if (passed) {
+ t.ok(passed)
+ }
+ done()
+ }
+ const s1 = new PassThrough()
+ const s2 = new PassThrough()
+ const s3 = new TestStream()
+ s1.pipe(s3)
+ // Don't let s2 auto close which may close s3
+ s2.pipe(s3, {
+ end: false
+ })
+
+ // We must write a buffer larger than highWaterMark
+ const big = Buffer.alloc(s1._writableState.highWaterMark + 1)
+ big.fill('x')
+
+ // Since big is larger than highWaterMark, it will be buffered internally.
+ t.notOk(s1.write(big))
+
+ // 'tiny' is small enough to pass through internal buffer.
+ t.ok(s2.write('tiny'))
+
+ // Write some small data in next IO loop, which will never be written to s3
+ // Because 'drain' event is not emitted from s1 and s1 is still paused
+ setImmediate(s1.write.bind(s1), 'later')
+ function indexOf(xs, x) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) {
+ return i
+ }
+ }
+ return -1
+ }
+}
+module.exports[kReadableStreamSuiteName] = 'stream-big-packet'
diff --git a/test/browser/test-stream-big-push.js b/test/browser/test-stream-big-push.js
new file mode 100644
index 0000000000..03abceac32
--- /dev/null
+++ b/test/browser/test-stream-big-push.js
@@ -0,0 +1,61 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(10)
+ const str = 'asdfasdfasdfasdfasdf'
+ const r = new Readable({
+ highWaterMark: 5,
+ encoding: 'utf8'
+ })
+ let reads = 0
+ let eofed = false
+ let ended = false
+ r._read = function (n) {
+ if (reads === 0) {
+ setTimeout(function () {
+ r.push(str)
+ })
+ reads++
+ } else if (reads === 1) {
+ const ret = r.push(str)
+ t.equal(ret, false)
+ reads++
+ } else {
+ t.notOk(eofed)
+ eofed = true
+ r.push(null)
+ }
+ }
+ r.on('end', function () {
+ ended = true
+ })
+
+ // push some data in to start.
+ // we've never gotten any read event at this point.
+ const ret = r.push(str)
+
+ // should be false. > hwm
+ t.notOk(ret)
+ let chunk = r.read()
+ t.equal(chunk, str)
+ chunk = r.read()
+ t.equal(chunk, null)
+ r.once('readable', function () {
+ // this time, we'll get *all* the remaining data, because
+ // it's been added synchronously, as the read WOULD take
+ // us below the hwm, and so it triggered a _read() again,
+ // which synchronously added more, which we then return.
+ chunk = r.read()
+ t.equal(chunk, str + str)
+ chunk = r.read()
+ t.equal(chunk, null)
+ })
+ r.on('end', function () {
+ t.ok(eofed)
+ t.ok(ended)
+ t.equal(reads, 2)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-big-push'
diff --git a/test/browser/test-stream-duplex.js b/test/browser/test-stream-duplex.js
new file mode 100644
index 0000000000..e594458c81
--- /dev/null
+++ b/test/browser/test-stream-duplex.js
@@ -0,0 +1,34 @@
+'use strict'
+
+const { Duplex } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(4)
+ const stream = new Duplex({
+ objectMode: true
+ })
+ t.ok(stream._readableState.objectMode)
+ t.ok(stream._writableState.objectMode)
+ let written
+ let read
+ stream._write = function (obj, _, cb) {
+ written = obj
+ cb()
+ }
+ stream._read = function () {}
+ stream.on('data', function (obj) {
+ read = obj
+ })
+ stream.on('end', function () {
+ t.equal(read.val, 1)
+ t.equal(written.val, 2)
+ })
+ stream.push({
+ val: 1
+ })
+ stream.end({
+ val: 2
+ })
+ stream.push(null)
+}
+module.exports[kReadableStreamSuiteName] = 'stream-duplex'
diff --git a/test/browser/test-stream-end-paused.js b/test/browser/test-stream-end-paused.js
new file mode 100644
index 0000000000..9aa637e310
--- /dev/null
+++ b/test/browser/test-stream-end-paused.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(2)
+ const stream = new Readable()
+ let calledRead = false
+ stream._read = function () {
+ t.notOk(calledRead)
+ calledRead = true
+ this.push(null)
+ }
+ stream.on('data', function () {
+ throw new Error('should not ever get data')
+ })
+ stream.pause()
+ setTimeout(function () {
+ stream.on('end', function () {
+ t.ok(calledRead)
+ })
+ stream.resume()
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-end-paused'
diff --git a/test/browser/test-stream-finished.js b/test/browser/test-stream-finished.js
new file mode 100644
index 0000000000..541e8ea628
--- /dev/null
+++ b/test/browser/test-stream-finished.js
@@ -0,0 +1,54 @@
+'use strict'
+
+const { Writable, Readable, Transform, finished } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+module.exports = function (test) {
+ test('readable finished', function (t) {
+ t.plan(1)
+ const rs = new Readable({
+ read: function read() {}
+ })
+ finished(rs, (err) => {
+ t.ifErr(err)
+ })
+ rs.push(null)
+ rs.resume()
+ })
+ test('writable finished', function (t) {
+ t.plan(1)
+ const ws = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+ finished(ws, (err) => {
+ t.ifErr(err)
+ })
+ ws.end()
+ })
+ test('transform finished', function (t) {
+ t.plan(3)
+ const tr = new Transform({
+ transform: function transform(data, enc, cb) {
+ cb()
+ }
+ })
+ let finish = false
+ let ended = false
+ tr.on('end', function () {
+ ended = true
+ })
+ tr.on('finish', function () {
+ finish = true
+ })
+ finished(tr, (err) => {
+ t.ifErr(err)
+ t.ok(finish)
+ t.ok(ended)
+ })
+ tr.end()
+ tr.resume()
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-finished'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-ispaused.js b/test/browser/test-stream-ispaused.js
new file mode 100644
index 0000000000..7786760730
--- /dev/null
+++ b/test/browser/test-stream-ispaused.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(4)
+ const readable = new stream.Readable()
+
+ // _read is a noop, here.
+ readable._read = () => {}
+
+ // default state of a stream is not "paused"
+ t.notOk(readable.isPaused())
+
+ // make the stream start flowing...
+ readable.on('data', () => {})
+
+ // still not paused.
+ t.notOk(readable.isPaused())
+ readable.pause()
+ t.ok(readable.isPaused())
+ readable.resume()
+ t.notOk(readable.isPaused())
+}
+module.exports[kReadableStreamSuiteName] = 'stream-ispaused'
diff --git a/test/browser/test-stream-pipe-after-end.js b/test/browser/test-stream-pipe-after-end.js
new file mode 100644
index 0000000000..f7fa5cfd6f
--- /dev/null
+++ b/test/browser/test-stream-pipe-after-end.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(4)
+ function TestReadable(opt) {
+ if (!(this instanceof TestReadable)) {
+ return new TestReadable(opt)
+ }
+ Readable.call(this, opt)
+ this._ended = false
+ }
+ inherits(TestReadable, Readable)
+ TestReadable.prototype._read = function (n) {
+ if (this._ended) {
+ this.emit('error', new Error('_read called twice'))
+ }
+ this._ended = true
+ this.push(null)
+ }
+ function TestWritable(opt) {
+ if (!(this instanceof TestWritable)) {
+ return new TestWritable(opt)
+ }
+ Writable.call(this, opt)
+ this._written = []
+ }
+ inherits(TestWritable, Writable)
+ TestWritable.prototype._write = function (chunk, encoding, cb) {
+ this._written.push(chunk)
+ cb()
+ }
+
+ // this one should not emit 'end' until we read() from it later.
+ const ender = new TestReadable()
+ let enderEnded = false
+
+ // what happens when you pipe() a Readable that's already ended?
+ const piper = new TestReadable()
+ // pushes EOF null, and length=0, so this will trigger 'end'
+ piper.read()
+ setTimeout(function () {
+ ender.on('end', function () {
+ enderEnded = true
+ t.ok(true, 'enderEnded')
+ })
+ t.notOk(enderEnded)
+ const c = ender.read()
+ t.equal(c, null)
+ const w = new TestWritable()
+ w.on('finish', function () {
+ t.ok(true, 'writableFinished')
+ })
+ piper.pipe(w)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-after-end'
diff --git a/test/browser/test-stream-pipe-cleanup-pause.js b/test/browser/test-stream-pipe-cleanup-pause.js
new file mode 100644
index 0000000000..dd0ea0ba97
--- /dev/null
+++ b/test/browser/test-stream-pipe-cleanup-pause.js
@@ -0,0 +1,43 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(3)
+ const reader = new stream.Readable()
+ const writer1 = new stream.Writable()
+ const writer2 = new stream.Writable()
+
+ // 560000 is chosen here because it is larger than the (default) highWaterMark
+ // and will cause `.write()` to return false
+ // See: https://github.com/nodejs/node/issues/2323
+ const buffer = Buffer.alloc(560000)
+ reader._read = function () {}
+ writer1._write = function (chunk, encoding, cb) {
+ this.emit('chunk-received')
+ cb()
+ }
+ writer1.on('chunk-received', function () {
+ reader.unpipe(writer1)
+ reader.pipe(writer2)
+ reader.push(buffer)
+ setImmediate(function () {
+ reader.push(buffer)
+ setImmediate(function () {
+ reader.push(buffer)
+ })
+ })
+ })
+ writer2._write = function (chunk, encoding, cb) {
+ t.ok(true)
+ cb()
+ }
+ reader.pipe(writer1)
+ reader.push(buffer)
+}
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup-pause'
diff --git a/test/browser/test-stream-pipe-cleanup.js b/test/browser/test-stream-pipe-cleanup.js
new file mode 100644
index 0000000000..ce204334f7
--- /dev/null
+++ b/test/browser/test-stream-pipe-cleanup.js
@@ -0,0 +1,97 @@
+'use strict'
+
+// This test asserts that Stream.prototype.pipe does not leave listeners
+// hanging on the source or dest.
+const inherits = require('inherits')
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(27)
+ if (/^v0\.8\./.test(process.version)) {
+ return
+ }
+ function Writable() {
+ this.writable = true
+ this.endCalls = 0
+ Stream.call(this)
+ }
+ inherits(Writable, Stream)
+ Writable.prototype.end = function () {
+ this.endCalls++
+ }
+ Writable.prototype.destroy = function () {
+ this.endCalls++
+ }
+ function Readable() {
+ this.readable = true
+ Stream.call(this)
+ }
+ inherits(Readable, Stream)
+ Readable.prototype._read = function () {}
+ function Duplex() {
+ this.readable = true
+ Writable.call(this)
+ }
+ inherits(Duplex, Writable)
+ Duplex.prototype._read = function () {}
+ let i = 0
+ let r
+ let w = new Writable()
+ const limit = 100
+ for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('end')
+ }
+ t.equal(0, r.listeners('end').length)
+ t.equal(limit, w.endCalls)
+ w.endCalls = 0
+ for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('close')
+ }
+ t.equal(0, r.listeners('close').length)
+ t.equal(limit, w.endCalls)
+ w.endCalls = 0
+ r = new Readable()
+ for (i = 0; i < limit; i++) {
+ w = new Writable()
+ r.pipe(w)
+ w.emit('close')
+ }
+ t.equal(0, w.listeners('close').length)
+ r = new Readable()
+ w = new Writable()
+ const d = new Duplex()
+ r.pipe(d) // pipeline A
+ d.pipe(w) // pipeline B
+ t.equal(r.listeners('end').length, 2) // A.onend, A.cleanup
+ t.equal(r.listeners('close').length, 2) // A.onclose, A.cleanup
+ t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup
+ t.equal(d.listeners('close').length, 3) // A.cleanup, B.onclose, B.cleanup
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 1) // B.cleanup
+
+ r.emit('end')
+ t.equal(d.endCalls, 1)
+ t.equal(w.endCalls, 0)
+ t.equal(r.listeners('end').length, 0)
+ t.equal(r.listeners('close').length, 0)
+ t.equal(d.listeners('end').length, 2) // B.onend, B.cleanup
+ t.equal(d.listeners('close').length, 2) // B.onclose, B.cleanup
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 1) // B.cleanup
+
+ d.emit('end')
+ t.equal(d.endCalls, 1)
+ t.equal(w.endCalls, 1)
+ t.equal(r.listeners('end').length, 0)
+ t.equal(r.listeners('close').length, 0)
+ t.equal(d.listeners('end').length, 0)
+ t.equal(d.listeners('close').length, 0)
+ t.equal(w.listeners('end').length, 0)
+ t.equal(w.listeners('close').length, 0)
+ d.end()
+}
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-cleanup'
diff --git a/test/browser/test-stream-pipe-error-handling.js b/test/browser/test-stream-pipe-error-handling.js
new file mode 100644
index 0000000000..553712cb3f
--- /dev/null
+++ b/test/browser/test-stream-pipe-error-handling.js
@@ -0,0 +1,86 @@
+'use strict'
+
+const { Readable, Writable, Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+module.exports = function (test) {
+ test('Error Listener Catches', function (t) {
+ t.plan(1)
+ const source = new Stream()
+ const dest = new Stream()
+ source._read = function () {}
+ source.pipe(dest)
+ let gotErr = null
+ source.on('error', function (err) {
+ gotErr = err
+ })
+ const err = new Error('This stream turned into bacon.')
+ source.emit('error', err)
+ t.strictEqual(gotErr, err)
+ })
+ test('Error WithoutListener Throws', function (t) {
+ t.plan(1)
+ const source = new Stream()
+ const dest = new Stream()
+ source._read = function () {}
+ source.pipe(dest)
+ const err = new Error('This stream turned into bacon.')
+ let gotErr = null
+ try {
+ source.emit('error', err)
+ } catch (e) {
+ gotErr = e
+ }
+ t.strictEqual(gotErr, err)
+ })
+ test('Error With Removed Listener Throws', function (t) {
+ t.plan(2)
+ const onerror = global.onerror
+ const r = new Readable()
+ const w = new Writable()
+ let removed = false
+ let caught = false
+ global.onerror = () => {
+ t.notOk(caught)
+ global.onerror = onerror
+ return true
+ }
+ r._read = function () {
+ setTimeout(function () {
+ t.ok(removed)
+ w.emit('error', new Error('fail'))
+ })
+ }
+ w.on('error', myOnError)
+ r.pipe(w)
+ w.removeListener('error', myOnError)
+ removed = true
+ function myOnError(er) {
+ caught = true
+ }
+ })
+ test('Error Listener Catches When Wrong Listener Is Removed', function (t) {
+ t.plan(2)
+ const r = new Readable()
+ const w = new Writable()
+ let removed = false
+ let caught = false
+ r._read = function () {
+ setTimeout(function () {
+ t.ok(removed)
+ w.emit('error', new Error('fail'))
+ })
+ }
+ w.on('error', myOnError)
+ w._write = function () {}
+ r.pipe(w)
+ // Removing some OTHER random listener should not do anything
+ w.removeListener('error', function () {})
+ removed = true
+ function myOnError(er) {
+ t.notOk(caught)
+ caught = true
+ }
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-error-handling'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-pipe-event.js b/test/browser/test-stream-pipe-event.js
new file mode 100644
index 0000000000..573093754f
--- /dev/null
+++ b/test/browser/test-stream-pipe-event.js
@@ -0,0 +1,28 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+ function Writable() {
+ this.writable = true
+ Stream.call(this)
+ }
+ inherits(Writable, Stream)
+ function Readable() {
+ this.readable = true
+ Stream.call(this)
+ }
+ inherits(Readable, Stream)
+ let passed = false
+ const w = new Writable()
+ w.on('pipe', function (src) {
+ passed = true
+ })
+ const r = new Readable()
+ r._read = function () {}
+ r.pipe(w)
+ t.ok(passed)
+}
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-event'
diff --git a/test/browser/test-stream-pipe-without-listenerCount.js b/test/browser/test-stream-pipe-without-listenerCount.js
new file mode 100644
index 0000000000..3b498a09b0
--- /dev/null
+++ b/test/browser/test-stream-pipe-without-listenerCount.js
@@ -0,0 +1,17 @@
+'use strict'
+
+const { Stream } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+ const r = new Stream({
+ read: function () {}
+ })
+ r.listenerCount = undefined
+ const w = new Stream()
+ w.on('pipe', function () {
+ r.emit('error', new Error('Readable Error'))
+ })
+ t.throws(() => r.pipe(w), 'TypeError: this.listenerCount is not a function')
+}
+module.exports[kReadableStreamSuiteName] = 'stream-pipe-without-listenerCount'
diff --git a/test/browser/test-stream-pipeline.js b/test/browser/test-stream-pipeline.js
new file mode 100644
index 0000000000..949135cd2d
--- /dev/null
+++ b/test/browser/test-stream-pipeline.js
@@ -0,0 +1,92 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Readable, Writable, pipeline } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+module.exports = function (test) {
+ test('pipeline', function (t) {
+ t.plan(3)
+ let finished = false
+ const processed = []
+ const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]
+ const read = new Readable({
+ read: function read() {}
+ })
+ const write = new Writable({
+ write: function write(data, enc, cb) {
+ processed.push(data)
+ cb()
+ }
+ })
+ write.on('finish', function () {
+ finished = true
+ })
+ for (let i = 0; i < expected.length; i++) {
+ read.push(expected[i])
+ }
+ read.push(null)
+ pipeline(read, write, (err) => {
+ t.ifErr(err)
+ t.ok(finished)
+ t.deepEqual(processed, expected)
+ })
+ })
+ test('pipeline missing args', function (t) {
+ t.plan(3)
+ const _read = new Readable({
+ read: function read() {}
+ })
+ t.throws(function () {
+ pipeline(_read, function () {})
+ })
+ t.throws(function () {
+ pipeline(function () {})
+ })
+ t.throws(function () {
+ pipeline()
+ })
+ })
+ test('pipeline error', function (t) {
+ t.plan(1)
+ const _read2 = new Readable({
+ read: function read() {}
+ })
+ const _write = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+ _read2.push('data')
+ setImmediate(function () {
+ return _read2.destroy()
+ })
+ pipeline(_read2, _write, (err) => {
+ t.equal(err.message, 'Premature close')
+ })
+ })
+ test('pipeline destroy', function (t) {
+ t.plan(2)
+ const _read3 = new Readable({
+ read: function read() {}
+ })
+ const _write2 = new Writable({
+ write: function write(data, enc, cb) {
+ cb()
+ }
+ })
+ _read3.push('data')
+ setImmediate(function () {
+ return _read3.destroy(new Error('kaboom'))
+ })
+ const dst = pipeline(_read3, _write2, (err) => {
+ t.equal(err.message, 'kaboom')
+ })
+ t.equal(dst, _write2)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-pipeline'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-push-order.js b/test/browser/test-stream-push-order.js
new file mode 100644
index 0000000000..ad821efac4
--- /dev/null
+++ b/test/browser/test-stream-push-order.js
@@ -0,0 +1,27 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+ const s = new Readable({
+ highWaterMark: 20,
+ encoding: 'ascii'
+ })
+ const list = ['1', '2', '3', '4', '5', '6']
+ s._read = function (n) {
+ const one = list.shift()
+ if (!one) {
+ s.push(null)
+ } else {
+ const two = list.shift()
+ s.push(one)
+ s.push(two)
+ }
+ }
+ s.read(0)
+ setTimeout(function () {
+ t.equals(s._readableState.buffer.join(','), '1,2,3,4,5,6')
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-push-order'
diff --git a/test/browser/test-stream-push-strings.js b/test/browser/test-stream-push-strings.js
new file mode 100644
index 0000000000..4c6a8aa836
--- /dev/null
+++ b/test/browser/test-stream-push-strings.js
@@ -0,0 +1,50 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(2)
+ function MyStream(options) {
+ Readable.call(this, options)
+ this._chunks = 3
+ }
+ inherits(MyStream, Readable)
+ MyStream.prototype._read = function (n) {
+ switch (this._chunks--) {
+ case 0:
+ return this.push(null)
+ case 1:
+ return setTimeout(
+ function () {
+ this.push('last chunk')
+ }.bind(this),
+ 100
+ )
+ case 2:
+ return this.push('second to last chunk')
+ case 3:
+ return process.nextTick(
+ function () {
+ this.push('first chunk')
+ }.bind(this)
+ )
+ default:
+ throw new Error('?')
+ }
+ }
+ const expect = ['first chunksecond to last chunk', 'last chunk']
+ const ms = new MyStream()
+ const results = []
+ ms.on('readable', function () {
+ let chunk
+ while ((chunk = ms.read()) !== null) {
+ results.push(chunk + '')
+ }
+ })
+ ms.on('end', function () {
+ t.equal(ms._chunks, -1)
+ t.deepEqual(results, expect)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-push-strings'
diff --git a/test/browser/test-stream-readable-constructor-set-methods.js b/test/browser/test-stream-readable-constructor-set-methods.js
new file mode 100644
index 0000000000..b9b0ec0e59
--- /dev/null
+++ b/test/browser/test-stream-readable-constructor-set-methods.js
@@ -0,0 +1,21 @@
+'use strict'
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(2)
+ let _readCalled = false
+ function _read(n) {
+ _readCalled = true
+ this.push(null)
+ }
+ const r = new Readable({
+ read: _read
+ })
+ r.resume()
+ setTimeout(function () {
+ t.equal(r._read, _read)
+ t.ok(_readCalled)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-readable-constructor-set-methods'
diff --git a/test/browser/test-stream-readable-event.js b/test/browser/test-stream-readable-event.js
new file mode 100644
index 0000000000..d6da267c9e
--- /dev/null
+++ b/test/browser/test-stream-readable-event.js
@@ -0,0 +1,102 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+module.exports = function (test) {
+ test('readable events - first', (t) => {
+ t.plan(3)
+
+ // First test, not reading when the readable is added.
+ // make sure that on('readable', ...) triggers a readable event.
+ const r = new Readable({
+ highWaterMark: 3
+ })
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+ let caughtReadable = false
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(_readCalled)
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+ test('readable events - second', (t) => {
+ t.plan(3)
+
+ // second test, make sure that readable is re-emitted if there's
+ // already a length, while it IS reading.
+
+ const r = new Readable({
+ highWaterMark: 3
+ })
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('bl'))
+ let caughtReadable = false
+ setTimeout(function () {
+ // assert we're testing what we think we are
+ t.ok(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.ok(_readCalled)
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+ test('readable events - third', (t) => {
+ t.plan(3)
+
+ // Third test, not reading when the stream has not passed
+ // the highWaterMark but *has* reached EOF.
+ const r = new Readable({
+ highWaterMark: 30
+ })
+ let _readCalled = false
+ r._read = function (n) {
+ _readCalled = true
+ }
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+ r.push(null)
+ let caughtReadable = false
+ setTimeout(function () {
+ // assert we're testing what we think we are
+ t.notOk(r._readableState.reading)
+ r.on('readable', function () {
+ caughtReadable = true
+ setTimeout(function () {
+ // we're testing what we think we are
+ t.notOk(_readCalled)
+ t.ok(caughtReadable)
+ })
+ })
+ })
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-readable-event'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-sync-write.js b/test/browser/test-stream-sync-write.js
new file mode 100644
index 0000000000..50b0615bb6
--- /dev/null
+++ b/test/browser/test-stream-sync-write.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const inherits = require('inherits')
+const { Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(2)
+ let internalCalls = 0
+ let externalCalls = 0
+ const InternalStream = function () {
+ Writable.call(this)
+ }
+ inherits(InternalStream, Writable)
+ InternalStream.prototype._write = function (chunk, encoding, callback) {
+ internalCalls++
+ callback()
+ }
+ const internalStream = new InternalStream()
+ const ExternalStream = function (writable) {
+ this._writable = writable
+ Writable.call(this)
+ }
+ inherits(ExternalStream, Writable)
+ ExternalStream.prototype._write = function (chunk, encoding, callback) {
+ externalCalls++
+ this._writable.write(chunk, encoding, callback)
+ }
+ const externalStream = new ExternalStream(internalStream)
+ for (let i = 0; i < 2000; i++) {
+ externalStream.write(i.toString())
+ }
+ externalStream.end(() => {
+ t.equal(internalCalls, 2000)
+ t.equal(externalCalls, 2000)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-sync-write'
diff --git a/test/browser/test-stream-transform-constructor-set-methods.js b/test/browser/test-stream-transform-constructor-set-methods.js
new file mode 100644
index 0000000000..13edd24e9a
--- /dev/null
+++ b/test/browser/test-stream-transform-constructor-set-methods.js
@@ -0,0 +1,35 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(4)
+ let _transformCalled = false
+ function _transform(d, e, n) {
+ _transformCalled = true
+ n()
+ }
+ let _flushCalled = false
+ function _flush(n) {
+ _flushCalled = true
+ n()
+ }
+ const tr = new Transform({
+ transform: _transform,
+ flush: _flush
+ })
+ tr.end(Buffer.from('blerg'))
+ tr.resume()
+ tr.on('end', function () {
+ t.equal(tr._transform, _transform)
+ t.equal(tr._flush, _flush)
+ t.ok(_transformCalled)
+ t.ok(_flushCalled)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-transform-constructor-set-methods'
diff --git a/test/browser/test-stream-transform-objectmode-falsey-value.js b/test/browser/test-stream-transform-objectmode-falsey-value.js
new file mode 100644
index 0000000000..1240dfa9b9
--- /dev/null
+++ b/test/browser/test-stream-transform-objectmode-falsey-value.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const { PassThrough } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(13)
+ const src = new PassThrough({
+ objectMode: true
+ })
+ const tx = new PassThrough({
+ objectMode: true
+ })
+ const dest = new PassThrough({
+ objectMode: true
+ })
+ const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+ const results = []
+ dest.on('end', function () {
+ t.deepEqual(results, expect)
+ })
+ dest.on('data', function (x) {
+ results.push(x)
+ })
+ src.pipe(tx).pipe(dest)
+ let i = -1
+ const int = setInterval(function () {
+ if (i > 10) {
+ src.end()
+ clearInterval(int)
+ } else {
+ t.ok(true)
+ src.write(i++)
+ }
+ }, 10)
+}
+module.exports[kReadableStreamSuiteName] = 'stream-transform-objectmode-falsey-value'
diff --git a/test/browser/test-stream-transform-split-objectmode.js b/test/browser/test-stream-transform-split-objectmode.js
new file mode 100644
index 0000000000..aacd363384
--- /dev/null
+++ b/test/browser/test-stream-transform-split-objectmode.js
@@ -0,0 +1,56 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(10)
+ const parser = new Transform({
+ readableObjectMode: true
+ })
+ t.ok(parser._readableState.objectMode, 'parser 1')
+ t.notOk(parser._writableState.objectMode, 'parser 2')
+ t.equals(parser._readableState.highWaterMark, 16, 'parser 3')
+ t.equals(parser._writableState.highWaterMark, 16 * 1024, 'parser 4')
+ parser._transform = function (chunk, enc, callback) {
+ callback(null, {
+ val: chunk[0]
+ })
+ }
+ let parsed
+ parser.on('data', function (obj) {
+ parsed = obj
+ })
+ parser.end(Buffer.from([42]))
+ parser.on('end', function () {
+ t.equals(parsed.val, 42, 'parser ended')
+ })
+ const serializer = new Transform({
+ writableObjectMode: true
+ })
+ t.notOk(serializer._readableState.objectMode, 'serializer 1')
+ t.ok(serializer._writableState.objectMode, 'serializer 2')
+ t.equals(serializer._readableState.highWaterMark, 16 * 1024, 'serializer 3')
+ t.equals(serializer._writableState.highWaterMark, 16, 'serializer 4')
+ serializer._transform = function (obj, _, callback) {
+ callback(null, Buffer.from([obj.val]))
+ }
+ let serialized
+ serializer.on('data', function (chunk) {
+ serialized = chunk
+ })
+ serializer.write({
+ val: 42
+ })
+ serializer.on('end', function () {
+ t.equals(serialized[0], 42, 'searlizer ended')
+ })
+ setImmediate(function () {
+ serializer.end()
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-transform-split-objectmode'
diff --git a/test/browser/test-stream-unshift-empty-chunk.js b/test/browser/test-stream-unshift-empty-chunk.js
new file mode 100644
index 0000000000..d707c3940c
--- /dev/null
+++ b/test/browser/test-stream-unshift-empty-chunk.js
@@ -0,0 +1,62 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+ const r = new Readable()
+ let nChunks = 10
+ const chunk = Buffer.alloc(10)
+ chunk.fill('x')
+ r._read = function (n) {
+ setTimeout(function () {
+ r.push(--nChunks === 0 ? null : chunk)
+ })
+ }
+ let readAll = false
+ const seen = []
+ r.on('readable', function () {
+ let chunk
+ while ((chunk = r.read())) {
+ seen.push(chunk.toString())
+ // simulate only reading a certain amount of the data,
+ // and then putting the rest of the chunk back into the
+ // stream, like a parser might do. We just fill it with
+ // 'y' so that it's easy to see which bits were touched,
+ // and which were not.
+ const putBack = Buffer.alloc(readAll ? 0 : 5)
+ putBack.fill('y')
+ readAll = !readAll
+ r.unshift(putBack)
+ }
+ })
+ const expect = [
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy'
+ ]
+ r.on('end', function () {
+ t.deepEqual(seen, expect)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-unshift-empty-chunk'
diff --git a/test/browser/test-stream-unshift-read-race.js b/test/browser/test-stream-unshift-read-race.js
new file mode 100644
index 0000000000..6cc3466326
--- /dev/null
+++ b/test/browser/test-stream-unshift-read-race.js
@@ -0,0 +1,117 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+// This test verifies that:
+// 1. unshift() does not cause colliding _read() calls.
+// 2. unshift() after the 'end' event is an error, but after the EOF
+// signalling null, it is ok, and just creates a new readable chunk.
+// 3. push() after the EOF signaling null is an error.
+// 4. _read() is not called after pushing the EOF null chunk.
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(139)
+ const hwm = 10
+ const r = stream.Readable({
+ highWaterMark: hwm
+ })
+ const chunks = 10
+ const data = Buffer.alloc(chunks * hwm + Math.ceil(hwm / 2))
+ for (let i = 0; i < data.length; i++) {
+ const c = 'asdf'.charCodeAt(i % 4)
+ data[i] = c
+ }
+ let pos = 0
+ let pushedNull = false
+ r._read = function (n) {
+ t.notOk(pushedNull, '_read after null push')
+
+ // every third chunk is fast
+ push(!(chunks % 3))
+ function push(fast) {
+ t.notOk(pushedNull, 'push() after null push')
+ const c = pos >= data.length ? null : data.slice(pos, pos + n)
+ pushedNull = c === null
+ if (fast) {
+ pos += n
+ r.push(c)
+ if (c === null) {
+ pushError()
+ }
+ } else {
+ setTimeout(function () {
+ pos += n
+ r.push(c)
+ if (c === null) {
+ pushError()
+ }
+ }, 1)
+ }
+ }
+ }
+ function pushError() {
+ r.unshift(Buffer.allocUnsafe(1))
+ w.end()
+ const onerror = global.onerror
+ global.onerror = () => {
+ t.ok(true)
+ global.onerror = onerror
+ return true
+ }
+ r.push(Buffer.allocUnsafe(1))
+ }
+ const w = stream.Writable()
+ const written = []
+ w._write = function (chunk, encoding, cb) {
+ written.push(chunk.toString())
+ cb()
+ }
+ r.on('end', t.fail)
+ r.on('readable', function () {
+ let chunk
+ while ((chunk = r.read(10)) !== null) {
+ w.write(chunk)
+ if (chunk.length > 4) {
+ r.unshift(Buffer.from('1234'))
+ }
+ }
+ })
+ w.on('finish', function () {
+ // each chunk should start with 1234, and then be asfdasdfasdf...
+ // The first got pulled out before the first unshift('1234'), so it's
+ // lacking that piece.
+ t.equal(written[0], 'asdfasdfas')
+ let asdf = 'd'
+
+ // console.error('0: %s', written[0]);
+ for (let i = 1; i < written.length; i++) {
+ // console.error('%s: %s', i.toString(32), written[i]);
+ t.equal(written[i].slice(0, 4), '1234')
+ for (let j = 4; j < written[i].length; j++) {
+ const c = written[i].charAt(j)
+ t.equal(c, asdf)
+ switch (asdf) {
+ case 'a':
+ asdf = 's'
+ break
+ case 's':
+ asdf = 'd'
+ break
+ case 'd':
+ asdf = 'f'
+ break
+ case 'f':
+ asdf = 'a'
+ break
+ }
+ }
+ }
+ t.equal(written.length, 18)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-unshift-read-race'
diff --git a/test/browser/test-stream-writable-change-default-encoding.js b/test/browser/test-stream-writable-change-default-encoding.js
new file mode 100644
index 0000000000..615c60e46c
--- /dev/null
+++ b/test/browser/test-stream-writable-change-default-encoding.js
@@ -0,0 +1,75 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+inherits(MyWritable, stream.Writable)
+MyWritable.prototype._write = function (chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+}
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options)
+ this.fn = fn
+}
+module.exports = function (test) {
+ test('defaultCondingIsUtf8', (t) => {
+ t.plan(1)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'utf8')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.write('foo')
+ m.end()
+ })
+ test('changeDefaultEncodingToAscii', (t) => {
+ t.plan(1)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'ascii')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.setDefaultEncoding('ascii')
+ m.write('bar')
+ m.end()
+ })
+ test('changeDefaultEncodingToInvalidValue', (t) => {
+ t.plan(1)
+ t.throws(function () {
+ const m = new MyWritable(function (isBuffer, type, enc) {}, {
+ decodeStrings: false
+ })
+ m.setDefaultEncoding({})
+ m.write('bar')
+ m.end()
+ }, TypeError)
+ })
+ test('checkVairableCaseEncoding', (t) => {
+ t.plan(1)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.equal(enc, 'ascii')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.setDefaultEncoding('AsCii')
+ m.write('bar')
+ m.end()
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-writable-change-default-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-writable-constructor-set-methods.js b/test/browser/test-stream-writable-constructor-set-methods.js
new file mode 100644
index 0000000000..520d41dd33
--- /dev/null
+++ b/test/browser/test-stream-writable-constructor-set-methods.js
@@ -0,0 +1,41 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(5)
+ let _writeCalled = false
+ function _write(d, e, n) {
+ _writeCalled = true
+ }
+ const w = new Writable({
+ write: _write
+ })
+ w.end(Buffer.from('blerg'))
+ let _writevCalled = false
+ let dLength = 0
+ function _writev(d, n) {
+ dLength = d.length
+ _writevCalled = true
+ }
+ const w2 = new Writable({
+ writev: _writev
+ })
+ w2.cork()
+ w2.write(Buffer.from('blerg'))
+ w2.write(Buffer.from('blerg'))
+ w2.end()
+ setImmediate(function () {
+ t.equal(w._write, _write)
+ t.ok(_writeCalled)
+ t.equal(w2._writev, _writev)
+ t.equal(dLength, 2)
+ t.ok(_writevCalled)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-writable-constructor-set-methods'
diff --git a/test/browser/test-stream-writable-decoded-encoding.js b/test/browser/test-stream-writable-decoded-encoding.js
new file mode 100644
index 0000000000..44654a7f5c
--- /dev/null
+++ b/test/browser/test-stream-writable-decoded-encoding.js
@@ -0,0 +1,55 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+function MyWritable(fn, options) {
+ stream.Writable.call(this, options)
+ this.fn = fn
+}
+inherits(MyWritable, stream.Writable)
+MyWritable.prototype._write = function (chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+}
+module.exports = function (test) {
+ test('decodeStringsTrue', (t) => {
+ t.plan(3)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.ok(isBuffer)
+ t.equal(type, 'object')
+ t.equal(enc, 'buffer')
+ // console.log('ok - decoded string is decoded');
+ },
+ {
+ decodeStrings: true
+ }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+ })
+ test('decodeStringsFalse', (t) => {
+ t.plan(3)
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ t.notOk(isBuffer)
+ t.equal(type, 'string')
+ t.equal(enc, 'utf8')
+ // console.log('ok - un-decoded string is not decoded');
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream-writable-decoded-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream-writev.js b/test/browser/test-stream-writev.js
new file mode 100644
index 0000000000..df9bab6f4b
--- /dev/null
+++ b/test/browser/test-stream-writev.js
@@ -0,0 +1,127 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+const queue = []
+for (let decode = 0; decode < 2; decode++) {
+ for (let uncork = 0; uncork < 2; uncork++) {
+ for (let multi = 0; multi < 2; multi++) {
+ queue.push([!!decode, !!uncork, !!multi])
+ }
+ }
+}
+function runTest(decode, uncork, multi) {
+ return function (t) {
+ t.plan(8)
+
+ // console.log('# decode=%j uncork=%j multi=%j', decode, uncork, multi);
+ let counter = 0
+ let expectCount = 0
+ function cnt(msg) {
+ expectCount++
+ const expect = expectCount
+ return function (er) {
+ if (er) {
+ throw er
+ }
+ counter++
+ t.equal(counter, expect)
+ }
+ }
+ const w = new stream.Writable({
+ decodeStrings: decode
+ })
+ w._write = function (chunk, e, cb) {
+ t.ok(false, 'Should not call _write')
+ }
+ const expectChunks = decode
+ ? [
+ {
+ encoding: 'buffer',
+ chunk: [104, 101, 108, 108, 111, 44, 32]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [119, 111, 114, 108, 100]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [33]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]
+ }
+ ]
+ : [
+ {
+ encoding: 'ascii',
+ chunk: 'hello, '
+ },
+ {
+ encoding: 'utf8',
+ chunk: 'world'
+ },
+ {
+ encoding: 'buffer',
+ chunk: [33]
+ },
+ {
+ encoding: 'binary',
+ chunk: '\nand then...'
+ },
+ {
+ encoding: 'hex',
+ chunk: 'facebea7deadbeefdecafbad'
+ }
+ ]
+ let actualChunks
+ w._writev = function (chunks, cb) {
+ actualChunks = chunks.map(function (chunk) {
+ return {
+ encoding: chunk.encoding,
+ chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk
+ }
+ })
+ cb()
+ }
+ w.cork()
+ w.write('hello, ', 'ascii', cnt('hello'))
+ w.write('world', 'utf8', cnt('world'))
+ if (multi) {
+ w.cork()
+ }
+ w.write(Buffer.from('!'), 'buffer', cnt('!'))
+ w.write('\nand then...', 'binary', cnt('and then'))
+ if (multi) {
+ w.uncork()
+ }
+ w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'))
+ if (uncork) {
+ w.uncork()
+ }
+ w.end(cnt('end'))
+ w.on('finish', function () {
+ // make sure finish comes after all the write cb
+ cnt('finish')()
+ t.deepEqual(expectChunks, actualChunks)
+ })
+ }
+}
+module.exports = function (test) {
+ for (let i = 0; i < queue.length; i++) {
+ const tr = queue[i]
+ test('round ' + i, runTest(tr[0], tr[1], tr[2]))
+ }
+}
+module.exports[kReadableStreamSuiteName] = 'stream-writev'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-base64-single-char-read-end.js b/test/browser/test-stream2-base64-single-char-read-end.js
new file mode 100644
index 0000000000..c0534c9037
--- /dev/null
+++ b/test/browser/test-stream2-base64-single-char-read-end.js
@@ -0,0 +1,40 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+ const src = new Readable({
+ encoding: 'base64'
+ })
+ const dst = new Writable()
+ let hasRead = false
+ const accum = []
+ src._read = function (n) {
+ if (!hasRead) {
+ hasRead = true
+ process.nextTick(function () {
+ src.push(Buffer.from('1'))
+ src.push(null)
+ })
+ }
+ }
+ dst._write = function (chunk, enc, cb) {
+ accum.push(chunk)
+ cb()
+ }
+ src.on('end', function () {
+ t.equal(Buffer.concat(accum) + '', 'MQ==')
+ clearTimeout(timeout)
+ })
+ src.pipe(dst)
+ const timeout = setTimeout(function () {
+ t.fail('timed out waiting for _write')
+ }, 100)
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-base64-single-char-read-end'
diff --git a/test/browser/test-stream2-compatibility.js b/test/browser/test-stream2-compatibility.js
new file mode 100644
index 0000000000..c41d6912fd
--- /dev/null
+++ b/test/browser/test-stream2-compatibility.js
@@ -0,0 +1,32 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+ let ondataCalled = 0
+ function TestReader() {
+ Readable.apply(this)
+ this._buffer = Buffer.alloc(100)
+ this._buffer.fill('x')
+ this.on('data', function () {
+ ondataCalled++
+ })
+ }
+ inherits(TestReader, Readable)
+ TestReader.prototype._read = function (n) {
+ this.push(this._buffer)
+ this._buffer = Buffer.alloc(0)
+ }
+ setTimeout(function () {
+ t.equal(ondataCalled, 1)
+ })
+ new TestReader().read()
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-compatibility'
diff --git a/test/browser/test-stream2-large-read-stall.js b/test/browser/test-stream2-large-read-stall.js
new file mode 100644
index 0000000000..ffcd738817
--- /dev/null
+++ b/test/browser/test-stream2-large-read-stall.js
@@ -0,0 +1,57 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+
+ // If everything aligns so that you do a read(n) of exactly the
+ // remaining buffer, then make sure that 'end' still emits.
+
+ const READSIZE = 100
+ const PUSHSIZE = 20
+ const PUSHCOUNT = 1000
+ const HWM = 50
+ const r = new Readable({
+ highWaterMark: HWM
+ })
+ const rs = r._readableState
+ r._read = push
+ r.on('readable', function () {
+ false && console.error('>> readable')
+ let ret
+ do {
+ false && console.error(' > read(%d)', READSIZE)
+ ret = r.read(READSIZE)
+ false && console.error(' < %j (%d remain)', ret && ret.length, rs.length)
+ } while (ret && ret.length === READSIZE)
+ false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length)
+ })
+ r.on('end', function () {
+ t.equal(pushes, PUSHCOUNT + 1)
+ false && console.error('end')
+ })
+ let pushes = 0
+ function push() {
+ if (pushes > PUSHCOUNT) {
+ return
+ }
+ if (pushes++ === PUSHCOUNT) {
+ false && console.error(' push(EOF)')
+ return r.push(null)
+ }
+ false && console.error(' push #%d', pushes)
+ if (r.push(Buffer.alloc(PUSHSIZE))) {
+ setTimeout(push)
+ }
+ }
+
+ // start the flow
+ r.read(0)
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-large-read-stall'
diff --git a/test/browser/test-stream2-objects.js b/test/browser/test-stream2-objects.js
new file mode 100644
index 0000000000..07be218fb1
--- /dev/null
+++ b/test/browser/test-stream2-objects.js
@@ -0,0 +1,318 @@
+'use strict'
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+}
+function toArray(callback) {
+ const stream = new Writable({
+ objectMode: true
+ })
+ const list = []
+ stream.write = function (chunk) {
+ list.push(chunk)
+ }
+ stream.end = function () {
+ callback(list)
+ }
+ return stream
+}
+function fromArray(list) {
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ forEach(list, function (chunk) {
+ r.push(chunk)
+ })
+ r.push(null)
+ return r
+}
+function noop() {}
+module.exports = function (test) {
+ test('can read objects from stream', function (t) {
+ t.plan(3)
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ const v1 = r.read()
+ const v2 = r.read()
+ const v3 = r.read()
+ t.deepEqual(v1, {
+ one: '1'
+ })
+ t.deepEqual(v2, {
+ two: '2'
+ })
+ t.deepEqual(v3, null)
+ })
+ test('can pipe objects into stream', function (t) {
+ t.plan(1)
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ })
+ test('read(n) is ignored', function (t) {
+ t.plan(1)
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ const value = r.read(2)
+ t.deepEqual(value, {
+ one: '1'
+ })
+ })
+ test('can read objects from _read (sync)', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ objectMode: true
+ })
+ const list = [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ]
+ r._read = function (n) {
+ const item = list.shift()
+ r.push(item || null)
+ }
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ })
+ test('can read objects from _read (async)', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ objectMode: true
+ })
+ const list = [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ]
+ r._read = function (n) {
+ const item = list.shift()
+ process.nextTick(function () {
+ r.push(item || null)
+ })
+ }
+ r.pipe(
+ toArray(function (list) {
+ t.deepEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ })
+ test('can read strings as objects', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ const list = ['one', 'two', 'three']
+ forEach(list, function (str) {
+ r.push(str)
+ })
+ r.push(null)
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, list)
+ })
+ )
+ })
+ test('read(0) for object streams', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ r.push('foobar')
+ r.push(null)
+ r.read(0)
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, ['foobar'])
+ })
+ )
+ })
+ test('falsey values', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = noop
+ r.push(false)
+ r.push(0)
+ r.push('')
+ r.push(null)
+ r.pipe(
+ toArray(function (array) {
+ t.deepEqual(array, [false, 0, ''])
+ })
+ )
+ })
+ test('high watermark _read', function (t) {
+ t.plan(5)
+ const r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ })
+ let calls = 0
+ const list = ['1', '2', '3', '4', '5', '6', '7', '8']
+ r._read = function (n) {
+ calls++
+ }
+ forEach(list, function (c) {
+ r.push(c)
+ })
+ const v = r.read()
+ t.equal(calls, 0)
+ t.equal(v, '1')
+ const v2 = r.read()
+ t.equal(v2, '2')
+ const v3 = r.read()
+ t.equal(v3, '3')
+ t.equal(calls, 1)
+ })
+ test('high watermark push', function (t) {
+ t.plan(6)
+ const r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ })
+ r._read = function (n) {}
+ for (let i = 0; i < 6; i++) {
+ const bool = r.push(i)
+ t.equal(bool, i !== 5)
+ }
+ })
+ test('can write objects to stream', function (t) {
+ t.plan(1)
+ const w = new Writable({
+ objectMode: true
+ })
+ w._write = function (chunk, encoding, cb) {
+ t.deepEqual(chunk, {
+ foo: 'bar'
+ })
+ cb()
+ }
+ w.on('finish', function () {})
+ w.write({
+ foo: 'bar'
+ })
+ w.end()
+ })
+ test('can write multiple objects to stream', function (t) {
+ t.plan(1)
+ const w = new Writable({
+ objectMode: true
+ })
+ const list = []
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ cb()
+ }
+ w.on('finish', function () {
+ t.deepEqual(list, [0, 1, 2, 3, 4])
+ })
+ w.write(0)
+ w.write(1)
+ w.write(2)
+ w.write(3)
+ w.write(4)
+ w.end()
+ })
+ test('can write strings as objects', function (t) {
+ t.plan(1)
+ const w = new Writable({
+ objectMode: true
+ })
+ const list = []
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ process.nextTick(cb)
+ }
+ w.on('finish', function () {
+ t.deepEqual(list, ['0', '1', '2', '3', '4'])
+ })
+ w.write('0')
+ w.write('1')
+ w.write('2')
+ w.write('3')
+ w.write('4')
+ w.end()
+ })
+ test('buffers finish until cb is called', function (t) {
+ t.plan(2)
+ const w = new Writable({
+ objectMode: true
+ })
+ let called = false
+ w._write = function (chunk, encoding, cb) {
+ t.equal(chunk, 'foo')
+ process.nextTick(function () {
+ called = true
+ cb()
+ })
+ }
+ w.on('finish', function () {
+ t.equal(called, true)
+ })
+ w.write('foo')
+ w.end()
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-objects'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-pipe-error-handling.js b/test/browser/test-stream2-pipe-error-handling.js
new file mode 100644
index 0000000000..df0afb614b
--- /dev/null
+++ b/test/browser/test-stream2-pipe-error-handling.js
@@ -0,0 +1,80 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+module.exports = function (test) {
+ test('Error Listener Catches', function (t) {
+ t.plan(3)
+ let count = 1000
+ const source = new stream.Readable()
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.alloc(n))
+ }
+ let unpipedDest
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+ const dest = new stream.Writable()
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+ source.pipe(dest)
+ let gotErr = null
+ dest.on('error', function (err) {
+ gotErr = err
+ })
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+ const err = new Error('This stream turned into bacon.')
+ dest.emit('error', err)
+ t.strictEqual(gotErr, err)
+ t.strictEqual(unpipedSource, source)
+ t.strictEqual(unpipedDest, dest)
+ })
+ test('Error Without Listener Throws', function testErrorWithoutListenerThrows(t) {
+ t.plan(3)
+ let count = 1000
+ const source = new stream.Readable()
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.alloc(n))
+ }
+ let unpipedDest
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+ const dest = new stream.Writable()
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+ source.pipe(dest)
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+ const err = new Error('This stream turned into bacon.')
+ const onerror = global.onerror
+ dest.emit('error', err)
+ global.onerror = () => {
+ t.ok(true)
+ t.strictEqual(unpipedSource, source)
+ t.strictEqual(unpipedDest, dest)
+ global.onerror = onerror
+ return true
+ }
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-handling'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-pipe-error-once-listener.js b/test/browser/test-stream2-pipe-error-once-listener.js
new file mode 100644
index 0000000000..a9c8c6706d
--- /dev/null
+++ b/test/browser/test-stream2-pipe-error-once-listener.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+ const Read = function () {
+ stream.Readable.call(this)
+ }
+ inherits(Read, stream.Readable)
+ Read.prototype._read = function (size) {
+ this.push('x')
+ this.push(null)
+ }
+ const Write = function () {
+ stream.Writable.call(this)
+ }
+ inherits(Write, stream.Writable)
+ Write.prototype._write = function (buffer, encoding, cb) {
+ this.emit('error', new Error('boom'))
+ this.emit('alldone')
+ }
+ const read = new Read()
+ const write = new Write()
+ write.once('error', () => {})
+ write.once('alldone', function () {
+ t.ok(true)
+ })
+ read.pipe(write)
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-pipe-error-once-listener'
diff --git a/test/browser/test-stream2-push.js b/test/browser/test-stream2-push.js
new file mode 100644
index 0000000000..e2b4b0e0df
--- /dev/null
+++ b/test/browser/test-stream2-push.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(33)
+ const stream = new Readable({
+ highWaterMark: 16,
+ encoding: 'utf8'
+ })
+ const source = new EE()
+ stream._read = function () {
+ // console.error('stream._read');
+ readStart()
+ }
+ let ended = false
+ stream.on('end', function () {
+ ended = true
+ })
+ source.on('data', function (chunk) {
+ const ret = stream.push(chunk)
+ // console.error('data', stream._readableState.length);
+ if (!ret) {
+ readStop()
+ }
+ })
+ source.on('end', function () {
+ stream.push(null)
+ })
+ let reading = false
+ function readStart() {
+ // console.error('readStart');
+ reading = true
+ }
+ function readStop() {
+ // console.error('readStop');
+ reading = false
+ process.nextTick(function () {
+ const r = stream.read()
+ if (r !== null) {
+ writer.write(r)
+ }
+ })
+ }
+ const writer = new Writable({
+ decodeStrings: false
+ })
+ const written = []
+ const expectWritten = [
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg'
+ ]
+ writer._write = function (chunk, encoding, cb) {
+ // console.error('WRITE %s', chunk);
+ written.push(chunk)
+ process.nextTick(cb)
+ }
+ writer.on('finish', finish)
+
+ // now emit some chunks.
+
+ const chunk = 'asdfg'
+ let set = 0
+ readStart()
+ data()
+ function data() {
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.ok(reading)
+ source.emit('data', chunk)
+ t.notOk(reading)
+ if (set++ < 5) {
+ setTimeout(data, 10)
+ } else {
+ end()
+ }
+ }
+ function finish() {
+ // console.error('finish');
+ t.deepEqual(written, expectWritten)
+ }
+ function end() {
+ source.emit('end')
+ t.notOk(reading)
+ writer.end(stream.read())
+ setTimeout(function () {
+ t.ok(ended)
+ })
+ }
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-push'
diff --git a/test/browser/test-stream2-readable-empty-buffer-no-eof.js b/test/browser/test-stream2-readable-empty-buffer-no-eof.js
new file mode 100644
index 0000000000..576aeb7dae
--- /dev/null
+++ b/test/browser/test-stream2-readable-empty-buffer-no-eof.js
@@ -0,0 +1,100 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+module.exports = function (test) {
+ test('readable empty buffer no eof 1', function (t) {
+ t.plan(1)
+ const r = new Readable()
+
+ // should not end when we get a Buffer(0) or '' as the _read result
+ // that just means that there is *temporarily* no data, but to go
+ // ahead and try again later.
+ //
+ // note that this is very unusual. it only works for crypto streams
+ // because the other side of the stream will call read(0) to cycle
+ // data through openssl. that's why we set the timeouts to call
+ // r.read(0) again later, otherwise there is no more work being done
+ // and the process just exits.
+
+ const buf = Buffer.alloc(5)
+ buf.fill('x')
+ let reads = 5
+ r._read = function (n) {
+ switch (reads--) {
+ case 0:
+ return r.push(null)
+ // EOF
+ case 1:
+ return r.push(buf)
+ case 2:
+ setTimeout(r.read.bind(r, 0), 50)
+ return r.push(Buffer.alloc(0))
+ // Not-EOF!
+ case 3:
+ setTimeout(r.read.bind(r, 0), 50)
+ return process.nextTick(function () {
+ return r.push(Buffer.alloc(0))
+ })
+ case 4:
+ setTimeout(r.read.bind(r, 0), 50)
+ return setTimeout(function () {
+ return r.push(Buffer.alloc(0))
+ })
+ case 5:
+ return setTimeout(function () {
+ return r.push(buf)
+ })
+ default:
+ throw new Error('unreachable')
+ }
+ }
+ const results = []
+ function flow() {
+ let chunk
+ while ((chunk = r.read()) !== null) {
+ results.push(chunk + '')
+ }
+ }
+ r.on('readable', flow)
+ r.on('end', function () {
+ results.push('EOF')
+ t.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF'])
+ })
+ flow()
+ })
+ test('readable empty buffer no eof 2', function (t) {
+ t.plan(1)
+ const r = new Readable({
+ encoding: 'base64'
+ })
+ let reads = 5
+ r._read = function (n) {
+ if (!reads--) {
+ return r.push(null) // EOF
+ } else {
+ return r.push(Buffer.from('x'))
+ }
+ }
+ const results = []
+ function flow() {
+ let chunk
+ while ((chunk = r.read()) !== null) {
+ results.push(chunk + '')
+ }
+ }
+ r.on('readable', flow)
+ r.on('end', function () {
+ results.push('EOF')
+ t.deepEqual(results, ['eHh4', 'eHg=', 'EOF'])
+ })
+ flow()
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-empty-buffer-no-eof'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-readable-from-list.js b/test/browser/test-stream2-readable-from-list.js
new file mode 100644
index 0000000000..9d3ad502c0
--- /dev/null
+++ b/test/browser/test-stream2-readable-from-list.js
@@ -0,0 +1,97 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { _fromList: fromList } = require('../../lib/_stream_readable')
+const BufferList = require('../../lib/internal/streams/buffer_list')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+function bufferListFromArray(arr) {
+ const bl = new BufferList()
+ for (let i = 0; i < arr.length; ++i) {
+ bl.push(arr[i])
+ }
+ return bl
+}
+module.exports = function (test) {
+ test('buffers', function (t) {
+ t.plan(5)
+ let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')]
+ list = bufferListFromArray(list)
+
+ // read more than the first element.
+ let ret = fromList(6, {
+ buffer: list,
+ length: 16
+ })
+ t.equal(ret.toString(), 'foogba')
+
+ // read exactly the first element.
+ ret = fromList(2, {
+ buffer: list,
+ length: 10
+ })
+ t.equal(ret.toString(), 'rk')
+
+ // read less than the first element.
+ ret = fromList(2, {
+ buffer: list,
+ length: 8
+ })
+ t.equal(ret.toString(), 'ba')
+
+ // read more than we have.
+ ret = fromList(100, {
+ buffer: list,
+ length: 6
+ })
+ t.equal(ret.toString(), 'zykuel')
+
+ // all consumed.
+ t.same(list, new BufferList())
+ })
+ test('strings', function (t) {
+ t.plan(5)
+ let list = ['foog', 'bark', 'bazy', 'kuel']
+ list = bufferListFromArray(list)
+
+ // read more than the first element.
+ let ret = fromList(6, {
+ buffer: list,
+ length: 16,
+ decoder: true
+ })
+ t.equal(ret, 'foogba')
+
+ // read exactly the first element.
+ ret = fromList(2, {
+ buffer: list,
+ length: 10,
+ decoder: true
+ })
+ t.equal(ret, 'rk')
+
+ // read less than the first element.
+ ret = fromList(2, {
+ buffer: list,
+ length: 8,
+ decoder: true
+ })
+ t.equal(ret, 'ba')
+
+ // read more than we have.
+ ret = fromList(100, {
+ buffer: list,
+ length: 6,
+ decoder: true
+ })
+ t.equal(ret, 'zykuel')
+
+ // all consumed.
+ t.same(list, new BufferList())
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-from-list'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-readable-legacy-drain.js b/test/browser/test-stream2-readable-legacy-drain.js
new file mode 100644
index 0000000000..8aa8670549
--- /dev/null
+++ b/test/browser/test-stream2-readable-legacy-drain.js
@@ -0,0 +1,44 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Stream, Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(3)
+ const r = new Readable()
+ const N = 256
+ let reads = 0
+ r._read = function (n) {
+ return r.push(++reads === N ? null : Buffer.alloc(1))
+ }
+ r.on('end', function () {
+ t.ok(true, 'rended')
+ })
+ const w = new Stream()
+ w.writable = true
+ let writes = 0
+ let buffered = 0
+ w.write = function (c) {
+ writes += c.length
+ buffered += c.length
+ process.nextTick(drain)
+ return false
+ }
+ function drain() {
+ if (buffered > 3) {
+ t.ok(false, 'to much buffer')
+ }
+ buffered = 0
+ w.emit('drain')
+ }
+ w.end = function () {
+ t.equal(writes, 255)
+ t.ok(true, 'wended')
+ }
+ r.pipe(w)
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-legacy-drain'
diff --git a/test/browser/test-stream2-readable-non-empty-end.js b/test/browser/test-stream2-readable-non-empty-end.js
new file mode 100644
index 0000000000..b8e23851df
--- /dev/null
+++ b/test/browser/test-stream2-readable-non-empty-end.js
@@ -0,0 +1,57 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(4)
+ let len = 0
+ const chunks = new Array(10)
+ for (let i = 1; i <= 10; i++) {
+ chunks[i - 1] = Buffer.alloc(i)
+ len += i
+ }
+ const test = new Readable()
+ let n = 0
+ test._read = function (size) {
+ const chunk = chunks[n++]
+ setTimeout(function () {
+ test.push(chunk === undefined ? null : chunk)
+ })
+ }
+ test.on('end', thrower)
+ function thrower() {
+ throw new Error('this should not happen!')
+ }
+ let bytesread = 0
+ test.on('readable', function () {
+ const b = len - bytesread - 1
+ const res = test.read(b)
+ if (res) {
+ bytesread += res.length
+ // console.error('br=%d len=%d', bytesread, len);
+ setTimeout(next)
+ }
+ test.read(0)
+ })
+ test.read(0)
+ function next() {
+ // now let's make 'end' happen
+ test.removeListener('end', thrower)
+ test.on('end', function () {
+ t.ok(true, 'end emitted')
+ })
+
+ // one to get the last byte
+ let r = test.read()
+ t.ok(r)
+ t.equal(r.length, 1)
+ r = test.read()
+ t.equal(r, null)
+ }
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-non-empty-end'
diff --git a/test/browser/test-stream2-readable-wrap-empty.js b/test/browser/test-stream2-readable-wrap-empty.js
new file mode 100644
index 0000000000..2dd97b0cbd
--- /dev/null
+++ b/test/browser/test-stream2-readable-wrap-empty.js
@@ -0,0 +1,19 @@
+'use strict'
+
+const { EventEmitter: EE } = require('events')
+const Readable = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(1)
+ const oldStream = new EE()
+ oldStream.pause = function () {}
+ oldStream.resume = function () {}
+ const newStream = new Readable().wrap(oldStream)
+ newStream
+ .on('readable', function () {})
+ .on('end', function () {
+ t.ok(true, 'ended')
+ })
+ oldStream.emit('end')
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap-empty'
diff --git a/test/browser/test-stream2-readable-wrap.js b/test/browser/test-stream2-readable-wrap.js
new file mode 100644
index 0000000000..89098a180d
--- /dev/null
+++ b/test/browser/test-stream2-readable-wrap.js
@@ -0,0 +1,108 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { EventEmitter: EE } = require('events')
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+let run = 0
+module.exports = function (test) {
+ function runTest(highWaterMark, objectMode, produce) {
+ test('run #' + ++run, (t) => {
+ t.plan(4)
+ const old = new EE()
+ const r = new Readable({
+ highWaterMark,
+ objectMode
+ })
+ t.equal(r, r.wrap(old))
+ let ended = false
+ r.on('end', function () {
+ ended = true
+ })
+ old.pause = function () {
+ // console.error('old.pause()');
+ old.emit('pause')
+ flowing = false
+ }
+ old.resume = function () {
+ // console.error('old.resume()');
+ old.emit('resume')
+ flow()
+ }
+ let flowing
+ let chunks = 10
+ let oldEnded = false
+ const expected = []
+ function flow() {
+ flowing = true
+ // eslint-disable-next-line no-unmodified-loop-condition
+ while (flowing && chunks-- > 0) {
+ const item = produce()
+ expected.push(item)
+ // console.log('old.emit', chunks, flowing);
+ old.emit('data', item)
+ // console.log('after emit', chunks, flowing);
+ }
+ if (chunks <= 0) {
+ oldEnded = true
+ // console.log('old end', chunks, flowing);
+ old.emit('end')
+ }
+ }
+ const w = new Writable({
+ highWaterMark: highWaterMark * 2,
+ objectMode
+ })
+ const written = []
+ w._write = function (chunk, encoding, cb) {
+ // console.log('_write', chunk);
+ written.push(chunk)
+ setTimeout(cb)
+ }
+ w.on('finish', function () {
+ performAsserts()
+ })
+ r.pipe(w)
+ flow()
+ function performAsserts() {
+ t.ok(ended)
+ t.ok(oldEnded)
+ t.deepEqual(written, expected)
+ }
+ })
+ }
+ runTest(100, false, function () {
+ return Buffer.alloc(100)
+ })
+ runTest(10, false, function () {
+ return Buffer.from('xxxxxxxxxx')
+ })
+ runTest(1, true, function () {
+ return {
+ foo: 'bar'
+ }
+ })
+ const objectChunks = [
+ 5,
+ 'a',
+ false,
+ 0,
+ '',
+ 'xyz',
+ {
+ x: 4
+ },
+ 7,
+ [],
+ 555
+ ]
+ runTest(1, true, function () {
+ return objectChunks.shift()
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-readable-wrap'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-set-encoding.js b/test/browser/test-stream2-set-encoding.js
new file mode 100644
index 0000000000..4af1c989f8
--- /dev/null
+++ b/test/browser/test-stream2-set-encoding.js
@@ -0,0 +1,312 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const inherits = require('inherits')
+const { Readable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+inherits(TestReader, Readable)
+function TestReader(n, opts) {
+ Readable.call(this, opts)
+ this.pos = 0
+ this.len = n || 100
+}
+TestReader.prototype._read = function (n) {
+ setTimeout(
+ function () {
+ if (this.pos >= this.len) {
+ // double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+ n = Math.min(n, this.len - this.pos)
+ if (n <= 0) {
+ // double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+ this.pos += n
+ const ret = Buffer.alloc(n)
+ ret.fill('a')
+
+ // console.log('this.push(ret)', ret);
+
+ return this.push(ret)
+ }.bind(this),
+ 1
+ )
+}
+module.exports = function (test) {
+ test('setEncoding utf8', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ tr.setEncoding('utf8')
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('setEncoding hex', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('setEncoding hex with read(13)', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+ tr.on('readable', function flow() {
+ // console.log('readable once');
+ let chunk
+ while ((chunk = tr.read(13)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ // console.log('END');
+ t.same(out, expect)
+ })
+ })
+ test('setEncoding base64', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ tr.setEncoding('base64')
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('encoding: utf8', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100, {
+ encoding: 'utf8'
+ })
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('encoding: hex', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100, {
+ encoding: 'hex'
+ })
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('encoding: hex with read(13)', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100, {
+ encoding: 'hex'
+ })
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(13)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('encoding: base64', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100, {
+ encoding: 'base64'
+ })
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while ((chunk = tr.read(10)) !== null) {
+ out.push(chunk)
+ }
+ })
+ tr.on('end', function () {
+ t.same(out, expect)
+ })
+ })
+ test('chainable', function (t) {
+ t.plan(1)
+ const tr = new TestReader(100)
+ t.equal(tr.setEncoding('utf8'), tr)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-set-encoding'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-transform.js b/test/browser/test-stream2-transform.js
new file mode 100644
index 0000000000..27a95c98db
--- /dev/null
+++ b/test/browser/test-stream2-transform.js
@@ -0,0 +1,476 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { PassThrough, Transform } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+module.exports = function (test) {
+ test('writable side consumption', function (t) {
+ t.plan(3)
+ const tx = new Transform({
+ highWaterMark: 10
+ })
+ let transformed = 0
+ tx._transform = function (chunk, encoding, cb) {
+ transformed += chunk.length
+ tx.push(chunk)
+ cb()
+ }
+ for (let i = 1; i <= 10; i++) {
+ tx.write(Buffer.alloc(i))
+ }
+ tx.end()
+ t.equal(tx._readableState.length, 10)
+ t.equal(transformed, 10)
+ t.same(
+ tx._writableState.getBuffer().map(function (c) {
+ return c.chunk.length
+ }),
+ [5, 6, 7, 8, 9, 10]
+ )
+ })
+ test('passthrough', function (t) {
+ t.plan(4)
+ const pt = new PassThrough()
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5).toString(), 'l')
+ })
+ test('object passthrough', function (t) {
+ t.plan(7)
+ const pt = new PassThrough({
+ objectMode: true
+ })
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({
+ a: 'b'
+ })
+ pt.end()
+ t.equal(pt.read(), 1)
+ t.equal(pt.read(), true)
+ t.equal(pt.read(), false)
+ t.equal(pt.read(), 0)
+ t.equal(pt.read(), 'foo')
+ t.equal(pt.read(), '')
+ t.same(pt.read(), {
+ a: 'b'
+ })
+ })
+ test('simple transform', function (t) {
+ t.plan(4)
+ const pt = new Transform()
+ pt._transform = function (c, e, cb) {
+ const ret = Buffer.alloc(c.length)
+ ret.fill('x')
+ pt.push(ret)
+ cb()
+ }
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'xxxxx')
+ t.equal(pt.read(5).toString(), 'x')
+ })
+ test('simple object transform', function (t) {
+ t.plan(7)
+ const pt = new Transform({
+ objectMode: true
+ })
+ pt._transform = function (c, e, cb) {
+ pt.push(JSON.stringify(c))
+ cb()
+ }
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({
+ a: 'b'
+ })
+ pt.end()
+ t.equal(pt.read(), '1')
+ t.equal(pt.read(), 'true')
+ t.equal(pt.read(), 'false')
+ t.equal(pt.read(), '0')
+ t.equal(pt.read(), '"foo"')
+ t.equal(pt.read(), '""')
+ t.equal(pt.read(), '{"a":"b"}')
+ })
+ test('async passthrough', function (t) {
+ t.plan(4)
+ const pt = new Transform()
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5).toString(), 'l')
+ })
+ })
+ test('assymetric transform (expand)', function (t) {
+ t.plan(7)
+ const pt = new Transform()
+
+ // emit each chunk 2 times.
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }, 10)
+ }
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'foogf')
+ t.equal(pt.read(5).toString(), 'oogba')
+ t.equal(pt.read(5).toString(), 'rkbar')
+ t.equal(pt.read(5).toString(), 'kbazy')
+ t.equal(pt.read(5).toString(), 'bazyk')
+ t.equal(pt.read(5).toString(), 'uelku')
+ t.equal(pt.read(5).toString(), 'el')
+ })
+ })
+ test('assymetric transform (compress)', function (t) {
+ t.plan(3)
+ const pt = new Transform()
+
+ // each output is the first char of 3 consecutive chunks,
+ // or whatever's left.
+ pt.state = ''
+ pt._transform = function (chunk, encoding, cb) {
+ if (!chunk) {
+ chunk = ''
+ }
+ const s = chunk.toString()
+ setTimeout(
+ function () {
+ this.state += s.charAt(0)
+ if (this.state.length === 3) {
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ }
+ cb()
+ }.bind(this),
+ 10
+ )
+ }
+ pt._flush = function (cb) {
+ // just output whatever we have.
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ cb()
+ }
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.end()
+
+ // 'abcdeabcdeabcd'
+ pt.on('finish', function () {
+ t.equal(pt.read(5).toString(), 'abcde')
+ t.equal(pt.read(5).toString(), 'abcde')
+ t.equal(pt.read(5).toString(), 'abcd')
+ })
+ })
+
+ // this tests for a stall when data is written to a full stream
+ // that has empty transforms.
+ test('complex transform', function (t) {
+ t.plan(2)
+ let count = 0
+ let saved = null
+ const pt = new Transform({
+ highWaterMark: 3
+ })
+ pt._transform = function (c, e, cb) {
+ if (count++ === 1) {
+ saved = c
+ } else {
+ if (saved) {
+ pt.push(saved)
+ saved = null
+ }
+ pt.push(c)
+ }
+ cb()
+ }
+ pt.once('readable', function () {
+ process.nextTick(function () {
+ pt.write(Buffer.from('d'))
+ pt.write(Buffer.from('ef'), function () {
+ pt.end()
+ })
+ t.equal(pt.read().toString(), 'abcdef')
+ t.equal(pt.read(), null)
+ })
+ })
+ pt.write(Buffer.from('abc'))
+ })
+ test('passthrough event emission', function (t) {
+ t.plan(11)
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ // console.error('>>> emit readable %d', emits);
+ emits++
+ })
+ pt.write(Buffer.from('foog'))
+
+ // console.error('need emit 0');
+ pt.write(Buffer.from('bark'))
+ setTimeout(() => {
+ // console.error('should have emitted readable now 1 === %d', emits)
+ t.equal(emits, 1)
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5) + '', 'null')
+
+ // console.error('need emit 1');
+
+ pt.write(Buffer.from('bazy'))
+ // console.error('should have emitted, but not again');
+ pt.write(Buffer.from('kuel'))
+
+ // console.error('should have emitted readable now 2 === %d', emits);
+ setTimeout(() => {
+ t.equal(emits, 2)
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 2');
+
+ pt.end()
+ setTimeout(() => {
+ t.equal(emits, 3)
+ t.equal(pt.read(5).toString(), 'l')
+ t.equal(pt.read(5), null)
+
+ // console.error('should not have emitted again');
+ t.equal(emits, 3)
+ })
+ })
+ })
+ })
+ test('passthrough event emission reordered', function (t) {
+ t.plan(10)
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ // console.error('emit readable', emits);
+ emits++
+ })
+ pt.write(Buffer.from('foog'))
+ // console.error('need emit 0');
+ pt.write(Buffer.from('bark'))
+ setTimeout(() => {
+ // console.error('should have emitted readable now 1 === %d', emits);
+ t.equal(emits, 1)
+ t.equal(pt.read(5).toString(), 'foogb')
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 1');
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'arkba')
+ t.equal(pt.read(5), null)
+
+ // console.error('need emit 2');
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'zykue')
+ t.equal(pt.read(5), null)
+ pt.once('readable', function () {
+ t.equal(pt.read(5).toString(), 'l')
+ t.equal(pt.read(5), null)
+ t.equal(emits, 4)
+ })
+ pt.end()
+ })
+ pt.write(Buffer.from('kuel'))
+ })
+ pt.write(Buffer.from('bazy'))
+ })
+ })
+ test('passthrough facaded', function (t) {
+ t.plan(1)
+
+ // console.error('passthrough facaded');
+ const pt = new PassThrough()
+ const datas = []
+ pt.on('data', function (chunk) {
+ datas.push(chunk.toString())
+ })
+ pt.on('end', function () {
+ t.same(datas, ['foog', 'bark', 'bazy', 'kuel'])
+ })
+ pt.write(Buffer.from('foog'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bark'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bazy'))
+ setTimeout(function () {
+ pt.write(Buffer.from('kuel'))
+ setTimeout(function () {
+ pt.end()
+ }, 10)
+ }, 10)
+ }, 10)
+ }, 10)
+ })
+ test('object transform (json parse)', function (t) {
+ t.plan(5)
+
+ // console.error('json parse stream');
+ const jp = new Transform({
+ objectMode: true
+ })
+ jp._transform = function (data, encoding, cb) {
+ try {
+ jp.push(JSON.parse(data))
+ cb()
+ } catch (er) {
+ cb(er)
+ }
+ }
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ const objects = [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string',
+ {
+ nested: {
+ things: [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string'
+ ]
+ }
+ }
+ ]
+ let ended = false
+ jp.on('end', function () {
+ ended = true
+ })
+ forEach(objects, function (obj) {
+ jp.write(JSON.stringify(obj))
+ const res = jp.read()
+ t.same(res, obj)
+ })
+ jp.end()
+ // read one more time to get the 'end' event
+ jp.read()
+ process.nextTick(function () {
+ t.ok(ended)
+ })
+ })
+ test('object transform (json stringify)', function (t) {
+ t.plan(5)
+
+ // console.error('json parse stream');
+ const js = new Transform({
+ objectMode: true
+ })
+ js._transform = function (data, encoding, cb) {
+ try {
+ js.push(JSON.stringify(data))
+ cb()
+ } catch (er) {
+ cb(er)
+ }
+ }
+
+ // anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ const objects = [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string',
+ {
+ nested: {
+ things: [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string'
+ ]
+ }
+ }
+ ]
+ let ended = false
+ js.on('end', function () {
+ ended = true
+ })
+ forEach(objects, function (obj) {
+ js.write(obj)
+ const res = js.read()
+ t.equal(res, JSON.stringify(obj))
+ })
+ js.end()
+ // read one more time to get the 'end' event
+ js.read()
+ process.nextTick(function () {
+ t.ok(ended)
+ })
+ })
+ function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+ }
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-transform'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream2-unpipe-drain.js b/test/browser/test-stream2-unpipe-drain.js
new file mode 100644
index 0000000000..5b20ee530e
--- /dev/null
+++ b/test/browser/test-stream2-unpipe-drain.js
@@ -0,0 +1,52 @@
+'use strict'
+
+const crypto = require('crypto')
+const inherits = require('inherits')
+const stream = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ try {
+ crypto.randomBytes(9)
+ } catch (_) {
+ t.plan(1)
+ t.ok(true, 'does not suport random, skipping')
+ return
+ }
+ t.plan(2)
+ function TestWriter() {
+ stream.Writable.call(this)
+ }
+ inherits(TestWriter, stream.Writable)
+ TestWriter.prototype._write = function (buffer, encoding, callback) {
+ // console.log('write called');
+ // super slow write stream (callback never called)
+ }
+ const dest = new TestWriter()
+ function TestReader(id) {
+ stream.Readable.call(this)
+ this.reads = 0
+ }
+ inherits(TestReader, stream.Readable)
+ TestReader.prototype._read = function (size) {
+ this.reads += 1
+ this.push(crypto.randomBytes(size))
+ }
+ const src1 = new TestReader()
+ const src2 = new TestReader()
+ src1.pipe(dest)
+ src1.once('readable', function () {
+ process.nextTick(function () {
+ src2.pipe(dest)
+ src2.once('readable', function () {
+ process.nextTick(function () {
+ src1.unpipe(dest)
+ })
+ })
+ })
+ })
+ dest.on('unpipe', function () {
+ t.equal(src1.reads, 2)
+ t.equal(src2.reads, 1)
+ })
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-unpipe-drain'
diff --git a/test/browser/test-stream2-writable.js b/test/browser/test-stream2-writable.js
new file mode 100644
index 0000000000..4a917f26e4
--- /dev/null
+++ b/test/browser/test-stream2-writable.js
@@ -0,0 +1,384 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const inherits = require('inherits')
+const { Duplex, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName, kReadableStreamSuiteHasMultipleTests } = require('./symbols')
+inherits(TestWriter, Writable)
+function TestWriter() {
+ Writable.apply(this, arguments)
+ this.buffer = []
+ this.written = 0
+}
+TestWriter.prototype._write = function (chunk, encoding, cb) {
+ // simulate a small unpredictable latency
+ setTimeout(
+ function () {
+ this.buffer.push(chunk.toString())
+ this.written += chunk.length
+ cb()
+ }.bind(this),
+ Math.floor(Math.random() * 10)
+ )
+}
+inherits(Processstdout, Writable)
+function Processstdout() {
+ Writable.apply(this, arguments)
+ this.buffer = []
+ this.written = 0
+}
+Processstdout.prototype._write = function (chunk, encoding, cb) {
+ // console.log(chunk.toString());
+ cb()
+}
+const chunks = new Array(50)
+for (let i = 0; i < chunks.length; i++) {
+ chunks[i] = new Array(i + 1).join('x')
+}
+if (!process.stdout) {
+ process.stdout = new Processstdout()
+}
+module.exports = function (test) {
+ test('write fast', function (t) {
+ t.plan(1)
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ })
+ forEach(chunks, function (chunk) {
+ // screw backpressure. Just buffer it all up.
+ tw.write(chunk)
+ })
+ tw.end()
+ })
+ test('write slow', function (t) {
+ t.plan(1)
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ })
+ let i = 0
+ ;(function W() {
+ tw.write(chunks[i++])
+ if (i < chunks.length) {
+ setTimeout(W, 10)
+ } else {
+ tw.end()
+ }
+ })()
+ })
+ test('write backpressure', function (t) {
+ t.plan(19)
+ const tw = new TestWriter({
+ highWaterMark: 50
+ })
+ let drains = 0
+ tw.on('finish', function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ t.equal(drains, 17)
+ })
+ tw.on('drain', function () {
+ drains++
+ })
+ let i = 0
+ ;(function W() {
+ let ret
+ do {
+ ret = tw.write(chunks[i++])
+ } while (ret !== false && i < chunks.length)
+ if (i < chunks.length) {
+ t.ok(tw._writableState.length >= 50)
+ tw.once('drain', W)
+ } else {
+ tw.end()
+ }
+ })()
+ })
+ test('write bufferize', function (t) {
+ t.plan(50)
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+ tw.on('finish', function () {
+ forEach(chunks, function (chunk, i) {
+ const actual = Buffer.from(tw.buffer[i])
+ chunk = Buffer.from(chunk)
+
+ // Some combination of encoding and length result in the last byte replaced by two extra null bytes
+ if (actual[actual.length - 1] === 0) {
+ chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])])
+ }
+
+ // In some cases instead there is one byte less
+ if (actual.length === chunk.length - 1) {
+ chunk = chunk.slice(0, chunk.length - 1)
+ }
+ t.same(actual, chunk, 'got the expected chunks ' + i)
+ })
+ })
+ forEach(chunks, function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+ tw.end()
+ })
+ test('write no bufferize', function (t) {
+ t.plan(100)
+ const tw = new TestWriter({
+ highWaterMark: 100,
+ decodeStrings: false
+ })
+ tw._write = function (chunk, encoding, cb) {
+ t.equals(typeof chunk, 'string')
+ chunk = Buffer.from(chunk, encoding)
+ return TestWriter.prototype._write.call(this, chunk, encoding, cb)
+ }
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+ tw.on('finish', function () {
+ forEach(chunks, function (chunk, i) {
+ const actual = Buffer.from(tw.buffer[i])
+ chunk = Buffer.from(chunk)
+
+ // Some combination of encoding and length result in the last byte replaced by two extra null bytes
+ if (actual[actual.length - 1] === 0) {
+ chunk = Buffer.concat([chunk.slice(0, chunk.length - 1), Buffer.from([0, 0])])
+ }
+
+ // In some cases instead there is one byte less
+ if (actual.length === chunk.length - 1) {
+ chunk = chunk.slice(0, chunk.length - 1)
+ }
+ t.same(actual, chunk, 'got the expected chunks ' + i)
+ })
+ })
+ forEach(chunks, function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+ tw.end()
+ })
+ test('write callbacks', function (t) {
+ t.plan(2)
+ const callbacks = chunks
+ .map(function (chunk, i) {
+ return [
+ i,
+ function (er) {
+ callbacks._called[i] = chunk
+ }
+ ]
+ })
+ .reduce(function (set, x) {
+ set['callback-' + x[0]] = x[1]
+ return set
+ }, {})
+ callbacks._called = []
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ tw.on('finish', function () {
+ process.nextTick(function () {
+ t.same(tw.buffer, chunks, 'got chunks in the right order')
+ t.same(callbacks._called, chunks, 'called all callbacks')
+ })
+ })
+ forEach(chunks, function (chunk, i) {
+ tw.write(chunk, callbacks['callback-' + i])
+ })
+ tw.end()
+ })
+ test('end callback', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ tw.end(() => {
+ t.ok(true)
+ })
+ })
+ test('end callback with chunk', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ tw.end(Buffer.from('hello world'), () => {
+ t.ok(true)
+ })
+ })
+ test('end callback with chunk and encoding', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ tw.end('hello world', 'ascii', () => {
+ t.ok(true)
+ })
+ })
+ test('end callback after .write() call', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ tw.write(Buffer.from('hello world'))
+ tw.end(() => {
+ t.ok(true)
+ })
+ })
+ test('end callback called after write callback', function (t) {
+ t.plan(1)
+ const tw = new TestWriter()
+ let writeCalledback = false
+ tw.write(Buffer.from('hello world'), function () {
+ writeCalledback = true
+ })
+ tw.end(function () {
+ t.equal(writeCalledback, true)
+ })
+ })
+ test('encoding should be ignored for buffers', function (t) {
+ t.plan(1)
+ const tw = new Writable()
+ const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'
+ tw._write = function (chunk, encoding, cb) {
+ t.equal(chunk.toString('hex'), hex)
+ }
+ const buf = Buffer.from(hex, 'hex')
+ tw.write(buf, 'binary')
+ })
+ test('writables are not pipable', function (t) {
+ t.plan(1)
+ const w = new Writable({
+ autoDestroy: false
+ })
+ w._write = function () {}
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ })
+ w.pipe(process.stdout)
+ t.ok(gotError)
+ })
+ test('duplexes are pipable', function (t) {
+ t.plan(1)
+ const d = new Duplex()
+ d._read = function () {}
+ d._write = function () {}
+ let gotError = false
+ d.on('error', function (er) {
+ gotError = true
+ })
+ d.pipe(process.stdout)
+ t.notOk(gotError)
+ })
+ test('end(chunk) two times is an error', function (t) {
+ t.plan(2)
+ const w = new Writable()
+ w._write = function () {}
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ t.equal(er.message, 'write after end')
+ })
+ w.end('this is the end')
+ w.end('and so is this')
+ process.nextTick(function () {
+ t.ok(gotError)
+ })
+ })
+ test('dont end while writing', function (t) {
+ t.plan(2)
+ const w = new Writable()
+ let wrote = false
+ w._write = function (chunk, e, cb) {
+ t.notOk(this.writing)
+ wrote = true
+ this.writing = true
+ setTimeout(function () {
+ this.writing = false
+ cb()
+ })
+ }
+ w.on('finish', function () {
+ t.ok(wrote)
+ })
+ w.write(Buffer.alloc(0))
+ w.end()
+ })
+ test('finish does not come before write cb', function (t) {
+ t.plan(1)
+ const w = new Writable()
+ let writeCb = false
+ w._write = function (chunk, e, cb) {
+ setTimeout(function () {
+ writeCb = true
+ cb()
+ }, 10)
+ }
+ w.on('finish', function () {
+ t.ok(writeCb)
+ })
+ w.write(Buffer.alloc(0))
+ w.end()
+ })
+ test('finish does not come before sync _write cb', function (t) {
+ t.plan(1)
+ const w = new Writable()
+ let writeCb = false
+ w._write = function (chunk, e, cb) {
+ cb()
+ }
+ w.on('finish', function () {
+ t.ok(writeCb)
+ })
+ w.write(Buffer.alloc(0), function (er) {
+ writeCb = true
+ })
+ w.end()
+ })
+ test('finish is emitted if last chunk is empty', function (t) {
+ t.plan(1)
+ const w = new Writable()
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
+ w.on('finish', () => {
+ t.ok(true)
+ })
+ w.write(Buffer.alloc(1))
+ w.end(Buffer.alloc(0))
+ })
+ function forEach(xs, f) {
+ for (let i = 0, l = xs.length; i < l; i++) {
+ f(xs[i], i)
+ }
+ }
+}
+module.exports[kReadableStreamSuiteName] = 'stream2-writable'
+module.exports[kReadableStreamSuiteHasMultipleTests] = true
diff --git a/test/browser/test-stream3-pause-then-read.js b/test/browser/test-stream3-pause-then-read.js
new file mode 100644
index 0000000000..cced00a5c0
--- /dev/null
+++ b/test/browser/test-stream3-pause-then-read.js
@@ -0,0 +1,147 @@
+'use strict'
+
+/* replacement start */
+const { Buffer } = require('buffer')
+
+/* replacement end */
+
+const { Readable, Writable } = require('../../lib/ours/index')
+const { kReadableStreamSuiteName } = require('./symbols')
+module.exports = function (t) {
+ t.plan(7)
+ const totalChunks = 100
+ const chunkSize = 99
+ const expectTotalData = totalChunks * chunkSize
+ let expectEndingData = expectTotalData
+ const r = new Readable({
+ highWaterMark: 1000
+ })
+ let chunks = totalChunks
+ r._read = function (n) {
+ if (!(chunks % 2)) {
+ setImmediate(push)
+ } else if (!(chunks % 3)) {
+ process.nextTick(push)
+ } else {
+ push()
+ }
+ }
+ let totalPushed = 0
+ function push() {
+ const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize) : null
+ if (chunk) {
+ totalPushed += chunk.length
+ chunk.fill('x')
+ }
+ r.push(chunk)
+ }
+ read100()
+
+ // first we read 100 bytes
+ function read100() {
+ readn(100, onData)
+ }
+ function readn(n, then) {
+ // console.error('read %d', n);
+ expectEndingData -= n
+ ;(function read() {
+ const c = r.read(n)
+ if (!c) {
+ r.once('readable', read)
+ } else {
+ t.equal(c.length, n)
+ t.notOk(r._readableState.flowing)
+ then()
+ }
+ })()
+ }
+
+ // then we listen to some data events
+ function onData() {
+ expectEndingData -= 100
+ // console.error('onData');
+ let seen = 0
+ r.on('data', function od(c) {
+ seen += c.length
+ if (seen >= 100) {
+ // seen enough
+ r.removeListener('data', od)
+ r.pause()
+ if (seen > 100) {
+ // oh no, seen too much!
+ // put the extra back.
+ const diff = seen - 100
+ r.unshift(c.slice(c.length - diff))
+ // console.error('seen too much', seen, diff)
+ }
+
+ // Nothing should be lost in between
+ setImmediate(pipeLittle)
+ }
+ })
+ }
+
+ // Just pipe 200 bytes, then unshift the extra and unpipe
+ function pipeLittle() {
+ expectEndingData -= 200
+ // console.error('pipe a little');
+ const w = new Writable()
+ let written = 0
+ w.on('finish', function () {
+ t.equal(written, 200)
+ setImmediate(read1234)
+ })
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ if (written >= 200) {
+ r.unpipe(w)
+ w.end()
+ cb()
+ if (written > 200) {
+ const diff = written - 200
+ written -= diff
+ r.unshift(chunk.slice(chunk.length - diff))
+ }
+ } else {
+ setImmediate(cb)
+ }
+ }
+ r.pipe(w)
+ }
+
+ // now read 1234 more bytes
+ function read1234() {
+ readn(1234, resumePause)
+ }
+ function resumePause() {
+ // console.error('resumePause');
+ // don't read anything, just resume and re-pause a whole bunch
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ setImmediate(pipe)
+ }
+ function pipe() {
+ // console.error('pipe the rest');
+ const w = new Writable()
+ let written = 0
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ cb()
+ }
+ w.on('finish', function () {
+ // console.error('written', written, totalPushed);
+ t.equal(written, expectEndingData)
+ t.equal(totalPushed, expectTotalData)
+ })
+ r.pipe(w)
+ }
+}
+module.exports[kReadableStreamSuiteName] = 'stream3-pause-then-read'
diff --git a/test/common/fixtures.js b/test/common/fixtures.js
new file mode 100644
index 0000000000..d69561bc75
--- /dev/null
+++ b/test/common/fixtures.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const path = require('path')
+const fs = require('fs')
+const { pathToFileURL } = require('url')
+const fixturesDir = path.join(__dirname, '..', 'fixtures')
+function fixturesPath(...args) {
+ return path.join(fixturesDir, ...args)
+}
+function fixturesFileURL(...args) {
+ return pathToFileURL(fixturesPath(...args))
+}
+function readFixtureSync(args, enc) {
+ if (Array.isArray(args)) return fs.readFileSync(fixturesPath(...args), enc)
+ return fs.readFileSync(fixturesPath(args), enc)
+}
+function readFixtureKey(name, enc) {
+ return fs.readFileSync(fixturesPath('keys', name), enc)
+}
+function readFixtureKeys(enc, ...names) {
+ return names.map((name) => readFixtureKey(name, enc))
+}
+module.exports = {
+ fixturesDir,
+ path: fixturesPath,
+ fileURL: fixturesFileURL,
+ readSync: readFixtureSync,
+ readKey: readFixtureKey,
+ readKeys: readFixtureKeys
+}
diff --git a/test/common/fixtures.mjs b/test/common/fixtures.mjs
new file mode 100644
index 0000000000..372fabf88d
--- /dev/null
+++ b/test/common/fixtures.mjs
@@ -0,0 +1,5 @@
+import fixtures from './fixtures.js'
+
+const { fixturesDir, path, fileURL, readSync, readKey } = fixtures
+
+export { fixturesDir, path, fileURL, readSync, readKey }
diff --git a/test/common/index.js b/test/common/index.js
new file mode 100644
index 0000000000..3e9b4c3ea6
--- /dev/null
+++ b/test/common/index.js
@@ -0,0 +1,963 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+/* eslint-disable node-core/crypto-check */
+'use strict'
+
+const process = global.process // Some tests tamper with the process global.
+
+const assert = require('assert')
+const { exec, execSync, spawn, spawnSync } = require('child_process')
+const fs = require('fs')
+// Do not require 'os' until needed so that test-os-checked-function can
+// monkey patch it. If 'os' is required here, that test will fail.
+const path = require('path')
+const { inspect } = require('util')
+const { isMainThread } = require('worker_threads')
+const tmpdir = require('./tmpdir')
+const bits = ['arm64', 'loong64', 'mips', 'mipsel', 'ppc64', 'riscv64', 's390x', 'x64'].includes(process.arch) ? 64 : 32
+const hasIntl = !!process.config.variables.v8_enable_i18n_support
+const { atob, btoa } = require('buffer')
+
+// Some tests assume a umask of 0o022 so set that up front. Tests that need a
+// different umask will set it themselves.
+//
+// Workers can read, but not set the umask, so check that this is the main
+// thread.
+if (isMainThread) process.umask(0o022)
+const noop = () => {}
+const hasCrypto = Boolean(process.versions.openssl) && !process.env.NODE_SKIP_CRYPTO
+const hasOpenSSL3 = hasCrypto && require('crypto').constants.OPENSSL_VERSION_NUMBER >= 0x30000000
+const hasOpenSSL31 = hasCrypto && require('crypto').constants.OPENSSL_VERSION_NUMBER >= 0x30100000
+const hasQuic = hasCrypto && !!process.config.variables.openssl_quic
+function parseTestFlags(filename = process.argv[1]) {
+ // The copyright notice is relatively big and the flags could come afterwards.
+ const bytesToRead = 1500
+ const buffer = Buffer.allocUnsafe(bytesToRead)
+ const fd = fs.openSync(filename, 'r')
+ const bytesRead = fs.readSync(fd, buffer, 0, bytesToRead)
+ fs.closeSync(fd)
+ const source = buffer.toString('utf8', 0, bytesRead)
+ const flagStart = source.search(/\/\/ Flags:\s+--/) + 10
+ if (flagStart === 9) {
+ return []
+ }
+ let flagEnd = source.indexOf('\n', flagStart)
+ // Normalize different EOL.
+ if (source[flagEnd - 1] === '\r') {
+ flagEnd--
+ }
+ return source.substring(flagStart, flagEnd).split(/\s+/).filter(Boolean)
+}
+
+// Check for flags. Skip this for workers (both, the `cluster` module and
+// `worker_threads`) and child processes.
+// If the binary was built without-ssl then the crypto flags are
+// invalid (bad option). The test itself should handle this case.
+if (
+ process.argv.length === 2 &&
+ !process.env.NODE_SKIP_FLAG_CHECK &&
+ isMainThread &&
+ hasCrypto &&
+ require('cluster').isPrimary &&
+ fs.existsSync(process.argv[1])
+) {
+ const flags = parseTestFlags()
+ for (const flag of flags) {
+ if (
+ !process.execArgv.includes(flag) &&
+ // If the binary is build without `intl` the inspect option is
+ // invalid. The test itself should handle this case.
+ (process.features.inspector || !flag.startsWith('--inspect'))
+ ) {
+ console.log(
+ 'NOTE: The test started as a child_process using these flags:',
+ inspect(flags),
+ 'Use NODE_SKIP_FLAG_CHECK to run the test with the original flags.'
+ )
+ const args = [...flags, ...process.execArgv, ...process.argv.slice(1)]
+ const options = {
+ encoding: 'utf8',
+ stdio: 'inherit'
+ }
+ const result = spawnSync(process.execPath, args, options)
+ if (result.signal) {
+ process.kill(0, result.signal)
+ } else {
+ process.exit(result.status)
+ }
+ }
+ }
+}
+const isWindows = process.platform === 'win32'
+const isSunOS = process.platform === 'sunos'
+const isFreeBSD = process.platform === 'freebsd'
+const isOpenBSD = process.platform === 'openbsd'
+const isLinux = process.platform === 'linux'
+const isOSX = process.platform === 'darwin'
+const isAsan = process.env.ASAN !== undefined
+const isPi = (() => {
+ try {
+ var _exec
+ // Normal Raspberry Pi detection is to find the `Raspberry Pi` string in
+ // the contents of `/sys/firmware/devicetree/base/model` but that doesn't
+ // work inside a container. Match the chipset model number instead.
+ const cpuinfo = fs.readFileSync('/proc/cpuinfo', {
+ encoding: 'utf8'
+ })
+ const ok =
+ ((_exec = /^Hardware\s*:\s*(.*)$/im.exec(cpuinfo)) === null || _exec === undefined ? undefined : _exec[1]) ===
+ 'BCM2835'
+ ;/^/.test('') // Clear RegExp.$_, some tests expect it to be empty.
+ return ok
+ } catch {
+ return false
+ }
+})()
+const isDumbTerminal = process.env.TERM === 'dumb'
+const buildType = process.config.target_defaults ? process.config.target_defaults.default_configuration : 'Release'
+
+// If env var is set then enable async_hook hooks for all tests.
+if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) {
+ const destroydIdsList = {}
+ const destroyListList = {}
+ const initHandles = {}
+ const { internalBinding } = require('internal/test/binding')
+ const async_wrap = internalBinding('async_wrap')
+ process.on('exit', () => {
+ // Iterate through handles to make sure nothing crashes
+ for (const k in initHandles) inspect(initHandles[k])
+ })
+ const _queueDestroyAsyncId = async_wrap.queueDestroyAsyncId
+ async_wrap.queueDestroyAsyncId = function queueDestroyAsyncId(id) {
+ if (destroyListList[id] !== undefined) {
+ process._rawDebug(destroyListList[id])
+ process._rawDebug()
+ throw new Error(`same id added to destroy list twice (${id})`)
+ }
+ destroyListList[id] = inspect(new Error())
+ _queueDestroyAsyncId(id)
+ }
+ require('async_hooks')
+ .createHook({
+ init(id, ty, tr, resource) {
+ if (initHandles[id]) {
+ process._rawDebug(`Is same resource: ${resource === initHandles[id].resource}`)
+ process._rawDebug(`Previous stack:\n${initHandles[id].stack}\n`)
+ throw new Error(`init called twice for same id (${id})`)
+ }
+ initHandles[id] = {
+ resource,
+ stack: inspect(new Error()).substr(6)
+ }
+ },
+ before() {},
+ after() {},
+ destroy(id) {
+ if (destroydIdsList[id] !== undefined) {
+ process._rawDebug(destroydIdsList[id])
+ process._rawDebug()
+ throw new Error(`destroy called for same id (${id})`)
+ }
+ destroydIdsList[id] = inspect(new Error())
+ }
+ })
+ .enable()
+}
+let opensslCli = null
+let inFreeBSDJail = null
+let localhostIPv4 = null
+const localIPv6Hosts = isLinux
+ ? [
+ // Debian/Ubuntu
+ 'ip6-localhost',
+ 'ip6-loopback',
+ // SUSE
+ 'ipv6-localhost',
+ 'ipv6-loopback',
+ // Typically universal
+ 'localhost'
+ ]
+ : ['localhost']
+const PIPE = (() => {
+ const localRelative = path.relative(process.cwd(), `${tmpdir.path}/`)
+ const pipePrefix = isWindows ? '\\\\.\\pipe\\' : localRelative
+ const pipeName = `node-test.${process.pid}.sock`
+ return path.join(pipePrefix, pipeName)
+})()
+
+// Check that when running a test with
+// `$node --abort-on-uncaught-exception $file child`
+// the process aborts.
+function childShouldThrowAndAbort() {
+ let testCmd = ''
+ if (!isWindows) {
+ // Do not create core files, as it can take a lot of disk space on
+ // continuous testing and developers' machines
+ testCmd += 'ulimit -c 0 && '
+ }
+ testCmd += `"${process.argv[0]}" --abort-on-uncaught-exception `
+ testCmd += `"${process.argv[1]}" child`
+ const child = exec(testCmd)
+ child.on('exit', function onExit(exitCode, signal) {
+ const errMsg =
+ 'Test should have aborted ' + `but instead exited with exit code ${exitCode}` + ` and signal ${signal}`
+ assert(nodeProcessAborted(exitCode, signal), errMsg)
+ })
+}
+function createZeroFilledFile(filename) {
+ const fd = fs.openSync(filename, 'w')
+ fs.ftruncateSync(fd, 10 * 1024 * 1024)
+ fs.closeSync(fd)
+}
+const pwdCommand = isWindows ? ['cmd.exe', ['/d', '/c', 'cd']] : ['pwd', []]
+function platformTimeout(ms) {
+ const multipliers =
+ typeof ms === 'bigint'
+ ? {
+ two: 2n,
+ four: 4n,
+ seven: 7n
+ }
+ : {
+ two: 2,
+ four: 4,
+ seven: 7
+ }
+ if (process.features.debug) ms = multipliers.two * ms
+ if (exports.isAIX || exports.isIBMi) return multipliers.two * ms // Default localhost speed is slower on AIX
+
+ if (isPi) return multipliers.two * ms // Raspberry Pi devices
+
+ return ms
+}
+let knownGlobals = [
+ typeof AggregateError !== 'undefined' ? AggregateError : require('../../lib/ours/util').AggregateError,
+ typeof AbortController !== 'undefined' ? AbortController : require('abort-controller').AbortController,
+ typeof AbortSignal !== 'undefined' ? AbortSignal : require('abort-controller').AbortSignal,
+ typeof EventTarget !== 'undefined' ? EventTarget : require('event-target-shim').EventTarget,
+ typeof navigator !== 'undefined' ? navigator : {},
+ atob,
+ btoa,
+ clearImmediate,
+ clearInterval,
+ clearTimeout,
+ global,
+ setImmediate,
+ setInterval,
+ setTimeout,
+ queueMicrotask
+]
+
+// TODO(@jasnell): This check can be temporary. AbortController is
+// not currently supported in either Node.js 12 or 10, making it
+// difficult to run tests comparatively on those versions. Once
+// all supported versions have AbortController as a global, this
+// check can be removed and AbortController can be added to the
+// knownGlobals list above.
+if (global.AbortController) knownGlobals.push(global.AbortController)
+if (global.gc) {
+ knownGlobals.push(global.gc)
+}
+if (global.Performance) {
+ knownGlobals.push(global.Performance)
+}
+if (global.performance) {
+ knownGlobals.push(global.performance)
+}
+if (global.PerformanceMark) {
+ knownGlobals.push(global.PerformanceMark)
+}
+if (global.PerformanceMeasure) {
+ knownGlobals.push(global.PerformanceMeasure)
+}
+
+// TODO(@ethan-arrowood): Similar to previous checks, this can be temporary
+// until v16.x is EOL. Once all supported versions have structuredClone we
+// can add this to the list above instead.
+if (global.structuredClone) {
+ knownGlobals.push(global.structuredClone)
+}
+if (global.fetch) {
+ knownGlobals.push(fetch)
+}
+if (hasCrypto && global.crypto) {
+ knownGlobals.push(global.crypto)
+ knownGlobals.push(global.Crypto)
+ knownGlobals.push(global.CryptoKey)
+ knownGlobals.push(global.SubtleCrypto)
+}
+if (global.CustomEvent) {
+ knownGlobals.push(global.CustomEvent)
+}
+if (global.ReadableStream) {
+ knownGlobals.push(
+ global.ReadableStream,
+ global.ReadableStreamDefaultReader,
+ global.ReadableStreamBYOBReader,
+ global.ReadableStreamBYOBRequest,
+ global.ReadableByteStreamController,
+ global.ReadableStreamDefaultController,
+ global.TransformStream,
+ global.TransformStreamDefaultController,
+ global.WritableStream,
+ global.WritableStreamDefaultWriter,
+ global.WritableStreamDefaultController,
+ global.ByteLengthQueuingStrategy,
+ global.CountQueuingStrategy,
+ global.TextEncoderStream,
+ global.TextDecoderStream,
+ global.CompressionStream,
+ global.DecompressionStream
+ )
+}
+function allowGlobals(...allowlist) {
+ knownGlobals = knownGlobals.concat(allowlist)
+}
+if (process.env.NODE_TEST_KNOWN_GLOBALS !== '0') {
+ if (process.env.NODE_TEST_KNOWN_GLOBALS) {
+ const knownFromEnv = process.env.NODE_TEST_KNOWN_GLOBALS.split(',')
+ allowGlobals(...knownFromEnv)
+ }
+ function leakedGlobals() {
+ const leaked = []
+ for (const val in global) {
+ if (!knownGlobals.includes(global[val])) {
+ leaked.push(val)
+ }
+ }
+ return leaked
+ }
+ process.on('exit', function () {
+ const leaked = leakedGlobals()
+ if (leaked.length > 0) {
+ assert.fail(`Unexpected global(s) found: ${leaked.join(', ')}`)
+ }
+ })
+}
+const mustCallChecks = []
+function runCallChecks(exitCode) {
+ if (exitCode !== 0) return
+ const failed = mustCallChecks.filter(function (context) {
+ if ('minimum' in context) {
+ context.messageSegment = `at least ${context.minimum}`
+ return context.actual < context.minimum
+ }
+ context.messageSegment = `exactly ${context.exact}`
+ return context.actual !== context.exact
+ })
+ failed.forEach(function (context) {
+ console.log(
+ 'Mismatched %s function calls. Expected %s, actual %d.',
+ context.name,
+ context.messageSegment,
+ context.actual
+ )
+ console.log(context.stack.split('\n').slice(2).join('\n'))
+ })
+ if (failed.length) process.exit(1)
+}
+function mustCall(fn, exact) {
+ return _mustCallInner(fn, exact, 'exact')
+}
+function mustSucceed(fn, exact) {
+ return mustCall(function (err, ...args) {
+ assert.ifError(err)
+ if (typeof fn === 'function') return fn.apply(this, args)
+ }, exact)
+}
+function mustCallAtLeast(fn, minimum) {
+ return _mustCallInner(fn, minimum, 'minimum')
+}
+function _mustCallInner(fn, criteria = 1, field) {
+ if (process._exiting) throw new Error('Cannot use common.mustCall*() in process exit handler')
+ if (typeof fn === 'number') {
+ criteria = fn
+ fn = noop
+ } else if (fn === undefined) {
+ fn = noop
+ }
+ if (typeof criteria !== 'number') throw new TypeError(`Invalid ${field} value: ${criteria}`)
+ const context = {
+ [field]: criteria,
+ actual: 0,
+ stack: inspect(new Error()),
+ name: fn.name || ''
+ }
+
+ // Add the exit listener only once to avoid listener leak warnings
+ if (mustCallChecks.length === 0) process.on('exit', runCallChecks)
+ mustCallChecks.push(context)
+ const _return = function () {
+ // eslint-disable-line func-style
+ context.actual++
+ return fn.apply(this, arguments)
+ }
+ // Function instances have own properties that may be relevant.
+ // Let's replicate those properties to the returned function.
+ // Refs: https://tc39.es/ecma262/#sec-function-instances
+ Object.defineProperties(_return, {
+ name: {
+ value: fn.name,
+ writable: false,
+ enumerable: false,
+ configurable: true
+ },
+ length: {
+ value: fn.length,
+ writable: false,
+ enumerable: false,
+ configurable: true
+ }
+ })
+ return _return
+}
+function hasMultiLocalhost() {
+ const { internalBinding } = require('internal/test/binding')
+ const { TCP, constants: TCPConstants } = internalBinding('tcp_wrap')
+ const t = new TCP(TCPConstants.SOCKET)
+ const ret = t.bind('127.0.0.2', 0)
+ t.close()
+ return ret === 0
+}
+function skipIfEslintMissing() {
+ if (!fs.existsSync(path.join(__dirname, '..', '..', 'tools', 'node_modules', 'eslint'))) {
+ skip('missing ESLint')
+ }
+}
+function canCreateSymLink() {
+ // On Windows, creating symlinks requires admin privileges.
+ // We'll only try to run symlink test if we have enough privileges.
+ // On other platforms, creating symlinks shouldn't need admin privileges
+ if (isWindows) {
+ // whoami.exe needs to be the one from System32
+ // If unix tools are in the path, they can shadow the one we want,
+ // so use the full path while executing whoami
+ const whoamiPath = path.join(process.env.SystemRoot, 'System32', 'whoami.exe')
+ try {
+ const output = execSync(`${whoamiPath} /priv`, {
+ timeout: 1000
+ })
+ return output.includes('SeCreateSymbolicLinkPrivilege')
+ } catch {
+ return false
+ }
+ }
+ // On non-Windows platforms, this always returns `true`
+ return true
+}
+function getCallSite(top) {
+ const originalStackFormatter = Error.prepareStackTrace
+ Error.prepareStackTrace = (err, stack) => `${stack[0].getFileName()}:${stack[0].getLineNumber()}`
+ const err = new Error()
+ Error.captureStackTrace(err, top)
+ // With the V8 Error API, the stack is not formatted until it is accessed
+ err.stack // eslint-disable-line no-unused-expressions
+ Error.prepareStackTrace = originalStackFormatter
+ return err.stack
+}
+function mustNotCall(msg) {
+ const callSite = getCallSite(mustNotCall)
+ return function mustNotCall(...args) {
+ const argsInfo = args.length > 0 ? `\ncalled with arguments: ${args.map((arg) => inspect(arg)).join(', ')}` : ''
+ assert.fail(`${msg || 'function should not have been called'} at ${callSite}` + argsInfo)
+ }
+}
+const _mustNotMutateObjectDeepProxies = new WeakMap()
+function mustNotMutateObjectDeep(original) {
+ // Return primitives and functions directly. Primitives are immutable, and
+ // proxied functions are impossible to compare against originals, e.g. with
+ // `assert.deepEqual()`.
+ if (original === null || typeof original !== 'object') {
+ return original
+ }
+ const cachedProxy = _mustNotMutateObjectDeepProxies.get(original)
+ if (cachedProxy) {
+ return cachedProxy
+ }
+ const _mustNotMutateObjectDeepHandler = {
+ __proto__: null,
+ defineProperty(target, property, descriptor) {
+ assert.fail(`Expected no side effects, got ${inspect(property)} ` + 'defined')
+ },
+ deleteProperty(target, property) {
+ assert.fail(`Expected no side effects, got ${inspect(property)} ` + 'deleted')
+ },
+ get(target, prop, receiver) {
+ return mustNotMutateObjectDeep(Reflect.get(target, prop, receiver))
+ },
+ preventExtensions(target) {
+ assert.fail('Expected no side effects, got extensions prevented on ' + inspect(target))
+ },
+ set(target, property, value, receiver) {
+ assert.fail(`Expected no side effects, got ${inspect(value)} ` + `assigned to ${inspect(property)}`)
+ },
+ setPrototypeOf(target, prototype) {
+ assert.fail(`Expected no side effects, got set prototype to ${prototype}`)
+ }
+ }
+ const proxy = new Proxy(original, _mustNotMutateObjectDeepHandler)
+ _mustNotMutateObjectDeepProxies.set(original, proxy)
+ return proxy
+}
+function printSkipMessage(msg) {
+ console.log(`1..0 # Skipped: ${msg}`)
+}
+function skip(msg) {
+ printSkipMessage(msg)
+ process.exit(0)
+}
+
+// Returns true if the exit code "exitCode" and/or signal name "signal"
+// represent the exit code and/or signal name of a node process that aborted,
+// false otherwise.
+function nodeProcessAborted(exitCode, signal) {
+ // Depending on the compiler used, node will exit with either
+ // exit code 132 (SIGILL), 133 (SIGTRAP) or 134 (SIGABRT).
+ let expectedExitCodes = [132, 133, 134]
+
+ // On platforms using KSH as the default shell (like SmartOS),
+ // when a process aborts, KSH exits with an exit code that is
+ // greater than 256, and thus the exit code emitted with the 'exit'
+ // event is null and the signal is set to either SIGILL, SIGTRAP,
+ // or SIGABRT (depending on the compiler).
+ const expectedSignals = ['SIGILL', 'SIGTRAP', 'SIGABRT']
+
+ // On Windows, 'aborts' are of 2 types, depending on the context:
+ // (i) Exception breakpoint, if --abort-on-uncaught-exception is on
+ // which corresponds to exit code 2147483651 (0x80000003)
+ // (ii) Otherwise, _exit(134) which is called in place of abort() due to
+ // raising SIGABRT exiting with ambiguous exit code '3' by default
+ if (isWindows) expectedExitCodes = [0x80000003, 134]
+
+ // When using --abort-on-uncaught-exception, V8 will use
+ // base::OS::Abort to terminate the process.
+ // Depending on the compiler used, the shell or other aspects of
+ // the platform used to build the node binary, this will actually
+ // make V8 exit by aborting or by raising a signal. In any case,
+ // one of them (exit code or signal) needs to be set to one of
+ // the expected exit codes or signals.
+ if (signal !== null) {
+ return expectedSignals.includes(signal)
+ }
+ return expectedExitCodes.includes(exitCode)
+}
+function isAlive(pid) {
+ try {
+ process.kill(pid, 'SIGCONT')
+ return true
+ } catch {
+ return false
+ }
+}
+function _expectWarning(name, expected, code) {
+ if (typeof expected === 'string') {
+ expected = [[expected, code]]
+ } else if (!Array.isArray(expected)) {
+ expected = Object.entries(expected).map(([a, b]) => [b, a])
+ } else if (!Array.isArray(expected[0])) {
+ expected = [[expected[0], expected[1]]]
+ }
+ // Deprecation codes are mandatory, everything else is not.
+ if (name === 'DeprecationWarning') {
+ expected.forEach(([_, code]) => assert(code, expected))
+ }
+ return mustCall((warning) => {
+ const expectedProperties = expected.shift()
+ if (!expectedProperties) {
+ assert.fail(`Unexpected extra warning received: ${warning}`)
+ }
+ const [message, code] = expectedProperties
+ assert.strictEqual(warning.name, name)
+ if (typeof message === 'string') {
+ assert.strictEqual(warning.message, message)
+ } else {
+ assert.match(warning.message, message)
+ }
+ assert.strictEqual(warning.code, code)
+ }, expected.length)
+}
+let catchWarning
+
+// Accepts a warning name and description or array of descriptions or a map of
+// warning names to description(s) ensures a warning is generated for each
+// name/description pair.
+// The expected messages have to be unique per `expectWarning()` call.
+function expectWarning(nameOrMap, expected, code) {
+ if (catchWarning === undefined) {
+ catchWarning = {}
+ process.on('warning', (warning) => {
+ if (!catchWarning[warning.name]) {
+ throw new TypeError(`"${warning.name}" was triggered without being expected.\n` + inspect(warning))
+ }
+ catchWarning[warning.name](warning)
+ })
+ }
+ if (typeof nameOrMap === 'string') {
+ catchWarning[nameOrMap] = _expectWarning(nameOrMap, expected, code)
+ } else {
+ Object.keys(nameOrMap).forEach((name) => {
+ catchWarning[name] = _expectWarning(name, nameOrMap[name])
+ })
+ }
+}
+
+// Useful for testing expected internal/error objects
+function expectsError(validator, exact) {
+ return mustCall((...args) => {
+ if (args.length !== 1) {
+ // Do not use `assert.strictEqual()` to prevent `inspect` from
+ // always being called.
+ assert.fail(`Expected one argument, got ${inspect(args)}`)
+ }
+ const error = args.pop()
+ const descriptor = Object.getOwnPropertyDescriptor(error, 'message')
+ // The error message should be non-enumerable
+ assert.strictEqual(descriptor.enumerable, false)
+ assert.throws(() => {
+ throw error
+ }, validator)
+ return true
+ }, exact)
+}
+function skipIfInspectorDisabled() {
+ if (!process.features.inspector) {
+ skip('V8 inspector is disabled')
+ }
+}
+function skipIf32Bits() {
+ if (bits < 64) {
+ skip('The tested feature is not available in 32bit builds')
+ }
+}
+function skipIfWorker() {
+ if (!isMainThread) {
+ skip('This test only works on a main thread')
+ }
+}
+function getArrayBufferViews(buf) {
+ const { buffer, byteOffset, byteLength } = buf
+ const out = []
+ const arrayBufferViews = [
+ Int8Array,
+ Uint8Array,
+ Uint8ClampedArray,
+ Int16Array,
+ Uint16Array,
+ Int32Array,
+ Uint32Array,
+ Float32Array,
+ Float64Array,
+ BigInt64Array,
+ BigUint64Array,
+ DataView
+ ]
+ for (const type of arrayBufferViews) {
+ const { BYTES_PER_ELEMENT = 1 } = type
+ if (byteLength % BYTES_PER_ELEMENT === 0) {
+ out.push(new type(buffer, byteOffset, byteLength / BYTES_PER_ELEMENT))
+ }
+ }
+ return out
+}
+function getBufferSources(buf) {
+ return [...getArrayBufferViews(buf), new Uint8Array(buf).buffer]
+}
+function getTTYfd() {
+ // Do our best to grab a tty fd.
+ const tty = require('tty')
+ // Don't attempt fd 0 as it is not writable on Windows.
+ // Ref: ef2861961c3d9e9ed6972e1e84d969683b25cf95
+ const ttyFd = [1, 2, 4, 5].find(tty.isatty)
+ if (ttyFd === undefined) {
+ try {
+ return fs.openSync('/dev/tty')
+ } catch {
+ // There aren't any tty fd's available to use.
+ return -1
+ }
+ }
+ return ttyFd
+}
+function runWithInvalidFD(func) {
+ let fd = 1 << 30
+ // Get first known bad file descriptor. 1 << 30 is usually unlikely to
+ // be an valid one.
+ try {
+ while (fs.fstatSync(fd--) && fd > 0);
+ } catch {
+ return func(fd)
+ }
+ printSkipMessage('Could not generate an invalid fd')
+}
+
+// A helper function to simplify checking for ERR_INVALID_ARG_TYPE output.
+function invalidArgTypeHelper(input) {
+ if (input == null) {
+ return ` Received ${input}`
+ }
+ if (typeof input === 'function' && input.name) {
+ return ` Received function ${input.name}`
+ }
+ if (typeof input === 'object') {
+ var _input$constructor
+ if (
+ (_input$constructor = input.constructor) !== null &&
+ _input$constructor !== undefined &&
+ _input$constructor.name
+ ) {
+ return ` Received an instance of ${input.constructor.name}`
+ }
+ return ` Received ${inspect(input, {
+ depth: -1
+ })}`
+ }
+ let inspected = inspect(input, {
+ colors: false
+ })
+ if (inspected.length > 28) {
+ inspected = `${inspected.slice(inspected, 0, 25)}...`
+ }
+ return ` Received type ${typeof input} (${inspected})`
+}
+function skipIfDumbTerminal() {
+ if (isDumbTerminal) {
+ skip('skipping - dumb terminal')
+ }
+}
+function gcUntil(name, condition) {
+ if (typeof name === 'function') {
+ condition = name
+ name = undefined
+ }
+ return new Promise((resolve, reject) => {
+ let count = 0
+ function gcAndCheck() {
+ setImmediate(() => {
+ count++
+ global.gc()
+ if (condition()) {
+ resolve()
+ } else if (count < 10) {
+ gcAndCheck()
+ } else {
+ reject(name === undefined ? undefined : 'Test ' + name + ' failed')
+ }
+ })
+ }
+ gcAndCheck()
+ })
+}
+function requireNoPackageJSONAbove(dir = __dirname) {
+ let possiblePackage = path.join(dir, '..', 'package.json')
+ let lastPackage = null
+ while (possiblePackage !== lastPackage) {
+ if (fs.existsSync(possiblePackage)) {
+ assert.fail(
+ "This test shouldn't load properties from a package.json above " +
+ `its file location. Found package.json at ${possiblePackage}.`
+ )
+ }
+ lastPackage = possiblePackage
+ possiblePackage = path.join(possiblePackage, '..', '..', 'package.json')
+ }
+}
+function spawnPromisified(...args) {
+ let stderr = ''
+ let stdout = ''
+ const child = spawn(...args)
+ child.stderr.setEncoding('utf8')
+ child.stderr.on('data', (data) => {
+ stderr += data
+ })
+ child.stdout.setEncoding('utf8')
+ child.stdout.on('data', (data) => {
+ stdout += data
+ })
+ return new Promise((resolve, reject) => {
+ child.on('close', (code, signal) => {
+ resolve({
+ code,
+ signal,
+ stderr,
+ stdout
+ })
+ })
+ child.on('error', (code, signal) => {
+ reject({
+ code,
+ signal,
+ stderr,
+ stdout
+ })
+ })
+ })
+}
+const common = {
+ allowGlobals,
+ buildType,
+ canCreateSymLink,
+ childShouldThrowAndAbort,
+ createZeroFilledFile,
+ expectsError,
+ expectWarning,
+ gcUntil,
+ getArrayBufferViews,
+ getBufferSources,
+ getCallSite,
+ getTTYfd,
+ hasIntl,
+ hasCrypto,
+ hasOpenSSL3,
+ hasOpenSSL31,
+ hasQuic,
+ hasMultiLocalhost,
+ invalidArgTypeHelper,
+ isAlive,
+ isAsan,
+ isDumbTerminal,
+ isFreeBSD,
+ isLinux,
+ isMainThread,
+ isOpenBSD,
+ isOSX,
+ isPi,
+ isSunOS,
+ isWindows,
+ localIPv6Hosts,
+ mustCall,
+ mustCallAtLeast,
+ mustNotCall,
+ mustNotMutateObjectDeep,
+ mustSucceed,
+ nodeProcessAborted,
+ PIPE,
+ parseTestFlags,
+ platformTimeout,
+ printSkipMessage,
+ pwdCommand,
+ requireNoPackageJSONAbove,
+ runWithInvalidFD,
+ skip,
+ skipIf32Bits,
+ skipIfDumbTerminal,
+ skipIfEslintMissing,
+ skipIfInspectorDisabled,
+ skipIfWorker,
+ spawnPromisified,
+ get enoughTestMem() {
+ return require('os').totalmem() > 0x70000000 /* 1.75 Gb */
+ },
+ get hasFipsCrypto() {
+ return hasCrypto && require('crypto').getFips()
+ },
+ get hasIPv6() {
+ const iFaces = require('os').networkInterfaces()
+ let re
+ if (isWindows) {
+ re = /Loopback Pseudo-Interface/
+ } else if (this.isIBMi) {
+ re = /\*LOOPBACK/
+ } else {
+ re = /lo/
+ }
+ return Object.keys(iFaces).some((name) => {
+ return re.test(name) && iFaces[name].some(({ family }) => family === 'IPv6')
+ })
+ },
+ get inFreeBSDJail() {
+ if (inFreeBSDJail !== null) return inFreeBSDJail
+ if (exports.isFreeBSD && execSync('sysctl -n security.jail.jailed').toString() === '1\n') {
+ inFreeBSDJail = true
+ } else {
+ inFreeBSDJail = false
+ }
+ return inFreeBSDJail
+ },
+ // On IBMi, process.platform and os.platform() both return 'aix',
+ // when built with Python versions earlier than 3.9.
+ // It is not enough to differentiate between IBMi and real AIX system.
+ get isAIX() {
+ return require('os').type() === 'AIX'
+ },
+ get isIBMi() {
+ return require('os').type() === 'OS400'
+ },
+ get isLinuxPPCBE() {
+ return process.platform === 'linux' && process.arch === 'ppc64' && require('os').endianness() === 'BE'
+ },
+ get localhostIPv4() {
+ if (localhostIPv4 !== null) return localhostIPv4
+ if (this.inFreeBSDJail) {
+ // Jailed network interfaces are a bit special - since we need to jump
+ // through loops, as well as this being an exception case, assume the
+ // user will provide this instead.
+ if (process.env.LOCALHOST) {
+ localhostIPv4 = process.env.LOCALHOST
+ } else {
+ console.error(
+ "Looks like we're in a FreeBSD Jail. " +
+ 'Please provide your default interface address ' +
+ 'as LOCALHOST or expect some tests to fail.'
+ )
+ }
+ }
+ if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1'
+ return localhostIPv4
+ },
+ // opensslCli defined lazily to reduce overhead of spawnSync
+ get opensslCli() {
+ if (opensslCli !== null) return opensslCli
+ if (process.config.variables.node_shared_openssl) {
+ // Use external command
+ opensslCli = 'openssl'
+ } else {
+ // Use command built from sources included in Node.js repository
+ opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli')
+ }
+ if (exports.isWindows) opensslCli += '.exe'
+ const opensslCmd = spawnSync(opensslCli, ['version'])
+ if (opensslCmd.status !== 0 || opensslCmd.error !== undefined) {
+ // OpenSSL command cannot be executed
+ opensslCli = false
+ }
+ return opensslCli
+ },
+ get PORT() {
+ if (+process.env.TEST_PARALLEL) {
+ throw new Error('common.PORT cannot be used in a parallelized test')
+ }
+ return +process.env.NODE_COMMON_PORT || 12346
+ },
+ /**
+ * Returns the EOL character used by this Git checkout.
+ */
+ get checkoutEOL() {
+ return fs.readFileSync(__filename).includes('\r\n') ? '\r\n' : '\n'
+ }
+}
+const validProperties = new Set(Object.keys(common))
+module.exports = new Proxy(common, {
+ get(obj, prop) {
+ if (!validProperties.has(prop)) throw new Error(`Using invalid common property: '${prop}'`)
+ return obj[prop]
+ }
+})
diff --git a/test/common/index.mjs b/test/common/index.mjs
new file mode 100644
index 0000000000..0cfd4bedf3
--- /dev/null
+++ b/test/common/index.mjs
@@ -0,0 +1,108 @@
+import { createRequire } from 'module'
+
+const require = createRequire(import.meta.url)
+const common = require('./index.js')
+
+const {
+ allowGlobals,
+ buildType,
+ canCreateSymLink,
+ checkoutEOL,
+ childShouldThrowAndAbort,
+ createZeroFilledFile,
+ enoughTestMem,
+ expectsError,
+ expectWarning,
+ getArrayBufferViews,
+ getBufferSources,
+ getCallSite,
+ getTTYfd,
+ hasCrypto,
+ hasIPv6,
+ hasMultiLocalhost,
+ isAIX,
+ isAlive,
+ isDumbTerminal,
+ isFreeBSD,
+ isIBMi,
+ isLinux,
+ isLinuxPPCBE,
+ isMainThread,
+ isOpenBSD,
+ isOSX,
+ isSunOS,
+ isWindows,
+ localIPv6Hosts,
+ mustCall,
+ mustCallAtLeast,
+ mustNotCall,
+ mustNotMutateObjectDeep,
+ mustSucceed,
+ nodeProcessAborted,
+ opensslCli,
+ parseTestFlags,
+ PIPE,
+ platformTimeout,
+ printSkipMessage,
+ runWithInvalidFD,
+ skip,
+ skipIf32Bits,
+ skipIfDumbTerminal,
+ skipIfEslintMissing,
+ skipIfInspectorDisabled,
+ spawnPromisified
+} = common
+
+const getPort = () => common.PORT
+
+export {
+ allowGlobals,
+ buildType,
+ canCreateSymLink,
+ checkoutEOL,
+ childShouldThrowAndAbort,
+ createRequire,
+ createZeroFilledFile,
+ enoughTestMem,
+ expectsError,
+ expectWarning,
+ getArrayBufferViews,
+ getBufferSources,
+ getCallSite,
+ getPort,
+ getTTYfd,
+ hasCrypto,
+ hasIPv6,
+ hasMultiLocalhost,
+ isAIX,
+ isAlive,
+ isDumbTerminal,
+ isFreeBSD,
+ isIBMi,
+ isLinux,
+ isLinuxPPCBE,
+ isMainThread,
+ isOpenBSD,
+ isOSX,
+ isSunOS,
+ isWindows,
+ localIPv6Hosts,
+ mustCall,
+ mustCallAtLeast,
+ mustNotCall,
+ mustNotMutateObjectDeep,
+ mustSucceed,
+ nodeProcessAborted,
+ opensslCli,
+ parseTestFlags,
+ PIPE,
+ platformTimeout,
+ printSkipMessage,
+ runWithInvalidFD,
+ skip,
+ skipIf32Bits,
+ skipIfDumbTerminal,
+ skipIfEslintMissing,
+ skipIfInspectorDisabled,
+ spawnPromisified
+}
diff --git a/test/common/tmpdir.js b/test/common/tmpdir.js
new file mode 100644
index 0000000000..95931ec384
--- /dev/null
+++ b/test/common/tmpdir.js
@@ -0,0 +1,67 @@
+'use strict'
+
+const fs = require('fs')
+const path = require('path')
+const { pathToFileURL } = require('url')
+const { isMainThread } = require('worker_threads')
+function rmSync(pathname) {
+ fs.rmSync(pathname, {
+ maxRetries: 3,
+ recursive: true,
+ force: true
+ })
+}
+const testRoot = process.env.NODE_TEST_DIR ? fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..')
+
+// Using a `.` prefixed name, which is the convention for "hidden" on POSIX,
+// gets tools to ignore it by default or by simple rules, especially eslint.
+const tmpdirName = '.tmp.' + (process.env.TEST_SERIAL_ID || process.env.TEST_THREAD_ID || '0')
+const tmpPath = path.join(testRoot, tmpdirName)
+let firstRefresh = true
+function refresh() {
+ rmSync(tmpPath)
+ fs.mkdirSync(tmpPath)
+ if (firstRefresh) {
+ firstRefresh = false
+ // Clean only when a test uses refresh. This allows for child processes to
+ // use the tmpdir and only the parent will clean on exit.
+ process.on('exit', onexit)
+ }
+}
+function onexit() {
+ // Change directory to avoid possible EBUSY
+ if (isMainThread) process.chdir(testRoot)
+ try {
+ rmSync(tmpPath)
+ } catch (e) {
+ console.error("Can't clean tmpdir:", tmpPath)
+ const files = fs.readdirSync(tmpPath)
+ console.error('Files blocking:', files)
+ if (files.some((f) => f.startsWith('.nfs'))) {
+ // Warn about NFS "silly rename"
+ console.error('Note: ".nfs*" might be files that were open and ' + 'unlinked but not closed.')
+ console.error('See http://nfs.sourceforge.net/#faq_d2 for details.')
+ }
+ console.error()
+ throw e
+ }
+}
+function resolve(...paths) {
+ return path.resolve(tmpPath, ...paths)
+}
+function hasEnoughSpace(size) {
+ const { bavail, bsize } = fs.statfsSync(tmpPath)
+ return bavail >= Math.ceil(size / bsize)
+}
+function fileURL(...paths) {
+ // When called without arguments, add explicit trailing slash
+ const fullPath = path.resolve(tmpPath + path.sep, ...paths)
+ return pathToFileURL(fullPath)
+}
+module.exports = {
+ fileURL,
+ hasEnoughSpace,
+ path: tmpPath,
+ refresh,
+ resolve
+}
diff --git a/test/fixtures/elipses.txt b/test/fixtures/elipses.txt
new file mode 100644
index 0000000000..6105600505
--- /dev/null
+++ b/test/fixtures/elipses.txt
@@ -0,0 +1 @@
+…………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………………
\ No newline at end of file
diff --git a/test/fixtures/empty-with-bom.txt b/test/fixtures/empty-with-bom.txt
new file mode 100644
index 0000000000..5f282702bb
--- /dev/null
+++ b/test/fixtures/empty-with-bom.txt
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/test/fixtures/empty.txt b/test/fixtures/empty.txt
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/test/fixtures/file-to-read-with-bom.txt b/test/fixtures/file-to-read-with-bom.txt
new file mode 100644
index 0000000000..d46c8708d9
--- /dev/null
+++ b/test/fixtures/file-to-read-with-bom.txt
@@ -0,0 +1,3 @@
+abc
+def
+ghi
diff --git a/test/fixtures/file-to-read-without-bom.txt b/test/fixtures/file-to-read-without-bom.txt
new file mode 100644
index 0000000000..8edb37e36d
--- /dev/null
+++ b/test/fixtures/file-to-read-without-bom.txt
@@ -0,0 +1,3 @@
+abc
+def
+ghi
diff --git a/test/fixtures/outside.txt b/test/fixtures/outside.txt
new file mode 100644
index 0000000000..044c4b9614
--- /dev/null
+++ b/test/fixtures/outside.txt
@@ -0,0 +1,2 @@
+this file is part of the WASI tests. it exists outside of the sandbox, and
+should be inaccessible from the WASI tests.
diff --git a/test/fixtures/readfile_pipe_test.txt b/test/fixtures/readfile_pipe_test.txt
new file mode 100644
index 0000000000..65975655dc
--- /dev/null
+++ b/test/fixtures/readfile_pipe_test.txt
@@ -0,0 +1,5 @@
+xxxx xxxx xxxx xxxx
+xxxx xxxx xxxx xxxx
+xxxx xxxx xxxx xxxx
+xxxx xxxx xxxx xxxx
+xxxx xxxx xxxx xxxx
diff --git a/test/fixtures/tls-session-ticket.txt b/test/fixtures/tls-session-ticket.txt
new file mode 100644
index 0000000000..bc0f6b58e1
--- /dev/null
+++ b/test/fixtures/tls-session-ticket.txt
@@ -0,0 +1,23 @@
+-----BEGIN SSL SESSION PARAMETERS-----
+MIID2wIBAQICAwEEAgA1BCAMjLe+70uBSPGvybkTnPVUMwdbdtVbkMIXf8L5M8Kl
+VAQwog+Afs00cnYUcgD1BQewJyxX1e561oRuDTpy7BHABC1hC7hxTaul+pwv+cBx
+8D72oQYCBFFQF3OiBAICASyjggNhMIIDXTCCAkWgAwIBAgIJAMUSOvlaeyQHMA0G
+CSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRl
+MSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTAxMTE2MDkz
+MjQ5WhcNMTMxMTE1MDkzMjQ5WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29t
+ZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+LXZOjcQCJq3+ZKUFabj71oo/ex
+/XsBcFqtBThjjTw9CVEVwfPQQp4XwtPiB204vnYXwQ1/R2NdTQqCZu47l79LssL/
+u2a5Y9+0NEU3nQA5qdt+1FAE0c5oexPimXOrR3GWfKz7PmZ2O0117IeCUUXPG5U8
+umhDe/4mDF4ZNJiKc404WthquTqgS7rLQZHhZ6D0EnGnOkzlmxJMYPNHSOY1/6iv
+dNUUcC87awNEA3lgfhy25IyBK3QJc+aYKNTbt70Lery3bu2wWLFGtmNiGlQTS4Js
+xImRsECTI727ObS7/FWAQsqW+COL0Sa5BuMFrFIpjPrEe0ih7vRRbdmXRwIDAQAB
+o1AwTjAdBgNVHQ4EFgQUDnV4d6mDtOnluLoCjkUHTX/n4agwHwYDVR0jBBgwFoAU
+DnV4d6mDtOnluLoCjkUHTX/n4agwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUF
+AAOCAQEAFwV4MQfTo+qMv9JMiynoIEiqfOz4RgtmBqRnXUffcjS2dhc7/z+FPZnM
+79Kej8eLHoVfxCyWRHFlzm93vEdvwxOCrD13EDOi08OOZfxWyIlCa6Bg8cMAKqQz
+d2OvQOWqlRWBTThBJIhWflU33izXQn5GdmYqhfpc+9ZHHGhvXNydtRQkdxVK2dZN
+zLBvBlLlRmtoClU7xm3A+/5dddePAQHEPtyFlUw49VYtZ3ru6KqPms7MKvcRhYLs
+y9rwSfuuniMlx4d0bDR7TOkw0QQSA0N8MGQRQpzl4mw4jLzyM5d5QtuGBh2P6hPG
+a0YQxtI3RPT/p6ENzzBiAKXiSfzox6QCBAClAwIBEg==
+-----END SSL SESSION PARAMETERS-----
diff --git a/test/fixtures/tz-version.txt b/test/fixtures/tz-version.txt
new file mode 100644
index 0000000000..7daa77e00d
--- /dev/null
+++ b/test/fixtures/tz-version.txt
@@ -0,0 +1 @@
+2023c
diff --git a/test/fixtures/x.txt b/test/fixtures/x.txt
new file mode 100644
index 0000000000..cd470e6190
--- /dev/null
+++ b/test/fixtures/x.txt
@@ -0,0 +1 @@
+xyz
diff --git a/test/from-list.js b/test/from-list.js
deleted file mode 100644
index 651d981cbc..0000000000
--- a/test/from-list.js
+++ /dev/null
@@ -1,32 +0,0 @@
-var test = require('tap').test;
-var fromList = require('../from-list.js');
-
-test('with length', function(t) {
- // have a length
- var len = 16;
- var list = [ new Buffer('foog'),
- new Buffer('bark'),
- new Buffer('bazy'),
- new Buffer('kuel') ];
-
- // read more than the first element.
- var ret = fromList(6, list, 16);
- t.equal(ret.toString(), 'foogba');
-
- // read exactly the first element.
- ret = fromList(2, list, 10);
- t.equal(ret.toString(), 'rk');
-
- // read less than the first element.
- ret = fromList(2, list, 8);
- t.equal(ret.toString(), 'ba');
-
- // read more than we have.
- ret = fromList(100, list, 6);
- t.equal(ret.toString(), 'zykuel');
-
- // all consumed.
- t.same(list, []);
-
- t.end();
-});
diff --git a/test/fs.js b/test/fs.js
deleted file mode 100644
index 08b2b136d0..0000000000
--- a/test/fs.js
+++ /dev/null
@@ -1,51 +0,0 @@
-var test = require('tap').test
-var FSReadable = require('../fs.js');
-var fs = require('fs');
-
-var path = require('path');
-var file = path.resolve(__dirname, 'fixtures', 'x1024.txt');
-
-var size = fs.statSync(file).size;
-
-// expect to see chunks no more than 10 bytes each.
-var expectLengths = [];
-for (var i = size; i > 0; i -= 10) {
- expectLengths.push(Math.min(i, 10));
-}
-
-var util = require('util');
-var Stream = require('stream');
-
-util.inherits(TestWriter, Stream);
-
-function TestWriter() {
- Stream.apply(this);
- this.buffer = [];
- this.length = 0;
-}
-
-TestWriter.prototype.write = function(c) {
- this.buffer.push(c.toString());
- this.length += c.length;
- return true;
-};
-
-TestWriter.prototype.end = function(c) {
- if (c) this.buffer.push(c.toString());
- this.emit('results', this.buffer);
-}
-
-test('fs test', function(t) {
- var r = new FSReadable(file, { bufferSize: 10 });
- var w = new TestWriter();
-
- w.on('results', function(res) {
- console.error(res, w.length);
- t.equal(w.length, size);
- var l = 0;
- t.same(res.map(function (c) { return c.length; }), expectLengths);
- t.end();
- });
-
- r.pipe(w);
-});
diff --git a/test/ours/test-errors.js b/test/ours/test-errors.js
new file mode 100644
index 0000000000..4e0f898fc4
--- /dev/null
+++ b/test/ours/test-errors.js
@@ -0,0 +1,114 @@
+'use strict'
+
+const t = require('tap')
+const { codes: errors } = require('../../lib/ours/errors')
+function checkError(err, Base, name, code, message) {
+ t.ok(err instanceof Base)
+ t.equal(err.name, name)
+ t.equal(err.code, code)
+ t.equal(err.message, message)
+}
+
+// Update this numbers based on the number of checkError below multiplied by the assertions within checkError
+t.plan(17 * 4)
+checkError(
+ new errors.ERR_INVALID_ARG_VALUE('name', 0),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_VALUE',
+ "The argument 'name' is invalid. Received 0"
+)
+checkError(
+ new errors.ERR_INVALID_ARG_VALUE('name', undefined),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_VALUE',
+ "The argument 'name' is invalid. Received undefined"
+)
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], 0),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ 'The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received type number (0)'
+)
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('first argument', 'not string', 'foo'),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ "The first argument must be not string. Received type string ('foo')"
+)
+checkError(
+ new errors.ERR_INVALID_ARG_TYPE('obj.prop', 'string', undefined),
+ TypeError,
+ 'TypeError',
+ 'ERR_INVALID_ARG_TYPE',
+ 'The "obj.prop" property must be of type string. Received undefined'
+)
+checkError(
+ new errors.ERR_STREAM_PUSH_AFTER_EOF(),
+ Error,
+ 'Error',
+ 'ERR_STREAM_PUSH_AFTER_EOF',
+ 'stream.push() after EOF'
+)
+checkError(
+ new errors.ERR_METHOD_NOT_IMPLEMENTED('_read()'),
+ Error,
+ 'Error',
+ 'ERR_METHOD_NOT_IMPLEMENTED',
+ 'The _read() method is not implemented'
+)
+checkError(
+ new errors.ERR_METHOD_NOT_IMPLEMENTED('_write()'),
+ Error,
+ 'Error',
+ 'ERR_METHOD_NOT_IMPLEMENTED',
+ 'The _write() method is not implemented'
+)
+checkError(new errors.ERR_STREAM_PREMATURE_CLOSE(), Error, 'Error', 'ERR_STREAM_PREMATURE_CLOSE', 'Premature close')
+checkError(
+ new errors.ERR_STREAM_DESTROYED('pipe'),
+ Error,
+ 'Error',
+ 'ERR_STREAM_DESTROYED',
+ 'Cannot call pipe after a stream was destroyed'
+)
+checkError(
+ new errors.ERR_STREAM_DESTROYED('write'),
+ Error,
+ 'Error',
+ 'ERR_STREAM_DESTROYED',
+ 'Cannot call write after a stream was destroyed'
+)
+checkError(
+ new errors.ERR_MULTIPLE_CALLBACK(),
+ Error,
+ 'Error',
+ 'ERR_MULTIPLE_CALLBACK',
+ 'Callback called multiple times'
+)
+checkError(new errors.ERR_STREAM_CANNOT_PIPE(), Error, 'Error', 'ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable')
+checkError(new errors.ERR_STREAM_WRITE_AFTER_END(), Error, 'Error', 'ERR_STREAM_WRITE_AFTER_END', 'write after end')
+checkError(
+ new errors.ERR_STREAM_NULL_VALUES(),
+ TypeError,
+ 'TypeError',
+ 'ERR_STREAM_NULL_VALUES',
+ 'May not write null values to stream'
+)
+checkError(
+ new errors.ERR_UNKNOWN_ENCODING('foo'),
+ TypeError,
+ 'TypeError',
+ 'ERR_UNKNOWN_ENCODING',
+ 'Unknown encoding: foo'
+)
+checkError(
+ new errors.ERR_STREAM_UNSHIFT_AFTER_END_EVENT(),
+ Error,
+ 'Error',
+ 'ERR_STREAM_UNSHIFT_AFTER_END_EVENT',
+ 'stream.unshift() after end event'
+)
diff --git a/test/ours/test-fake-timers.js b/test/ours/test-fake-timers.js
new file mode 100644
index 0000000000..98871f304e
--- /dev/null
+++ b/test/ours/test-fake-timers.js
@@ -0,0 +1,36 @@
+'use strict'
+
+require('../common')
+const t = require('tap')
+const util = require('util')
+const fakeTimers = require('@sinonjs/fake-timers')
+const Transform = require('../../lib/ours/index').Transform
+t.plan(1)
+function MyTransform() {
+ Transform.call(this)
+}
+util.inherits(MyTransform, Transform)
+const clock = fakeTimers.install({
+ toFake: ['setImmediate', 'nextTick']
+})
+let stream2DataCalled = false
+const stream = new MyTransform()
+stream.on('data', function () {
+ stream.on('end', function () {
+ const stream2 = new MyTransform()
+ stream2.on('data', function () {
+ stream2.on('end', function () {
+ stream2DataCalled = true
+ })
+ setImmediate(function () {
+ stream2.end()
+ })
+ })
+ stream2.emit('data')
+ })
+ stream.end()
+})
+stream.emit('data')
+clock.runAll()
+clock.uninstall()
+t.ok(stream2DataCalled)
diff --git a/test/ours/test-stream-sync-write.js b/test/ours/test-stream-sync-write.js
new file mode 100644
index 0000000000..ed6d01ee64
--- /dev/null
+++ b/test/ours/test-stream-sync-write.js
@@ -0,0 +1,35 @@
+'use strict'
+
+require('../common')
+const t = require('tap')
+const util = require('util')
+const stream = require('../../lib/ours/index')
+const WritableStream = stream.Writable
+t.plan(1)
+const InternalStream = function () {
+ WritableStream.call(this)
+}
+util.inherits(InternalStream, WritableStream)
+let invocations = 0
+InternalStream.prototype._write = function (chunk, encoding, callback) {
+ callback()
+}
+const internalStream = new InternalStream()
+const ExternalStream = function (writable) {
+ this._writable = writable
+ WritableStream.call(this)
+}
+util.inherits(ExternalStream, WritableStream)
+ExternalStream.prototype._write = function (chunk, encoding, callback) {
+ this._writable.write(chunk, encoding, callback)
+}
+const externalStream = new ExternalStream(internalStream)
+for (let i = 0; i < 2000; i++) {
+ externalStream.write(i.toString(), () => {
+ invocations++
+ })
+}
+externalStream.end()
+externalStream.on('finish', () => {
+ t.equal(invocations, 2000)
+})
diff --git a/test/parallel/test-readable-from-iterator-closing.js b/test/parallel/test-readable-from-iterator-closing.js
new file mode 100644
index 0000000000..fee1a5a71a
--- /dev/null
+++ b/test/parallel/test-readable-from-iterator-closing.js
@@ -0,0 +1,192 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const { mustCall, mustNotCall } = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const { strictEqual } = require('assert')
+async function asyncSupport() {
+ const finallyMustCall = mustCall()
+ const bodyMustCall = mustCall()
+ async function* infiniteGenerate() {
+ try {
+ while (true) yield 'a'
+ } finally {
+ finallyMustCall()
+ }
+ }
+ const stream = Readable.from(infiniteGenerate())
+ for await (const chunk of stream) {
+ bodyMustCall()
+ strictEqual(chunk, 'a')
+ break
+ }
+}
+async function syncSupport() {
+ const finallyMustCall = mustCall()
+ const bodyMustCall = mustCall()
+ function* infiniteGenerate() {
+ try {
+ while (true) yield 'a'
+ } finally {
+ finallyMustCall()
+ }
+ }
+ const stream = Readable.from(infiniteGenerate())
+ for await (const chunk of stream) {
+ bodyMustCall()
+ strictEqual(chunk, 'a')
+ break
+ }
+}
+async function syncPromiseSupport() {
+ const returnMustBeAwaited = mustCall()
+ const bodyMustCall = mustCall()
+ function* infiniteGenerate() {
+ try {
+ while (true) yield Promise.resolve('a')
+ } finally {
+ // eslint-disable-next-line no-unsafe-finally
+ return {
+ then(cb) {
+ returnMustBeAwaited()
+ cb()
+ }
+ }
+ }
+ }
+ const stream = Readable.from(infiniteGenerate())
+ for await (const chunk of stream) {
+ bodyMustCall()
+ strictEqual(chunk, 'a')
+ break
+ }
+}
+async function syncRejectedSupport() {
+ const returnMustBeAwaited = mustCall()
+ const bodyMustNotCall = mustNotCall()
+ const catchMustCall = mustCall()
+ const secondNextMustNotCall = mustNotCall()
+ function* generate() {
+ try {
+ yield Promise.reject('a')
+ secondNextMustNotCall()
+ } finally {
+ // eslint-disable-next-line no-unsafe-finally
+ return {
+ then(cb) {
+ returnMustBeAwaited()
+ cb()
+ }
+ }
+ }
+ }
+ const stream = Readable.from(generate())
+ try {
+ for await (const chunk of stream) {
+ bodyMustNotCall(chunk)
+ }
+ } catch {
+ catchMustCall()
+ }
+}
+async function noReturnAfterThrow() {
+ const returnMustNotCall = mustNotCall()
+ const bodyMustNotCall = mustNotCall()
+ const catchMustCall = mustCall()
+ const nextMustCall = mustCall()
+ const stream = Readable.from({
+ [Symbol.asyncIterator]() {
+ return this
+ },
+ async next() {
+ nextMustCall()
+ throw new Error('a')
+ },
+ async return() {
+ returnMustNotCall()
+ return {
+ done: true
+ }
+ }
+ })
+ try {
+ for await (const chunk of stream) {
+ bodyMustNotCall(chunk)
+ }
+ } catch {
+ catchMustCall()
+ }
+}
+async function closeStreamWhileNextIsPending() {
+ const finallyMustCall = mustCall()
+ const dataMustCall = mustCall()
+ let resolveDestroy
+ const destroyed = new Promise((resolve) => {
+ resolveDestroy = mustCall(resolve)
+ })
+ let resolveYielded
+ const yielded = new Promise((resolve) => {
+ resolveYielded = mustCall(resolve)
+ })
+ async function* infiniteGenerate() {
+ try {
+ while (true) {
+ yield 'a'
+ resolveYielded()
+ await destroyed
+ }
+ } finally {
+ finallyMustCall()
+ }
+ }
+ const stream = Readable.from(infiniteGenerate())
+ stream.on('data', (data) => {
+ dataMustCall()
+ strictEqual(data, 'a')
+ })
+ yielded.then(() => {
+ stream.destroy()
+ resolveDestroy()
+ })
+}
+async function closeAfterNullYielded() {
+ const finallyMustCall = mustCall()
+ const dataMustCall = mustCall(3)
+ function* generate() {
+ try {
+ yield 'a'
+ yield 'a'
+ yield 'a'
+ } finally {
+ finallyMustCall()
+ }
+ }
+ const stream = Readable.from(generate())
+ stream.on('data', (chunk) => {
+ dataMustCall()
+ strictEqual(chunk, 'a')
+ })
+}
+Promise.all([
+ asyncSupport(),
+ syncSupport(),
+ syncPromiseSupport(),
+ syncRejectedSupport(),
+ noReturnAfterThrow(),
+ closeStreamWhileNextIsPending(),
+ closeAfterNullYielded()
+]).then(mustCall())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-readable-from.js b/test/parallel/test-readable-from.js
new file mode 100644
index 0000000000..886a7634c8
--- /dev/null
+++ b/test/parallel/test-readable-from.js
@@ -0,0 +1,190 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const { mustCall } = require('../common')
+const { once } = require('events')
+const { Readable } = require('../../lib/ours/index')
+const { strictEqual, throws } = require('assert')
+const common = require('../common')
+{
+ throws(() => {
+ Readable.from(null)
+ }, /ERR_INVALID_ARG_TYPE/)
+}
+async function toReadableBasicSupport() {
+ async function* generate() {
+ yield 'a'
+ yield 'b'
+ yield 'c'
+ }
+ const stream = Readable.from(generate())
+ const expected = ['a', 'b', 'c']
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
+}
+async function toReadableSyncIterator() {
+ function* generate() {
+ yield 'a'
+ yield 'b'
+ yield 'c'
+ }
+ const stream = Readable.from(generate())
+ const expected = ['a', 'b', 'c']
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
+}
+async function toReadablePromises() {
+ const promises = [Promise.resolve('a'), Promise.resolve('b'), Promise.resolve('c')]
+ const stream = Readable.from(promises)
+ const expected = ['a', 'b', 'c']
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
+}
+async function toReadableString() {
+ const stream = Readable.from('abc')
+ const expected = ['abc']
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
+}
+async function toReadableBuffer() {
+ const stream = Readable.from(Buffer.from('abc'))
+ const expected = ['abc']
+ for await (const chunk of stream) {
+ strictEqual(chunk.toString(), expected.shift())
+ }
+}
+async function toReadableOnData() {
+ async function* generate() {
+ yield 'a'
+ yield 'b'
+ yield 'c'
+ }
+ const stream = Readable.from(generate())
+ let iterations = 0
+ const expected = ['a', 'b', 'c']
+ stream.on('data', (chunk) => {
+ iterations++
+ strictEqual(chunk, expected.shift())
+ })
+ await once(stream, 'end')
+ strictEqual(iterations, 3)
+}
+async function toReadableOnDataNonObject() {
+ async function* generate() {
+ yield 'a'
+ yield 'b'
+ yield 'c'
+ }
+ const stream = Readable.from(generate(), {
+ objectMode: false
+ })
+ let iterations = 0
+ const expected = ['a', 'b', 'c']
+ stream.on('data', (chunk) => {
+ iterations++
+ strictEqual(chunk instanceof Buffer, true)
+ strictEqual(chunk.toString(), expected.shift())
+ })
+ await once(stream, 'end')
+ strictEqual(iterations, 3)
+}
+async function destroysTheStreamWhenThrowing() {
+ async function* generate() {
+ // eslint-disable-line require-yield
+ throw new Error('kaboom')
+ }
+ const stream = Readable.from(generate())
+ stream.read()
+ const [err] = await once(stream, 'error')
+ strictEqual(err.message, 'kaboom')
+ strictEqual(stream.destroyed, true)
+}
+async function asTransformStream() {
+ async function* generate(stream) {
+ for await (const chunk of stream) {
+ yield chunk.toUpperCase()
+ }
+ }
+ const source = new Readable({
+ objectMode: true,
+ read() {
+ this.push('a')
+ this.push('b')
+ this.push('c')
+ this.push(null)
+ }
+ })
+ const stream = Readable.from(generate(source))
+ const expected = ['A', 'B', 'C']
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
+}
+async function endWithError() {
+ async function* generate() {
+ yield 1
+ yield 2
+ yield Promise.reject('Boum')
+ }
+ const stream = Readable.from(generate())
+ const expected = [1, 2]
+ try {
+ for await (const chunk of stream) {
+ strictEqual(chunk, expected.shift())
+ }
+ throw new Error()
+ } catch (err) {
+ strictEqual(expected.length, 0)
+ strictEqual(err, 'Boum')
+ }
+}
+async function destroyingStreamWithErrorThrowsInGenerator() {
+ const validateError = common.mustCall((e) => {
+ strictEqual(e, 'Boum')
+ })
+ async function* generate() {
+ try {
+ yield 1
+ yield 2
+ yield 3
+ throw new Error()
+ } catch (e) {
+ validateError(e)
+ }
+ }
+ const stream = Readable.from(generate())
+ stream.read()
+ stream.once('error', common.mustCall())
+ stream.destroy('Boum')
+}
+Promise.all([
+ toReadableBasicSupport(),
+ toReadableSyncIterator(),
+ toReadablePromises(),
+ toReadableString(),
+ toReadableBuffer(),
+ toReadableOnData(),
+ toReadableOnDataNonObject(),
+ destroysTheStreamWhenThrowing(),
+ asTransformStream(),
+ endWithError(),
+ destroyingStreamWithErrorThrowsInGenerator()
+]).then(mustCall())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-readable-large-hwm.js b/test/parallel/test-readable-large-hwm.js
new file mode 100644
index 0000000000..23c30b894a
--- /dev/null
+++ b/test/parallel/test-readable-large-hwm.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+
+// Make sure that readable completes
+// even when reading larger buffer.
+const bufferSize = 10 * 1024 * 1024
+let n = 0
+const r = new Readable({
+ read() {
+ // Try to fill readable buffer piece by piece.
+ r.push(Buffer.alloc(bufferSize / 10))
+ if (n++ > 10) {
+ r.push(null)
+ }
+ }
+})
+r.on('readable', () => {
+ while (true) {
+ const ret = r.read(bufferSize)
+ if (ret === null) break
+ }
+})
+r.on('end', common.mustCall())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-readable-single-end.js b/test/parallel/test-readable-single-end.js
new file mode 100644
index 0000000000..7827276dd6
--- /dev/null
+++ b/test/parallel/test-readable-single-end.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+
+// This test ensures that there will not be an additional empty 'readable'
+// event when stream has ended (only 1 event signalling about end)
+
+const r = new Readable({
+ read: () => {}
+})
+r.push(null)
+r.on('readable', common.mustCall())
+r.on('end', common.mustCall())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-add-abort-signal.js b/test/parallel/test-stream-add-abort-signal.js
new file mode 100644
index 0000000000..c72fc45ca7
--- /dev/null
+++ b/test/parallel/test-stream-add-abort-signal.js
@@ -0,0 +1,49 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+// Flags: --expose-internals
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { addAbortSignal, Readable } = require('../../lib/ours/index')
+const { addAbortSignalNoValidate } = require('../../lib/internal/streams/add-abort-signal')
+{
+ assert.throws(() => {
+ addAbortSignal('INVALID_SIGNAL')
+ }, /ERR_INVALID_ARG_TYPE/)
+ const ac = new AbortController()
+ assert.throws(() => {
+ addAbortSignal(ac.signal, 'INVALID_STREAM')
+ }, /ERR_INVALID_ARG_TYPE/)
+}
+{
+ const r = new Readable({
+ read: () => {}
+ })
+ assert.deepStrictEqual(r, addAbortSignalNoValidate('INVALID_SIGNAL', r))
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-aliases-legacy.js b/test/parallel/test-stream-aliases-legacy.js
new file mode 100644
index 0000000000..a3c5d96b5c
--- /dev/null
+++ b/test/parallel/test-stream-aliases-legacy.js
@@ -0,0 +1,28 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+
+// Verify that all individual aliases are left in place.
+
+assert.strictEqual(stream.Readable, require('../../lib/_stream_readable'))
+assert.strictEqual(stream.Writable, require('../../lib/_stream_writable'))
+assert.strictEqual(stream.Duplex, require('../../lib/_stream_duplex'))
+assert.strictEqual(stream.Transform, require('../../lib/_stream_transform'))
+assert.strictEqual(stream.PassThrough, require('../../lib/_stream_passthrough'))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-asIndexedPairs.mjs b/test/parallel/test-stream-asIndexedPairs.mjs
new file mode 100644
index 0000000000..35919114a9
--- /dev/null
+++ b/test/parallel/test-stream-asIndexedPairs.mjs
@@ -0,0 +1,82 @@
+import '../common/index.mjs'
+import { Readable } from '../../lib/ours/index.js'
+import { deepStrictEqual, rejects, throws } from 'assert'
+import tap from 'tap'
+
+{
+ // asIndexedPairs with a synchronous stream
+ const pairs = await Readable.from([1, 2, 3]).asIndexedPairs().toArray()
+ deepStrictEqual(pairs, [
+ [0, 1],
+ [1, 2],
+ [2, 3]
+ ])
+ const empty = await Readable.from([]).asIndexedPairs().toArray()
+ deepStrictEqual(empty, [])
+}
+
+{
+ // asIndexedPairs works an asynchronous streams
+ const asyncFrom = (...args) => Readable.from(...args).map(async (x) => x)
+ const pairs = await asyncFrom([1, 2, 3]).asIndexedPairs().toArray()
+ deepStrictEqual(pairs, [
+ [0, 1],
+ [1, 2],
+ [2, 3]
+ ])
+ const empty = await asyncFrom([]).asIndexedPairs().toArray()
+ deepStrictEqual(empty, [])
+}
+
+{
+ // Does not enumerate an infinite stream
+ const infinite = () =>
+ Readable.from(
+ (async function* () {
+ while (true) yield 1
+ })()
+ )
+ const pairs = await infinite().asIndexedPairs().take(3).toArray()
+ deepStrictEqual(pairs, [
+ [0, 1],
+ [1, 1],
+ [2, 1]
+ ])
+ const empty = await infinite().asIndexedPairs().take(0).toArray()
+ deepStrictEqual(empty, [])
+}
+
+{
+ // AbortSignal
+ await rejects(
+ async () => {
+ const ac = new AbortController()
+ const { signal } = ac
+ const p = Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray()
+ ac.abort()
+ await p
+ },
+ { name: 'AbortError' }
+ )
+
+ await rejects(async () => {
+ const signal = AbortSignal.abort()
+ await Readable.from([1, 2, 3]).asIndexedPairs({ signal }).toArray()
+ }, /AbortError/)
+}
+
+{
+ // Error cases
+ throws(() => Readable.from([1]).asIndexedPairs(1), /ERR_INVALID_ARG_TYPE/)
+ throws(() => Readable.from([1]).asIndexedPairs({ signal: true }), /ERR_INVALID_ARG_TYPE/)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-auto-destroy.js b/test/parallel/test-stream-auto-destroy.js
new file mode 100644
index 0000000000..4163e84ba9
--- /dev/null
+++ b/test/parallel/test-stream-auto-destroy.js
@@ -0,0 +1,128 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const r = new stream.Readable({
+ autoDestroy: true,
+ read() {
+ this.push('hello')
+ this.push('world')
+ this.push(null)
+ },
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ let ended = false
+ r.resume()
+ r.on(
+ 'end',
+ common.mustCall(() => {
+ ended = true
+ })
+ )
+ r.on(
+ 'close',
+ common.mustCall(() => {
+ assert(ended)
+ })
+ )
+}
+{
+ const w = new stream.Writable({
+ autoDestroy: true,
+ write(data, enc, cb) {
+ cb(null)
+ },
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ let finished = false
+ w.write('hello')
+ w.write('world')
+ w.end()
+ w.on(
+ 'finish',
+ common.mustCall(() => {
+ finished = true
+ })
+ )
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ assert(finished)
+ })
+ )
+}
+{
+ const t = new stream.Transform({
+ autoDestroy: true,
+ transform(data, enc, cb) {
+ cb(null, data)
+ },
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ let ended = false
+ let finished = false
+ t.write('hello')
+ t.write('world')
+ t.end()
+ t.resume()
+ t.on(
+ 'end',
+ common.mustCall(() => {
+ ended = true
+ })
+ )
+ t.on(
+ 'finish',
+ common.mustCall(() => {
+ finished = true
+ })
+ )
+ t.on(
+ 'close',
+ common.mustCall(() => {
+ assert(ended)
+ assert(finished)
+ })
+ )
+}
+{
+ const r = new stream.Readable({
+ read() {
+ r2.emit('error', new Error('fail'))
+ }
+ })
+ const r2 = new stream.Readable({
+ autoDestroy: true,
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ r.pipe(r2)
+}
+{
+ const r = new stream.Readable({
+ read() {
+ w.emit('error', new Error('fail'))
+ }
+ })
+ const w = new stream.Writable({
+ autoDestroy: true,
+ destroy: common.mustCall((err, cb) => cb())
+ })
+ r.pipe(w)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js
new file mode 100644
index 0000000000..ef8d5a93e5
--- /dev/null
+++ b/test/parallel/test-stream-await-drain-writers-in-synchronously-recursion-write.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { PassThrough } = require('../../lib/ours/index')
+const encode = new PassThrough({
+ highWaterMark: 1
+})
+const decode = new PassThrough({
+ highWaterMark: 1
+})
+const send = common.mustCall((buf) => {
+ encode.write(buf)
+}, 4)
+let i = 0
+const onData = common.mustCall(() => {
+ if (++i === 2) {
+ send(Buffer.from([0x3]))
+ send(Buffer.from([0x4]))
+ }
+}, 4)
+encode.pipe(decode).on('data', onData)
+send(Buffer.from([0x1]))
+send(Buffer.from([0x2]))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-backpressure.js b/test/parallel/test-stream-backpressure.js
new file mode 100644
index 0000000000..3f73d84b9e
--- /dev/null
+++ b/test/parallel/test-stream-backpressure.js
@@ -0,0 +1,49 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+let pushes = 0
+const total = 65500 + 40 * 1024
+const rs = new stream.Readable({
+ read: common.mustCall(function () {
+ if (pushes++ === 10) {
+ this.push(null)
+ return
+ }
+ const length = this._readableState.length
+
+ // We are at most doing two full runs of _reads
+ // before stopping, because Readable is greedy
+ // to keep its buffer full
+ assert(length <= total)
+ this.push(Buffer.alloc(65500))
+ for (let i = 0; i < 40; i++) {
+ this.push(Buffer.alloc(1024))
+ }
+
+ // We will be over highWaterMark at this point
+ // but a new call to _read is scheduled anyway.
+ }, 11)
+})
+const ws = stream.Writable({
+ write: common.mustCall(function (data, enc, cb) {
+ setImmediate(cb)
+ }, 41 * 10)
+})
+rs.pipe(ws)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-base-prototype-accessors-enumerability.js b/test/parallel/test-stream-base-prototype-accessors-enumerability.js
new file mode 100644
index 0000000000..e593d33030
--- /dev/null
+++ b/test/parallel/test-stream-base-prototype-accessors-enumerability.js
@@ -0,0 +1,36 @@
+// Flags: --expose-internals
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+
+// This tests that the prototype accessors added by StreamBase::AddMethods
+// are not enumerable. They could be enumerated when inspecting the prototype
+// with util.inspect or the inspector protocol.
+
+const assert = require('assert')
+
+// Or anything that calls StreamBase::AddMethods when setting up its prototype
+const internalBinding = process.binding
+const TTY = internalBinding('tty_wrap').TTY
+{
+ const ttyIsEnumerable = Object.prototype.propertyIsEnumerable.bind(TTY)
+ assert.strictEqual(ttyIsEnumerable('bytesRead'), false)
+ assert.strictEqual(ttyIsEnumerable('fd'), false)
+ assert.strictEqual(ttyIsEnumerable('_externalStream'), false)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-base-typechecking.js b/test/parallel/test-stream-base-typechecking.js
new file mode 100644
index 0000000000..070e7563fb
--- /dev/null
+++ b/test/parallel/test-stream-base-typechecking.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const net = require('net')
+const server = net.createServer().listen(
+ 0,
+ common.mustCall(() => {
+ const client = net.connect(
+ server.address().port,
+ common.mustCall(() => {
+ assert.throws(
+ () => {
+ client.write('broken', 'buffer')
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_INVALID_ARG_TYPE',
+ message: 'Second argument must be a buffer'
+ }
+ )
+ client.destroy()
+ server.close()
+ })
+ )
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-big-packet.js b/test/parallel/test-stream-big-packet.js
new file mode 100644
index 0000000000..9a7a2b92a8
--- /dev/null
+++ b/test/parallel/test-stream-big-packet.js
@@ -0,0 +1,80 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+let passed = false
+class TestStream extends stream.Transform {
+ _transform(chunk, encoding, done) {
+ if (!passed) {
+ // Char 'a' only exists in the last write
+ passed = chunk.toString().includes('a')
+ }
+ done()
+ }
+}
+const s1 = new stream.Transform({
+ transform(chunk, encoding, cb) {
+ process.nextTick(cb, null, chunk)
+ }
+})
+const s2 = new stream.PassThrough()
+const s3 = new TestStream()
+s1.pipe(s3)
+// Don't let s2 auto close which may close s3
+s2.pipe(s3, {
+ end: false
+})
+
+// We must write a buffer larger than highWaterMark
+const big = Buffer.alloc(s1.writableHighWaterMark + 1, 'x')
+
+// Since big is larger than highWaterMark, it will be buffered internally.
+assert(!s1.write(big))
+// 'tiny' is small enough to pass through internal buffer.
+assert(s2.write('tiny'))
+
+// Write some small data in next IO loop, which will never be written to s3
+// Because 'drain' event is not emitted from s1 and s1 is still paused
+setImmediate(s1.write.bind(s1), 'later')
+
+// Assert after two IO loops when all operations have been done.
+process.on('exit', function () {
+ assert(passed, 'Large buffer is not handled properly by Writable Stream')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js
new file mode 100644
index 0000000000..c4171023dc
--- /dev/null
+++ b/test/parallel/test-stream-big-push.js
@@ -0,0 +1,83 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const str = 'asdfasdfasdfasdfasdf'
+const r = new stream.Readable({
+ highWaterMark: 5,
+ encoding: 'utf8'
+})
+let reads = 0
+function _read() {
+ if (reads === 0) {
+ setTimeout(() => {
+ r.push(str)
+ }, 1)
+ reads++
+ } else if (reads === 1) {
+ const ret = r.push(str)
+ assert.strictEqual(ret, false)
+ reads++
+ } else {
+ r.push(null)
+ }
+}
+r._read = common.mustCall(_read, 3)
+r.on('end', common.mustCall())
+
+// Push some data in to start.
+// We've never gotten any read event at this point.
+const ret = r.push(str)
+// Should be false. > hwm
+assert(!ret)
+let chunk = r.read()
+assert.strictEqual(chunk, str)
+chunk = r.read()
+assert.strictEqual(chunk, null)
+r.once('readable', () => {
+ // This time, we'll get *all* the remaining data, because
+ // it's been added synchronously, as the read WOULD take
+ // us below the hwm, and so it triggered a _read() again,
+ // which synchronously added more, which we then return.
+ chunk = r.read()
+ assert.strictEqual(chunk, str + str)
+ chunk = r.read()
+ assert.strictEqual(chunk, null)
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-buffer-list.js b/test/parallel/test-stream-buffer-list.js
new file mode 100644
index 0000000000..f134cca0b8
--- /dev/null
+++ b/test/parallel/test-stream-buffer-list.js
@@ -0,0 +1,87 @@
+// Flags: --expose-internals
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const BufferList = require('../../lib/internal/streams/buffer_list')
+
+// Test empty buffer list.
+const emptyList = new BufferList()
+emptyList.shift()
+assert.deepStrictEqual(emptyList, new BufferList())
+assert.strictEqual(emptyList.join(','), '')
+assert.deepStrictEqual(emptyList.concat(0), Buffer.alloc(0))
+const buf = Buffer.from('foo')
+function testIterator(list, count) {
+ // test iterator
+ let len = 0
+ // eslint-disable-next-line no-unused-vars
+ for (const x of list) {
+ len++
+ }
+ assert.strictEqual(len, count)
+}
+
+// Test buffer list with one element.
+const list = new BufferList()
+testIterator(list, 0)
+list.push(buf)
+testIterator(list, 1)
+for (const x of list) {
+ assert.strictEqual(x, buf)
+}
+const copy = list.concat(3)
+testIterator(copy, 3)
+assert.notStrictEqual(copy, buf)
+assert.deepStrictEqual(copy, buf)
+assert.strictEqual(list.join(','), 'foo')
+const shifted = list.shift()
+testIterator(list, 0)
+assert.strictEqual(shifted, buf)
+assert.deepStrictEqual(list, new BufferList())
+{
+ const list = new BufferList()
+ list.push('foo')
+ list.push('bar')
+ list.push('foo')
+ list.push('bar')
+ assert.strictEqual(list.consume(6, true), 'foobar')
+ assert.strictEqual(list.consume(6, true), 'foobar')
+}
+{
+ const list = new BufferList()
+ list.push('foo')
+ list.push('bar')
+ assert.strictEqual(list.consume(5, true), 'fooba')
+}
+{
+ const list = new BufferList()
+ list.push(buf)
+ list.push(buf)
+ list.push(buf)
+ list.push(buf)
+ assert.strictEqual(list.consume(6).toString(), 'foofoo')
+ assert.strictEqual(list.consume(6).toString(), 'foofoo')
+}
+{
+ const list = new BufferList()
+ list.push(buf)
+ list.push(buf)
+ assert.strictEqual(list.consume(5).toString(), 'foofo')
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-catch-rejections.js b/test/parallel/test-stream-catch-rejections.js
new file mode 100644
index 0000000000..b4f9efc699
--- /dev/null
+++ b/test/parallel/test-stream-catch-rejections.js
@@ -0,0 +1,65 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const r = new stream.Readable({
+ captureRejections: true,
+ read() {}
+ })
+ r.push('hello')
+ r.push('world')
+ const err = new Error('kaboom')
+ r.on(
+ 'error',
+ common.mustCall((_err) => {
+ assert.strictEqual(err, _err)
+ assert.strictEqual(r.destroyed, true)
+ })
+ )
+ r.on('data', async () => {
+ throw err
+ })
+}
+{
+ const w = new stream.Writable({
+ captureRejections: true,
+ highWaterMark: 1,
+ write(chunk, enc, cb) {
+ process.nextTick(cb)
+ }
+ })
+ const err = new Error('kaboom')
+ w.write('hello', () => {
+ w.write('world')
+ })
+ w.on(
+ 'error',
+ common.mustCall((_err) => {
+ assert.strictEqual(err, _err)
+ assert.strictEqual(w.destroyed, true)
+ })
+ )
+ w.on(
+ 'drain',
+ common.mustCall(async () => {
+ throw err
+ }, 2)
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-compose-operator.js b/test/parallel/test-stream-compose-operator.js
new file mode 100644
index 0000000000..45cac28d65
--- /dev/null
+++ b/test/parallel/test-stream-compose-operator.js
@@ -0,0 +1,134 @@
+'use strict'
+
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable, Transform } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ // with async generator
+ const stream = Readable.from(['a', 'b', 'c', 'd']).compose(async function* (stream) {
+ let str = ''
+ for await (const chunk of stream) {
+ str += chunk
+ if (str.length === 2) {
+ yield str
+ str = ''
+ }
+ }
+ })
+ const result = ['ab', 'cd']
+ ;(async () => {
+ for await (const item of stream) {
+ assert.strictEqual(item, result.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // With Transformer
+ const stream = Readable.from(['a', 'b', 'c', 'd']).compose(
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk)
+ }, 4)
+ })
+ )
+ const result = ['a', 'b', 'c', 'd']
+ ;(async () => {
+ for await (const item of stream) {
+ assert.strictEqual(item, result.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Throwing an error during `compose` (before waiting for data)
+ const stream = Readable.from([1, 2, 3, 4, 5]).compose(async function* (stream) {
+ // eslint-disable-line require-yield
+
+ throw new Error('boom')
+ })
+ assert
+ .rejects(async () => {
+ for await (const item of stream) {
+ assert.fail('should not reach here, got ' + item)
+ }
+ }, /boom/)
+ .then(common.mustCall())
+}
+{
+ // Throwing an error during `compose` (when waiting for data)
+ const stream = Readable.from([1, 2, 3, 4, 5]).compose(async function* (stream) {
+ for await (const chunk of stream) {
+ if (chunk === 3) {
+ throw new Error('boom')
+ }
+ yield chunk
+ }
+ })
+ assert.rejects(stream.toArray(), /boom/).then(common.mustCall())
+}
+{
+ // Throwing an error during `compose` (after finishing all readable data)
+ const stream = Readable.from([1, 2, 3, 4, 5]).compose(async function* (stream) {
+ // eslint-disable-line require-yield
+
+ // eslint-disable-next-line no-unused-vars,no-empty
+ for await (const chunk of stream) {
+ }
+ throw new Error('boom')
+ })
+ assert.rejects(stream.toArray(), /boom/).then(common.mustCall())
+}
+{
+ // AbortSignal
+ const ac = new AbortController()
+ const stream = Readable.from([1, 2, 3, 4, 5]).compose(
+ async function* (source) {
+ // Should not reach here
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ {
+ signal: ac.signal
+ }
+ )
+ ac.abort()
+ assert
+ .rejects(
+ async () => {
+ for await (const item of stream) {
+ assert.fail('should not reach here, got ' + item)
+ }
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+}
+{
+ assert.throws(() => Readable.from(['a']).compose(Readable.from(['b'])), {
+ code: 'ERR_INVALID_ARG_VALUE'
+ })
+}
+{
+ assert.throws(() => Readable.from(['a']).compose(), {
+ code: 'ERR_INVALID_ARG_TYPE'
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-compose.js b/test/parallel/test-stream-compose.js
new file mode 100644
index 0000000000..cbd9e96bf0
--- /dev/null
+++ b/test/parallel/test-stream-compose.js
@@ -0,0 +1,567 @@
+// Flags: --expose-internals
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable, Transform, Writable, finished, PassThrough } = require('../../lib/ours/index')
+const compose = require('../../lib/internal/streams/compose')
+const assert = require('assert')
+{
+ let res = ''
+ compose(
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk + chunk)
+ })
+ }),
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ })
+ )
+ .end('asd')
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASDASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk + chunk
+ }
+ },
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk.toString().toUpperCase()
+ }
+ }
+ )
+ .end('asd')
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASDASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(async function* (source) {
+ for await (const chunk of source) {
+ yield chunk + chunk
+ }
+ })
+ .end('asd')
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'asdasd')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ Readable.from(['asd']),
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ })
+ )
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ (async function* () {
+ yield 'asd'
+ })(),
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ })
+ )
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res += buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ }),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ new Writable({
+ write: common.mustCall((chunk, encoding, callback) => {
+ res += chunk
+ callback(null)
+ })
+ })
+ )
+ .end('asd')
+ .on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASD')
+ })
+ )
+}
+{
+ let res = ''
+ compose(
+ new Transform({
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk.toString().toUpperCase())
+ })
+ }),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ res += chunk
+ }
+ }
+ )
+ .end('asd')
+ .on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'ASD')
+ })
+ )
+}
+{
+ let res
+ compose(
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, {
+ chunk
+ })
+ })
+ }),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, {
+ chunk
+ })
+ })
+ })
+ )
+ .end(true)
+ .on(
+ 'data',
+ common.mustCall((buf) => {
+ res = buf
+ })
+ )
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res.chunk.chunk, true)
+ })
+ )
+}
+{
+ const _err = new Error('asd')
+ compose(
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(_err)
+ })
+ }),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield chunk
+ }
+ },
+ new Transform({
+ objectMode: true,
+ transform: common.mustNotCall((chunk, encoding, callback) => {
+ callback(null, {
+ chunk
+ })
+ })
+ })
+ )
+ .end(true)
+ .on('data', common.mustNotCall())
+ .on('end', common.mustNotCall())
+ .on('error', (err) => {
+ assert.strictEqual(err, _err)
+ })
+}
+{
+ const _err = new Error('asd')
+ compose(
+ new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk, encoding, callback) => {
+ callback(null, chunk)
+ })
+ }),
+ async function* (source) {
+ // eslint-disable-line require-yield
+ let tmp = ''
+ for await (const chunk of source) {
+ tmp += chunk
+ throw _err
+ }
+ return tmp
+ },
+ new Transform({
+ objectMode: true,
+ transform: common.mustNotCall((chunk, encoding, callback) => {
+ callback(null, {
+ chunk
+ })
+ })
+ })
+ )
+ .end(true)
+ .on('data', common.mustNotCall())
+ .on('end', common.mustNotCall())
+ .on('error', (err) => {
+ assert.strictEqual(err, _err)
+ })
+}
+{
+ let buf = ''
+
+ // Convert into readable Duplex.
+ const s1 = compose(
+ (async function* () {
+ yield 'Hello'
+ yield 'World'
+ })(),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ }
+ )
+ assert.strictEqual(s1.writable, false)
+ assert.strictEqual(s1.readable, false)
+ finished(
+ s1.resume(),
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ let buf = ''
+ // Convert into transform duplex.
+ const s2 = compose(async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ })
+ s2.end('helloworld')
+ s2.resume()
+ s2.on('data', (chunk) => {
+ buf += chunk
+ })
+ finished(
+ s2.resume(),
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ let buf = ''
+
+ // Convert into readable Duplex.
+ const s1 = compose(
+ (async function* () {
+ yield 'Hello'
+ yield 'World'
+ })()
+ )
+
+ // Convert into transform duplex.
+ const s2 = compose(async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ })
+
+ // Convert into writable duplex.
+ const s3 = compose(async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ })
+ const s4 = compose(s1, s2, s3)
+ finished(
+ s4,
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ let buf = ''
+
+ // Convert into readable Duplex.
+ const s1 = compose(
+ (async function* () {
+ yield 'Hello'
+ yield 'World'
+ })(),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ }
+ )
+ finished(
+ s1,
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ assert.throws(() => compose(), {
+ code: 'ERR_MISSING_ARGS'
+ })
+}
+{
+ assert.throws(() => compose(new Writable(), new PassThrough()), {
+ code: 'ERR_INVALID_ARG_VALUE'
+ })
+}
+{
+ assert.throws(
+ () =>
+ compose(
+ new PassThrough(),
+ new Readable({
+ read() {}
+ }),
+ new PassThrough()
+ ),
+ {
+ code: 'ERR_INVALID_ARG_VALUE'
+ }
+ )
+}
+{
+ let buf = ''
+
+ // Convert into readable Duplex.
+ const s1 = compose(
+ (async function* () {
+ yield 'Hello'
+ yield 'World'
+ })(),
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ return buf
+ }
+ )
+ finished(
+ s1,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_INVALID_RETURN_VALUE')
+ })
+ )
+}
+{
+ let buf = ''
+
+ // Convert into readable Duplex.
+ const s1 = compose(
+ 'HelloWorld',
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ async function (source) {
+ for await (const chunk of source) {
+ buf += chunk
+ }
+ }
+ )
+ finished(
+ s1,
+ common.mustCall((err) => {
+ assert(!err)
+ assert.strictEqual(buf, 'HELLOWORLD')
+ })
+ )
+}
+{
+ // In the new stream than should use the writeable of the first stream and readable of the last stream
+ // #46829
+ ;(async () => {
+ const newStream = compose(
+ new PassThrough({
+ // reading FROM you in object mode or not
+ readableObjectMode: false,
+ // writing TO you in object mode or not
+ writableObjectMode: false
+ }),
+ new Transform({
+ // reading FROM you in object mode or not
+ readableObjectMode: true,
+ // writing TO you in object mode or not
+ writableObjectMode: false,
+ transform: (chunk, encoding, callback) => {
+ callback(null, {
+ value: chunk.toString()
+ })
+ }
+ })
+ )
+ assert.strictEqual(newStream.writableObjectMode, false)
+ assert.strictEqual(newStream.readableObjectMode, true)
+ newStream.write('Steve Rogers')
+ newStream.write('On your left')
+ newStream.end()
+ assert.deepStrictEqual(await newStream.toArray(), [
+ {
+ value: 'Steve Rogers'
+ },
+ {
+ value: 'On your left'
+ }
+ ])
+ })().then(common.mustCall())
+}
+{
+ // In the new stream than should use the writeable of the first stream and readable of the last stream
+ // #46829
+ ;(async () => {
+ const newStream = compose(
+ new PassThrough({
+ // reading FROM you in object mode or not
+ readableObjectMode: true,
+ // writing TO you in object mode or not
+ writableObjectMode: true
+ }),
+ new Transform({
+ // reading FROM you in object mode or not
+ readableObjectMode: false,
+ // writing TO you in object mode or not
+ writableObjectMode: true,
+ transform: (chunk, encoding, callback) => {
+ callback(null, chunk.value)
+ }
+ })
+ )
+ assert.strictEqual(newStream.writableObjectMode, true)
+ assert.strictEqual(newStream.readableObjectMode, false)
+ newStream.write({
+ value: 'Steve Rogers'
+ })
+ newStream.write({
+ value: 'On your left'
+ })
+ newStream.end()
+ assert.deepStrictEqual(await newStream.toArray(), [Buffer.from('Steve RogersOn your left')])
+ })().then(common.mustCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-construct.js b/test/parallel/test-stream-construct.js
new file mode 100644
index 0000000000..0a46a5a0b6
--- /dev/null
+++ b/test/parallel/test-stream-construct.js
@@ -0,0 +1,336 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable, Readable, Duplex } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ // Multiple callback.
+ new Writable({
+ construct: common.mustCall((callback) => {
+ callback()
+ callback()
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_MULTIPLE_CALLBACK'
+ })
+ )
+}
+{
+ // Multiple callback.
+ new Readable({
+ construct: common.mustCall((callback) => {
+ callback()
+ callback()
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_MULTIPLE_CALLBACK'
+ })
+ )
+}
+{
+ // Synchronous error.
+
+ new Writable({
+ construct: common.mustCall((callback) => {
+ callback(new Error('test'))
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'test'
+ })
+ )
+}
+{
+ // Synchronous error.
+
+ new Readable({
+ construct: common.mustCall((callback) => {
+ callback(new Error('test'))
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'test'
+ })
+ )
+}
+{
+ // Asynchronous error.
+
+ new Writable({
+ construct: common.mustCall((callback) => {
+ process.nextTick(callback, new Error('test'))
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'test'
+ })
+ )
+}
+{
+ // Asynchronous error.
+
+ new Readable({
+ construct: common.mustCall((callback) => {
+ process.nextTick(callback, new Error('test'))
+ })
+ }).on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'test'
+ })
+ )
+}
+function testDestroy(factory) {
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.destroy()
+ }
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.destroy(null, () => {
+ assert.strictEqual(constructed, true)
+ })
+ }
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.destroy()
+ }
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'kaboom')
+ })
+ )
+ s.destroy(new Error('kaboom'), (err) => {
+ assert.strictEqual(err.message, 'kaboom')
+ assert.strictEqual(constructed, true)
+ })
+ }
+ {
+ let constructed = false
+ const s = factory({
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ })
+ })
+ s.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ s.destroy(new Error())
+ }
+}
+testDestroy(
+ (opts) =>
+ new Readable({
+ read: common.mustNotCall(),
+ ...opts
+ })
+)
+testDestroy(
+ (opts) =>
+ new Writable({
+ write: common.mustNotCall(),
+ final: common.mustNotCall(),
+ ...opts
+ })
+)
+{
+ let constructed = false
+ const r = new Readable({
+ autoDestroy: true,
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ }),
+ read: common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ r.push(null)
+ })
+ })
+ r.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ r.on('data', common.mustNotCall())
+}
+{
+ let constructed = false
+ const w = new Writable({
+ autoDestroy: true,
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ }),
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(constructed, true)
+ process.nextTick(cb)
+ }),
+ final: common.mustCall((cb) => {
+ assert.strictEqual(constructed, true)
+ process.nextTick(cb)
+ })
+ })
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ w.end('data')
+}
+{
+ let constructed = false
+ const w = new Writable({
+ autoDestroy: true,
+ construct: common.mustCall((cb) => {
+ constructed = true
+ process.nextTick(cb)
+ }),
+ write: common.mustNotCall(),
+ final: common.mustCall((cb) => {
+ assert.strictEqual(constructed, true)
+ process.nextTick(cb)
+ })
+ })
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+ w.end()
+}
+{
+ new Duplex({
+ construct: common.mustCall()
+ })
+}
+{
+ // https://github.com/nodejs/node/issues/34448
+
+ let constructed = false
+ const d = new Duplex({
+ readable: false,
+ construct: common.mustCall((callback) => {
+ setImmediate(
+ common.mustCall(() => {
+ constructed = true
+ callback()
+ })
+ )
+ }),
+ write(chunk, encoding, callback) {
+ callback()
+ },
+ read() {
+ this.push(null)
+ }
+ })
+ d.resume()
+ d.end('foo')
+ d.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(constructed, true)
+ })
+ )
+}
+{
+ // Construct should not cause stream to read.
+ new Readable({
+ construct: common.mustCall((callback) => {
+ callback()
+ }),
+ read: common.mustNotCall()
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-decoder-objectmode.js b/test/parallel/test-stream-decoder-objectmode.js
new file mode 100644
index 0000000000..e282caeedf
--- /dev/null
+++ b/test/parallel/test-stream-decoder-objectmode.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+const readable = new stream.Readable({
+ read: () => {},
+ encoding: 'utf16le',
+ objectMode: true
+})
+readable.push(Buffer.from('abc', 'utf16le'))
+readable.push(Buffer.from('def', 'utf16le'))
+readable.push(null)
+
+// Without object mode, these would be concatenated into a single chunk.
+assert.strictEqual(readable.read(), 'abc')
+assert.strictEqual(readable.read(), 'def')
+assert.strictEqual(readable.read(), null)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-destroy-event-order.js b/test/parallel/test-stream-destroy-event-order.js
new file mode 100644
index 0000000000..a52a13ed27
--- /dev/null
+++ b/test/parallel/test-stream-destroy-event-order.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+const rs = new Readable({
+ read() {}
+})
+let closed = false
+let errored = false
+rs.on(
+ 'close',
+ common.mustCall(() => {
+ closed = true
+ assert(errored)
+ })
+)
+rs.on(
+ 'error',
+ common.mustCall((err) => {
+ errored = true
+ assert(!closed)
+ })
+)
+rs.destroy(new Error('kaboom'))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-drop-take.js b/test/parallel/test-stream-drop-take.js
new file mode 100644
index 0000000000..7d6f0a01e6
--- /dev/null
+++ b/test/parallel/test-stream-drop-take.js
@@ -0,0 +1,169 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const { deepStrictEqual, rejects, throws, strictEqual } = require('assert')
+const { from } = Readable
+const fromAsync = (...args) => from(...args).map(async (x) => x)
+const naturals = () =>
+ from(
+ (async function* () {
+ let i = 1
+ while (true) {
+ yield i++
+ }
+ })()
+ )
+{
+ // Synchronous streams
+ ;(async () => {
+ deepStrictEqual(await from([1, 2, 3]).drop(2).toArray(), [3])
+ deepStrictEqual(await from([1, 2, 3]).take(1).toArray(), [1])
+ deepStrictEqual(await from([]).drop(2).toArray(), [])
+ deepStrictEqual(await from([]).take(1).toArray(), [])
+ deepStrictEqual(await from([1, 2, 3]).drop(1).take(1).toArray(), [2])
+ deepStrictEqual(await from([1, 2]).drop(0).toArray(), [1, 2])
+ deepStrictEqual(await from([1, 2]).take(0).toArray(), [])
+ })().then(common.mustCall())
+ // Asynchronous streams
+ ;(async () => {
+ deepStrictEqual(await fromAsync([1, 2, 3]).drop(2).toArray(), [3])
+ deepStrictEqual(await fromAsync([1, 2, 3]).take(1).toArray(), [1])
+ deepStrictEqual(await fromAsync([]).drop(2).toArray(), [])
+ deepStrictEqual(await fromAsync([]).take(1).toArray(), [])
+ deepStrictEqual(await fromAsync([1, 2, 3]).drop(1).take(1).toArray(), [2])
+ deepStrictEqual(await fromAsync([1, 2]).drop(0).toArray(), [1, 2])
+ deepStrictEqual(await fromAsync([1, 2]).take(0).toArray(), [])
+ })().then(common.mustCall())
+ // Infinite streams
+ // Asynchronous streams
+ ;(async () => {
+ deepStrictEqual(await naturals().take(1).toArray(), [1])
+ deepStrictEqual(await naturals().drop(1).take(1).toArray(), [2])
+ const next10 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
+ deepStrictEqual(await naturals().drop(10).take(10).toArray(), next10)
+ deepStrictEqual(await naturals().take(5).take(1).toArray(), [1])
+ })().then(common.mustCall())
+}
+
+// Don't wait for next item in the original stream when already consumed the requested take amount
+{
+ let reached = false
+ let resolve
+ const promise = new Promise((res) => (resolve = res))
+ const stream = from(
+ (async function* () {
+ yield 1
+ await promise
+ reached = true
+ yield 2
+ })()
+ )
+ stream
+ .take(1)
+ .toArray()
+ .then(
+ common.mustCall(() => {
+ strictEqual(reached, false)
+ })
+ )
+ .finally(() => resolve())
+}
+{
+ // Coercion
+ ;(async () => {
+ // The spec made me do this ^^
+ deepStrictEqual(await naturals().take('cat').toArray(), [])
+ deepStrictEqual(await naturals().take('2').toArray(), [1, 2])
+ deepStrictEqual(await naturals().take(true).toArray(), [1])
+ })().then(common.mustCall())
+}
+{
+ // Support for AbortSignal
+ const ac = new AbortController()
+ rejects(
+ Readable.from([1, 2, 3])
+ .take(1, {
+ signal: ac.signal
+ })
+ .toArray(),
+ {
+ name: 'AbortError'
+ }
+ ).then(common.mustCall())
+ rejects(
+ Readable.from([1, 2, 3])
+ .drop(1, {
+ signal: ac.signal
+ })
+ .toArray(),
+ {
+ name: 'AbortError'
+ }
+ ).then(common.mustCall())
+ ac.abort()
+}
+{
+ // Support for AbortSignal, already aborted
+ const signal = AbortSignal.abort()
+ rejects(
+ Readable.from([1, 2, 3])
+ .take(1, {
+ signal
+ })
+ .toArray(),
+ {
+ name: 'AbortError'
+ }
+ ).then(common.mustCall())
+}
+{
+ // Error cases
+ const invalidArgs = [-1, -Infinity, -40]
+ for (const example of invalidArgs) {
+ throws(() => from([]).take(example).toArray(), /ERR_OUT_OF_RANGE/)
+ }
+ throws(() => Readable.from([1]).drop(1, 1), /ERR_INVALID_ARG_TYPE/)
+ throws(
+ () =>
+ Readable.from([1]).drop(1, {
+ signal: true
+ }),
+ /ERR_INVALID_ARG_TYPE/
+ )
+ throws(() => Readable.from([1]).take(1, 1), /ERR_INVALID_ARG_TYPE/)
+ throws(
+ () =>
+ Readable.from([1]).take(1, {
+ signal: true
+ }),
+ /ERR_INVALID_ARG_TYPE/
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-destroy.js b/test/parallel/test-stream-duplex-destroy.js
new file mode 100644
index 0000000000..950686ccb7
--- /dev/null
+++ b/test/parallel/test-stream-duplex-destroy.js
@@ -0,0 +1,281 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Duplex } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex.resume()
+ duplex.on('end', common.mustNotCall())
+ duplex.on('finish', common.mustNotCall())
+ duplex.on('close', common.mustCall())
+ duplex.destroy()
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex.resume()
+ const expected = new Error('kaboom')
+ duplex.on('end', common.mustNotCall())
+ duplex.on('finish', common.mustNotCall())
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ duplex.destroy(expected)
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb(err)
+ })
+ const expected = new Error('kaboom')
+ duplex.on('finish', common.mustNotCall('no finish event'))
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ duplex.destroy(expected)
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const expected = new Error('kaboom')
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {},
+ destroy: common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb()
+ })
+ })
+ duplex.resume()
+ duplex.on('end', common.mustNotCall('no end event'))
+ duplex.on('finish', common.mustNotCall('no finish event'))
+
+ // Error is swallowed by the custom _destroy
+ duplex.on('error', common.mustNotCall('no error event'))
+ duplex.on('close', common.mustCall())
+ duplex.destroy(expected)
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb()
+ })
+ duplex.destroy()
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex.resume()
+ duplex._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ process.nextTick(() => {
+ this.push(null)
+ this.end()
+ cb()
+ })
+ })
+ const fail = common.mustNotCall('no finish or end event')
+ duplex.on('finish', fail)
+ duplex.on('end', fail)
+ duplex.destroy()
+ duplex.removeListener('end', fail)
+ duplex.removeListener('finish', fail)
+ duplex.on('end', common.mustNotCall())
+ duplex.on('finish', common.mustNotCall())
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ const expected = new Error('kaboom')
+ duplex._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb(expected)
+ })
+ duplex.on('finish', common.mustNotCall('no finish event'))
+ duplex.on('end', common.mustNotCall('no end event'))
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ duplex.destroy()
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {},
+ allowHalfOpen: true
+ })
+ duplex.resume()
+ duplex.on('finish', common.mustNotCall())
+ duplex.on('end', common.mustNotCall())
+ duplex.destroy()
+ assert.strictEqual(duplex.destroyed, true)
+}
+{
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex.destroyed = true
+ assert.strictEqual(duplex.destroyed, true)
+
+ // The internal destroy() mechanism should not be triggered
+ duplex.on('finish', common.mustNotCall())
+ duplex.on('end', common.mustNotCall())
+ duplex.destroy()
+}
+{
+ function MyDuplex() {
+ assert.strictEqual(this.destroyed, false)
+ this.destroyed = false
+ Duplex.call(this)
+ }
+ Object.setPrototypeOf(MyDuplex.prototype, Duplex.prototype)
+ Object.setPrototypeOf(MyDuplex, Duplex)
+ new MyDuplex()
+}
+{
+ const duplex = new Duplex({
+ writable: false,
+ autoDestroy: true,
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex.push(null)
+ duplex.resume()
+ duplex.on('close', common.mustCall())
+}
+{
+ const duplex = new Duplex({
+ readable: false,
+ autoDestroy: true,
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex.end()
+ duplex.on('close', common.mustCall())
+}
+{
+ const duplex = new Duplex({
+ allowHalfOpen: false,
+ autoDestroy: true,
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {}
+ })
+ duplex.push(null)
+ duplex.resume()
+ const orgEnd = duplex.end
+ duplex.end = common.mustNotCall()
+ duplex.on('end', () => {
+ // Ensure end() is called in next tick to allow
+ // any pending writes to be invoked first.
+ process.nextTick(() => {
+ duplex.end = common.mustCall(orgEnd)
+ })
+ })
+ duplex.on('close', common.mustCall())
+}
+{
+ // Check abort signal
+ const controller = new AbortController()
+ const { signal } = controller
+ const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ read() {},
+ signal
+ })
+ let count = 0
+ duplex.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(count++, 0) // Ensure not called twice
+ assert.strictEqual(e.name, 'AbortError')
+ })
+ )
+ duplex.on('close', common.mustCall())
+ controller.abort()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-end.js b/test/parallel/test-stream-duplex-end.js
new file mode 100644
index 0000000000..39c2a04585
--- /dev/null
+++ b/test/parallel/test-stream-duplex-end.js
@@ -0,0 +1,53 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const Duplex = require('../../lib/ours/index').Duplex
+{
+ const stream = new Duplex({
+ read() {}
+ })
+ assert.strictEqual(stream.allowHalfOpen, true)
+ stream.on('finish', common.mustNotCall())
+ assert.strictEqual(stream.listenerCount('end'), 0)
+ stream.resume()
+ stream.push(null)
+}
+{
+ const stream = new Duplex({
+ read() {},
+ allowHalfOpen: false
+ })
+ assert.strictEqual(stream.allowHalfOpen, false)
+ stream.on('finish', common.mustCall())
+ assert.strictEqual(stream.listenerCount('end'), 0)
+ stream.resume()
+ stream.push(null)
+}
+{
+ const stream = new Duplex({
+ read() {},
+ allowHalfOpen: false
+ })
+ assert.strictEqual(stream.allowHalfOpen, false)
+ stream._writableState.ended = true
+ stream.on('finish', common.mustNotCall())
+ assert.strictEqual(stream.listenerCount('end'), 0)
+ stream.resume()
+ stream.push(null)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-from.js b/test/parallel/test-stream-duplex-from.js
new file mode 100644
index 0000000000..d198586fbc
--- /dev/null
+++ b/test/parallel/test-stream-duplex-from.js
@@ -0,0 +1,490 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Duplex, Readable, Writable, pipeline, PassThrough } = require('../../lib/ours/index')
+function makeATestReadableStream(value) {
+ return Readable.from([value])
+}
+function makeATestWritableStream(writeFunc) {
+ return new Writable({
+ write(chunk, enc, cb) {
+ writeFunc(chunk)
+ cb()
+ }
+ })
+}
+const Blob = globalThis.Blob || require('buffer').Blob
+{
+ const d = Duplex.from({
+ readable: new Readable({
+ read() {
+ this.push('asd')
+ this.push(null)
+ }
+ })
+ })
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, false)
+ d.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(d.read().toString(), 'asd')
+ })
+ )
+ d.once(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+}
+{
+ const d = Duplex.from(
+ new Readable({
+ read() {
+ this.push('asd')
+ this.push(null)
+ }
+ })
+ )
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, false)
+ d.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(d.read().toString(), 'asd')
+ })
+ )
+ d.once(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from(
+ new Writable({
+ write(chunk, encoding, callback) {
+ ret += chunk
+ callback()
+ }
+ })
+ )
+ assert.strictEqual(d.readable, false)
+ assert.strictEqual(d.writable, true)
+ d.end('asd')
+ d.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(d.writable, false)
+ assert.strictEqual(ret, 'asd')
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from({
+ writable: new Writable({
+ write(chunk, encoding, callback) {
+ ret += chunk
+ callback()
+ }
+ })
+ })
+ assert.strictEqual(d.readable, false)
+ assert.strictEqual(d.writable, true)
+ d.end('asd')
+ d.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(d.writable, false)
+ assert.strictEqual(ret, 'asd')
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from({
+ readable: new Readable({
+ read() {
+ this.push('asd')
+ this.push(null)
+ }
+ }),
+ writable: new Writable({
+ write(chunk, encoding, callback) {
+ ret += chunk
+ callback()
+ }
+ })
+ })
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, true)
+ d.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(d.read().toString(), 'asd')
+ })
+ )
+ d.once(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+ d.end('asd')
+ d.once(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(d.writable, false)
+ assert.strictEqual(ret, 'asd')
+ })
+ )
+}
+{
+ const d = Duplex.from(Promise.resolve('asd'))
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, false)
+ d.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(d.read().toString(), 'asd')
+ })
+ )
+ d.once(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+}
+{
+ // https://github.com/nodejs/node/issues/40497
+ pipeline(
+ ['abc\ndef\nghi'],
+ Duplex.from(async function* (source) {
+ let rest = ''
+ for await (const chunk of source) {
+ const lines = (rest + chunk.toString()).split('\n')
+ rest = lines.pop()
+ for (const line of lines) {
+ yield line
+ }
+ }
+ yield rest
+ }),
+ async function* (source) {
+ // eslint-disable-line require-yield
+ let ret = ''
+ for await (const x of source) {
+ ret += x
+ }
+ assert.strictEqual(ret, 'abcdefghi')
+ },
+ common.mustSucceed()
+ )
+}
+
+// Ensure that isDuplexNodeStream was called
+{
+ const duplex = new Duplex()
+ assert.strictEqual(Duplex.from(duplex), duplex)
+}
+
+// Ensure that Duplex.from works for blobs
+if (typeof Blob !== 'undefined') {
+ const blob = new Blob(['blob'])
+ const expectedByteLength = blob.size
+ const duplex = Duplex.from(blob)
+ duplex.on(
+ 'data',
+ common.mustCall((arrayBuffer) => {
+ assert.strictEqual(arrayBuffer.byteLength, expectedByteLength)
+ })
+ )
+}
+
+// Ensure that given a promise rejection it emits an error
+{
+ const myErrorMessage = 'myCustomError'
+ Duplex.from(Promise.reject(myErrorMessage)).on(
+ 'error',
+ common.mustCall((error) => {
+ assert.strictEqual(error, myErrorMessage)
+ })
+ )
+}
+
+// Ensure that given a promise rejection on an async function it emits an error
+{
+ const myErrorMessage = 'myCustomError'
+ async function asyncFn() {
+ return Promise.reject(myErrorMessage)
+ }
+ Duplex.from(asyncFn).on(
+ 'error',
+ common.mustCall((error) => {
+ assert.strictEqual(error, myErrorMessage)
+ })
+ )
+}
+
+// Ensure that Duplex.from throws an Invalid return value when function is void
+{
+ assert.throws(() => Duplex.from(() => {}), {
+ code: 'ERR_INVALID_RETURN_VALUE'
+ })
+}
+
+// Ensure data if a sub object has a readable stream it's duplexified
+{
+ const msg = Buffer.from('hello')
+ const duplex = Duplex.from({
+ readable: Readable({
+ read() {
+ this.push(msg)
+ this.push(null)
+ }
+ })
+ }).on(
+ 'data',
+ common.mustCall((data) => {
+ assert.strictEqual(data, msg)
+ })
+ )
+ assert.strictEqual(duplex.writable, false)
+}
+
+// Ensure data if a sub object has a writable stream it's duplexified
+{
+ const msg = Buffer.from('hello')
+ const duplex = Duplex.from({
+ writable: Writable({
+ write: common.mustCall((data) => {
+ assert.strictEqual(data, msg)
+ })
+ })
+ })
+ duplex.write(msg)
+ assert.strictEqual(duplex.readable, false)
+}
+
+// Ensure data if a sub object has a writable and readable stream it's duplexified
+{
+ const msg = Buffer.from('hello')
+ const duplex = Duplex.from({
+ readable: Readable({
+ read() {
+ this.push(msg)
+ this.push(null)
+ }
+ }),
+ writable: Writable({
+ write: common.mustCall((data) => {
+ assert.strictEqual(data, msg)
+ })
+ })
+ })
+ duplex
+ .pipe(duplex)
+ .on(
+ 'data',
+ common.mustCall((data) => {
+ assert.strictEqual(data, msg)
+ assert.strictEqual(duplex.readable, true)
+ assert.strictEqual(duplex.writable, true)
+ })
+ )
+ .on('end', common.mustCall())
+}
+
+// Ensure that given readable stream that throws an error it calls destroy
+{
+ const myErrorMessage = 'error!'
+ const duplex = Duplex.from(
+ Readable({
+ read() {
+ throw new Error(myErrorMessage)
+ }
+ })
+ )
+ duplex.on(
+ 'error',
+ common.mustCall((msg) => {
+ assert.strictEqual(msg.message, myErrorMessage)
+ })
+ )
+}
+
+// Ensure that given writable stream that throws an error it calls destroy
+{
+ const myErrorMessage = 'error!'
+ const duplex = Duplex.from(
+ Writable({
+ write(chunk, enc, cb) {
+ cb(myErrorMessage)
+ }
+ })
+ )
+ duplex.on(
+ 'error',
+ common.mustCall((msg) => {
+ assert.strictEqual(msg, myErrorMessage)
+ })
+ )
+ duplex.write('test')
+}
+{
+ const through = new PassThrough({
+ objectMode: true
+ })
+ let res = ''
+ const d = Readable.from(['foo', 'bar'], {
+ objectMode: true
+ }).pipe(
+ Duplex.from({
+ writable: through,
+ readable: through
+ })
+ )
+ d.on('data', (data) => {
+ d.pause()
+ setImmediate(() => {
+ d.resume()
+ })
+ res += data
+ })
+ .on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(res, 'foobar')
+ })
+ )
+ .on('close', common.mustCall())
+}
+function makeATestReadableStreamOff(value) {
+ return new ReadableStream({
+ start(controller) {
+ controller.enqueue(value)
+ controller.close()
+ }
+ })
+}
+function makeATestWritableStreamOff(writeFunc) {
+ return new WritableStream({
+ write(chunk) {
+ writeFunc(chunk)
+ }
+ })
+}
+{
+ const d = Duplex.from({
+ readable: makeATestReadableStream('foo')
+ })
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, false)
+ d.on(
+ 'data',
+ common.mustCall((data) => {
+ assert.strictEqual(data.toString(), 'foo')
+ })
+ )
+ d.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+}
+{
+ const d = Duplex.from(makeATestReadableStream('foo'))
+ assert.strictEqual(d.readable, true)
+ assert.strictEqual(d.writable, false)
+ d.on(
+ 'data',
+ common.mustCall((data) => {
+ assert.strictEqual(data.toString(), 'foo')
+ })
+ )
+ d.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from({
+ writable: makeATestWritableStream((chunk) => (ret += chunk))
+ })
+ assert.strictEqual(d.readable, false)
+ assert.strictEqual(d.writable, true)
+ d.end('foo')
+ d.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(ret, 'foo')
+ assert.strictEqual(d.writable, false)
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from(makeATestWritableStream((chunk) => (ret += chunk)))
+ assert.strictEqual(d.readable, false)
+ assert.strictEqual(d.writable, true)
+ d.end('foo')
+ d.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(ret, 'foo')
+ assert.strictEqual(d.writable, false)
+ })
+ )
+}
+{
+ let ret = ''
+ const d = Duplex.from({
+ readable: makeATestReadableStream('foo'),
+ writable: makeATestWritableStream((chunk) => (ret += chunk))
+ })
+ d.end('bar')
+ d.on(
+ 'data',
+ common.mustCall((data) => {
+ assert.strictEqual(data.toString(), 'foo')
+ })
+ )
+ d.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(d.readable, false)
+ })
+ )
+ d.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(ret, 'bar')
+ assert.strictEqual(d.writable, false)
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-props.js b/test/parallel/test-stream-duplex-props.js
new file mode 100644
index 0000000000..1a976f6687
--- /dev/null
+++ b/test/parallel/test-stream-duplex-props.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Duplex } = require('../../lib/ours/index')
+{
+ const d = new Duplex({
+ objectMode: true,
+ highWaterMark: 100
+ })
+ assert.strictEqual(d.writableObjectMode, true)
+ assert.strictEqual(d.writableHighWaterMark, 100)
+ assert.strictEqual(d.readableObjectMode, true)
+ assert.strictEqual(d.readableHighWaterMark, 100)
+}
+{
+ const d = new Duplex({
+ readableObjectMode: false,
+ readableHighWaterMark: 10,
+ writableObjectMode: true,
+ writableHighWaterMark: 100
+ })
+ assert.strictEqual(d.writableObjectMode, true)
+ assert.strictEqual(d.writableHighWaterMark, 100)
+ assert.strictEqual(d.readableObjectMode, false)
+ assert.strictEqual(d.readableHighWaterMark, 10)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-readable-end.js b/test/parallel/test-stream-duplex-readable-end.js
new file mode 100644
index 0000000000..ecb15381f0
--- /dev/null
+++ b/test/parallel/test-stream-duplex-readable-end.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+// https://github.com/nodejs/node/issues/35926
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+let loops = 5
+const src = new stream.Readable({
+ read() {
+ if (loops--) this.push(Buffer.alloc(20000))
+ }
+})
+const dst = new stream.Transform({
+ transform(chunk, output, fn) {
+ this.push(null)
+ fn()
+ }
+})
+src.pipe(dst)
+dst.on('data', () => {})
+dst.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(loops, 3)
+ assert.ok(src.isPaused())
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-readable-writable.js b/test/parallel/test-stream-duplex-readable-writable.js
new file mode 100644
index 0000000000..90425acbdb
--- /dev/null
+++ b/test/parallel/test-stream-duplex-readable-writable.js
@@ -0,0 +1,64 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Duplex } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const duplex = new Duplex({
+ readable: false
+ })
+ assert.strictEqual(duplex.readable, false)
+ duplex.push('asd')
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PUSH_AFTER_EOF')
+ })
+ )
+ duplex.on('data', common.mustNotCall())
+ duplex.on('end', common.mustNotCall())
+}
+{
+ const duplex = new Duplex({
+ writable: false,
+ write: common.mustNotCall()
+ })
+ assert.strictEqual(duplex.writable, false)
+ duplex.write('asd')
+ duplex.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ duplex.on('finish', common.mustNotCall())
+}
+{
+ const duplex = new Duplex({
+ readable: false
+ })
+ assert.strictEqual(duplex.readable, false)
+ duplex.on('data', common.mustNotCall())
+ duplex.on('end', common.mustNotCall())
+ async function run() {
+ for await (const chunk of duplex) {
+ assert(false, chunk)
+ }
+ }
+ run().then(common.mustCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-duplex-writable-finished.js b/test/parallel/test-stream-duplex-writable-finished.js
new file mode 100644
index 0000000000..b78ba23935
--- /dev/null
+++ b/test/parallel/test-stream-duplex-writable-finished.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Duplex } = require('../../lib/ours/index')
+const assert = require('assert')
+
+// basic
+{
+ // Find it on Duplex.prototype
+ assert(Reflect.has(Duplex.prototype, 'writableFinished'))
+}
+
+// event
+{
+ const duplex = new Duplex()
+ duplex._write = (chunk, encoding, cb) => {
+ // The state finished should start in false.
+ assert.strictEqual(duplex.writableFinished, false)
+ cb()
+ }
+ duplex.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(duplex.writableFinished, true)
+ })
+ )
+ duplex.end(
+ 'testing finished state',
+ common.mustCall(() => {
+ assert.strictEqual(duplex.writableFinished, true)
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-end-of-streams.js b/test/parallel/test-stream-end-of-streams.js
new file mode 100644
index 0000000000..e2af91b8ec
--- /dev/null
+++ b/test/parallel/test-stream-end-of-streams.js
@@ -0,0 +1,35 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Duplex, finished } = require('../../lib/ours/index')
+assert.throws(
+ () => {
+ // Passing empty object to mock invalid stream
+ // should throw error
+ finished({}, () => {})
+ },
+ {
+ code: 'ERR_INVALID_ARG_TYPE'
+ }
+)
+const streamObj = new Duplex()
+streamObj.end()
+// Below code should not throw any errors as the
+// streamObj is `Stream`
+finished(streamObj, () => {})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js
new file mode 100644
index 0000000000..5ca5d84f76
--- /dev/null
+++ b/test/parallel/test-stream-end-paused.js
@@ -0,0 +1,66 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+
+// Make sure we don't miss the end event for paused 0-length streams
+
+const Readable = require('../../lib/ours/index').Readable
+const stream = new Readable()
+let calledRead = false
+stream._read = function () {
+ assert(!calledRead)
+ calledRead = true
+ this.push(null)
+}
+stream.on('data', function () {
+ throw new Error('should not ever get data')
+})
+stream.pause()
+setTimeout(
+ common.mustCall(function () {
+ stream.on('end', common.mustCall())
+ stream.resume()
+ }),
+ 1
+)
+process.on('exit', function () {
+ assert(calledRead)
+ silentConsole.log('ok')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-error-once.js b/test/parallel/test-stream-error-once.js
new file mode 100644
index 0000000000..dadfc2d677
--- /dev/null
+++ b/test/parallel/test-stream-error-once.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable, Readable } = require('../../lib/ours/index')
+{
+ const writable = new Writable()
+ writable.on('error', common.mustCall())
+ writable.end()
+ writable.write('h')
+ writable.write('h')
+}
+{
+ const readable = new Readable()
+ readable.on('error', common.mustCall())
+ readable.push(null)
+ readable.push('h')
+ readable.push('h')
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-events-prepend.js b/test/parallel/test-stream-events-prepend.js
new file mode 100644
index 0000000000..4ad0c24c2b
--- /dev/null
+++ b/test/parallel/test-stream-events-prepend.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+class Writable extends stream.Writable {
+ constructor() {
+ super()
+ this.prependListener = undefined
+ }
+ _write(chunk, end, cb) {
+ cb()
+ }
+}
+class Readable extends stream.Readable {
+ _read() {
+ this.push(null)
+ }
+}
+const w = new Writable()
+w.on('pipe', common.mustCall())
+const r = new Readable()
+r.pipe(w)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-filter.js b/test/parallel/test-stream-filter.js
new file mode 100644
index 0000000000..a6b30f897d
--- /dev/null
+++ b/test/parallel/test-stream-filter.js
@@ -0,0 +1,218 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+const { once } = require('events')
+const st = require('timers').setTimeout
+function setTimeout(ms) {
+ return new Promise((resolve) => {
+ st(resolve, ms)
+ })
+}
+{
+ // Filter works on synchronous streams with a synchronous predicate
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => x < 3)
+ const result = [1, 2]
+ ;(async () => {
+ for await (const item of stream) {
+ assert.strictEqual(item, result.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Filter works on synchronous streams with an asynchronous predicate
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => {
+ await Promise.resolve()
+ return x > 3
+ })
+ const result = [4, 5]
+ ;(async () => {
+ for await (const item of stream) {
+ assert.strictEqual(item, result.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Map works on asynchronous streams with a asynchronous mapper
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ .map(async (x) => {
+ await Promise.resolve()
+ return x + x
+ })
+ .filter((x) => x > 5)
+ const result = [6, 8, 10]
+ ;(async () => {
+ for await (const item of stream) {
+ assert.strictEqual(item, result.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Filter works on an infinite stream
+ const stream = Readable.from(
+ (async function* () {
+ while (true) yield 1
+ })()
+ ).filter(
+ common.mustCall(async (x) => {
+ return x < 3
+ }, 5)
+ )
+ ;(async () => {
+ let i = 1
+ for await (const item of stream) {
+ assert.strictEqual(item, 1)
+ if (++i === 5) break
+ }
+ })().then(common.mustCall())
+}
+{
+ // Filter works on constructor created streams
+ let i = 0
+ const stream = new Readable({
+ read() {
+ if (i === 10) {
+ this.push(null)
+ return
+ }
+ this.push(Uint8Array.from([i]))
+ i++
+ },
+ highWaterMark: 0
+ }).filter(
+ common.mustCall(async ([x]) => {
+ return x !== 5
+ }, 10)
+ )
+ ;(async () => {
+ const result = (await stream.toArray()).map((x) => x[0])
+ const expected = [...Array(10).keys()].filter((x) => x !== 5)
+ assert.deepStrictEqual(result, expected)
+ })().then(common.mustCall())
+}
+{
+ // Throwing an error during `filter` (sync)
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => {
+ if (x === 3) {
+ throw new Error('boom')
+ }
+ return true
+ })
+ assert.rejects(stream.map((x) => x + x).toArray(), /boom/).then(common.mustCall())
+}
+{
+ // Throwing an error during `filter` (async)
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter(async (x) => {
+ if (x === 3) {
+ throw new Error('boom')
+ }
+ return true
+ })
+ assert.rejects(stream.filter(() => true).toArray(), /boom/).then(common.mustCall())
+}
+{
+ // Concurrency + AbortSignal
+ const ac = new AbortController()
+ let calls = 0
+ const stream = Readable.from([1, 2, 3, 4]).filter(
+ async (_, { signal }) => {
+ calls++
+ await once(signal, 'abort')
+ },
+ {
+ signal: ac.signal,
+ concurrency: 2
+ }
+ )
+ // pump
+ assert
+ .rejects(
+ async () => {
+ for await (const item of stream) {
+ // nope
+ silentConsole.log(item)
+ }
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+ setImmediate(() => {
+ ac.abort()
+ assert.strictEqual(calls, 2)
+ })
+}
+{
+ // Concurrency result order
+ const stream = Readable.from([1, 2]).filter(
+ async (item, { signal }) => {
+ await setTimeout(10 - item, {
+ signal
+ })
+ return true
+ },
+ {
+ concurrency: 2
+ }
+ )
+ ;(async () => {
+ const expected = [1, 2]
+ for await (const item of stream) {
+ assert.strictEqual(item, expected.shift())
+ }
+ })().then(common.mustCall())
+}
+{
+ // Error cases
+ assert.throws(() => Readable.from([1]).filter(1), /ERR_INVALID_ARG_TYPE/)
+ assert.throws(
+ () =>
+ Readable.from([1]).filter((x) => x, {
+ concurrency: 'Foo'
+ }),
+ /ERR_OUT_OF_RANGE/
+ )
+ assert.throws(() => Readable.from([1]).filter((x) => x, 1), /ERR_INVALID_ARG_TYPE/)
+}
+{
+ // Test result is a Readable
+ const stream = Readable.from([1, 2, 3, 4, 5]).filter((x) => true)
+ assert.strictEqual(stream.readable, true)
+}
+{
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ Object.defineProperty(stream, 'map', {
+ value: common.mustNotCall()
+ })
+ // Check that map isn't getting called.
+ stream.filter(() => true)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-finished.js b/test/parallel/test-stream-finished.js
new file mode 100644
index 0000000000..26f286af58
--- /dev/null
+++ b/test/parallel/test-stream-finished.js
@@ -0,0 +1,773 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable, Readable, Transform, finished, Duplex, PassThrough, Stream } = require('../../lib/ours/index')
+const assert = require('assert')
+const EE = require('events')
+const fs = require('fs')
+const { promisify } = require('util')
+const http = require('http')
+{
+ const rs = new Readable({
+ read() {}
+ })
+ finished(rs, common.mustSucceed())
+ rs.push(null)
+ rs.resume()
+}
+{
+ const ws = new Writable({
+ write(data, enc, cb) {
+ cb()
+ }
+ })
+ finished(ws, common.mustSucceed())
+ ws.end()
+}
+{
+ const tr = new Transform({
+ transform(data, enc, cb) {
+ cb()
+ }
+ })
+ let finish = false
+ let ended = false
+ tr.on('end', () => {
+ ended = true
+ })
+ tr.on('finish', () => {
+ finish = true
+ })
+ finished(
+ tr,
+ common.mustSucceed(() => {
+ assert(finish)
+ assert(ended)
+ })
+ )
+ tr.end()
+ tr.resume()
+}
+{
+ const rs = fs.createReadStream(__filename)
+ rs.resume()
+ finished(rs, common.mustCall())
+}
+{
+ const finishedPromise = promisify(finished)
+ async function run() {
+ const rs = fs.createReadStream(__filename)
+ const done = common.mustCall()
+ let ended = false
+ rs.resume()
+ rs.on('end', () => {
+ ended = true
+ })
+ await finishedPromise(rs)
+ assert(ended)
+ done()
+ }
+ run()
+}
+{
+ // Check pre-cancelled
+ const signal = new EventTarget()
+ signal.aborted = true
+ const rs = Readable.from((function* () {})())
+ finished(
+ rs,
+ {
+ signal
+ },
+ common.mustCall((err) => {
+ assert.strictEqual(err.name, 'AbortError')
+ })
+ )
+}
+{
+ // Check cancelled before the stream ends sync.
+ const ac = new AbortController()
+ const { signal } = ac
+ const rs = Readable.from((function* () {})())
+ finished(
+ rs,
+ {
+ signal
+ },
+ common.mustCall((err) => {
+ assert.strictEqual(err.name, 'AbortError')
+ })
+ )
+ ac.abort()
+}
+{
+ // Check cancelled before the stream ends async.
+ const ac = new AbortController()
+ const { signal } = ac
+ const rs = Readable.from((function* () {})())
+ setTimeout(() => ac.abort(), 1)
+ finished(
+ rs,
+ {
+ signal
+ },
+ common.mustCall((err) => {
+ assert.strictEqual(err.name, 'AbortError')
+ })
+ )
+}
+{
+ // Check cancelled after doesn't throw.
+ const ac = new AbortController()
+ const { signal } = ac
+ const rs = Readable.from(
+ (function* () {
+ yield 5
+ setImmediate(() => ac.abort())
+ })()
+ )
+ rs.resume()
+ finished(
+ rs,
+ {
+ signal
+ },
+ common.mustSucceed()
+ )
+}
+{
+ // Promisified abort works
+ const finishedPromise = promisify(finished)
+ async function run() {
+ const ac = new AbortController()
+ const { signal } = ac
+ const rs = Readable.from((function* () {})())
+ setImmediate(() => ac.abort())
+ await finishedPromise(rs, {
+ signal
+ })
+ }
+ assert
+ .rejects(run, {
+ name: 'AbortError'
+ })
+ .then(common.mustCall())
+}
+{
+ // Promisified pre-aborted works
+ const finishedPromise = promisify(finished)
+ async function run() {
+ const signal = new EventTarget()
+ signal.aborted = true
+ const rs = Readable.from((function* () {})())
+ await finishedPromise(rs, {
+ signal
+ })
+ }
+ assert
+ .rejects(run, {
+ name: 'AbortError'
+ })
+ .then(common.mustCall())
+}
+{
+ const rs = fs.createReadStream('file-does-not-exist')
+ finished(
+ rs,
+ common.expectsError({
+ code: 'ENOENT'
+ })
+ )
+}
+{
+ const rs = new Readable()
+ finished(rs, common.mustSucceed())
+ rs.push(null)
+ rs.emit('close') // Should not trigger an error
+ rs.resume()
+}
+{
+ const rs = new Readable()
+ finished(
+ rs,
+ common.mustCall((err) => {
+ assert(err, 'premature close error')
+ })
+ )
+ rs.emit('close') // Should trigger error
+ rs.push(null)
+ rs.resume()
+}
+
+// Test faulty input values and options.
+{
+ const rs = new Readable({
+ read() {}
+ })
+ assert.throws(() => finished(rs, 'foo'), {
+ code: 'ERR_INVALID_ARG_TYPE',
+ message: /callback/
+ })
+ assert.throws(() => finished(rs, 'foo', () => {}), {
+ code: 'ERR_INVALID_ARG_TYPE',
+ message: /options/
+ })
+ assert.throws(() => finished(rs, {}, 'foo'), {
+ code: 'ERR_INVALID_ARG_TYPE',
+ message: /callback/
+ })
+ finished(rs, null, common.mustCall())
+ rs.push(null)
+ rs.resume()
+}
+
+// Test that calling returned function removes listeners
+{
+ const ws = new Writable({
+ write(data, env, cb) {
+ cb()
+ }
+ })
+ const removeListener = finished(ws, common.mustNotCall())
+ removeListener()
+ ws.end()
+}
+{
+ const rs = new Readable()
+ const removeListeners = finished(rs, common.mustNotCall())
+ removeListeners()
+ rs.emit('close')
+ rs.push(null)
+ rs.resume()
+}
+{
+ const streamLike = new EE()
+ streamLike.readableEnded = true
+ streamLike.readable = true
+ assert.throws(
+ () => {
+ finished(streamLike, () => {})
+ },
+ {
+ code: 'ERR_INVALID_ARG_TYPE'
+ }
+ )
+ streamLike.emit('close')
+}
+{
+ const writable = new Writable({
+ write() {}
+ })
+ writable.writable = false
+ writable.destroy()
+ finished(
+ writable,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+}
+{
+ const readable = new Readable()
+ readable.readable = false
+ readable.destroy()
+ finished(
+ readable,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+}
+{
+ const w = new Writable({
+ write(chunk, encoding, callback) {
+ setImmediate(callback)
+ }
+ })
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+ w.end('asd')
+ w.destroy()
+}
+function testClosed(factory) {
+ {
+ // If already destroyed but finished is cancelled in same tick
+ // don't invoke the callback,
+
+ const s = factory()
+ s.destroy()
+ const dispose = finished(s, common.mustNotCall())
+ dispose()
+ }
+ {
+ // If already destroyed invoked callback.
+
+ const s = factory()
+ s.destroy()
+ finished(s, common.mustCall())
+ }
+ {
+ // Don't invoke until destroy has completed.
+
+ let destroyed = false
+ const s = factory({
+ destroy(err, cb) {
+ setImmediate(() => {
+ destroyed = true
+ cb()
+ })
+ }
+ })
+ s.destroy()
+ finished(
+ s,
+ common.mustCall(() => {
+ assert.strictEqual(destroyed, true)
+ })
+ )
+ }
+ {
+ // Invoke callback even if close is inhibited.
+
+ const s = factory({
+ emitClose: false,
+ destroy(err, cb) {
+ cb()
+ finished(s, common.mustCall())
+ }
+ })
+ s.destroy()
+ }
+ {
+ // Invoke with deep async.
+
+ const s = factory({
+ destroy(err, cb) {
+ setImmediate(() => {
+ cb()
+ setImmediate(() => {
+ finished(s, common.mustCall())
+ })
+ })
+ }
+ })
+ s.destroy()
+ }
+}
+testClosed(
+ (opts) =>
+ new Readable({
+ ...opts
+ })
+)
+testClosed(
+ (opts) =>
+ new Writable({
+ write() {},
+ ...opts
+ })
+)
+{
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ },
+ autoDestroy: false
+ })
+ w.end('asd')
+ process.nextTick(() => {
+ finished(w, common.mustCall())
+ })
+}
+{
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb(new Error())
+ },
+ autoDestroy: false
+ })
+ w.write('asd')
+ w.on(
+ 'error',
+ common.mustCall(() => {
+ finished(w, common.mustCall())
+ })
+ )
+}
+{
+ const r = new Readable({
+ autoDestroy: false
+ })
+ r.push(null)
+ r.resume()
+ r.on(
+ 'end',
+ common.mustCall(() => {
+ finished(r, common.mustCall())
+ })
+ )
+}
+{
+ const rs = fs.createReadStream(__filename, {
+ autoClose: false
+ })
+ rs.resume()
+ rs.on('close', common.mustNotCall())
+ rs.on(
+ 'end',
+ common.mustCall(() => {
+ finished(rs, common.mustCall())
+ })
+ )
+}
+{
+ const d = new EE()
+ d._writableState = {}
+ d._writableState.finished = true
+ finished(
+ d,
+ {
+ readable: false,
+ writable: true
+ },
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+ d._writableState.errored = true
+ d.emit('close')
+}
+{
+ const r = new Readable()
+ finished(
+ r,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+ r.push('asd')
+ r.push(null)
+ r.destroy()
+}
+{
+ const d = new Duplex({
+ final(cb) {},
+ // Never close writable side for test purpose
+ read() {
+ this.push(null)
+ }
+ })
+ d.on('end', common.mustCall())
+ finished(
+ d,
+ {
+ readable: true,
+ writable: false
+ },
+ common.mustCall()
+ )
+ d.end()
+ d.resume()
+}
+{
+ const d = new Duplex({
+ final(cb) {},
+ // Never close writable side for test purpose
+ read() {
+ this.push(null)
+ }
+ })
+ d.on('end', common.mustCall())
+ d.end()
+ finished(
+ d,
+ {
+ readable: true,
+ writable: false
+ },
+ common.mustCall()
+ )
+ d.resume()
+}
+{
+ // Test for compat for e.g. fd-slicer which implements
+ // non standard destroy behavior which might not emit
+ // 'close'.
+ const r = new Readable()
+ finished(r, common.mustCall())
+ r.resume()
+ r.push('asd')
+ r.destroyed = true
+ r.push(null)
+}
+{
+ // Regression https://github.com/nodejs/node/issues/33130
+ const response = new PassThrough()
+ class HelloWorld extends Duplex {
+ constructor(response) {
+ super({
+ autoDestroy: false
+ })
+ this.response = response
+ this.readMore = false
+ response.once('end', () => {
+ this.push(null)
+ })
+ response.on('readable', () => {
+ if (this.readMore) {
+ this._read()
+ }
+ })
+ }
+ _read() {
+ const { response } = this
+ this.readMore = true
+ if (response.readableLength) {
+ this.readMore = false
+ }
+ let data
+ while ((data = response.read()) !== null) {
+ this.push(data)
+ }
+ }
+ }
+ const instance = new HelloWorld(response)
+ instance.setEncoding('utf8')
+ instance.end()
+ ;(async () => {
+ await EE.once(instance, 'finish')
+ setImmediate(() => {
+ response.write('chunk 1')
+ response.write('chunk 2')
+ response.write('chunk 3')
+ response.end()
+ })
+ let res = ''
+ for await (const data of instance) {
+ res += data
+ }
+ assert.strictEqual(res, 'chunk 1chunk 2chunk 3')
+ })().then(common.mustCall())
+}
+{
+ const p = new PassThrough()
+ p.end()
+ finished(p, common.mustNotCall())
+}
+{
+ const p = new PassThrough()
+ p.end()
+ p.on(
+ 'finish',
+ common.mustCall(() => {
+ finished(p, common.mustNotCall())
+ })
+ )
+}
+{
+ const server = http
+ .createServer(
+ common.mustCall((req, res) => {
+ res.on(
+ 'finish',
+ common.mustCall(() => {
+ finished(
+ res,
+ common.mustCall(() => {
+ server.close()
+ })
+ )
+ })
+ )
+ res.end()
+ })
+ )
+ .listen(0, function () {
+ http
+ .request({
+ method: 'GET',
+ port: this.address().port
+ })
+ .end()
+ .on('response', common.mustCall())
+ })
+}
+{
+ const server = http
+ .createServer(
+ common.mustCall((req, res) => {
+ req.on(
+ 'close',
+ common.mustCall(() => {
+ finished(
+ req,
+ common.mustCall(() => {
+ server.close()
+ })
+ )
+ })
+ )
+ req.destroy()
+ })
+ )
+ .listen(0, function () {
+ http
+ .request({
+ method: 'GET',
+ port: this.address().port
+ })
+ .end()
+ .on('error', common.mustCall())
+ })
+}
+{
+ const w = new Writable({
+ write(chunk, encoding, callback) {
+ process.nextTick(callback)
+ }
+ })
+ w.aborted = false
+ w.end()
+ let closed = false
+ w.on('finish', () => {
+ assert.strictEqual(closed, false)
+ w.emit('aborted')
+ })
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ closed = true
+ })
+ )
+ finished(
+ w,
+ common.mustCall(() => {
+ assert.strictEqual(closed, true)
+ })
+ )
+}
+{
+ const w = new Writable()
+ const _err = new Error()
+ w.destroy(_err)
+ assert.strictEqual(w.errored, _err)
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(_err, err)
+ assert.strictEqual(w.closed, true)
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(_err, err)
+ })
+ )
+ })
+ )
+}
+{
+ const w = new Writable()
+ w.destroy()
+ assert.strictEqual(w.errored, null)
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(w.closed, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ finished(
+ w,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+ )
+ })
+ )
+}
+{
+ // Legacy Streams do not inherit from Readable or Writable.
+ // We cannot really assume anything about them, so we cannot close them
+ // automatically.
+ const s = new Stream()
+ finished(s, common.mustNotCall())
+}
+{
+ const server = http
+ .createServer(
+ common.mustCall(function (req, res) {
+ fs.createReadStream(__filename).pipe(res)
+ finished(
+ res,
+ common.mustCall(function (err) {
+ assert.strictEqual(err, undefined)
+ })
+ )
+ })
+ )
+ .listen(0, function () {
+ http
+ .request(
+ {
+ method: 'GET',
+ port: this.address().port
+ },
+ common.mustCall(function (res) {
+ res.resume()
+ finished(
+ res,
+ common.mustCall(() => {
+ server.close()
+ })
+ )
+ })
+ )
+ .end()
+ })
+}
+{
+ const stream = new Duplex({
+ write(chunk, enc, cb) {
+ setImmediate(cb)
+ }
+ })
+ stream.end('foo')
+ finished(
+ stream,
+ {
+ readable: false
+ },
+ common.mustCall((err) => {
+ assert(!err)
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-flatMap.js b/test/parallel/test-stream-flatMap.js
new file mode 100644
index 0000000000..446f30dc92
--- /dev/null
+++ b/test/parallel/test-stream-flatMap.js
@@ -0,0 +1,195 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const fixtures = require('../common/fixtures')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+const st = require('timers').setTimeout
+function setTimeout(ms) {
+ return new Promise((resolve) => {
+ st(resolve, ms)
+ })
+}
+const { createReadStream } = require('fs')
+function oneTo5() {
+ return Readable.from([1, 2, 3, 4, 5])
+}
+{
+ // flatMap works on synchronous streams with a synchronous mapper
+ ;(async () => {
+ assert.deepStrictEqual(
+ await oneTo5()
+ .flatMap((x) => [x + x])
+ .toArray(),
+ [2, 4, 6, 8, 10]
+ )
+ assert.deepStrictEqual(
+ await oneTo5()
+ .flatMap(() => [])
+ .toArray(),
+ []
+ )
+ assert.deepStrictEqual(
+ await oneTo5()
+ .flatMap((x) => [x, x])
+ .toArray(),
+ [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]
+ )
+ })().then(common.mustCall())
+}
+{
+ // flatMap works on sync/async streams with an asynchronous mapper
+ ;(async () => {
+ assert.deepStrictEqual(
+ await oneTo5()
+ .flatMap(async (x) => [x, x])
+ .toArray(),
+ [1, 1, 2, 2, 3, 3, 4, 4, 5, 5]
+ )
+ const asyncOneTo5 = oneTo5().map(async (x) => x)
+ assert.deepStrictEqual(await asyncOneTo5.flatMap(async (x) => [x, x]).toArray(), [1, 1, 2, 2, 3, 3, 4, 4, 5, 5])
+ })().then(common.mustCall())
+}
+{
+ // flatMap works on a stream where mapping returns a stream
+ ;(async () => {
+ const result = await oneTo5()
+ .flatMap(async (x) => {
+ return Readable.from([x, x])
+ })
+ .toArray()
+ assert.deepStrictEqual(result, [1, 1, 2, 2, 3, 3, 4, 4, 5, 5])
+ })().then(common.mustCall())
+ // flatMap works on an objectMode stream where mappign returns a stream
+ ;(async () => {
+ const result = await oneTo5()
+ .flatMap(() => {
+ return createReadStream(fixtures.path('x.txt'))
+ })
+ .toArray()
+ // The resultant stream is in object mode so toArray shouldn't flatten
+ assert.strictEqual(result.length, 5)
+ assert.deepStrictEqual(
+ Buffer.concat(result).toString(),
+ (process.platform === 'win32' ? 'xyz\r\n' : 'xyz\n').repeat(5)
+ )
+ })().then(common.mustCall())
+}
+{
+ // Concurrency + AbortSignal
+ const ac = new AbortController()
+ const stream = oneTo5().flatMap(
+ common.mustNotCall(async (_, { signal }) => {
+ await setTimeout(100, {
+ signal
+ })
+ }),
+ {
+ signal: ac.signal,
+ concurrency: 2
+ }
+ )
+ // pump
+ assert
+ .rejects(
+ async () => {
+ for await (const item of stream) {
+ // nope
+ silentConsole.log(item)
+ }
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+ queueMicrotask(() => {
+ ac.abort()
+ })
+}
+{
+ // Already aborted AbortSignal
+ const stream = oneTo5().flatMap(
+ common.mustNotCall(async (_, { signal }) => {
+ await setTimeout(100, {
+ signal
+ })
+ }),
+ {
+ signal: AbortSignal.abort()
+ }
+ )
+ // pump
+ assert
+ .rejects(
+ async () => {
+ for await (const item of stream) {
+ // nope
+ silentConsole.log(item)
+ }
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+}
+{
+ // Error cases
+ assert.throws(() => Readable.from([1]).flatMap(1), /ERR_INVALID_ARG_TYPE/)
+ assert.throws(
+ () =>
+ Readable.from([1]).flatMap((x) => x, {
+ concurrency: 'Foo'
+ }),
+ /ERR_OUT_OF_RANGE/
+ )
+ assert.throws(() => Readable.from([1]).flatMap((x) => x, 1), /ERR_INVALID_ARG_TYPE/)
+ assert.throws(
+ () =>
+ Readable.from([1]).flatMap((x) => x, {
+ signal: true
+ }),
+ /ERR_INVALID_ARG_TYPE/
+ )
+}
+{
+ // Test result is a Readable
+ const stream = oneTo5().flatMap((x) => x)
+ assert.strictEqual(stream.readable, true)
+}
+{
+ const stream = oneTo5()
+ Object.defineProperty(stream, 'map', {
+ value: common.mustNotCall()
+ })
+ // Check that map isn't getting called.
+ stream.flatMap(() => true)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js
new file mode 100644
index 0000000000..80115e9174
--- /dev/null
+++ b/test/parallel/test-stream-forEach.js
@@ -0,0 +1,200 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+const { once } = require('events')
+{
+ // forEach works on synchronous streams with a synchronous predicate
+ const stream = Readable.from([1, 2, 3])
+ const result = [1, 2, 3]
+ ;(async () => {
+ await stream.forEach((value) => assert.strictEqual(value, result.shift()))
+ })().then(common.mustCall())
+}
+{
+ // forEach works an asynchronous streams
+ const stream = Readable.from([1, 2, 3]).filter(async (x) => {
+ await Promise.resolve()
+ return true
+ })
+ const result = [1, 2, 3]
+ ;(async () => {
+ await stream.forEach((value) => assert.strictEqual(value, result.shift()))
+ })().then(common.mustCall())
+}
+{
+ // forEach works on asynchronous streams with a asynchronous forEach fn
+ const stream = Readable.from([1, 2, 3]).filter(async (x) => {
+ await Promise.resolve()
+ return true
+ })
+ const result = [1, 2, 3]
+ ;(async () => {
+ await stream.forEach(async (value) => {
+ await Promise.resolve()
+ assert.strictEqual(value, result.shift())
+ })
+ })().then(common.mustCall())
+}
+{
+ // forEach works on an infinite stream
+ const ac = new AbortController()
+ const { signal } = ac
+ const stream = Readable.from(
+ (async function* () {
+ while (true) yield 1
+ })(),
+ {
+ signal
+ }
+ )
+ let i = 0
+ assert
+ .rejects(
+ stream.forEach(
+ common.mustCall((x) => {
+ i++
+ if (i === 10) ac.abort()
+ assert.strictEqual(x, 1)
+ }, 10)
+ ),
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+}
+{
+ // Emitting an error during `forEach`
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ assert
+ .rejects(
+ stream.forEach(async (x) => {
+ if (x === 3) {
+ stream.emit('error', new Error('boom'))
+ }
+ }),
+ /boom/
+ )
+ .then(common.mustCall())
+}
+{
+ // Throwing an error during `forEach` (sync)
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ assert
+ .rejects(
+ stream.forEach((x) => {
+ if (x === 3) {
+ throw new Error('boom')
+ }
+ }),
+ /boom/
+ )
+ .then(common.mustCall())
+}
+{
+ // Throwing an error during `forEach` (async)
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ assert
+ .rejects(
+ stream.forEach(async (x) => {
+ if (x === 3) {
+ return Promise.reject(new Error('boom'))
+ }
+ }),
+ /boom/
+ )
+ .then(common.mustCall())
+}
+{
+ // Concurrency + AbortSignal
+ const ac = new AbortController()
+ let calls = 0
+ const forEachPromise = Readable.from([1, 2, 3, 4]).forEach(
+ async (_, { signal }) => {
+ calls++
+ await once(signal, 'abort')
+ },
+ {
+ signal: ac.signal,
+ concurrency: 2,
+ highWaterMark: 0
+ }
+ )
+ // pump
+ assert
+ .rejects(
+ async () => {
+ await forEachPromise
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+ setImmediate(() => {
+ ac.abort()
+ assert.strictEqual(calls, 2)
+ })
+}
+{
+ // Error cases
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).forEach(1)
+ }, /ERR_INVALID_ARG_TYPE/)
+ .then(common.mustCall())
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).forEach((x) => x, {
+ concurrency: 'Foo'
+ })
+ }, /ERR_OUT_OF_RANGE/)
+ .then(common.mustCall())
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).forEach((x) => x, 1)
+ }, /ERR_INVALID_ARG_TYPE/)
+ .then(common.mustCall())
+}
+{
+ // Test result is a Promise
+ const stream = Readable.from([1, 2, 3, 4, 5]).forEach((_) => true)
+ assert.strictEqual(typeof stream.then, 'function')
+}
+{
+ const stream = Readable.from([1, 2, 3, 4, 5])
+ Object.defineProperty(stream, 'map', {
+ value: common.mustNotCall()
+ })
+ // Check that map isn't getting called.
+ stream.forEach(() => true)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-inheritance.js b/test/parallel/test-stream-inheritance.js
new file mode 100644
index 0000000000..150a43a74f
--- /dev/null
+++ b/test/parallel/test-stream-inheritance.js
@@ -0,0 +1,68 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Readable, Writable, Duplex, Transform } = require('../../lib/ours/index')
+const readable = new Readable({
+ read() {}
+})
+const writable = new Writable({
+ write() {}
+})
+const duplex = new Duplex({
+ read() {},
+ write() {}
+})
+const transform = new Transform({
+ transform() {}
+})
+assert.ok(readable instanceof Readable)
+assert.ok(!(writable instanceof Readable))
+assert.ok(duplex instanceof Readable)
+assert.ok(transform instanceof Readable)
+assert.ok(!(readable instanceof Writable))
+assert.ok(writable instanceof Writable)
+assert.ok(duplex instanceof Writable)
+assert.ok(transform instanceof Writable)
+assert.ok(!(readable instanceof Duplex))
+assert.ok(!(writable instanceof Duplex))
+assert.ok(duplex instanceof Duplex)
+assert.ok(transform instanceof Duplex)
+assert.ok(!(readable instanceof Transform))
+assert.ok(!(writable instanceof Transform))
+assert.ok(!(duplex instanceof Transform))
+assert.ok(transform instanceof Transform)
+assert.ok(!(null instanceof Writable))
+assert.ok(!(undefined instanceof Writable))
+
+// Simple inheritance check for `Writable` works fine in a subclass constructor.
+function CustomWritable() {
+ assert.ok(this instanceof CustomWritable, `${this} does not inherit from CustomWritable`)
+ assert.ok(this instanceof Writable, `${this} does not inherit from Writable`)
+}
+Object.setPrototypeOf(CustomWritable, Writable)
+Object.setPrototypeOf(CustomWritable.prototype, Writable.prototype)
+new CustomWritable()
+assert.throws(CustomWritable, {
+ code: 'ERR_ASSERTION',
+ constructor: assert.AssertionError,
+ message: 'undefined does not inherit from CustomWritable'
+})
+class OtherCustomWritable extends Writable {}
+assert(!(new OtherCustomWritable() instanceof CustomWritable))
+assert(!(new CustomWritable() instanceof OtherCustomWritable))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-ispaused.js b/test/parallel/test-stream-ispaused.js
new file mode 100644
index 0000000000..32a8711c34
--- /dev/null
+++ b/test/parallel/test-stream-ispaused.js
@@ -0,0 +1,58 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const readable = new stream.Readable()
+
+// _read is a noop, here.
+readable._read = Function()
+
+// Default state of a stream is not "paused"
+assert.ok(!readable.isPaused())
+
+// Make the stream start flowing...
+readable.on('data', Function())
+
+// still not paused.
+assert.ok(!readable.isPaused())
+readable.pause()
+assert.ok(readable.isPaused())
+readable.resume()
+assert.ok(!readable.isPaused())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-iterator-helpers-test262-tests.mjs b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs
new file mode 100644
index 0000000000..6bddd42918
--- /dev/null
+++ b/test/parallel/test-stream-iterator-helpers-test262-tests.mjs
@@ -0,0 +1,174 @@
+import { mustCall } from '../common/index.mjs'
+import { Readable } from '../../lib/ours/index.js'
+import assert from 'assert'
+import tap from 'tap'
+
+// These tests are manually ported from the draft PR for the test262 test suite
+// Authored by Rick Waldron in https://github.com/tc39/test262/pull/2818/files
+
+// test262 license:
+// The << Software identified by reference to the Ecma Standard* ("Software)">>
+// is protected by copyright and is being made available under the
+// "BSD License", included below. This Software may be subject to third party
+// rights (rights from parties other than Ecma International), including patent
+// rights, and no licenses under such third party rights are granted under this
+// license even if the third party concerned is a member of Ecma International.
+// SEE THE ECMA CODE OF CONDUCT IN PATENT MATTERS AVAILABLE AT
+// http://www.ecma-international.org/memento/codeofconduct.htm FOR INFORMATION
+// REGARDING THE LICENSING OF PATENT CLAIMS THAT ARE REQUIRED TO IMPLEMENT ECMA
+// INTERNATIONAL STANDARDS*
+
+// Copyright (C) 2012-2013 Ecma International
+// All rights reserved.
+
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+// 1. Redistributions of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+// 3. Neither the name of the authors nor Ecma International may be used to
+// endorse or promote products derived from this software without specific
+// prior written permission.
+
+// THIS SOFTWARE IS PROVIDED BY THE ECMA INTERNATIONAL "AS IS" AND ANY EXPRESS
+// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
+// NO EVENT SHALL ECMA INTERNATIONAL BE LIABLE FOR ANY DIRECT, INDIRECT,
+// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
+// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// * Ecma International Standards hereafter means Ecma International Standards
+// as well as Ecma Technical Reports
+
+// Note all the tests that check AsyncIterator's prototype itself and things
+// that happen before stream conversion were not ported.
+{
+ // asIndexedPairs/is-function
+ assert.strictEqual(typeof Readable.prototype.asIndexedPairs, 'function')
+ // asIndexedPairs/indexed-pairs.js
+ const iterator = Readable.from([0, 1])
+ const indexedPairs = iterator.asIndexedPairs()
+
+ for await (const [i, v] of indexedPairs) {
+ assert.strictEqual(i, v)
+ }
+ // asIndexedPairs/length.js
+ assert.strictEqual(Readable.prototype.asIndexedPairs.length, 0)
+ const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'asIndexedPairs')
+ assert.strictEqual(descriptor.enumerable, false)
+ assert.strictEqual(descriptor.configurable, true)
+ assert.strictEqual(descriptor.writable, true)
+}
+{
+ // drop/length
+ assert.strictEqual(Readable.prototype.drop.length, 1)
+ const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'drop')
+ assert.strictEqual(descriptor.enumerable, false)
+ assert.strictEqual(descriptor.configurable, true)
+ assert.strictEqual(descriptor.writable, true)
+ // drop/limit-equals-total
+ const iterator = Readable.from([1, 2]).drop(2)
+ const result = await iterator[Symbol.asyncIterator]().next()
+ assert.deepStrictEqual(result, { done: true, value: undefined })
+ // drop/limit-greater-than-total.js
+ const iterator2 = Readable.from([1, 2]).drop(3)
+ const result2 = await iterator2[Symbol.asyncIterator]().next()
+ assert.deepStrictEqual(result2, { done: true, value: undefined })
+ // drop/limit-less-than-total.js
+ const iterator3 = Readable.from([1, 2]).drop(1)
+ const result3 = await iterator3[Symbol.asyncIterator]().next()
+ assert.deepStrictEqual(result3, { done: false, value: 2 })
+ // drop/limit-rangeerror
+ assert.throws(() => Readable.from([1]).drop(-1), RangeError)
+ assert.throws(() => {
+ Readable.from([1]).drop({
+ valueOf() {
+ throw new Error('boom')
+ }
+ })
+ }, /boom/)
+ // drop/limit-tointeger
+ const two = await Readable.from([1, 2])
+ .drop({ valueOf: () => 1 })
+ .toArray()
+ assert.deepStrictEqual(two, [2])
+ // drop/name
+ assert.strictEqual(Readable.prototype.drop.name, 'drop')
+ // drop/non-constructible
+ assert.throws(() => new Readable.prototype.drop(1), TypeError)
+ // drop/proto
+ const proto = Object.getPrototypeOf(Readable.prototype.drop)
+ assert.strictEqual(proto, Function.prototype)
+}
+{
+ // every/abrupt-iterator-close
+ const stream = Readable.from([1, 2, 3])
+ const e = new Error()
+ await assert.rejects(
+ stream.every(
+ mustCall(() => {
+ throw e
+ }, 1)
+ ),
+ e
+ )
+}
+{
+ // every/callable-fn
+ await assert.rejects(Readable.from([1, 2]).every({}), TypeError)
+}
+{
+ // every/callable
+ Readable.prototype.every.call(Readable.from([]), () => {})
+ // eslint-disable-next-line array-callback-return
+ Readable.from([]).every(() => {})
+ assert.throws(() => {
+ const r = Readable.from([])
+ new r.every(() => {})
+ }, TypeError)
+}
+
+{
+ // every/false
+ const iterator = Readable.from([1, 2, 3])
+ const result = await iterator.every((v) => v === 1)
+ assert.strictEqual(result, false)
+}
+{
+ // every/every
+ const iterator = Readable.from([1, 2, 3])
+ const result = await iterator.every((v) => true)
+ assert.strictEqual(result, true)
+}
+
+{
+ // every/is-function
+ assert.strictEqual(typeof Readable.prototype.every, 'function')
+}
+{
+ // every/length
+ assert.strictEqual(Readable.prototype.every.length, 1)
+ // every/name
+ assert.strictEqual(Readable.prototype.every.name, 'every')
+ // every/propdesc
+ const descriptor = Object.getOwnPropertyDescriptor(Readable.prototype, 'every')
+ assert.strictEqual(descriptor.enumerable, false)
+ assert.strictEqual(descriptor.configurable, true)
+ assert.strictEqual(descriptor.writable, true)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-objectmode-undefined.js b/test/parallel/test-stream-objectmode-undefined.js
new file mode 100644
index 0000000000..7e3b28b83d
--- /dev/null
+++ b/test/parallel/test-stream-objectmode-undefined.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable, Transform } = require('../../lib/ours/index')
+{
+ const stream = new Readable({
+ objectMode: true,
+ read: common.mustCall(() => {
+ stream.push(undefined)
+ stream.push(null)
+ })
+ })
+ stream.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk, undefined)
+ })
+ )
+}
+{
+ const stream = new Writable({
+ objectMode: true,
+ write: common.mustCall((chunk) => {
+ assert.strictEqual(chunk, undefined)
+ })
+ })
+ stream.write(undefined)
+}
+{
+ const stream = new Transform({
+ objectMode: true,
+ transform: common.mustCall((chunk) => {
+ stream.push(chunk)
+ })
+ })
+ stream.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk, undefined)
+ })
+ )
+ stream.write(undefined)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-once-readable-pipe.js b/test/parallel/test-stream-once-readable-pipe.js
new file mode 100644
index 0000000000..097bcae05b
--- /dev/null
+++ b/test/parallel/test-stream-once-readable-pipe.js
@@ -0,0 +1,74 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+
+// This test ensures that if have 'readable' listener
+// on Readable instance it will not disrupt the pipe.
+
+{
+ let receivedData = ''
+ const w = new Writable({
+ write: (chunk, env, callback) => {
+ receivedData += chunk
+ callback()
+ }
+ })
+ const data = ['foo', 'bar', 'baz']
+ const r = new Readable({
+ read: () => {}
+ })
+ r.once('readable', common.mustCall())
+ r.pipe(w)
+ r.push(data[0])
+ r.push(data[1])
+ r.push(data[2])
+ r.push(null)
+ w.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(receivedData, data.join(''))
+ })
+ )
+}
+{
+ let receivedData = ''
+ const w = new Writable({
+ write: (chunk, env, callback) => {
+ receivedData += chunk
+ callback()
+ }
+ })
+ const data = ['foo', 'bar', 'baz']
+ const r = new Readable({
+ read: () => {}
+ })
+ r.pipe(w)
+ r.push(data[0])
+ r.push(data[1])
+ r.push(data[2])
+ r.push(null)
+ r.once('readable', common.mustCall())
+ w.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(receivedData, data.join(''))
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-passthrough-drain.js b/test/parallel/test-stream-passthrough-drain.js
new file mode 100644
index 0000000000..2433cb3730
--- /dev/null
+++ b/test/parallel/test-stream-passthrough-drain.js
@@ -0,0 +1,27 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { PassThrough } = require('../../lib/ours/index')
+const pt = new PassThrough({
+ highWaterMark: 0
+})
+pt.on('drain', common.mustCall())
+assert(!pt.write('hello1'))
+pt.read()
+pt.read()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js
new file mode 100644
index 0000000000..d0df35184f
--- /dev/null
+++ b/test/parallel/test-stream-pipe-after-end.js
@@ -0,0 +1,81 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+class TestReadable extends Readable {
+ constructor(opt) {
+ super(opt)
+ this._ended = false
+ }
+ _read() {
+ if (this._ended) this.emit('error', new Error('_read called twice'))
+ this._ended = true
+ this.push(null)
+ }
+}
+class TestWritable extends Writable {
+ constructor(opt) {
+ super(opt)
+ this._written = []
+ }
+ _write(chunk, encoding, cb) {
+ this._written.push(chunk)
+ cb()
+ }
+}
+
+// This one should not emit 'end' until we read() from it later.
+const ender = new TestReadable()
+
+// What happens when you pipe() a Readable that's already ended?
+const piper = new TestReadable()
+// pushes EOF null, and length=0, so this will trigger 'end'
+piper.read()
+setTimeout(
+ common.mustCall(function () {
+ ender.on('end', common.mustCall())
+ const c = ender.read()
+ assert.strictEqual(c, null)
+ const w = new TestWritable()
+ w.on('finish', common.mustCall())
+ piper.pipe(w)
+ }),
+ 1
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-await-drain-manual-resume.js b/test/parallel/test-stream-pipe-await-drain-manual-resume.js
new file mode 100644
index 0000000000..18e62b6c90
--- /dev/null
+++ b/test/parallel/test-stream-pipe-await-drain-manual-resume.js
@@ -0,0 +1,94 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+
+// A consumer stream with a very low highWaterMark, which starts in a state
+// where it buffers the chunk it receives rather than indicating that they
+// have been consumed.
+const writable = new stream.Writable({
+ highWaterMark: 5
+})
+let isCurrentlyBufferingWrites = true
+const queue = []
+writable._write = (chunk, encoding, cb) => {
+ if (isCurrentlyBufferingWrites)
+ queue.push({
+ chunk,
+ cb
+ })
+ else cb()
+}
+const readable = new stream.Readable({
+ read() {}
+})
+readable.pipe(writable)
+readable.once(
+ 'pause',
+ common.mustCall(() => {
+ assert.strictEqual(
+ readable._readableState.awaitDrainWriters,
+ writable,
+ 'Expected awaitDrainWriters to be a Writable but instead got ' + `${readable._readableState.awaitDrainWriters}`
+ )
+ // First pause, resume manually. The next write() to writable will still
+ // return false, because chunks are still being buffered, so it will increase
+ // the awaitDrain counter again.
+
+ process.nextTick(
+ common.mustCall(() => {
+ readable.resume()
+ })
+ )
+ readable.once(
+ 'pause',
+ common.mustCall(() => {
+ assert.strictEqual(
+ readable._readableState.awaitDrainWriters,
+ writable,
+ '.resume() should not reset the awaitDrainWriters, but instead got ' +
+ `${readable._readableState.awaitDrainWriters}`
+ )
+ // Second pause, handle all chunks from now on. Once all callbacks that
+ // are currently queued up are handled, the awaitDrain drain counter should
+ // fall back to 0 and all chunks that are pending on the readable side
+ // should be flushed.
+ isCurrentlyBufferingWrites = false
+ for (const queued of queue) queued.cb()
+ })
+ )
+ })
+)
+readable.push(Buffer.alloc(100)) // Fill the writable HWM, first 'pause'.
+readable.push(Buffer.alloc(100)) // Second 'pause'.
+readable.push(Buffer.alloc(100)) // Should get through to the writable.
+readable.push(null)
+writable.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(
+ readable._readableState.awaitDrainWriters,
+ null,
+ `awaitDrainWriters should be reset to null
+ after all chunks are written but instead got
+ ${readable._readableState.awaitDrainWriters}`
+ )
+ // Everything okay, all chunks were written.
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-await-drain-push-while-write.js b/test/parallel/test-stream-pipe-await-drain-push-while-write.js
new file mode 100644
index 0000000000..130ae08568
--- /dev/null
+++ b/test/parallel/test-stream-pipe-await-drain-push-while-write.js
@@ -0,0 +1,46 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+const writable = new stream.Writable({
+ write: common.mustCall(function (chunk, encoding, cb) {
+ assert.strictEqual(readable._readableState.awaitDrainWriters, null)
+ if (chunk.length === 32 * 1024) {
+ // first chunk
+ readable.push(Buffer.alloc(34 * 1024)) // above hwm
+ // We should check if awaitDrain counter is increased in the next
+ // tick, because awaitDrain is incremented after this method finished
+ process.nextTick(() => {
+ assert.strictEqual(readable._readableState.awaitDrainWriters, writable)
+ })
+ }
+ process.nextTick(cb)
+ }, 3)
+})
+
+// A readable stream which produces two buffers.
+const bufs = [Buffer.alloc(32 * 1024), Buffer.alloc(33 * 1024)] // above hwm
+const readable = new stream.Readable({
+ read: function () {
+ while (bufs.length > 0) {
+ this.push(bufs.shift())
+ }
+ }
+})
+readable.pipe(writable)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-await-drain.js b/test/parallel/test-stream-pipe-await-drain.js
new file mode 100644
index 0000000000..912e37eef8
--- /dev/null
+++ b/test/parallel/test-stream-pipe-await-drain.js
@@ -0,0 +1,75 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+
+// This is very similar to test-stream-pipe-cleanup-pause.js.
+
+const reader = new stream.Readable()
+const writer1 = new stream.Writable()
+const writer2 = new stream.Writable()
+const writer3 = new stream.Writable()
+
+// 560000 is chosen here because it is larger than the (default) highWaterMark
+// and will cause `.write()` to return false
+// See: https://github.com/nodejs/node/issues/5820
+const buffer = Buffer.allocUnsafe(560000)
+reader._read = () => {}
+writer1._write = common.mustCall(function (chunk, encoding, cb) {
+ this.emit('chunk-received')
+ process.nextTick(cb)
+}, 1)
+writer1.once('chunk-received', () => {
+ assert.strictEqual(
+ reader._readableState.awaitDrainWriters.size,
+ 0,
+ 'awaitDrain initial value should be 0, actual is ' + reader._readableState.awaitDrainWriters.size
+ )
+ setImmediate(() => {
+ // This one should *not* get through to writer1 because writer2 is not
+ // "done" processing.
+ reader.push(buffer)
+ })
+})
+
+// A "slow" consumer:
+writer2._write = common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(
+ reader._readableState.awaitDrainWriters.size,
+ 1,
+ 'awaitDrain should be 1 after first push, actual is ' + reader._readableState.awaitDrainWriters.size
+ )
+ // Not calling cb here to "simulate" slow stream.
+ // This should be called exactly once, since the first .write() call
+ // will return false.
+}, 1)
+writer3._write = common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(
+ reader._readableState.awaitDrainWriters.size,
+ 2,
+ 'awaitDrain should be 2 after second push, actual is ' + reader._readableState.awaitDrainWriters.size
+ )
+ // Not calling cb here to "simulate" slow stream.
+ // This should be called exactly once, since the first .write() call
+ // will return false.
+}, 1)
+reader.pipe(writer1)
+reader.pipe(writer2)
+reader.pipe(writer3)
+reader.push(buffer)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-cleanup-pause.js b/test/parallel/test-stream-pipe-cleanup-pause.js
new file mode 100644
index 0000000000..0b7326dd51
--- /dev/null
+++ b/test/parallel/test-stream-pipe-cleanup-pause.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const reader = new stream.Readable()
+const writer1 = new stream.Writable()
+const writer2 = new stream.Writable()
+
+// 560000 is chosen here because it is larger than the (default) highWaterMark
+// and will cause `.write()` to return false
+// See: https://github.com/nodejs/node/issues/2323
+const buffer = Buffer.allocUnsafe(560000)
+reader._read = () => {}
+writer1._write = common.mustCall(function (chunk, encoding, cb) {
+ this.emit('chunk-received')
+ cb()
+}, 1)
+writer1.once('chunk-received', function () {
+ reader.unpipe(writer1)
+ reader.pipe(writer2)
+ reader.push(buffer)
+ setImmediate(function () {
+ reader.push(buffer)
+ setImmediate(function () {
+ reader.push(buffer)
+ })
+ })
+})
+writer2._write = common.mustCall(function (chunk, encoding, cb) {
+ cb()
+}, 3)
+reader.pipe(writer1)
+reader.push(buffer)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js
new file mode 100644
index 0000000000..72b3875e79
--- /dev/null
+++ b/test/parallel/test-stream-pipe-cleanup.js
@@ -0,0 +1,127 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+// This test asserts that Stream.prototype.pipe does not leave listeners
+// hanging on the source or dest.
+require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+function Writable() {
+ this.writable = true
+ this.endCalls = 0
+ stream.Stream.call(this)
+}
+Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype)
+Object.setPrototypeOf(Writable, stream.Stream)
+Writable.prototype.end = function () {
+ this.endCalls++
+}
+Writable.prototype.destroy = function () {
+ this.endCalls++
+}
+function Readable() {
+ this.readable = true
+ stream.Stream.call(this)
+}
+Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype)
+Object.setPrototypeOf(Readable, stream.Stream)
+function Duplex() {
+ this.readable = true
+ Writable.call(this)
+}
+Object.setPrototypeOf(Duplex.prototype, Writable.prototype)
+Object.setPrototypeOf(Duplex, Writable)
+let i = 0
+const limit = 100
+let w = new Writable()
+let r
+for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('end')
+}
+assert.strictEqual(r.listeners('end').length, 0)
+assert.strictEqual(w.endCalls, limit)
+w.endCalls = 0
+for (i = 0; i < limit; i++) {
+ r = new Readable()
+ r.pipe(w)
+ r.emit('close')
+}
+assert.strictEqual(r.listeners('close').length, 0)
+assert.strictEqual(w.endCalls, limit)
+w.endCalls = 0
+r = new Readable()
+for (i = 0; i < limit; i++) {
+ w = new Writable()
+ r.pipe(w)
+ w.emit('close')
+}
+assert.strictEqual(w.listeners('close').length, 0)
+r = new Readable()
+w = new Writable()
+const d = new Duplex()
+r.pipe(d) // pipeline A
+d.pipe(w) // pipeline B
+assert.strictEqual(r.listeners('end').length, 2) // A.onend, A.cleanup
+assert.strictEqual(r.listeners('close').length, 2) // A.onclose, A.cleanup
+assert.strictEqual(d.listeners('end').length, 2) // B.onend, B.cleanup
+// A.cleanup, B.onclose, B.cleanup
+assert.strictEqual(d.listeners('close').length, 3)
+assert.strictEqual(w.listeners('end').length, 0)
+assert.strictEqual(w.listeners('close').length, 1) // B.cleanup
+
+r.emit('end')
+assert.strictEqual(d.endCalls, 1)
+assert.strictEqual(w.endCalls, 0)
+assert.strictEqual(r.listeners('end').length, 0)
+assert.strictEqual(r.listeners('close').length, 0)
+assert.strictEqual(d.listeners('end').length, 2) // B.onend, B.cleanup
+assert.strictEqual(d.listeners('close').length, 2) // B.onclose, B.cleanup
+assert.strictEqual(w.listeners('end').length, 0)
+assert.strictEqual(w.listeners('close').length, 1) // B.cleanup
+
+d.emit('end')
+assert.strictEqual(d.endCalls, 1)
+assert.strictEqual(w.endCalls, 1)
+assert.strictEqual(r.listeners('end').length, 0)
+assert.strictEqual(r.listeners('close').length, 0)
+assert.strictEqual(d.listeners('end').length, 0)
+assert.strictEqual(d.listeners('close').length, 0)
+assert.strictEqual(w.listeners('end').length, 0)
+assert.strictEqual(w.listeners('close').length, 0)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-deadlock.js b/test/parallel/test-stream-pipe-deadlock.js
new file mode 100644
index 0000000000..e5414f9f05
--- /dev/null
+++ b/test/parallel/test-stream-pipe-deadlock.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable, Writable } = require('../../lib/ours/index')
+
+// https://github.com/nodejs/node/issues/48666
+;(async () => {
+ // Prepare src that is internally ended, with buffered data pending
+ const src = new Readable({
+ read() {}
+ })
+ src.push(Buffer.alloc(100))
+ src.push(null)
+ src.pause()
+
+ // Give it time to settle
+ await new Promise((resolve) => setImmediate(resolve))
+ const dst = new Writable({
+ highWaterMark: 1000,
+ write(buf, enc, cb) {
+ process.nextTick(cb)
+ }
+ })
+ dst.write(Buffer.alloc(1000)) // Fill write buffer
+ dst.on('finish', common.mustCall())
+ src.pipe(dst)
+})().then(common.mustCall())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js
new file mode 100644
index 0000000000..e399a5f744
--- /dev/null
+++ b/test/parallel/test-stream-pipe-error-handling.js
@@ -0,0 +1,130 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Stream, PassThrough } = require('../../lib/ours/index')
+{
+ const source = new Stream()
+ const dest = new Stream()
+ source.pipe(dest)
+ let gotErr = null
+ source.on('error', function (err) {
+ gotErr = err
+ })
+ const err = new Error('This stream turned into bacon.')
+ source.emit('error', err)
+ assert.strictEqual(gotErr, err)
+}
+{
+ const source = new Stream()
+ const dest = new Stream()
+ source.pipe(dest)
+ const err = new Error('This stream turned into bacon.')
+ let gotErr = null
+ try {
+ source.emit('error', err)
+ } catch (e) {
+ gotErr = e
+ }
+ assert.strictEqual(gotErr, err)
+}
+{
+ const R = Stream.Readable
+ const W = Stream.Writable
+ const r = new R({
+ autoDestroy: false
+ })
+ const w = new W({
+ autoDestroy: false
+ })
+ let removed = false
+ r._read = common.mustCall(function () {
+ setTimeout(
+ common.mustCall(function () {
+ assert(removed)
+ assert.throws(function () {
+ w.emit('error', new Error('fail'))
+ }, /^Error: fail$/)
+ }),
+ 1
+ )
+ })
+ w.on('error', myOnError)
+ r.pipe(w)
+ w.removeListener('error', myOnError)
+ removed = true
+ function myOnError() {
+ throw new Error('this should not happen')
+ }
+}
+{
+ const R = Stream.Readable
+ const W = Stream.Writable
+ const r = new R()
+ const w = new W()
+ let removed = false
+ r._read = common.mustCall(function () {
+ setTimeout(
+ common.mustCall(function () {
+ assert(removed)
+ w.emit('error', new Error('fail'))
+ }),
+ 1
+ )
+ })
+ w.on('error', common.mustCall())
+ w._write = () => {}
+ r.pipe(w)
+ // Removing some OTHER random listener should not do anything
+ w.removeListener('error', () => {})
+ removed = true
+}
+{
+ const _err = new Error('this should be handled')
+ const destination = new PassThrough()
+ destination.once(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ const stream = new Stream()
+ stream.pipe(destination)
+ destination.destroy(_err)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-error-unhandled.js b/test/parallel/test-stream-pipe-error-unhandled.js
new file mode 100644
index 0000000000..76d4b2128a
--- /dev/null
+++ b/test/parallel/test-stream-pipe-error-unhandled.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+process.on(
+ 'uncaughtException',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'asd')
+ })
+)
+const r = new Readable({
+ read() {
+ this.push('asd')
+ }
+})
+const w = new Writable({
+ autoDestroy: true,
+ write() {}
+})
+r.pipe(w)
+w.destroy(new Error('asd'))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js
new file mode 100644
index 0000000000..19d716d924
--- /dev/null
+++ b/test/parallel/test-stream-pipe-event.js
@@ -0,0 +1,61 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+function Writable() {
+ this.writable = true
+ stream.Stream.call(this)
+}
+Object.setPrototypeOf(Writable.prototype, stream.Stream.prototype)
+Object.setPrototypeOf(Writable, stream.Stream)
+function Readable() {
+ this.readable = true
+ stream.Stream.call(this)
+}
+Object.setPrototypeOf(Readable.prototype, stream.Stream.prototype)
+Object.setPrototypeOf(Readable, stream.Stream)
+let passed = false
+const w = new Writable()
+w.on('pipe', function (src) {
+ passed = true
+})
+const r = new Readable()
+r.pipe(w)
+assert.ok(passed)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-flow-after-unpipe.js b/test/parallel/test-stream-pipe-flow-after-unpipe.js
new file mode 100644
index 0000000000..760bdcbc97
--- /dev/null
+++ b/test/parallel/test-stream-pipe-flow-after-unpipe.js
@@ -0,0 +1,44 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable, Writable } = require('../../lib/ours/index')
+
+// Tests that calling .unpipe() un-blocks a stream that is paused because
+// it is waiting on the writable side to finish a write().
+
+const rs = new Readable({
+ highWaterMark: 1,
+ // That this gets called at least 20 times is the real test here.
+ read: common.mustCallAtLeast(() => rs.push('foo'), 20)
+})
+const ws = new Writable({
+ highWaterMark: 1,
+ write: common.mustCall(() => {
+ // Ignore the callback, this write() simply never finishes.
+ setImmediate(() => rs.unpipe(ws))
+ })
+})
+let chunks = 0
+rs.on(
+ 'data',
+ common.mustCallAtLeast(() => {
+ chunks++
+ if (chunks >= 20) rs.pause() // Finish this test.
+ })
+)
+rs.pipe(ws)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-flow.js b/test/parallel/test-stream-pipe-flow.js
new file mode 100644
index 0000000000..56ccef69fd
--- /dev/null
+++ b/test/parallel/test-stream-pipe-flow.js
@@ -0,0 +1,107 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable, PassThrough } = require('../../lib/ours/index')
+{
+ let ticks = 17
+ const rs = new Readable({
+ objectMode: true,
+ read: () => {
+ if (ticks-- > 0) return process.nextTick(() => rs.push({}))
+ rs.push({})
+ rs.push(null)
+ }
+ })
+ const ws = new Writable({
+ highWaterMark: 0,
+ objectMode: true,
+ write: (data, end, cb) => setImmediate(cb)
+ })
+ rs.on('end', common.mustCall())
+ ws.on('finish', common.mustCall())
+ rs.pipe(ws)
+}
+{
+ let missing = 8
+ const rs = new Readable({
+ objectMode: true,
+ read: () => {
+ if (missing--) rs.push({})
+ else rs.push(null)
+ }
+ })
+ const pt = rs
+ .pipe(
+ new PassThrough({
+ objectMode: true,
+ highWaterMark: 2
+ })
+ )
+ .pipe(
+ new PassThrough({
+ objectMode: true,
+ highWaterMark: 2
+ })
+ )
+ pt.on('end', () => {
+ wrapper.push(null)
+ })
+ const wrapper = new Readable({
+ objectMode: true,
+ read: () => {
+ process.nextTick(() => {
+ let data = pt.read()
+ if (data === null) {
+ pt.once('readable', () => {
+ data = pt.read()
+ if (data !== null) wrapper.push(data)
+ })
+ } else {
+ wrapper.push(data)
+ }
+ })
+ }
+ })
+ wrapper.resume()
+ wrapper.on('end', common.mustCall())
+}
+{
+ // Only register drain if there is backpressure.
+ const rs = new Readable({
+ read() {}
+ })
+ const pt = rs.pipe(
+ new PassThrough({
+ objectMode: true,
+ highWaterMark: 2
+ })
+ )
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ pt.on('finish', () => {
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ })
+ rs.push('asd')
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ process.nextTick(() => {
+ rs.push('asd')
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ rs.push(null)
+ assert.strictEqual(pt.listenerCount('drain'), 0)
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-manual-resume.js b/test/parallel/test-stream-pipe-manual-resume.js
new file mode 100644
index 0000000000..d4f09f7b68
--- /dev/null
+++ b/test/parallel/test-stream-pipe-manual-resume.js
@@ -0,0 +1,47 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+function test(throwCodeInbetween) {
+ // Check that a pipe does not stall if .read() is called unexpectedly
+ // (i.e. the stream is not resumed by the pipe).
+
+ const n = 1000
+ let counter = n
+ const rs = stream.Readable({
+ objectMode: true,
+ read: common.mustCallAtLeast(() => {
+ if (--counter >= 0)
+ rs.push({
+ counter
+ })
+ else rs.push(null)
+ }, n)
+ })
+ const ws = stream.Writable({
+ objectMode: true,
+ write: common.mustCall((data, enc, cb) => {
+ setImmediate(cb)
+ }, n)
+ })
+ setImmediate(() => throwCodeInbetween(rs, ws))
+ rs.pipe(ws)
+}
+test((rs) => rs.read())
+test((rs) => rs.resume())
+test(() => 0)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-multiple-pipes.js b/test/parallel/test-stream-pipe-multiple-pipes.js
new file mode 100644
index 0000000000..39bde03aca
--- /dev/null
+++ b/test/parallel/test-stream-pipe-multiple-pipes.js
@@ -0,0 +1,61 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+const readable = new stream.Readable({
+ read: () => {}
+})
+const writables = []
+for (let i = 0; i < 5; i++) {
+ const target = new stream.Writable({
+ write: common.mustCall((chunk, encoding, callback) => {
+ target.output.push(chunk)
+ callback()
+ }, 1)
+ })
+ target.output = []
+ target.on('pipe', common.mustCall())
+ readable.pipe(target)
+ writables.push(target)
+}
+const input = Buffer.from([1, 2, 3, 4, 5])
+readable.push(input)
+
+// The pipe() calls will postpone emission of the 'resume' event using nextTick,
+// so no data will be available to the writable streams until then.
+process.nextTick(
+ common.mustCall(() => {
+ for (const target of writables) {
+ assert.deepStrictEqual(target.output, [input])
+ target.on('unpipe', common.mustCall())
+ readable.unpipe(target)
+ }
+ readable.push('something else') // This does not get through.
+ readable.push(null)
+ readable.resume() // Make sure the 'end' event gets emitted.
+ })
+)
+readable.on(
+ 'end',
+ common.mustCall(() => {
+ for (const target of writables) {
+ assert.deepStrictEqual(target.output, [input])
+ }
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-needDrain.js b/test/parallel/test-stream-pipe-needDrain.js
new file mode 100644
index 0000000000..e4c9ee2131
--- /dev/null
+++ b/test/parallel/test-stream-pipe-needDrain.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+
+// Pipe should pause temporarily if writable needs drain.
+{
+ const w = new Writable({
+ write(buf, encoding, callback) {
+ process.nextTick(callback)
+ },
+ highWaterMark: 1
+ })
+ while (w.write('asd'));
+ assert.strictEqual(w.writableNeedDrain, true)
+ const r = new Readable({
+ read() {
+ this.push('asd')
+ this.push(null)
+ }
+ })
+ r.on('pause', common.mustCall(2))
+ r.on('end', common.mustCall())
+ r.pipe(w)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-same-destination-twice.js b/test/parallel/test-stream-pipe-same-destination-twice.js
new file mode 100644
index 0000000000..63c1797d6c
--- /dev/null
+++ b/test/parallel/test-stream-pipe-same-destination-twice.js
@@ -0,0 +1,78 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+
+// Regression test for https://github.com/nodejs/node/issues/12718.
+// Tests that piping a source stream twice to the same destination stream
+// works, and that a subsequent unpipe() call only removes the pipe *once*.
+const assert = require('assert')
+const { PassThrough, Writable } = require('../../lib/ours/index')
+{
+ const passThrough = new PassThrough()
+ const dest = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(`${chunk}`, 'foobar')
+ cb()
+ })
+ })
+ passThrough.pipe(dest)
+ passThrough.pipe(dest)
+ assert.strictEqual(passThrough._events.data.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes[0], dest)
+ assert.strictEqual(passThrough._readableState.pipes[1], dest)
+ passThrough.unpipe(dest)
+ assert.strictEqual(passThrough._events.data.length, 1)
+ assert.strictEqual(passThrough._readableState.pipes.length, 1)
+ assert.deepStrictEqual(passThrough._readableState.pipes, [dest])
+ passThrough.write('foobar')
+ passThrough.pipe(dest)
+}
+{
+ const passThrough = new PassThrough()
+ const dest = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert.strictEqual(`${chunk}`, 'foobar')
+ cb()
+ }, 2)
+ })
+ passThrough.pipe(dest)
+ passThrough.pipe(dest)
+ assert.strictEqual(passThrough._events.data.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes[0], dest)
+ assert.strictEqual(passThrough._readableState.pipes[1], dest)
+ passThrough.write('foobar')
+}
+{
+ const passThrough = new PassThrough()
+ const dest = new Writable({
+ write: common.mustNotCall()
+ })
+ passThrough.pipe(dest)
+ passThrough.pipe(dest)
+ assert.strictEqual(passThrough._events.data.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes.length, 2)
+ assert.strictEqual(passThrough._readableState.pipes[0], dest)
+ assert.strictEqual(passThrough._readableState.pipes[1], dest)
+ passThrough.unpipe(dest)
+ passThrough.unpipe(dest)
+ assert.strictEqual(passThrough._events.data, undefined)
+ assert.strictEqual(passThrough._readableState.pipes.length, 0)
+ passThrough.write('foobar')
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-unpipe-streams.js b/test/parallel/test-stream-pipe-unpipe-streams.js
new file mode 100644
index 0000000000..86b66f67fd
--- /dev/null
+++ b/test/parallel/test-stream-pipe-unpipe-streams.js
@@ -0,0 +1,108 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+const source = Readable({
+ read: () => {}
+})
+const dest1 = Writable({
+ write: () => {}
+})
+const dest2 = Writable({
+ write: () => {}
+})
+source.pipe(dest1)
+source.pipe(dest2)
+dest1.on('unpipe', common.mustCall())
+dest2.on('unpipe', common.mustCall())
+assert.strictEqual(source._readableState.pipes[0], dest1)
+assert.strictEqual(source._readableState.pipes[1], dest2)
+assert.strictEqual(source._readableState.pipes.length, 2)
+
+// Should be able to unpipe them in the reverse order that they were piped.
+
+source.unpipe(dest2)
+assert.deepStrictEqual(source._readableState.pipes, [dest1])
+assert.notStrictEqual(source._readableState.pipes, dest2)
+dest2.on('unpipe', common.mustNotCall())
+source.unpipe(dest2)
+source.unpipe(dest1)
+assert.strictEqual(source._readableState.pipes.length, 0)
+{
+ // Test `cleanup()` if we unpipe all streams.
+ const source = Readable({
+ read: () => {}
+ })
+ const dest1 = Writable({
+ write: () => {}
+ })
+ const dest2 = Writable({
+ write: () => {}
+ })
+ let destCount = 0
+ const srcCheckEventNames = ['end', 'data']
+ const destCheckEventNames = ['close', 'finish', 'drain', 'error', 'unpipe']
+ const checkSrcCleanup = common.mustCall(() => {
+ assert.strictEqual(source._readableState.pipes.length, 0)
+ assert.strictEqual(source._readableState.flowing, false)
+ srcCheckEventNames.forEach((eventName) => {
+ assert.strictEqual(source.listenerCount(eventName), 0, `source's '${eventName}' event listeners not removed`)
+ })
+ })
+ function checkDestCleanup(dest) {
+ const currentDestId = ++destCount
+ source.pipe(dest)
+ const unpipeChecker = common.mustCall(() => {
+ assert.deepStrictEqual(
+ dest.listeners('unpipe'),
+ [unpipeChecker],
+ `destination{${currentDestId}} should have a 'unpipe' event ` + 'listener which is `unpipeChecker`'
+ )
+ dest.removeListener('unpipe', unpipeChecker)
+ destCheckEventNames.forEach((eventName) => {
+ assert.strictEqual(
+ dest.listenerCount(eventName),
+ 0,
+ `destination{${currentDestId}}'s '${eventName}' event ` + 'listeners not removed'
+ )
+ })
+ if (--destCount === 0) checkSrcCleanup()
+ })
+ dest.on('unpipe', unpipeChecker)
+ }
+ checkDestCleanup(dest1)
+ checkDestCleanup(dest2)
+ source.unpipe()
+}
+{
+ const src = Readable({
+ read: () => {}
+ })
+ const dst = Writable({
+ write: () => {}
+ })
+ src.pipe(dst)
+ src.on(
+ 'resume',
+ common.mustCall(() => {
+ src.on('pause', common.mustCall())
+ src.unpipe(dst)
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipe-without-listenerCount.js b/test/parallel/test-stream-pipe-without-listenerCount.js
new file mode 100644
index 0000000000..3c2c6f4da3
--- /dev/null
+++ b/test/parallel/test-stream-pipe-without-listenerCount.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const r = new stream.Stream()
+r.listenerCount = undefined
+const w = new stream.Stream()
+w.listenerCount = undefined
+w.on('pipe', function () {
+ r.emit('error', new Error('Readable Error'))
+ w.emit('error', new Error('Writable Error'))
+})
+r.on('error', common.mustCall())
+w.on('error', common.mustCall())
+r.pipe(w)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-async-iterator.js b/test/parallel/test-stream-pipeline-async-iterator.js
new file mode 100644
index 0000000000..dea0bdbf3d
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-async-iterator.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable, PassThrough, pipeline } = require('../../lib/ours/index')
+const assert = require('assert')
+const _err = new Error('kaboom')
+async function run() {
+ const source = new Readable({
+ read() {}
+ })
+ source.push('hello')
+ source.push('world')
+ setImmediate(() => {
+ source.destroy(_err)
+ })
+ const iterator = pipeline(source, new PassThrough(), () => {})
+ iterator.setEncoding('utf8')
+ for await (const k of iterator) {
+ assert.strictEqual(k, 'helloworld')
+ }
+}
+run().catch(common.mustCall((err) => assert.strictEqual(err, _err)))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-duplex.js b/test/parallel/test-stream-pipeline-duplex.js
new file mode 100644
index 0000000000..833848ed00
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-duplex.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { pipeline, Duplex, PassThrough } = require('../../lib/ours/index')
+const assert = require('assert')
+const remote = new PassThrough()
+const local = new Duplex({
+ read() {},
+ write(chunk, enc, callback) {
+ callback()
+ }
+})
+pipeline(
+ remote,
+ local,
+ remote,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+)
+setImmediate(() => {
+ remote.end()
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-http2.js b/test/parallel/test-stream-pipeline-http2.js
new file mode 100644
index 0000000000..5cd16d1843
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-http2.js
@@ -0,0 +1,51 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+if (!common.hasCrypto) common.skip('missing crypto')
+const { Readable, pipeline } = require('../../lib/ours/index')
+const http2 = require('http2')
+{
+ const server = http2.createServer((req, res) => {
+ pipeline(req, res, common.mustCall())
+ })
+ server.listen(0, () => {
+ const url = `http://localhost:${server.address().port}`
+ const client = http2.connect(url)
+ const req = client.request({
+ ':method': 'POST'
+ })
+ const rs = new Readable({
+ read() {
+ rs.push('hello')
+ }
+ })
+ pipeline(
+ rs,
+ req,
+ common.mustCall((err) => {
+ server.close()
+ client.close()
+ })
+ )
+ let cnt = 10
+ req.on('data', (data) => {
+ cnt--
+ if (cnt === 0) rs.destroy()
+ })
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-listeners.js b/test/parallel/test-stream-pipeline-listeners.js
new file mode 100644
index 0000000000..0b40366d35
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-listeners.js
@@ -0,0 +1,98 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { pipeline, Duplex, PassThrough, Writable } = require('../../lib/ours/index')
+const assert = require('assert')
+process.on(
+ 'uncaughtException',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'no way')
+ }, 2)
+)
+
+// Ensure that listeners is removed if last stream is readable
+// And other stream's listeners unchanged
+const a = new PassThrough()
+a.end('foobar')
+const b = new Duplex({
+ write(chunk, encoding, callback) {
+ callback()
+ }
+})
+pipeline(
+ a,
+ b,
+ common.mustCall((error) => {
+ if (error) {
+ assert.ifError(error)
+ }
+ assert(a.listenerCount('error') > 0)
+ assert.strictEqual(b.listenerCount('error'), 0)
+ setTimeout(() => {
+ assert.strictEqual(b.listenerCount('error'), 0)
+ b.destroy(new Error('no way'))
+ }, 100)
+ })
+)
+
+// Async generators
+const c = new PassThrough()
+c.end('foobar')
+const d = pipeline(
+ c,
+ async function* (source) {
+ for await (const chunk of source) {
+ yield String(chunk).toUpperCase()
+ }
+ },
+ common.mustCall((error) => {
+ if (error) {
+ assert.ifError(error)
+ }
+ assert(c.listenerCount('error') > 0)
+ assert.strictEqual(d.listenerCount('error'), 0)
+ setTimeout(() => {
+ assert.strictEqual(b.listenerCount('error'), 0)
+ d.destroy(new Error('no way'))
+ }, 100)
+ })
+)
+
+// If last stream is not readable, will not throw and remove listeners
+const e = new PassThrough()
+e.end('foobar')
+const f = new Writable({
+ write(chunk, encoding, callback) {
+ callback()
+ }
+})
+pipeline(
+ e,
+ f,
+ common.mustCall((error) => {
+ if (error) {
+ assert.ifError(error)
+ }
+ assert(e.listenerCount('error') > 0)
+ assert(f.listenerCount('error') > 0)
+ setTimeout(() => {
+ assert(f.listenerCount('error') > 0)
+ f.destroy(new Error('no way'))
+ }, 100)
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-process.js b/test/parallel/test-stream-pipeline-process.js
new file mode 100644
index 0000000000..d047b179e8
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-process.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const os = require('os')
+if (process.argv[2] === 'child') {
+ const { pipeline } = require('../../lib/ours/index')
+ pipeline(process.stdin, process.stdout, common.mustSucceed())
+} else {
+ const cp = require('child_process')
+ cp.exec(
+ ['echo', 'hello', '|', `"${process.execPath}"`, `"${__filename}"`, 'child'].join(' '),
+ common.mustSucceed((stdout) => {
+ assert.strictEqual(stdout.split(os.EOL).shift().trim(), 'hello')
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-queued-end-in-destroy.js b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js
new file mode 100644
index 0000000000..1d5b71baf2
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-queued-end-in-destroy.js
@@ -0,0 +1,56 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Duplex, pipeline } = require('../../lib/ours/index')
+
+// Test that the callback for pipeline() is called even when the ._destroy()
+// method of the stream places an .end() request to itself that does not
+// get processed before the destruction of the stream (i.e. the 'close' event).
+// Refs: https://github.com/nodejs/node/issues/24456
+
+const readable = new Readable({
+ read: common.mustCall()
+})
+const duplex = new Duplex({
+ write(chunk, enc, cb) {
+ // Simulate messages queueing up.
+ },
+ read() {},
+ destroy(err, cb) {
+ // Call end() from inside the destroy() method, like HTTP/2 streams
+ // do at the time of writing.
+ this.end()
+ cb(err)
+ }
+})
+duplex.on('finished', common.mustNotCall())
+pipeline(
+ readable,
+ duplex,
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE')
+ })
+)
+
+// Write one chunk of data, and destroy the stream later.
+// That should trigger the pipeline destruction.
+readable.push('foo')
+setImmediate(() => {
+ readable.destroy()
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-uncaught.js b/test/parallel/test-stream-pipeline-uncaught.js
new file mode 100644
index 0000000000..5b45eb1fcb
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-uncaught.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { pipeline, PassThrough } = require('../../lib/ours/index')
+const assert = require('assert')
+process.on(
+ 'uncaughtException',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'error')
+ })
+)
+
+// Ensure that pipeline that ends with Promise
+// still propagates error to uncaughtException.
+const s = new PassThrough()
+s.end('data')
+pipeline(
+ s,
+ async function (source) {
+ for await (const chunk of source) {
+ } // eslint-disable-line no-unused-vars, no-empty
+ },
+ common.mustSucceed(() => {
+ throw new Error('error')
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-pipeline-with-empty-string.js b/test/parallel/test-stream-pipeline-with-empty-string.js
new file mode 100644
index 0000000000..2e650b324f
--- /dev/null
+++ b/test/parallel/test-stream-pipeline-with-empty-string.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { pipeline, PassThrough } = require('../../lib/ours/index')
+async function runTest() {
+ await pipeline(
+ '',
+ new PassThrough({
+ objectMode: true
+ }),
+ common.mustCall()
+ )
+}
+runTest().then(common.mustCall())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-preprocess.js b/test/parallel/test-stream-preprocess.js
new file mode 100644
index 0000000000..736043131e
--- /dev/null
+++ b/test/parallel/test-stream-preprocess.js
@@ -0,0 +1,87 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const fs = require('fs')
+const rl = require('readline')
+const fixtures = require('../common/fixtures')
+const BOM = '\uFEFF'
+
+// Get the data using a non-stream way to compare with the streamed data.
+const modelData = fixtures.readSync('file-to-read-without-bom.txt', 'utf8')
+const modelDataFirstCharacter = modelData[0]
+
+// Detect the number of forthcoming 'line' events for mustCall() 'expected' arg.
+const lineCount = modelData.match(/\n/g).length
+
+// Ensure both without-bom and with-bom test files are textwise equal.
+assert.strictEqual(fixtures.readSync('file-to-read-with-bom.txt', 'utf8'), `${BOM}${modelData}`)
+
+// An unjustified BOM stripping with a non-BOM character unshifted to a stream.
+const inputWithoutBOM = fs.createReadStream(fixtures.path('file-to-read-without-bom.txt'), 'utf8')
+inputWithoutBOM.once(
+ 'readable',
+ common.mustCall(() => {
+ const maybeBOM = inputWithoutBOM.read(1)
+ assert.strictEqual(maybeBOM, modelDataFirstCharacter)
+ assert.notStrictEqual(maybeBOM, BOM)
+ inputWithoutBOM.unshift(maybeBOM)
+ let streamedData = ''
+ rl.createInterface({
+ input: inputWithoutBOM
+ })
+ .on(
+ 'line',
+ common.mustCall((line) => {
+ streamedData += `${line}\n`
+ }, lineCount)
+ )
+ .on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData)
+ })
+ )
+ })
+)
+
+// A justified BOM stripping.
+const inputWithBOM = fs.createReadStream(fixtures.path('file-to-read-with-bom.txt'), 'utf8')
+inputWithBOM.once(
+ 'readable',
+ common.mustCall(() => {
+ const maybeBOM = inputWithBOM.read(1)
+ assert.strictEqual(maybeBOM, BOM)
+ let streamedData = ''
+ rl.createInterface({
+ input: inputWithBOM
+ })
+ .on(
+ 'line',
+ common.mustCall((line) => {
+ streamedData += `${line}\n`
+ }, lineCount)
+ )
+ .on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(streamedData, process.platform === 'win32' ? modelData.replace(/\r\n/g, '\n') : modelData)
+ })
+ )
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-promises.js b/test/parallel/test-stream-promises.js
new file mode 100644
index 0000000000..1dc23f91ed
--- /dev/null
+++ b/test/parallel/test-stream-promises.js
@@ -0,0 +1,168 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const { Readable, Writable, promises } = stream
+const { finished, pipeline } = require('../../lib/stream/promises')
+const fs = require('fs')
+const assert = require('assert')
+const { promisify } = require('util')
+assert.strictEqual(promises.pipeline, pipeline)
+assert.strictEqual(promises.finished, finished)
+assert.strictEqual(pipeline, promisify(stream.pipeline))
+assert.strictEqual(finished, promisify(stream.finished))
+
+// pipeline success
+{
+ let finished = false
+ const processed = []
+ const expected = [Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]
+ const read = new Readable({
+ read() {}
+ })
+ const write = new Writable({
+ write(data, enc, cb) {
+ processed.push(data)
+ cb()
+ }
+ })
+ write.on('finish', () => {
+ finished = true
+ })
+ for (let i = 0; i < expected.length; i++) {
+ read.push(expected[i])
+ }
+ read.push(null)
+ pipeline(read, write).then(
+ common.mustCall((value) => {
+ assert.ok(finished)
+ assert.deepStrictEqual(processed, expected)
+ })
+ )
+}
+
+// pipeline error
+{
+ const read = new Readable({
+ read() {}
+ })
+ const write = new Writable({
+ write(data, enc, cb) {
+ cb()
+ }
+ })
+ read.push('data')
+ setImmediate(() => read.destroy())
+ pipeline(read, write).catch(
+ common.mustCall((err) => {
+ assert.ok(err, 'should have an error')
+ })
+ )
+}
+
+// finished success
+{
+ async function run() {
+ const rs = fs.createReadStream(__filename)
+ let ended = false
+ rs.resume()
+ rs.on('end', () => {
+ ended = true
+ })
+ await finished(rs)
+ assert(ended)
+ }
+ run().then(common.mustCall())
+}
+
+// finished error
+{
+ const rs = fs.createReadStream('file-does-not-exist')
+ assert
+ .rejects(finished(rs), {
+ code: 'ENOENT'
+ })
+ .then(common.mustCall())
+}
+{
+ const streamObj = new Readable()
+ assert.throws(
+ () => {
+ // Passing cleanup option not as boolean
+ // should throw error
+ finished(streamObj, {
+ cleanup: 2
+ })
+ },
+ {
+ code: 'ERR_INVALID_ARG_TYPE'
+ }
+ )
+}
+
+// Below code should not throw any errors as the
+// streamObj is `Stream` and cleanup is boolean
+{
+ const streamObj = new Readable()
+ finished(streamObj, {
+ cleanup: true
+ })
+}
+
+// Cleanup function should not be called when cleanup is set to false
+// listenerCount should be 1 after calling finish
+{
+ const streamObj = new Writable()
+ assert.strictEqual(streamObj.listenerCount('end'), 0)
+ finished(streamObj, {
+ cleanup: false
+ }).then(
+ common.mustCall(() => {
+ assert.strictEqual(streamObj.listenerCount('end'), 1)
+ })
+ )
+ streamObj.end()
+}
+
+// Cleanup function should be called when cleanup is set to true
+// listenerCount should be 0 after calling finish
+{
+ const streamObj = new Writable()
+ assert.strictEqual(streamObj.listenerCount('end'), 0)
+ finished(streamObj, {
+ cleanup: true
+ }).then(
+ common.mustCall(() => {
+ assert.strictEqual(streamObj.listenerCount('end'), 0)
+ })
+ )
+ streamObj.end()
+}
+
+// Cleanup function should not be called when cleanup has not been set
+// listenerCount should be 1 after calling finish
+{
+ const streamObj = new Writable()
+ assert.strictEqual(streamObj.listenerCount('end'), 0)
+ finished(streamObj).then(
+ common.mustCall(() => {
+ assert.strictEqual(streamObj.listenerCount('end'), 1)
+ })
+ )
+ streamObj.end()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-push-order.js b/test/parallel/test-stream-push-order.js
new file mode 100644
index 0000000000..5ed3c5c679
--- /dev/null
+++ b/test/parallel/test-stream-push-order.js
@@ -0,0 +1,64 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const Readable = require('../../lib/ours/index').Readable
+const assert = require('assert')
+const s = new Readable({
+ highWaterMark: 20,
+ encoding: 'ascii'
+})
+const list = ['1', '2', '3', '4', '5', '6']
+s._read = function (n) {
+ const one = list.shift()
+ if (!one) {
+ s.push(null)
+ } else {
+ const two = list.shift()
+ s.push(one)
+ s.push(two)
+ }
+}
+s.read(0)
+
+// ACTUALLY [1, 3, 5, 6, 4, 2]
+
+process.on('exit', function () {
+ assert.strictEqual(s.readableBuffer.join(','), '1,2,3,4,5,6')
+ silentConsole.log('ok')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js
new file mode 100644
index 0000000000..d2ceafeb0a
--- /dev/null
+++ b/test/parallel/test-stream-push-strings.js
@@ -0,0 +1,77 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const Readable = require('../../lib/ours/index').Readable
+class MyStream extends Readable {
+ constructor(options) {
+ super(options)
+ this._chunks = 3
+ }
+ _read(n) {
+ switch (this._chunks--) {
+ case 0:
+ return this.push(null)
+ case 1:
+ return setTimeout(() => {
+ this.push('last chunk')
+ }, 100)
+ case 2:
+ return this.push('second to last chunk')
+ case 3:
+ return process.nextTick(() => {
+ this.push('first chunk')
+ })
+ default:
+ throw new Error('?')
+ }
+ }
+}
+const ms = new MyStream()
+const results = []
+ms.on('readable', function () {
+ let chunk
+ while (null !== (chunk = ms.read())) results.push(String(chunk))
+})
+const expect = ['first chunksecond to last chunk', 'last chunk']
+process.on('exit', function () {
+ assert.strictEqual(ms._chunks, -1)
+ assert.deepStrictEqual(results, expect)
+ silentConsole.log('ok')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-aborted.js b/test/parallel/test-stream-readable-aborted.js
new file mode 100644
index 0000000000..84b126df8e
--- /dev/null
+++ b/test/parallel/test-stream-readable-aborted.js
@@ -0,0 +1,75 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Duplex } = require('../../lib/ours/index')
+{
+ const readable = new Readable({
+ read() {}
+ })
+ assert.strictEqual(readable.readableAborted, false)
+ readable.destroy()
+ assert.strictEqual(readable.readableAborted, true)
+}
+{
+ const readable = new Readable({
+ read() {}
+ })
+ assert.strictEqual(readable.readableAborted, false)
+ readable.push(null)
+ readable.destroy()
+ assert.strictEqual(readable.readableAborted, true)
+}
+{
+ const readable = new Readable({
+ read() {}
+ })
+ assert.strictEqual(readable.readableAborted, false)
+ readable.push('asd')
+ readable.destroy()
+ assert.strictEqual(readable.readableAborted, true)
+}
+{
+ const readable = new Readable({
+ read() {}
+ })
+ assert.strictEqual(readable.readableAborted, false)
+ readable.push('asd')
+ readable.push(null)
+ assert.strictEqual(readable.readableAborted, false)
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(readable.readableAborted, false)
+ readable.destroy()
+ assert.strictEqual(readable.readableAborted, false)
+ queueMicrotask(() => {
+ assert.strictEqual(readable.readableAborted, false)
+ })
+ })
+ )
+ readable.resume()
+}
+{
+ const duplex = new Duplex({
+ readable: false,
+ write() {}
+ })
+ duplex.destroy()
+ assert.strictEqual(duplex.readableAborted, false)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-add-chunk-during-data.js b/test/parallel/test-stream-readable-add-chunk-during-data.js
new file mode 100644
index 0000000000..d5c571f04a
--- /dev/null
+++ b/test/parallel/test-stream-readable-add-chunk-during-data.js
@@ -0,0 +1,43 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+
+// Verify that .push() and .unshift() can be called from 'data' listeners.
+
+for (const method of ['push', 'unshift']) {
+ const r = new Readable({
+ read() {}
+ })
+ r.once(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(r.readableLength, 0)
+ r[method](chunk)
+ assert.strictEqual(r.readableLength, chunk.length)
+ r.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk.toString(), 'Hello, world')
+ })
+ )
+ })
+ )
+ r.push('Hello, world')
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js
new file mode 100644
index 0000000000..3a80f9618e
--- /dev/null
+++ b/test/parallel/test-stream-readable-constructor-set-methods.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const Readable = require('../../lib/ours/index').Readable
+const _read = common.mustCall(function _read(n) {
+ this.push(null)
+})
+const r = new Readable({
+ read: _read
+})
+r.resume()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-data.js b/test/parallel/test-stream-readable-data.js
new file mode 100644
index 0000000000..430475d3e4
--- /dev/null
+++ b/test/parallel/test-stream-readable-data.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const readable = new Readable({
+ read() {}
+})
+function read() {}
+readable.setEncoding('utf8')
+readable.on('readable', read)
+readable.removeListener('readable', read)
+process.nextTick(function () {
+ readable.on('data', common.mustCall())
+ readable.push('hello')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-destroy.js b/test/parallel/test-stream-readable-destroy.js
new file mode 100644
index 0000000000..94b90791c2
--- /dev/null
+++ b/test/parallel/test-stream-readable-destroy.js
@@ -0,0 +1,420 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable, addAbortSignal } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read.on('close', common.mustCall())
+ read.destroy()
+ assert.strictEqual(read.errored, null)
+ assert.strictEqual(read.destroyed, true)
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ const expected = new Error('kaboom')
+ read.on('end', common.mustNotCall('no end event'))
+ read.on('close', common.mustCall())
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ read.destroy(expected)
+ assert.strictEqual(read.errored, expected)
+ assert.strictEqual(read.destroyed, true)
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb(err)
+ })
+ const expected = new Error('kaboom')
+ read.on('end', common.mustNotCall('no end event'))
+ read.on('close', common.mustCall())
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ read.destroy(expected)
+ assert.strictEqual(read.destroyed, true)
+}
+{
+ const read = new Readable({
+ read() {},
+ destroy: common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb()
+ })
+ })
+ const expected = new Error('kaboom')
+ read.on('end', common.mustNotCall('no end event'))
+
+ // Error is swallowed by the custom _destroy
+ read.on('error', common.mustNotCall('no error event'))
+ read.on('close', common.mustCall())
+ read.destroy(expected)
+ assert.strictEqual(read.destroyed, true)
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb()
+ })
+ read.destroy()
+ assert.strictEqual(read.destroyed, true)
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ process.nextTick(() => {
+ this.push(null)
+ cb()
+ })
+ })
+ const fail = common.mustNotCall('no end event')
+ read.on('end', fail)
+ read.on('close', common.mustCall())
+ read.destroy()
+ read.removeListener('end', fail)
+ read.on('end', common.mustNotCall())
+ assert.strictEqual(read.destroyed, true)
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ const expected = new Error('kaboom')
+ read._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb(expected)
+ })
+ let ticked = false
+ read.on('end', common.mustNotCall('no end event'))
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(read._readableState.errorEmitted, true)
+ assert.strictEqual(read._readableState.errored, expected)
+ assert.strictEqual(err, expected)
+ })
+ )
+ read.destroy()
+ assert.strictEqual(read._readableState.errorEmitted, false)
+ assert.strictEqual(read._readableState.errored, expected)
+ assert.strictEqual(read.destroyed, true)
+ ticked = true
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read.destroyed = true
+ assert.strictEqual(read.destroyed, true)
+
+ // The internal destroy() mechanism should not be triggered
+ read.on('end', common.mustNotCall())
+ read.destroy()
+}
+{
+ function MyReadable() {
+ assert.strictEqual(this.destroyed, false)
+ this.destroyed = false
+ Readable.call(this)
+ }
+ Object.setPrototypeOf(MyReadable.prototype, Readable.prototype)
+ Object.setPrototypeOf(MyReadable, Readable)
+ new MyReadable()
+}
+{
+ // Destroy and destroy callback
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ const expected = new Error('kaboom')
+ let ticked = false
+ read.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(read._readableState.errorEmitted, true)
+ assert.strictEqual(ticked, true)
+ })
+ )
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ assert.strictEqual(read._readableState.errored, null)
+ assert.strictEqual(read._readableState.errorEmitted, false)
+ read.destroy(
+ expected,
+ common.mustCall(function (err) {
+ assert.strictEqual(read._readableState.errored, expected)
+ assert.strictEqual(err, expected)
+ })
+ )
+ assert.strictEqual(read._readableState.errorEmitted, false)
+ assert.strictEqual(read._readableState.errored, expected)
+ ticked = true
+}
+{
+ const readable = new Readable({
+ destroy: common.mustCall(function (err, cb) {
+ process.nextTick(cb, new Error('kaboom 1'))
+ }),
+ read() {}
+ })
+ let ticked = false
+ readable.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(readable._readableState.errorEmitted, true)
+ })
+ )
+ readable.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.message, 'kaboom 1')
+ assert.strictEqual(readable._readableState.errorEmitted, true)
+ })
+ )
+ readable.destroy()
+ assert.strictEqual(readable.destroyed, true)
+ assert.strictEqual(readable._readableState.errored, null)
+ assert.strictEqual(readable._readableState.errorEmitted, false)
+
+ // Test case where `readable.destroy()` is called again with an error before
+ // the `_destroy()` callback is called.
+ readable.destroy(new Error('kaboom 2'))
+ assert.strictEqual(readable._readableState.errorEmitted, false)
+ assert.strictEqual(readable._readableState.errored, null)
+ ticked = true
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.destroy()
+ read.push('hi')
+ read.on('data', common.mustNotCall())
+}
+{
+ const read = new Readable({
+ read: common.mustNotCall()
+ })
+ read.destroy()
+ assert.strictEqual(read.destroyed, true)
+ read.read()
+}
+{
+ const read = new Readable({
+ autoDestroy: false,
+ read() {
+ this.push(null)
+ this.push('asd')
+ }
+ })
+ read.on(
+ 'error',
+ common.mustCall(() => {
+ assert(read._readableState.errored)
+ })
+ )
+ read.resume()
+}
+{
+ const controller = new AbortController()
+ const read = addAbortSignal(
+ controller.signal,
+ new Readable({
+ read() {
+ this.push('asd')
+ }
+ })
+ )
+ read.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ })
+ )
+ controller.abort()
+ read.on('data', common.mustNotCall())
+}
+{
+ const controller = new AbortController()
+ const read = new Readable({
+ signal: controller.signal,
+ read() {
+ this.push('asd')
+ }
+ })
+ read.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ })
+ )
+ controller.abort()
+ read.on('data', common.mustNotCall())
+}
+{
+ const controller = new AbortController()
+ const read = addAbortSignal(
+ controller.signal,
+ new Readable({
+ objectMode: true,
+ read() {
+ return false
+ }
+ })
+ )
+ read.push('asd')
+ read.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ })
+ )
+ assert.rejects(
+ (async () => {
+ // eslint-disable-next-line no-unused-vars, no-empty
+ for await (const chunk of read) {
+ }
+ })(),
+ /AbortError/
+ )
+ setTimeout(() => controller.abort(), 0)
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.on(
+ 'error',
+ common.mustCall((e) => {
+ read.push('asd')
+ read.read()
+ })
+ )
+ read.on(
+ 'close',
+ common.mustCall((e) => {
+ read.push('asd')
+ read.read()
+ })
+ )
+ read.destroy(new Error('asd'))
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.on(
+ 'close',
+ common.mustCall((e) => {
+ read.push('asd')
+ read.read()
+ })
+ )
+ read.destroy()
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.on(
+ 'close',
+ common.mustCall((e) => {
+ read.push('asd')
+ read.unshift('asd')
+ })
+ )
+ read.destroy()
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.destroy()
+ read.unshift('asd')
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read.on('data', common.mustNotCall())
+ read.on(
+ 'close',
+ common.mustCall((e) => {
+ read.push('asd')
+ })
+ )
+ read.destroy()
+}
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.on('data', common.mustNotCall())
+ read.destroy()
+ read.push('asd')
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-didRead.js b/test/parallel/test-stream-readable-didRead.js
new file mode 100644
index 0000000000..38cbd48047
--- /dev/null
+++ b/test/parallel/test-stream-readable-didRead.js
@@ -0,0 +1,119 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { isDisturbed, isErrored, Readable } = require('../../lib/ours/index')
+function noop() {}
+function check(readable, data, fn) {
+ assert.strictEqual(readable.readableDidRead, false)
+ assert.strictEqual(isDisturbed(readable), false)
+ assert.strictEqual(isErrored(readable), false)
+ if (data === -1) {
+ readable.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(isErrored(readable), true)
+ })
+ )
+ readable.on('data', common.mustNotCall())
+ readable.on('end', common.mustNotCall())
+ } else {
+ readable.on('error', common.mustNotCall())
+ if (data === -2) {
+ readable.on('end', common.mustNotCall())
+ } else {
+ readable.on('end', common.mustCall())
+ }
+ if (data > 0) {
+ readable.on('data', common.mustCallAtLeast(data))
+ } else {
+ readable.on('data', common.mustNotCall())
+ }
+ }
+ readable.on('close', common.mustCall())
+ fn()
+ setImmediate(() => {
+ assert.strictEqual(readable.readableDidRead, data > 0)
+ if (data > 0) {
+ assert.strictEqual(isDisturbed(readable), true)
+ }
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push(null)
+ }
+ })
+ check(readable, 0, () => {
+ readable.read()
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push(null)
+ }
+ })
+ check(readable, 0, () => {
+ readable.resume()
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push(null)
+ }
+ })
+ check(readable, -2, () => {
+ readable.destroy()
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push(null)
+ }
+ })
+ check(readable, -1, () => {
+ readable.destroy(new Error())
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push('data')
+ this.push(null)
+ }
+ })
+ check(readable, 1, () => {
+ readable.on('data', noop)
+ })
+}
+{
+ const readable = new Readable({
+ read() {
+ this.push('data')
+ this.push(null)
+ }
+ })
+ check(readable, 1, () => {
+ readable.on('data', noop)
+ readable.off('data', noop)
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-dispose.js b/test/parallel/test-stream-readable-dispose.js
new file mode 100644
index 0000000000..3ba69074bf
--- /dev/null
+++ b/test/parallel/test-stream-readable-dispose.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const read = new Readable({
+ read() {}
+ })
+ read.resume()
+ read.on('end', common.mustNotCall('no end event'))
+ read.on('close', common.mustCall())
+ read.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.name, 'AbortError')
+ })
+ )
+ read[require('../../lib/ours/primordials').SymbolAsyncDispose]().then(
+ common.mustCall(() => {
+ assert.strictEqual(read.errored.name, 'AbortError')
+ assert.strictEqual(read.destroyed, true)
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-emit-readable-short-stream.js b/test/parallel/test-stream-readable-emit-readable-short-stream.js
new file mode 100644
index 0000000000..224023fd77
--- /dev/null
+++ b/test/parallel/test-stream-readable-emit-readable-short-stream.js
@@ -0,0 +1,152 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const r = new stream.Readable({
+ read: common.mustCall(function () {
+ this.push('content')
+ this.push(null)
+ })
+ })
+ const t = new stream.Transform({
+ transform: common.mustCall(function (chunk, encoding, callback) {
+ this.push(chunk)
+ return callback()
+ }),
+ flush: common.mustCall(function (callback) {
+ return callback()
+ })
+ })
+ r.pipe(t)
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ }, 2)
+ )
+}
+{
+ const t = new stream.Transform({
+ transform: common.mustCall(function (chunk, encoding, callback) {
+ this.push(chunk)
+ return callback()
+ }),
+ flush: common.mustCall(function (callback) {
+ return callback()
+ })
+ })
+ t.end('content')
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ })
+ )
+}
+{
+ const t = new stream.Transform({
+ transform: common.mustCall(function (chunk, encoding, callback) {
+ this.push(chunk)
+ return callback()
+ }),
+ flush: common.mustCall(function (callback) {
+ return callback()
+ })
+ })
+ t.write('content')
+ t.end()
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ })
+ )
+}
+{
+ const t = new stream.Readable({
+ read() {}
+ })
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ })
+ )
+ t.push('content')
+ t.push(null)
+}
+{
+ const t = new stream.Readable({
+ read() {}
+ })
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ }, 2)
+ )
+ process.nextTick(() => {
+ t.push('content')
+ t.push(null)
+ })
+}
+{
+ const t = new stream.Transform({
+ transform: common.mustCall(function (chunk, encoding, callback) {
+ this.push(chunk)
+ return callback()
+ }),
+ flush: common.mustCall(function (callback) {
+ return callback()
+ })
+ })
+ t.on(
+ 'readable',
+ common.mustCall(function () {
+ while (true) {
+ const chunk = t.read()
+ if (!chunk) break
+ assert.strictEqual(chunk.toString(), 'content')
+ }
+ }, 2)
+ )
+ t.write('content')
+ t.end()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-emittedReadable.js b/test/parallel/test-stream-readable-emittedReadable.js
new file mode 100644
index 0000000000..61bcd75243
--- /dev/null
+++ b/test/parallel/test-stream-readable-emittedReadable.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const Readable = require('../../lib/ours/index').Readable
+const readable = new Readable({
+ read: () => {}
+})
+
+// Initialized to false.
+assert.strictEqual(readable._readableState.emittedReadable, false)
+const expected = [Buffer.from('foobar'), Buffer.from('quo'), null]
+readable.on(
+ 'readable',
+ common.mustCall(() => {
+ // emittedReadable should be true when the readable event is emitted
+ assert.strictEqual(readable._readableState.emittedReadable, true)
+ assert.deepStrictEqual(readable.read(), expected.shift())
+ // emittedReadable is reset to false during read()
+ assert.strictEqual(readable._readableState.emittedReadable, false)
+ }, 3)
+)
+
+// When the first readable listener is just attached,
+// emittedReadable should be false
+assert.strictEqual(readable._readableState.emittedReadable, false)
+
+// These trigger a single 'readable', as things are batched up
+process.nextTick(
+ common.mustCall(() => {
+ readable.push('foo')
+ })
+)
+process.nextTick(
+ common.mustCall(() => {
+ readable.push('bar')
+ })
+)
+
+// These triggers two readable events
+setImmediate(
+ common.mustCall(() => {
+ readable.push('quo')
+ process.nextTick(
+ common.mustCall(() => {
+ readable.push(null)
+ })
+ )
+ })
+)
+const noRead = new Readable({
+ read: () => {}
+})
+noRead.on(
+ 'readable',
+ common.mustCall(() => {
+ // emittedReadable should be true when the readable event is emitted
+ assert.strictEqual(noRead._readableState.emittedReadable, true)
+ noRead.read(0)
+ // emittedReadable is not reset during read(0)
+ assert.strictEqual(noRead._readableState.emittedReadable, true)
+ })
+)
+noRead.push('foo')
+noRead.push(null)
+const flowing = new Readable({
+ read: () => {}
+})
+flowing.on(
+ 'data',
+ common.mustCall(() => {
+ // When in flowing mode, emittedReadable is always false.
+ assert.strictEqual(flowing._readableState.emittedReadable, false)
+ flowing.read()
+ assert.strictEqual(flowing._readableState.emittedReadable, false)
+ }, 3)
+)
+flowing.push('foooo')
+flowing.push('bar')
+flowing.push('quo')
+process.nextTick(
+ common.mustCall(() => {
+ flowing.push(null)
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-end-destroyed.js b/test/parallel/test-stream-readable-end-destroyed.js
new file mode 100644
index 0000000000..a4936e04ef
--- /dev/null
+++ b/test/parallel/test-stream-readable-end-destroyed.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+{
+ // Don't emit 'end' after 'close'.
+
+ const r = new Readable()
+ r.on('end', common.mustNotCall())
+ r.resume()
+ r.destroy()
+ r.on(
+ 'close',
+ common.mustCall(() => {
+ r.push(null)
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-ended.js b/test/parallel/test-stream-readable-ended.js
new file mode 100644
index 0000000000..a18cef4bdc
--- /dev/null
+++ b/test/parallel/test-stream-readable-ended.js
@@ -0,0 +1,64 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+
+// basic
+{
+ // Find it on Readable.prototype
+ assert(Reflect.has(Readable.prototype, 'readableEnded'))
+}
+
+// event
+{
+ const readable = new Readable()
+ readable._read = () => {
+ // The state ended should start in false.
+ assert.strictEqual(readable.readableEnded, false)
+ readable.push('asd')
+ assert.strictEqual(readable.readableEnded, false)
+ readable.push(null)
+ assert.strictEqual(readable.readableEnded, false)
+ }
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(readable.readableEnded, true)
+ })
+ )
+ readable.on(
+ 'data',
+ common.mustCall(() => {
+ assert.strictEqual(readable.readableEnded, false)
+ })
+ )
+}
+
+// Verifies no `error` triggered on multiple .push(null) invocations
+{
+ const readable = new Readable()
+ readable.on('readable', () => {
+ readable.read()
+ })
+ readable.on('error', common.mustNotCall())
+ readable.on('end', common.mustCall())
+ readable.push('a')
+ readable.push(null)
+ readable.push(null)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-error-end.js b/test/parallel/test-stream-readable-error-end.js
new file mode 100644
index 0000000000..a164e9b916
--- /dev/null
+++ b/test/parallel/test-stream-readable-error-end.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+{
+ const r = new Readable({
+ read() {}
+ })
+ r.on('end', common.mustNotCall())
+ r.on('data', common.mustCall())
+ r.on('error', common.mustCall())
+ r.push('asd')
+ r.push(null)
+ r.destroy(new Error('kaboom'))
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js
new file mode 100644
index 0000000000..75fbbe8408
--- /dev/null
+++ b/test/parallel/test-stream-readable-event.js
@@ -0,0 +1,130 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const Readable = require('../../lib/ours/index').Readable
+{
+ // First test, not reading when the readable is added.
+ // make sure that on('readable', ...) triggers a readable event.
+ const r = new Readable({
+ highWaterMark: 3
+ })
+ r._read = common.mustNotCall()
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+ setTimeout(function () {
+ // We're testing what we think we are
+ assert(!r._readableState.reading)
+ r.on('readable', common.mustCall())
+ }, 1)
+}
+{
+ // Second test, make sure that readable is re-emitted if there's
+ // already a length, while it IS reading.
+
+ const r = new Readable({
+ highWaterMark: 3
+ })
+ r._read = common.mustCall()
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('bl'))
+ setTimeout(function () {
+ // Assert we're testing what we think we are
+ assert(r._readableState.reading)
+ r.on('readable', common.mustCall())
+ }, 1)
+}
+{
+ // Third test, not reading when the stream has not passed
+ // the highWaterMark but *has* reached EOF.
+ const r = new Readable({
+ highWaterMark: 30
+ })
+ r._read = common.mustNotCall()
+
+ // This triggers a 'readable' event, which is lost.
+ r.push(Buffer.from('blerg'))
+ r.push(null)
+ setTimeout(function () {
+ // Assert we're testing what we think we are
+ assert(!r._readableState.reading)
+ r.on('readable', common.mustCall())
+ }, 1)
+}
+{
+ // Pushing an empty string in non-objectMode should
+ // trigger next `read()`.
+ const underlyingData = ['', 'x', 'y', '', 'z']
+ const expected = underlyingData.filter((data) => data)
+ const result = []
+ const r = new Readable({
+ encoding: 'utf8'
+ })
+ r._read = function () {
+ process.nextTick(() => {
+ if (!underlyingData.length) {
+ this.push(null)
+ } else {
+ this.push(underlyingData.shift())
+ }
+ })
+ }
+ r.on('readable', () => {
+ const data = r.read()
+ if (data !== null) result.push(data)
+ })
+ r.on(
+ 'end',
+ common.mustCall(() => {
+ assert.deepStrictEqual(result, expected)
+ })
+ )
+}
+{
+ // #20923
+ const r = new Readable()
+ r._read = function () {
+ // Actually doing thing here
+ }
+ r.on('data', function () {})
+ r.removeAllListeners()
+ assert.strictEqual(r.eventNames().length, 0)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js
new file mode 100644
index 0000000000..6783852662
--- /dev/null
+++ b/test/parallel/test-stream-readable-flow-recursion.js
@@ -0,0 +1,84 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+
+// This test verifies that passing a huge number to read(size)
+// will push up the highWaterMark, and cause the stream to read
+// more data continuously, but without triggering a nextTick
+// warning or RangeError.
+
+const Readable = require('../../lib/ours/index').Readable
+
+// Throw an error if we trigger a nextTick warning.
+process.throwDeprecation = true
+const stream = new Readable({
+ highWaterMark: 2
+})
+let reads = 0
+let total = 5000
+stream._read = function (size) {
+ reads++
+ size = Math.min(size, total)
+ total -= size
+ if (size === 0) stream.push(null)
+ else stream.push(Buffer.allocUnsafe(size))
+}
+let depth = 0
+function flow(stream, size, callback) {
+ depth += 1
+ const chunk = stream.read(size)
+ if (!chunk) stream.once('readable', flow.bind(null, stream, size, callback))
+ else callback(chunk)
+ depth -= 1
+ silentConsole.log(`flow(${depth}): exit`)
+}
+flow(stream, 5000, function () {
+ silentConsole.log(`complete (${depth})`)
+})
+process.on('exit', function (code) {
+ assert.strictEqual(reads, 2)
+ // We pushed up the high water mark
+ assert.strictEqual(stream.readableHighWaterMark, 8192)
+ // Length is 0 right now, because we pulled it all out.
+ assert.strictEqual(stream.readableLength, 0)
+ assert(!code)
+ assert.strictEqual(depth, 0)
+ silentConsole.log('ok')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-hwm-0-async.js b/test/parallel/test-stream-readable-hwm-0-async.js
new file mode 100644
index 0000000000..859a11329f
--- /dev/null
+++ b/test/parallel/test-stream-readable-hwm-0-async.js
@@ -0,0 +1,39 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+
+// This test ensures that Readable stream will continue to call _read
+// for streams with highWaterMark === 0 once the stream returns data
+// by calling push() asynchronously.
+
+const { Readable } = require('../../lib/ours/index')
+let count = 5
+const r = new Readable({
+ // Called 6 times: First 5 return data, last one signals end of stream.
+ read: common.mustCall(() => {
+ process.nextTick(
+ common.mustCall(() => {
+ if (count--) r.push('a')
+ else r.push(null)
+ })
+ )
+ }, 6),
+ highWaterMark: 0
+})
+r.on('end', common.mustCall())
+r.on('data', common.mustCall(5))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-hwm-0-no-flow-data.js b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js
new file mode 100644
index 0000000000..9c0cf2f053
--- /dev/null
+++ b/test/parallel/test-stream-readable-hwm-0-no-flow-data.js
@@ -0,0 +1,120 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+
+// Ensure that subscribing the 'data' event will not make the stream flow.
+// The 'data' event will require calling read() by hand.
+//
+// The test is written for the (somewhat rare) highWaterMark: 0 streams to
+// specifically catch any regressions that might occur with these streams.
+
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+const streamData = ['a', null]
+
+// Track the calls so we can assert their order later.
+const calls = []
+const r = new Readable({
+ read: common.mustCall(() => {
+ calls.push('_read:' + streamData[0])
+ process.nextTick(() => {
+ calls.push('push:' + streamData[0])
+ r.push(streamData.shift())
+ })
+ }, streamData.length),
+ highWaterMark: 0,
+ // Object mode is used here just for testing convenience. It really
+ // shouldn't affect the order of events. Just the data and its format.
+ objectMode: true
+})
+assert.strictEqual(r.readableFlowing, null)
+r.on(
+ 'readable',
+ common.mustCall(() => {
+ calls.push('readable')
+ }, 2)
+)
+assert.strictEqual(r.readableFlowing, false)
+r.on(
+ 'data',
+ common.mustCall((data) => {
+ calls.push('data:' + data)
+ }, 1)
+)
+r.on(
+ 'end',
+ common.mustCall(() => {
+ calls.push('end')
+ })
+)
+assert.strictEqual(r.readableFlowing, false)
+
+// The stream emits the events asynchronously but that's not guaranteed to
+// happen on the next tick (especially since the _read implementation above
+// uses process.nextTick).
+//
+// We use setImmediate here to give the stream enough time to emit all the
+// events it's about to emit.
+setImmediate(() => {
+ // Only the _read, push, readable calls have happened. No data must be
+ // emitted yet.
+ assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable'])
+
+ // Calling 'r.read()' should trigger the data event.
+ assert.strictEqual(r.read(), 'a')
+ assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a'])
+
+ // The next 'read()' will return null because hwm: 0 does not buffer any
+ // data and the _read implementation above does the push() asynchronously.
+ //
+ // Note: This 'null' signals "no data available". It isn't the end-of-stream
+ // null value as the stream doesn't know yet that it is about to reach the
+ // end.
+ //
+ // Using setImmediate again to give the stream enough time to emit all the
+ // events it wants to emit.
+ assert.strictEqual(r.read(), null)
+ setImmediate(() => {
+ // There's a new 'readable' event after the data has been pushed.
+ // The 'end' event will be emitted only after a 'read()'.
+ //
+ // This is somewhat special for the case where the '_read' implementation
+ // calls 'push' asynchronously. If 'push' was synchronous, the 'end' event
+ // would be emitted here _before_ we call read().
+ assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable'])
+ assert.strictEqual(r.read(), null)
+
+ // While it isn't really specified whether the 'end' event should happen
+ // synchronously with read() or not, we'll assert the current behavior
+ // ('end' event happening on the next tick after read()) so any changes
+ // to it are noted and acknowledged in the future.
+ assert.deepStrictEqual(calls, ['_read:a', 'push:a', 'readable', 'data:a', '_read:null', 'push:null', 'readable'])
+ process.nextTick(() => {
+ assert.deepStrictEqual(calls, [
+ '_read:a',
+ 'push:a',
+ 'readable',
+ 'data:a',
+ '_read:null',
+ 'push:null',
+ 'readable',
+ 'end'
+ ])
+ })
+ })
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-hwm-0.js b/test/parallel/test-stream-readable-hwm-0.js
new file mode 100644
index 0000000000..192788e5ad
--- /dev/null
+++ b/test/parallel/test-stream-readable-hwm-0.js
@@ -0,0 +1,46 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+
+// This test ensures that Readable stream will call _read() for streams
+// with highWaterMark === 0 upon .read(0) instead of just trying to
+// emit 'readable' event.
+
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+const r = new Readable({
+ // Must be called only once upon setting 'readable' listener
+ read: common.mustCall(),
+ highWaterMark: 0
+})
+let pushedNull = false
+// This will trigger read(0) but must only be called after push(null)
+// because the we haven't pushed any data
+r.on(
+ 'readable',
+ common.mustCall(() => {
+ assert.strictEqual(r.read(), null)
+ assert.strictEqual(pushedNull, true)
+ })
+)
+r.on('end', common.mustCall())
+process.nextTick(() => {
+ assert.strictEqual(r.read(), null)
+ pushedNull = true
+ r.push(null)
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-infinite-read.js b/test/parallel/test-stream-readable-infinite-read.js
new file mode 100644
index 0000000000..233168ca72
--- /dev/null
+++ b/test/parallel/test-stream-readable-infinite-read.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+const buf = Buffer.alloc(8192)
+const readable = new Readable({
+ read: common.mustCall(function () {
+ this.push(buf)
+ }, 31)
+})
+let i = 0
+readable.on(
+ 'readable',
+ common.mustCall(function () {
+ if (i++ === 10) {
+ // We will just terminate now.
+ process.removeAllListeners('readable')
+ return
+ }
+ const data = readable.read()
+ // TODO(mcollina): there is something odd in the highWaterMark logic
+ // investigate.
+ if (i === 1) {
+ assert.strictEqual(data.length, 8192 * 2)
+ } else {
+ assert.strictEqual(data.length, 8192 * 3)
+ }
+ }, 11)
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-invalid-chunk.js b/test/parallel/test-stream-readable-invalid-chunk.js
new file mode 100644
index 0000000000..d93153b229
--- /dev/null
+++ b/test/parallel/test-stream-readable-invalid-chunk.js
@@ -0,0 +1,51 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+function testPushArg(val) {
+ const readable = new stream.Readable({
+ read: () => {}
+ })
+ readable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_INVALID_ARG_TYPE',
+ name: 'TypeError'
+ })
+ )
+ readable.push(val)
+}
+testPushArg([])
+testPushArg({})
+testPushArg(0)
+function testUnshiftArg(val) {
+ const readable = new stream.Readable({
+ read: () => {}
+ })
+ readable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_INVALID_ARG_TYPE',
+ name: 'TypeError'
+ })
+ )
+ readable.unshift(val)
+}
+testUnshiftArg([])
+testUnshiftArg({})
+testUnshiftArg(0)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-needReadable.js b/test/parallel/test-stream-readable-needReadable.js
new file mode 100644
index 0000000000..14981a600d
--- /dev/null
+++ b/test/parallel/test-stream-readable-needReadable.js
@@ -0,0 +1,135 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const Readable = require('../../lib/ours/index').Readable
+const readable = new Readable({
+ read: () => {}
+})
+
+// Initialized to false.
+assert.strictEqual(readable._readableState.needReadable, false)
+readable.on(
+ 'readable',
+ common.mustCall(() => {
+ // When the readable event fires, needReadable is reset.
+ assert.strictEqual(readable._readableState.needReadable, false)
+ readable.read()
+ })
+)
+
+// If a readable listener is attached, then a readable event is needed.
+assert.strictEqual(readable._readableState.needReadable, true)
+readable.push('foo')
+readable.push(null)
+readable.on(
+ 'end',
+ common.mustCall(() => {
+ // No need to emit readable anymore when the stream ends.
+ assert.strictEqual(readable._readableState.needReadable, false)
+ })
+)
+const asyncReadable = new Readable({
+ read: () => {}
+})
+asyncReadable.on(
+ 'readable',
+ common.mustCall(() => {
+ if (asyncReadable.read() !== null) {
+ // After each read(), the buffer is empty.
+ // If the stream doesn't end now,
+ // then we need to notify the reader on future changes.
+ assert.strictEqual(asyncReadable._readableState.needReadable, true)
+ }
+ }, 2)
+)
+process.nextTick(
+ common.mustCall(() => {
+ asyncReadable.push('foooo')
+ })
+)
+process.nextTick(
+ common.mustCall(() => {
+ asyncReadable.push('bar')
+ })
+)
+setImmediate(
+ common.mustCall(() => {
+ asyncReadable.push(null)
+ assert.strictEqual(asyncReadable._readableState.needReadable, false)
+ })
+)
+const flowing = new Readable({
+ read: () => {}
+})
+
+// Notice this must be above the on('data') call.
+flowing.push('foooo')
+flowing.push('bar')
+flowing.push('quo')
+process.nextTick(
+ common.mustCall(() => {
+ flowing.push(null)
+ })
+)
+
+// When the buffer already has enough data, and the stream is
+// in flowing mode, there is no need for the readable event.
+flowing.on(
+ 'data',
+ common.mustCall(function (data) {
+ assert.strictEqual(flowing._readableState.needReadable, false)
+ }, 3)
+)
+const slowProducer = new Readable({
+ read: () => {}
+})
+slowProducer.on(
+ 'readable',
+ common.mustCall(() => {
+ const chunk = slowProducer.read(8)
+ const state = slowProducer._readableState
+ if (chunk === null) {
+ // The buffer doesn't have enough data, and the stream is not need,
+ // we need to notify the reader when data arrives.
+ assert.strictEqual(state.needReadable, true)
+ } else {
+ assert.strictEqual(state.needReadable, false)
+ }
+ }, 4)
+)
+process.nextTick(
+ common.mustCall(() => {
+ slowProducer.push('foo')
+ process.nextTick(
+ common.mustCall(() => {
+ slowProducer.push('foo')
+ process.nextTick(
+ common.mustCall(() => {
+ slowProducer.push('foo')
+ process.nextTick(
+ common.mustCall(() => {
+ slowProducer.push(null)
+ })
+ )
+ })
+ )
+ })
+ )
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-next-no-null.js b/test/parallel/test-stream-readable-next-no-null.js
new file mode 100644
index 0000000000..4f7d01e3d7
--- /dev/null
+++ b/test/parallel/test-stream-readable-next-no-null.js
@@ -0,0 +1,33 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const { mustNotCall, expectsError } = require('../common')
+const { Readable } = require('../../lib/ours/index')
+async function* generate() {
+ yield null
+}
+const stream = Readable.from(generate())
+stream.on(
+ 'error',
+ expectsError({
+ code: 'ERR_STREAM_NULL_VALUES',
+ name: 'TypeError',
+ message: 'May not write null values to stream'
+ })
+)
+stream.on('data', mustNotCall())
+stream.on('end', mustNotCall())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-no-unneeded-readable.js b/test/parallel/test-stream-readable-no-unneeded-readable.js
new file mode 100644
index 0000000000..67f71b89c5
--- /dev/null
+++ b/test/parallel/test-stream-readable-no-unneeded-readable.js
@@ -0,0 +1,69 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable, PassThrough } = require('../../lib/ours/index')
+function test(r) {
+ const wrapper = new Readable({
+ read: () => {
+ let data = r.read()
+ if (data) {
+ wrapper.push(data)
+ return
+ }
+ r.once('readable', function () {
+ data = r.read()
+ if (data) {
+ wrapper.push(data)
+ }
+ // else: the end event should fire
+ })
+ }
+ })
+ r.once('end', function () {
+ wrapper.push(null)
+ })
+ wrapper.resume()
+ wrapper.once('end', common.mustCall())
+}
+{
+ const source = new Readable({
+ read: () => {}
+ })
+ source.push('foo')
+ source.push('bar')
+ source.push(null)
+ const pt = source.pipe(new PassThrough())
+ test(pt)
+}
+{
+ // This is the underlying cause of the above test case.
+ const pushChunks = ['foo', 'bar']
+ const r = new Readable({
+ read: () => {
+ const chunk = pushChunks.shift()
+ if (chunk) {
+ // synchronous call
+ r.push(chunk)
+ } else {
+ // asynchronous call
+ process.nextTick(() => r.push(null))
+ }
+ }
+ })
+ test(r)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-object-multi-push-async.js b/test/parallel/test-stream-readable-object-multi-push-async.js
new file mode 100644
index 0000000000..eefacf1d28
--- /dev/null
+++ b/test/parallel/test-stream-readable-object-multi-push-async.js
@@ -0,0 +1,182 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+const MAX = 42
+const BATCH = 10
+{
+ const readable = new Readable({
+ objectMode: true,
+ read: common.mustCall(function () {
+ silentConsole.log('>> READ')
+ fetchData((err, data) => {
+ if (err) {
+ this.destroy(err)
+ return
+ }
+ if (data.length === 0) {
+ silentConsole.log('pushing null')
+ this.push(null)
+ return
+ }
+ silentConsole.log('pushing')
+ data.forEach((d) => this.push(d))
+ })
+ }, Math.floor(MAX / BATCH) + 2)
+ })
+ let i = 0
+ function fetchData(cb) {
+ if (i > MAX) {
+ setTimeout(cb, 10, null, [])
+ } else {
+ const array = []
+ const max = i + BATCH
+ for (; i < max; i++) {
+ array.push(i)
+ }
+ setTimeout(cb, 10, null, array)
+ }
+ }
+ readable.on('readable', () => {
+ let data
+ silentConsole.log('readable emitted')
+ while ((data = readable.read()) !== null) {
+ silentConsole.log(data)
+ }
+ })
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH)
+ })
+ )
+}
+{
+ const readable = new Readable({
+ objectMode: true,
+ read: common.mustCall(function () {
+ silentConsole.log('>> READ')
+ fetchData((err, data) => {
+ if (err) {
+ this.destroy(err)
+ return
+ }
+ if (data.length === 0) {
+ silentConsole.log('pushing null')
+ this.push(null)
+ return
+ }
+ silentConsole.log('pushing')
+ data.forEach((d) => this.push(d))
+ })
+ }, Math.floor(MAX / BATCH) + 2)
+ })
+ let i = 0
+ function fetchData(cb) {
+ if (i > MAX) {
+ setTimeout(cb, 10, null, [])
+ } else {
+ const array = []
+ const max = i + BATCH
+ for (; i < max; i++) {
+ array.push(i)
+ }
+ setTimeout(cb, 10, null, array)
+ }
+ }
+ readable.on('data', (data) => {
+ silentConsole.log('data emitted', data)
+ })
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH)
+ })
+ )
+}
+{
+ const readable = new Readable({
+ objectMode: true,
+ read: common.mustCall(function () {
+ silentConsole.log('>> READ')
+ fetchData((err, data) => {
+ if (err) {
+ this.destroy(err)
+ return
+ }
+ silentConsole.log('pushing')
+ data.forEach((d) => this.push(d))
+ if (data[BATCH - 1] >= MAX) {
+ silentConsole.log('pushing null')
+ this.push(null)
+ }
+ })
+ }, Math.floor(MAX / BATCH) + 1)
+ })
+ let i = 0
+ function fetchData(cb) {
+ const array = []
+ const max = i + BATCH
+ for (; i < max; i++) {
+ array.push(i)
+ }
+ setTimeout(cb, 10, null, array)
+ }
+ readable.on('data', (data) => {
+ silentConsole.log('data emitted', data)
+ })
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(i, (Math.floor(MAX / BATCH) + 1) * BATCH)
+ })
+ )
+}
+{
+ const readable = new Readable({
+ objectMode: true,
+ read: common.mustNotCall()
+ })
+ readable.on('data', common.mustNotCall())
+ readable.push(null)
+ let nextTickPassed = false
+ process.nextTick(() => {
+ nextTickPassed = true
+ })
+ readable.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(nextTickPassed, true)
+ })
+ )
+}
+{
+ const readable = new Readable({
+ objectMode: true,
+ read: common.mustCall()
+ })
+ readable.on('data', (data) => {
+ silentConsole.log('data emitted', data)
+ })
+ readable.on('end', common.mustCall())
+ setImmediate(() => {
+ readable.push('aaa')
+ readable.push(null)
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-pause-and-resume.js b/test/parallel/test-stream-readable-pause-and-resume.js
new file mode 100644
index 0000000000..464f0c3abd
--- /dev/null
+++ b/test/parallel/test-stream-readable-pause-and-resume.js
@@ -0,0 +1,76 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+let ticks = 18
+let expectedData = 19
+const rs = new Readable({
+ objectMode: true,
+ read: () => {
+ if (ticks-- > 0) return process.nextTick(() => rs.push({}))
+ rs.push({})
+ rs.push(null)
+ }
+})
+rs.on('end', common.mustCall())
+readAndPause()
+function readAndPause() {
+ // Does a on(data) -> pause -> wait -> resume -> on(data) ... loop.
+ // Expects on(data) to never fire if the stream is paused.
+ const ondata = common.mustCall((data) => {
+ rs.pause()
+ expectedData--
+ if (expectedData <= 0) return
+ setImmediate(function () {
+ rs.removeListener('data', ondata)
+ readAndPause()
+ rs.resume()
+ })
+ }, 1) // Only call ondata once
+
+ rs.on('data', ondata)
+}
+{
+ const readable = new Readable({
+ read() {}
+ })
+ function read() {}
+ readable.setEncoding('utf8')
+ readable.on('readable', read)
+ readable.removeListener('readable', read)
+ readable.pause()
+ process.nextTick(function () {
+ assert(readable.isPaused())
+ })
+}
+{
+ const { PassThrough } = require('../../lib/ours/index')
+ const source3 = new PassThrough()
+ const target3 = new PassThrough()
+ const chunk = Buffer.allocUnsafe(1000)
+ while (target3.write(chunk));
+ source3.pipe(target3)
+ target3.on(
+ 'drain',
+ common.mustCall(() => {
+ assert(!source3.isPaused())
+ })
+ )
+ target3.on('data', () => {})
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-readable-then-resume.js b/test/parallel/test-stream-readable-readable-then-resume.js
new file mode 100644
index 0000000000..8926f40576
--- /dev/null
+++ b/test/parallel/test-stream-readable-readable-then-resume.js
@@ -0,0 +1,46 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+
+// This test verifies that a stream could be resumed after
+// removing the readable event in the same tick
+
+check(
+ new Readable({
+ objectMode: true,
+ highWaterMark: 1,
+ read() {
+ if (!this.first) {
+ this.push('hello')
+ this.first = true
+ return
+ }
+ this.push(null)
+ }
+ })
+)
+function check(s) {
+ const readableListener = common.mustNotCall()
+ s.on('readable', readableListener)
+ s.on('end', common.mustCall())
+ assert.strictEqual(s.removeListener, s.off)
+ s.removeListener('readable', readableListener)
+ s.resume()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-readable.js b/test/parallel/test-stream-readable-readable.js
new file mode 100644
index 0000000000..65dc93d72e
--- /dev/null
+++ b/test/parallel/test-stream-readable-readable.js
@@ -0,0 +1,63 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+{
+ const r = new Readable({
+ read() {}
+ })
+ assert.strictEqual(r.readable, true)
+ r.destroy()
+ assert.strictEqual(r.readable, false)
+}
+{
+ const mustNotCall = common.mustNotCall()
+ const r = new Readable({
+ read() {}
+ })
+ assert.strictEqual(r.readable, true)
+ r.on('end', mustNotCall)
+ r.resume()
+ r.push(null)
+ assert.strictEqual(r.readable, true)
+ r.off('end', mustNotCall)
+ r.on(
+ 'end',
+ common.mustCall(() => {
+ assert.strictEqual(r.readable, false)
+ })
+ )
+}
+{
+ const r = new Readable({
+ read: common.mustCall(() => {
+ process.nextTick(() => {
+ r.destroy(new Error())
+ assert.strictEqual(r.readable, false)
+ })
+ })
+ })
+ r.resume()
+ r.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(r.readable, false)
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-reading-readingMore.js b/test/parallel/test-stream-readable-reading-readingMore.js
new file mode 100644
index 0000000000..804ac81670
--- /dev/null
+++ b/test/parallel/test-stream-readable-reading-readingMore.js
@@ -0,0 +1,175 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const Readable = require('../../lib/ours/index').Readable
+{
+ const readable = new Readable({
+ read(size) {}
+ })
+ const state = readable._readableState
+
+ // Starting off with false initially.
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.readingMore, false)
+ readable.on(
+ 'data',
+ common.mustCall((data) => {
+ // While in a flowing state with a 'readable' listener
+ // we should not be reading more
+ if (readable.readableFlowing) assert.strictEqual(state.readingMore, true)
+
+ // Reading as long as we've not ended
+ assert.strictEqual(state.reading, !state.ended)
+ }, 2)
+ )
+ function onStreamEnd() {
+ // End of stream; state.reading is false
+ // And so should be readingMore.
+ assert.strictEqual(state.readingMore, false)
+ assert.strictEqual(state.reading, false)
+ }
+ const expectedReadingMore = [true, true, false]
+ readable.on(
+ 'readable',
+ common.mustCall(() => {
+ // There is only one readingMore scheduled from on('data'),
+ // after which everything is governed by the .read() call
+ assert.strictEqual(state.readingMore, expectedReadingMore.shift())
+
+ // If the stream has ended, we shouldn't be reading
+ assert.strictEqual(state.ended, !state.reading)
+
+ // Consume all the data
+ while (readable.read() !== null);
+ if (expectedReadingMore.length === 0)
+ // Reached end of stream
+ process.nextTick(common.mustCall(onStreamEnd, 1))
+ }, 3)
+ )
+ readable.on('end', common.mustCall(onStreamEnd))
+ readable.push('pushed')
+ readable.read(6)
+
+ // reading
+ assert.strictEqual(state.reading, true)
+ assert.strictEqual(state.readingMore, true)
+
+ // add chunk to front
+ readable.unshift('unshifted')
+
+ // end
+ readable.push(null)
+}
+{
+ const readable = new Readable({
+ read(size) {}
+ })
+ const state = readable._readableState
+
+ // Starting off with false initially.
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.readingMore, false)
+ readable.on(
+ 'data',
+ common.mustCall((data) => {
+ // While in a flowing state without a 'readable' listener
+ // we should be reading more
+ if (readable.readableFlowing) assert.strictEqual(state.readingMore, true)
+
+ // Reading as long as we've not ended
+ assert.strictEqual(state.reading, !state.ended)
+ }, 2)
+ )
+ function onStreamEnd() {
+ // End of stream; state.reading is false
+ // And so should be readingMore.
+ assert.strictEqual(state.readingMore, false)
+ assert.strictEqual(state.reading, false)
+ }
+ readable.on('end', common.mustCall(onStreamEnd))
+ readable.push('pushed')
+
+ // Stop emitting 'data' events
+ assert.strictEqual(state.flowing, true)
+ readable.pause()
+
+ // paused
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.flowing, false)
+ readable.resume()
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.flowing, true)
+
+ // add chunk to front
+ readable.unshift('unshifted')
+
+ // end
+ readable.push(null)
+}
+{
+ const readable = new Readable({
+ read(size) {}
+ })
+ const state = readable._readableState
+
+ // Starting off with false initially.
+ assert.strictEqual(state.reading, false)
+ assert.strictEqual(state.readingMore, false)
+ const onReadable = common.mustNotCall()
+ readable.on('readable', onReadable)
+ readable.on(
+ 'data',
+ common.mustCall((data) => {
+ // Reading as long as we've not ended
+ assert.strictEqual(state.reading, !state.ended)
+ }, 2)
+ )
+ readable.removeListener('readable', onReadable)
+ function onStreamEnd() {
+ // End of stream; state.reading is false
+ // And so should be readingMore.
+ assert.strictEqual(state.readingMore, false)
+ assert.strictEqual(state.reading, false)
+ }
+ readable.on('end', common.mustCall(onStreamEnd))
+ readable.push('pushed')
+
+ // We are still not flowing, we will be resuming in the next tick
+ assert.strictEqual(state.flowing, false)
+
+ // Wait for nextTick, so the readableListener flag resets
+ process.nextTick(function () {
+ readable.resume()
+
+ // Stop emitting 'data' events
+ assert.strictEqual(state.flowing, true)
+ readable.pause()
+
+ // paused
+ assert.strictEqual(state.flowing, false)
+ readable.resume()
+ assert.strictEqual(state.flowing, true)
+
+ // add chunk to front
+ readable.unshift('unshifted')
+
+ // end
+ readable.push(null)
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-resume-hwm.js b/test/parallel/test-stream-readable-resume-hwm.js
new file mode 100644
index 0000000000..9dd9df57e7
--- /dev/null
+++ b/test/parallel/test-stream-readable-resume-hwm.js
@@ -0,0 +1,39 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+
+// readable.resume() should not lead to a ._read() call being scheduled
+// when we exceed the high water mark already.
+
+const readable = new Readable({
+ read: common.mustNotCall(),
+ highWaterMark: 100
+})
+
+// Fill up the internal buffer so that we definitely exceed the HWM:
+for (let i = 0; i < 10; i++) readable.push('a'.repeat(200))
+
+// Call resume, and pause after one chunk.
+// The .pause() is just so that we don’t empty the buffer fully, which would
+// be a valid reason to call ._read().
+readable.resume()
+readable.once(
+ 'data',
+ common.mustCall(() => readable.pause())
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-resumeScheduled.js b/test/parallel/test-stream-readable-resumeScheduled.js
new file mode 100644
index 0000000000..1f50e3960f
--- /dev/null
+++ b/test/parallel/test-stream-readable-resumeScheduled.js
@@ -0,0 +1,91 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+
+// Testing Readable Stream resumeScheduled state
+
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+{
+ // pipe() test case
+ const r = new Readable({
+ read() {}
+ })
+ const w = new Writable()
+
+ // resumeScheduled should start = `false`.
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+
+ // Calling pipe() should change the state value = true.
+ r.pipe(w)
+ assert.strictEqual(r._readableState.resumeScheduled, true)
+ process.nextTick(
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
+}
+{
+ // 'data' listener test case
+ const r = new Readable({
+ read() {}
+ })
+
+ // resumeScheduled should start = `false`.
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ r.push(Buffer.from([1, 2, 3]))
+
+ // Adding 'data' listener should change the state value
+ r.on(
+ 'data',
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
+ assert.strictEqual(r._readableState.resumeScheduled, true)
+ process.nextTick(
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
+}
+{
+ // resume() test case
+ const r = new Readable({
+ read() {}
+ })
+
+ // resumeScheduled should start = `false`.
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+
+ // Calling resume() should change the state value.
+ r.resume()
+ assert.strictEqual(r._readableState.resumeScheduled, true)
+ r.on(
+ 'resume',
+ common.mustCall(() => {
+ // The state value should be `false` again
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
+ process.nextTick(
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.resumeScheduled, false)
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-setEncoding-existing-buffers.js b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js
new file mode 100644
index 0000000000..fc391bf64e
--- /dev/null
+++ b/test/parallel/test-stream-readable-setEncoding-existing-buffers.js
@@ -0,0 +1,68 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ // Call .setEncoding() while there are bytes already in the buffer.
+ const r = new Readable({
+ read() {}
+ })
+ r.push(Buffer.from('a'))
+ r.push(Buffer.from('b'))
+ r.setEncoding('utf8')
+ const chunks = []
+ r.on('data', (chunk) => chunks.push(chunk))
+ process.nextTick(() => {
+ assert.deepStrictEqual(chunks, ['ab'])
+ })
+}
+{
+ // Call .setEncoding() while the buffer contains a complete,
+ // but chunked character.
+ const r = new Readable({
+ read() {}
+ })
+ r.push(Buffer.from([0xf0]))
+ r.push(Buffer.from([0x9f]))
+ r.push(Buffer.from([0x8e]))
+ r.push(Buffer.from([0x89]))
+ r.setEncoding('utf8')
+ const chunks = []
+ r.on('data', (chunk) => chunks.push(chunk))
+ process.nextTick(() => {
+ assert.deepStrictEqual(chunks, ['🎉'])
+ })
+}
+{
+ // Call .setEncoding() while the buffer contains an incomplete character,
+ // and finish the character later.
+ const r = new Readable({
+ read() {}
+ })
+ r.push(Buffer.from([0xf0]))
+ r.push(Buffer.from([0x9f]))
+ r.setEncoding('utf8')
+ r.push(Buffer.from([0x8e]))
+ r.push(Buffer.from([0x89]))
+ const chunks = []
+ r.on('data', (chunk) => chunks.push(chunk))
+ process.nextTick(() => {
+ assert.deepStrictEqual(chunks, ['🎉'])
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-setEncoding-null.js b/test/parallel/test-stream-readable-setEncoding-null.js
new file mode 100644
index 0000000000..6063705002
--- /dev/null
+++ b/test/parallel/test-stream-readable-setEncoding-null.js
@@ -0,0 +1,28 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+{
+ const readable = new Readable({
+ encoding: 'hex'
+ })
+ assert.strictEqual(readable._readableState.encoding, 'hex')
+ readable.setEncoding(null)
+ assert.strictEqual(readable._readableState.encoding, 'utf8')
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-unpipe-resume.js b/test/parallel/test-stream-readable-unpipe-resume.js
new file mode 100644
index 0000000000..acbf6720c6
--- /dev/null
+++ b/test/parallel/test-stream-readable-unpipe-resume.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const fs = require('fs')
+const readStream = fs.createReadStream(process.execPath)
+const transformStream = new stream.Transform({
+ transform: common.mustCall(() => {
+ readStream.unpipe()
+ readStream.resume()
+ })
+})
+readStream.on('end', common.mustCall())
+readStream.pipe(transformStream).resume()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-unshift.js b/test/parallel/test-stream-readable-unshift.js
new file mode 100644
index 0000000000..624de8bb11
--- /dev/null
+++ b/test/parallel/test-stream-readable-unshift.js
@@ -0,0 +1,185 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+{
+ // Check that strings are saved as Buffer
+ const readable = new Readable({
+ read() {}
+ })
+ const string = 'abc'
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert(Buffer.isBuffer(chunk))
+ assert.strictEqual(chunk.toString('utf8'), string)
+ }, 1)
+ )
+ readable.unshift(string)
+}
+{
+ // Check that data goes at the beginning
+ const readable = new Readable({
+ read() {}
+ })
+ const unshift = 'front'
+ const push = 'back'
+ const expected = [unshift, push]
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk.toString('utf8'), expected.shift())
+ }, 2)
+ )
+ readable.push(push)
+ readable.unshift(unshift)
+}
+{
+ // Check that buffer is saved with correct encoding
+ const readable = new Readable({
+ read() {}
+ })
+ const encoding = 'base64'
+ const string = Buffer.from('abc').toString(encoding)
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk.toString(encoding), string)
+ }, 1)
+ )
+ readable.unshift(string, encoding)
+}
+{
+ const streamEncoding = 'base64'
+ function checkEncoding(readable) {
+ // chunk encodings
+ const encodings = ['utf8', 'binary', 'hex', 'base64']
+ const expected = []
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ const { encoding, string } = expected.pop()
+ assert.strictEqual(chunk.toString(encoding), string)
+ }, encodings.length)
+ )
+ for (const encoding of encodings) {
+ const string = 'abc'
+
+ // If encoding is the same as the state.encoding the string is
+ // saved as is
+ const expect = encoding !== streamEncoding ? Buffer.from(string, encoding).toString(streamEncoding) : string
+ expected.push({
+ encoding,
+ string: expect
+ })
+ readable.unshift(string, encoding)
+ }
+ }
+ const r1 = new Readable({
+ read() {}
+ })
+ r1.setEncoding(streamEncoding)
+ checkEncoding(r1)
+ const r2 = new Readable({
+ read() {},
+ encoding: streamEncoding
+ })
+ checkEncoding(r2)
+}
+{
+ // Both .push & .unshift should have the same behaviour
+ // When setting an encoding, each chunk should be emitted with that encoding
+ const encoding = 'base64'
+ function checkEncoding(readable) {
+ const string = 'abc'
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk, Buffer.from(string).toString(encoding))
+ }, 2)
+ )
+ readable.push(string)
+ readable.unshift(string)
+ }
+ const r1 = new Readable({
+ read() {}
+ })
+ r1.setEncoding(encoding)
+ checkEncoding(r1)
+ const r2 = new Readable({
+ read() {},
+ encoding
+ })
+ checkEncoding(r2)
+}
+{
+ // Check that ObjectMode works
+ const readable = new Readable({
+ objectMode: true,
+ read() {}
+ })
+ const chunks = ['a', 1, {}, []]
+ readable.on(
+ 'data',
+ common.mustCall((chunk) => {
+ assert.strictEqual(chunk, chunks.pop())
+ }, chunks.length)
+ )
+ for (const chunk of chunks) {
+ readable.unshift(chunk)
+ }
+}
+{
+ // Should not throw: https://github.com/nodejs/node/issues/27192
+ const highWaterMark = 50
+ class ArrayReader extends Readable {
+ constructor(opt) {
+ super({
+ highWaterMark
+ })
+ // The error happened only when pushing above hwm
+ this.buffer = new Array(highWaterMark * 2).fill(0).map(String)
+ }
+ _read(size) {
+ while (this.buffer.length) {
+ const chunk = this.buffer.shift()
+ if (!this.buffer.length) {
+ this.push(chunk)
+ this.push(null)
+ return true
+ }
+ if (!this.push(chunk)) return
+ }
+ }
+ }
+ function onRead() {
+ while (null !== stream.read()) {
+ // Remove the 'readable' listener before unshifting
+ stream.removeListener('readable', onRead)
+ stream.unshift('a')
+ stream.on('data', (chunk) => {
+ silentConsole.log(chunk.length)
+ })
+ break
+ }
+ }
+ const stream = new ArrayReader()
+ stream.once('readable', common.mustCall(onRead))
+ stream.on('end', common.mustCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readable-with-unimplemented-_read.js b/test/parallel/test-stream-readable-with-unimplemented-_read.js
new file mode 100644
index 0000000000..1fccc976a8
--- /dev/null
+++ b/test/parallel/test-stream-readable-with-unimplemented-_read.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const readable = new Readable()
+readable.read()
+readable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_METHOD_NOT_IMPLEMENTED',
+ name: 'Error',
+ message: 'The _read() method is not implemented'
+ })
+)
+readable.on('close', common.mustCall())
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-readableListening-state.js b/test/parallel/test-stream-readableListening-state.js
new file mode 100644
index 0000000000..29e6acc17f
--- /dev/null
+++ b/test/parallel/test-stream-readableListening-state.js
@@ -0,0 +1,49 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const r = new stream.Readable({
+ read: () => {}
+})
+
+// readableListening state should start in `false`.
+assert.strictEqual(r._readableState.readableListening, false)
+r.on(
+ 'readable',
+ common.mustCall(() => {
+ // Inside the readable event this state should be true.
+ assert.strictEqual(r._readableState.readableListening, true)
+ })
+)
+r.push(Buffer.from('Testing readableListening state'))
+const r2 = new stream.Readable({
+ read: () => {}
+})
+
+// readableListening state should start in `false`.
+assert.strictEqual(r2._readableState.readableListening, false)
+r2.on(
+ 'data',
+ common.mustCall((chunk) => {
+ // readableListening should be false because we don't have
+ // a `readable` listener
+ assert.strictEqual(r2._readableState.readableListening, false)
+ })
+)
+r2.push(Buffer.from('Testing readableListening state'))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-reduce.js b/test/parallel/test-stream-reduce.js
new file mode 100644
index 0000000000..079e2a44ce
--- /dev/null
+++ b/test/parallel/test-stream-reduce.js
@@ -0,0 +1,199 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+function sum(p, c) {
+ return p + c
+}
+{
+ // Does the same thing as `(await stream.toArray()).reduce(...)`
+ ;(async () => {
+ const tests = [
+ [[], sum, 0],
+ [[1], sum, 0],
+ [[1, 2, 3, 4, 5], sum, 0],
+ [[...Array(100).keys()], sum, 0],
+ [['a', 'b', 'c'], sum, ''],
+ [[1, 2], sum],
+ [[1, 2, 3], (x, y) => y]
+ ]
+ for (const [values, fn, initial] of tests) {
+ const streamReduce = await Readable.from(values).reduce(fn, initial)
+ const arrayReduce = values.reduce(fn, initial)
+ assert.deepStrictEqual(streamReduce, arrayReduce)
+ }
+ // Does the same thing as `(await stream.toArray()).reduce(...)` with an
+ // asynchronous reducer
+ for (const [values, fn, initial] of tests) {
+ const streamReduce = await Readable.from(values)
+ .map(async (x) => x)
+ .reduce(fn, initial)
+ const arrayReduce = values.reduce(fn, initial)
+ assert.deepStrictEqual(streamReduce, arrayReduce)
+ }
+ })().then(common.mustCall())
+}
+{
+ // Works with an async reducer, with or without initial value
+ ;(async () => {
+ const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c, 0)
+ assert.strictEqual(six, 6)
+ })().then(common.mustCall())
+ ;(async () => {
+ const six = await Readable.from([1, 2, 3]).reduce(async (p, c) => p + c)
+ assert.strictEqual(six, 6)
+ })().then(common.mustCall())
+}
+{
+ // Works lazily
+ assert
+ .rejects(
+ Readable.from([1, 2, 3, 4, 5, 6])
+ .map(
+ common.mustCall((x) => {
+ return x
+ }, 3)
+ ) // Two consumed and one buffered by `map` due to default concurrency
+ .reduce(async (p, c) => {
+ if (p === 1) {
+ throw new Error('boom')
+ }
+ return c
+ }, 0),
+ /boom/
+ )
+ .then(common.mustCall())
+}
+{
+ // Support for AbortSignal
+ const ac = new AbortController()
+ assert
+ .rejects(
+ async () => {
+ await Readable.from([1, 2, 3]).reduce(
+ async (p, c) => {
+ if (c === 3) {
+ await new Promise(() => {}) // Explicitly do not pass signal here
+ }
+ return Promise.resolve()
+ },
+ 0,
+ {
+ signal: ac.signal
+ }
+ )
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(common.mustCall())
+ ac.abort()
+}
+{
+ // Support for AbortSignal - pre aborted
+ const stream = Readable.from([1, 2, 3])
+ assert
+ .rejects(
+ async () => {
+ await stream.reduce(
+ async (p, c) => {
+ if (c === 3) {
+ await new Promise(() => {}) // Explicitly do not pass signal here
+ }
+ return Promise.resolve()
+ },
+ 0,
+ {
+ signal: AbortSignal.abort()
+ }
+ )
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(
+ common.mustCall(() => {
+ assert.strictEqual(stream.destroyed, true)
+ })
+ )
+}
+{
+ // Support for AbortSignal - deep
+ const stream = Readable.from([1, 2, 3])
+ assert
+ .rejects(
+ async () => {
+ await stream.reduce(
+ async (p, c, { signal }) => {
+ signal.addEventListener('abort', common.mustCall(), {
+ once: true
+ })
+ if (c === 3) {
+ await new Promise(() => {}) // Explicitly do not pass signal here
+ }
+ return Promise.resolve()
+ },
+ 0,
+ {
+ signal: AbortSignal.abort()
+ }
+ )
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(
+ common.mustCall(() => {
+ assert.strictEqual(stream.destroyed, true)
+ })
+ )
+}
+{
+ // Error cases
+ assert.rejects(() => Readable.from([]).reduce(1), /TypeError/)
+ assert.rejects(() => Readable.from([]).reduce('5'), /TypeError/)
+ assert.rejects(() => Readable.from([]).reduce((x, y) => x + y, 0, 1), /ERR_INVALID_ARG_TYPE/)
+ assert.rejects(
+ () =>
+ Readable.from([]).reduce((x, y) => x + y, 0, {
+ signal: true
+ }),
+ /ERR_INVALID_ARG_TYPE/
+ )
+}
+{
+ // Test result is a Promise
+ const result = Readable.from([1, 2, 3, 4, 5]).reduce(sum, 0)
+ assert.ok(result instanceof Promise)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-set-default-hwm.js b/test/parallel/test-stream-set-default-hwm.js
new file mode 100644
index 0000000000..4ef13346cf
--- /dev/null
+++ b/test/parallel/test-stream-set-default-hwm.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const {
+ setDefaultHighWaterMark,
+ getDefaultHighWaterMark,
+ Writable,
+ Readable,
+ Transform
+} = require('../../lib/ours/index')
+assert.notStrictEqual(getDefaultHighWaterMark(false), 32 * 1000)
+setDefaultHighWaterMark(false, 32 * 1000)
+assert.strictEqual(getDefaultHighWaterMark(false), 32 * 1000)
+assert.notStrictEqual(getDefaultHighWaterMark(true), 32)
+setDefaultHighWaterMark(true, 32)
+assert.strictEqual(getDefaultHighWaterMark(true), 32)
+const w = new Writable({
+ write() {}
+})
+assert.strictEqual(w.writableHighWaterMark, 32 * 1000)
+const r = new Readable({
+ read() {}
+})
+assert.strictEqual(r.readableHighWaterMark, 32 * 1000)
+const t = new Transform({
+ transform() {}
+})
+assert.strictEqual(t.writableHighWaterMark, 32 * 1000)
+assert.strictEqual(t.readableHighWaterMark, 32 * 1000)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-some-find-every.mjs b/test/parallel/test-stream-some-find-every.mjs
new file mode 100644
index 0000000000..f68c45e3eb
--- /dev/null
+++ b/test/parallel/test-stream-some-find-every.mjs
@@ -0,0 +1,215 @@
+import * as common from '../common/index.mjs'
+import { setTimeout } from 'timers/promises'
+import { Readable } from '../../lib/ours/index.js'
+import assert from 'assert'
+import tap from 'tap'
+
+function oneTo5() {
+ return Readable.from([1, 2, 3, 4, 5])
+}
+
+function oneTo5Async() {
+ return oneTo5().map(async (x) => {
+ await Promise.resolve()
+ return x
+ })
+}
+{
+ // Some, find, and every work with a synchronous stream and predicate
+ assert.strictEqual(await oneTo5().some((x) => x > 3), true)
+ assert.strictEqual(await oneTo5().every((x) => x > 3), false)
+ assert.strictEqual(await oneTo5().find((x) => x > 3), 4)
+ assert.strictEqual(await oneTo5().some((x) => x > 6), false)
+ assert.strictEqual(await oneTo5().every((x) => x < 6), true)
+ assert.strictEqual(await oneTo5().find((x) => x > 6), undefined)
+ assert.strictEqual(await Readable.from([]).some(() => true), false)
+ assert.strictEqual(await Readable.from([]).every(() => true), true)
+ assert.strictEqual(await Readable.from([]).find(() => true), undefined)
+}
+
+{
+ // Some, find, and every work with an asynchronous stream and synchronous predicate
+ assert.strictEqual(await oneTo5Async().some((x) => x > 3), true)
+ assert.strictEqual(await oneTo5Async().every((x) => x > 3), false)
+ assert.strictEqual(await oneTo5Async().find((x) => x > 3), 4)
+ assert.strictEqual(await oneTo5Async().some((x) => x > 6), false)
+ assert.strictEqual(await oneTo5Async().every((x) => x < 6), true)
+ assert.strictEqual(await oneTo5Async().find((x) => x > 6), undefined)
+}
+
+{
+ // Some, find, and every work on synchronous streams with an asynchronous predicate
+ assert.strictEqual(await oneTo5().some(async (x) => x > 3), true)
+ assert.strictEqual(await oneTo5().every(async (x) => x > 3), false)
+ assert.strictEqual(await oneTo5().find(async (x) => x > 3), 4)
+ assert.strictEqual(await oneTo5().some(async (x) => x > 6), false)
+ assert.strictEqual(await oneTo5().every(async (x) => x < 6), true)
+ assert.strictEqual(await oneTo5().find(async (x) => x > 6), undefined)
+}
+
+{
+ // Some, find, and every work on asynchronous streams with an asynchronous predicate
+ assert.strictEqual(await oneTo5Async().some(async (x) => x > 3), true)
+ assert.strictEqual(await oneTo5Async().every(async (x) => x > 3), false)
+ assert.strictEqual(await oneTo5Async().find(async (x) => x > 3), 4)
+ assert.strictEqual(await oneTo5Async().some(async (x) => x > 6), false)
+ assert.strictEqual(await oneTo5Async().every(async (x) => x < 6), true)
+ assert.strictEqual(await oneTo5Async().find(async (x) => x > 6), undefined)
+}
+
+{
+ async function checkDestroyed(stream) {
+ await setTimeout()
+ assert.strictEqual(stream.destroyed, true)
+ }
+
+ {
+ // Some, find, and every short circuit
+ const someStream = oneTo5()
+ await someStream.some(common.mustCall((x) => x > 2, 3))
+ await checkDestroyed(someStream)
+
+ const everyStream = oneTo5()
+ await everyStream.every(common.mustCall((x) => x < 3, 3))
+ await checkDestroyed(everyStream)
+
+ const findStream = oneTo5()
+ await findStream.find(common.mustCall((x) => x > 1, 2))
+ await checkDestroyed(findStream)
+
+ // When short circuit isn't possible the whole stream is iterated
+ await oneTo5().some(common.mustCall(() => false, 5))
+ await oneTo5().every(common.mustCall(() => true, 5))
+ await oneTo5().find(common.mustCall(() => false, 5))
+ }
+
+ {
+ // Some, find, and every short circuit async stream/predicate
+ const someStream = oneTo5Async()
+ await someStream.some(common.mustCall(async (x) => x > 2, 3))
+ await checkDestroyed(someStream)
+
+ const everyStream = oneTo5Async()
+ await everyStream.every(common.mustCall(async (x) => x < 3, 3))
+ await checkDestroyed(everyStream)
+
+ const findStream = oneTo5Async()
+ await findStream.find(common.mustCall(async (x) => x > 1, 2))
+ await checkDestroyed(findStream)
+
+ // When short circuit isn't possible the whole stream is iterated
+ await oneTo5Async().some(common.mustCall(async () => false, 5))
+ await oneTo5Async().every(common.mustCall(async () => true, 5))
+ await oneTo5Async().find(common.mustCall(async () => false, 5))
+ }
+}
+
+{
+ // Concurrency doesn't affect which value is found.
+ const found = await Readable.from([1, 2]).find(
+ async (val) => {
+ if (val === 1) {
+ await setTimeout(100)
+ }
+ return true
+ },
+ { concurrency: 2 }
+ )
+ assert.strictEqual(found, 1)
+}
+
+{
+ // Support for AbortSignal
+ for (const op of ['some', 'every', 'find']) {
+ {
+ const ac = new AbortController()
+ assert
+ .rejects(
+ Readable.from([1, 2, 3])[op](() => new Promise(() => {}), { signal: ac.signal }),
+ {
+ name: 'AbortError'
+ },
+ `${op} should abort correctly with sync abort`
+ )
+ .then(common.mustCall())
+ ac.abort()
+ }
+ {
+ // Support for pre-aborted AbortSignal
+ assert
+ .rejects(
+ Readable.from([1, 2, 3])[op](() => new Promise(() => {}), { signal: AbortSignal.abort() }),
+ {
+ name: 'AbortError'
+ },
+ `${op} should abort with pre-aborted abort controller`
+ )
+ .then(common.mustCall())
+ }
+ }
+}
+{
+ // Error cases
+ for (const op of ['some', 'every', 'find']) {
+ assert
+ .rejects(
+ async () => {
+ await Readable.from([1])[op](1)
+ },
+ /ERR_INVALID_ARG_TYPE/,
+ `${op} should throw for invalid function`
+ )
+ .then(common.mustCall())
+ assert
+ .rejects(
+ async () => {
+ await Readable.from([1])[op]((x) => x, {
+ concurrency: 'Foo'
+ })
+ },
+ /ERR_OUT_OF_RANGE/,
+ `${op} should throw for invalid concurrency`
+ )
+ .then(common.mustCall())
+ assert
+ .rejects(
+ async () => {
+ await Readable.from([1])[op]((x) => x, 1)
+ },
+ /ERR_INVALID_ARG_TYPE/,
+ `${op} should throw for invalid concurrency`
+ )
+ .then(common.mustCall())
+ assert
+ .rejects(
+ async () => {
+ await Readable.from([1])[op]((x) => x, {
+ signal: true
+ })
+ },
+ /ERR_INVALID_ARG_TYPE/,
+ `${op} should throw for invalid signal`
+ )
+ .then(common.mustCall())
+ }
+}
+{
+ for (const op of ['some', 'every', 'find']) {
+ const stream = oneTo5()
+ Object.defineProperty(stream, 'map', {
+ value: common.mustNotCall()
+ })
+ // Check that map isn't getting called.
+ stream[op](() => {})
+ }
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-toArray.js b/test/parallel/test-stream-toArray.js
new file mode 100644
index 0000000000..f4f87f0c95
--- /dev/null
+++ b/test/parallel/test-stream-toArray.js
@@ -0,0 +1,130 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ // Works on a synchronous stream
+ ;(async () => {
+ const tests = [
+ [],
+ [1],
+ [1, 2, 3],
+ Array(100)
+ .fill()
+ .map((_, i) => i)
+ ]
+ for (const test of tests) {
+ const stream = Readable.from(test)
+ const result = await stream.toArray()
+ assert.deepStrictEqual(result, test)
+ }
+ })().then(common.mustCall())
+}
+{
+ // Works on a non-object-mode stream
+ ;(async () => {
+ const firstBuffer = Buffer.from([1, 2, 3])
+ const secondBuffer = Buffer.from([4, 5, 6])
+ const stream = Readable.from([firstBuffer, secondBuffer], {
+ objectMode: false
+ })
+ const result = await stream.toArray()
+ assert.strictEqual(Array.isArray(result), true)
+ assert.deepStrictEqual(result, [firstBuffer, secondBuffer])
+ })().then(common.mustCall())
+}
+{
+ // Works on an asynchronous stream
+ ;(async () => {
+ const tests = [
+ [],
+ [1],
+ [1, 2, 3],
+ Array(100)
+ .fill()
+ .map((_, i) => i)
+ ]
+ for (const test of tests) {
+ const stream = Readable.from(test).map((x) => Promise.resolve(x))
+ const result = await stream.toArray()
+ assert.deepStrictEqual(result, test)
+ }
+ })().then(common.mustCall())
+}
+{
+ // Support for AbortSignal
+ const ac = new AbortController()
+ let stream
+ assert
+ .rejects(
+ async () => {
+ stream = Readable.from([1, 2, 3]).map(async (x) => {
+ if (x === 3) {
+ await new Promise(() => {}) // Explicitly do not pass signal here
+ }
+ return Promise.resolve(x)
+ })
+ await stream.toArray({
+ signal: ac.signal
+ })
+ },
+ {
+ name: 'AbortError'
+ }
+ )
+ .then(
+ common.mustCall(() => {
+ // Only stops toArray, does not destroy the stream
+ assert(stream.destroyed, false)
+ })
+ )
+ ac.abort()
+}
+{
+ // Test result is a Promise
+ const result = Readable.from([1, 2, 3, 4, 5]).toArray()
+ assert.strictEqual(result instanceof Promise, true)
+}
+{
+ // Error cases
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).toArray(1)
+ }, /ERR_INVALID_ARG_TYPE/)
+ .then(common.mustCall())
+ assert
+ .rejects(async () => {
+ await Readable.from([1]).toArray({
+ signal: true
+ })
+ }, /ERR_INVALID_ARG_TYPE/)
+ .then(common.mustCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-callback-twice.js b/test/parallel/test-stream-transform-callback-twice.js
new file mode 100644
index 0000000000..f1437576ca
--- /dev/null
+++ b/test/parallel/test-stream-transform-callback-twice.js
@@ -0,0 +1,34 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Transform } = require('../../lib/ours/index')
+const stream = new Transform({
+ transform(chunk, enc, cb) {
+ cb()
+ cb()
+ }
+})
+stream.on(
+ 'error',
+ common.expectsError({
+ name: 'Error',
+ message: 'Callback called multiple times',
+ code: 'ERR_MULTIPLE_CALLBACK'
+ })
+)
+stream.write('foo')
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js
new file mode 100644
index 0000000000..9ab0c48c7b
--- /dev/null
+++ b/test/parallel/test-stream-transform-constructor-set-methods.js
@@ -0,0 +1,50 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Transform } = require('../../lib/ours/index')
+const t = new Transform()
+assert.throws(
+ () => {
+ t.end(Buffer.from('blerg'))
+ },
+ {
+ name: 'Error',
+ code: 'ERR_METHOD_NOT_IMPLEMENTED',
+ message: 'The _transform() method is not implemented'
+ }
+)
+const _transform = common.mustCall((chunk, _, next) => {
+ next()
+})
+const _final = common.mustCall((next) => {
+ next()
+})
+const _flush = common.mustCall((next) => {
+ next()
+})
+const t2 = new Transform({
+ transform: _transform,
+ flush: _flush,
+ final: _final
+})
+assert.strictEqual(t2._transform, _transform)
+assert.strictEqual(t2._flush, _flush)
+assert.strictEqual(t2._final, _final)
+t2.end(Buffer.from('blerg'))
+t2.resume()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-destroy.js b/test/parallel/test-stream-transform-destroy.js
new file mode 100644
index 0000000000..b635df131f
--- /dev/null
+++ b/test/parallel/test-stream-transform-destroy.js
@@ -0,0 +1,137 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Transform } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform.resume()
+ transform.on('end', common.mustNotCall())
+ transform.on('close', common.mustCall())
+ transform.on('finish', common.mustNotCall())
+ transform.destroy()
+}
+{
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform.resume()
+ const expected = new Error('kaboom')
+ transform.on('end', common.mustNotCall())
+ transform.on('finish', common.mustNotCall())
+ transform.on('close', common.mustCall())
+ transform.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ transform.destroy(expected)
+}
+{
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb(err)
+ }, 1)
+ const expected = new Error('kaboom')
+ transform.on('finish', common.mustNotCall('no finish event'))
+ transform.on('close', common.mustCall())
+ transform.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ transform.destroy(expected)
+}
+{
+ const expected = new Error('kaboom')
+ const transform = new Transform({
+ transform(chunk, enc, cb) {},
+ destroy: common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb()
+ }, 1)
+ })
+ transform.resume()
+ transform.on('end', common.mustNotCall('no end event'))
+ transform.on('close', common.mustCall())
+ transform.on('finish', common.mustNotCall('no finish event'))
+
+ // Error is swallowed by the custom _destroy
+ transform.on('error', common.mustNotCall('no error event'))
+ transform.destroy(expected)
+}
+{
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb()
+ }, 1)
+ transform.destroy()
+}
+{
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ transform.resume()
+ transform._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ process.nextTick(() => {
+ this.push(null)
+ this.end()
+ cb()
+ })
+ }, 1)
+ const fail = common.mustNotCall('no event')
+ transform.on('finish', fail)
+ transform.on('end', fail)
+ transform.on('close', common.mustCall())
+ transform.destroy()
+ transform.removeListener('end', fail)
+ transform.removeListener('finish', fail)
+ transform.on('end', common.mustCall())
+ transform.on('finish', common.mustNotCall())
+}
+{
+ const transform = new Transform({
+ transform(chunk, enc, cb) {}
+ })
+ const expected = new Error('kaboom')
+ transform._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb(expected)
+ }, 1)
+ transform.on('close', common.mustCall())
+ transform.on('finish', common.mustNotCall('no finish event'))
+ transform.on('end', common.mustNotCall('no end event'))
+ transform.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ transform.destroy()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-final-sync.js b/test/parallel/test-stream-transform-final-sync.js
new file mode 100644
index 0000000000..c532898202
--- /dev/null
+++ b/test/parallel/test-stream-transform-final-sync.js
@@ -0,0 +1,136 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+let state = 0
+
+// What you do
+//
+// const stream = new stream.Transform({
+// transform: function transformCallback(chunk, _, next) {
+// // part 1
+// this.push(chunk);
+// //part 2
+// next();
+// },
+// final: function endCallback(done) {
+// // part 1
+// process.nextTick(function () {
+// // part 2
+// done();
+// });
+// },
+// flush: function flushCallback(done) {
+// // part 1
+// process.nextTick(function () {
+// // part 2
+// done();
+// });
+// }
+// });
+// t.on('data', dataListener);
+// t.on('end', endListener);
+// t.on('finish', finishListener);
+// t.write(1);
+// t.write(4);
+// t.end(7, endMethodCallback);
+//
+// The order things are called
+//
+// 1. transformCallback part 1
+// 2. dataListener
+// 3. transformCallback part 2
+// 4. transformCallback part 1
+// 5. dataListener
+// 6. transformCallback part 2
+// 7. transformCallback part 1
+// 8. dataListener
+// 9. transformCallback part 2
+// 10. finalCallback part 1
+// 11. finalCallback part 2
+// 12. flushCallback part 1
+// 13. finishListener
+// 14. endMethodCallback
+// 15. flushCallback part 2
+// 16. endListener
+
+const t = new stream.Transform({
+ objectMode: true,
+ transform: common.mustCall(function (chunk, _, next) {
+ // transformCallback part 1
+ assert.strictEqual(++state, chunk)
+ this.push(state)
+ // transformCallback part 2
+ assert.strictEqual(++state, chunk + 2)
+ process.nextTick(next)
+ }, 3),
+ final: common.mustCall(function (done) {
+ state++
+ // finalCallback part 1
+ assert.strictEqual(state, 10)
+ state++
+ // finalCallback part 2
+ assert.strictEqual(state, 11)
+ done()
+ }, 1),
+ flush: common.mustCall(function (done) {
+ state++
+ // fluchCallback part 1
+ assert.strictEqual(state, 12)
+ process.nextTick(function () {
+ state++
+ // fluchCallback part 2
+ assert.strictEqual(state, 13)
+ done()
+ })
+ }, 1)
+})
+t.on(
+ 'finish',
+ common.mustCall(function () {
+ state++
+ // finishListener
+ assert.strictEqual(state, 15)
+ }, 1)
+)
+t.on(
+ 'end',
+ common.mustCall(function () {
+ state++
+ // endEvent
+ assert.strictEqual(state, 16)
+ }, 1)
+)
+t.on(
+ 'data',
+ common.mustCall(function (d) {
+ // dataListener
+ assert.strictEqual(++state, d + 1)
+ }, 3)
+)
+t.write(1)
+t.write(4)
+t.end(
+ 7,
+ common.mustCall(function () {
+ state++
+ // endMethodCallback
+ assert.strictEqual(state, 14)
+ }, 1)
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-final.js b/test/parallel/test-stream-transform-final.js
new file mode 100644
index 0000000000..d8536708e3
--- /dev/null
+++ b/test/parallel/test-stream-transform-final.js
@@ -0,0 +1,138 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+let state = 0
+
+// What you do:
+//
+// const stream = new stream.Transform({
+// transform: function transformCallback(chunk, _, next) {
+// // part 1
+// this.push(chunk);
+// //part 2
+// next();
+// },
+// final: function endCallback(done) {
+// // part 1
+// process.nextTick(function () {
+// // part 2
+// done();
+// });
+// },
+// flush: function flushCallback(done) {
+// // part 1
+// process.nextTick(function () {
+// // part 2
+// done();
+// });
+// }
+// });
+// t.on('data', dataListener);
+// t.on('end', endListener);
+// t.on('finish', finishListener);
+// t.write(1);
+// t.write(4);
+// t.end(7, endMethodCallback);
+//
+// The order things are called
+
+// 1. transformCallback part 1
+// 2. dataListener
+// 3. transformCallback part 2
+// 4. transformCallback part 1
+// 5. dataListener
+// 6. transformCallback part 2
+// 7. transformCallback part 1
+// 8. dataListener
+// 9. transformCallback part 2
+// 10. finalCallback part 1
+// 11. finalCallback part 2
+// 12. flushCallback part 1
+// 13. finishListener
+// 14. endMethodCallback
+// 15. flushCallback part 2
+// 16. endListener
+
+const t = new stream.Transform({
+ objectMode: true,
+ transform: common.mustCall(function (chunk, _, next) {
+ // transformCallback part 1
+ assert.strictEqual(++state, chunk)
+ this.push(state)
+ // transformCallback part 2
+ assert.strictEqual(++state, chunk + 2)
+ process.nextTick(next)
+ }, 3),
+ final: common.mustCall(function (done) {
+ state++
+ // finalCallback part 1
+ assert.strictEqual(state, 10)
+ setTimeout(function () {
+ state++
+ // finalCallback part 2
+ assert.strictEqual(state, 11)
+ done()
+ }, 100)
+ }, 1),
+ flush: common.mustCall(function (done) {
+ state++
+ // flushCallback part 1
+ assert.strictEqual(state, 12)
+ process.nextTick(function () {
+ state++
+ // flushCallback part 2
+ assert.strictEqual(state, 13)
+ done()
+ })
+ }, 1)
+})
+t.on(
+ 'finish',
+ common.mustCall(function () {
+ state++
+ // finishListener
+ assert.strictEqual(state, 15)
+ }, 1)
+)
+t.on(
+ 'end',
+ common.mustCall(function () {
+ state++
+ // end event
+ assert.strictEqual(state, 16)
+ }, 1)
+)
+t.on(
+ 'data',
+ common.mustCall(function (d) {
+ // dataListener
+ assert.strictEqual(++state, d + 1)
+ }, 3)
+)
+t.write(1)
+t.write(4)
+t.end(
+ 7,
+ common.mustCall(function () {
+ state++
+ // endMethodCallback
+ assert.strictEqual(state, 14)
+ }, 1)
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-flush-data.js b/test/parallel/test-stream-transform-flush-data.js
new file mode 100644
index 0000000000..de90e053a7
--- /dev/null
+++ b/test/parallel/test-stream-transform-flush-data.js
@@ -0,0 +1,35 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const Transform = require('../../lib/ours/index').Transform
+const expected = 'asdf'
+function _transform(d, e, n) {
+ n()
+}
+function _flush(n) {
+ n(null, expected)
+}
+const t = new Transform({
+ transform: _transform,
+ flush: _flush
+})
+t.end(Buffer.from('blerg'))
+t.on('data', (data) => {
+ assert.strictEqual(data.toString(), expected)
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-hwm0.js b/test/parallel/test-stream-transform-hwm0.js
new file mode 100644
index 0000000000..30fafec0bc
--- /dev/null
+++ b/test/parallel/test-stream-transform-hwm0.js
@@ -0,0 +1,52 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Transform } = require('../../lib/ours/index')
+const t = new Transform({
+ objectMode: true,
+ highWaterMark: 0,
+ transform(chunk, enc, callback) {
+ process.nextTick(() => callback(null, chunk, enc))
+ }
+})
+assert.strictEqual(t.write(1), false)
+t.on(
+ 'drain',
+ common.mustCall(() => {
+ assert.strictEqual(t.write(2), false)
+ t.end()
+ })
+)
+t.once(
+ 'readable',
+ common.mustCall(() => {
+ assert.strictEqual(t.read(), 1)
+ setImmediate(
+ common.mustCall(() => {
+ assert.strictEqual(t.read(), null)
+ t.once(
+ 'readable',
+ common.mustCall(() => {
+ assert.strictEqual(t.read(), 2)
+ })
+ )
+ })
+ )
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js
new file mode 100644
index 0000000000..7942c64421
--- /dev/null
+++ b/test/parallel/test-stream-transform-objectmode-falsey-value.js
@@ -0,0 +1,73 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const PassThrough = stream.PassThrough
+const src = new PassThrough({
+ objectMode: true
+})
+const tx = new PassThrough({
+ objectMode: true
+})
+const dest = new PassThrough({
+ objectMode: true
+})
+const expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+const results = []
+dest.on(
+ 'data',
+ common.mustCall(function (x) {
+ results.push(x)
+ }, expect.length)
+)
+src.pipe(tx).pipe(dest)
+let i = -1
+const int = setInterval(
+ common.mustCall(function () {
+ if (results.length === expect.length) {
+ src.end()
+ clearInterval(int)
+ assert.deepStrictEqual(results, expect)
+ } else {
+ src.write(i++)
+ }
+ }, expect.length + 1),
+ 1
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-split-highwatermark.js b/test/parallel/test-stream-transform-split-highwatermark.js
new file mode 100644
index 0000000000..7ab990a4fd
--- /dev/null
+++ b/test/parallel/test-stream-transform-split-highwatermark.js
@@ -0,0 +1,111 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Transform, Readable, Writable } = require('../../lib/ours/index')
+const DEFAULT = 16 * 1024
+function testTransform(expectedReadableHwm, expectedWritableHwm, options) {
+ const t = new Transform(options)
+ assert.strictEqual(t._readableState.highWaterMark, expectedReadableHwm)
+ assert.strictEqual(t._writableState.highWaterMark, expectedWritableHwm)
+}
+
+// Test overriding defaultHwm
+testTransform(666, DEFAULT, {
+ readableHighWaterMark: 666
+})
+testTransform(DEFAULT, 777, {
+ writableHighWaterMark: 777
+})
+testTransform(666, 777, {
+ readableHighWaterMark: 666,
+ writableHighWaterMark: 777
+})
+
+// Test highWaterMark overriding
+testTransform(555, 555, {
+ highWaterMark: 555,
+ readableHighWaterMark: 666
+})
+testTransform(555, 555, {
+ highWaterMark: 555,
+ writableHighWaterMark: 777
+})
+testTransform(555, 555, {
+ highWaterMark: 555,
+ readableHighWaterMark: 666,
+ writableHighWaterMark: 777
+})
+
+// Test undefined, null
+;[undefined, null].forEach((v) => {
+ testTransform(DEFAULT, DEFAULT, {
+ readableHighWaterMark: v
+ })
+ testTransform(DEFAULT, DEFAULT, {
+ writableHighWaterMark: v
+ })
+ testTransform(666, DEFAULT, {
+ highWaterMark: v,
+ readableHighWaterMark: 666
+ })
+ testTransform(DEFAULT, 777, {
+ highWaterMark: v,
+ writableHighWaterMark: 777
+ })
+})
+
+// test NaN
+{
+ assert.throws(
+ () => {
+ new Transform({
+ readableHighWaterMark: NaN
+ })
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_INVALID_ARG_VALUE',
+ message: "The property 'options.readableHighWaterMark' is invalid. " + 'Received NaN'
+ }
+ )
+ assert.throws(
+ () => {
+ new Transform({
+ writableHighWaterMark: NaN
+ })
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_INVALID_ARG_VALUE',
+ message: "The property 'options.writableHighWaterMark' is invalid. " + 'Received NaN'
+ }
+ )
+}
+
+// Test non Duplex streams ignore the options
+{
+ const r = new Readable({
+ readableHighWaterMark: 666
+ })
+ assert.strictEqual(r._readableState.highWaterMark, DEFAULT)
+ const w = new Writable({
+ writableHighWaterMark: 777
+ })
+ assert.strictEqual(w._writableState.highWaterMark, DEFAULT)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js
new file mode 100644
index 0000000000..869596375f
--- /dev/null
+++ b/test/parallel/test-stream-transform-split-objectmode.js
@@ -0,0 +1,85 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const Transform = require('../../lib/ours/index').Transform
+const parser = new Transform({
+ readableObjectMode: true
+})
+assert(parser._readableState.objectMode)
+assert(!parser._writableState.objectMode)
+assert.strictEqual(parser.readableHighWaterMark, 16)
+assert.strictEqual(parser.writableHighWaterMark, 16 * 1024)
+assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark)
+assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark)
+parser._transform = function (chunk, enc, callback) {
+ callback(null, {
+ val: chunk[0]
+ })
+}
+let parsed
+parser.on('data', function (obj) {
+ parsed = obj
+})
+parser.end(Buffer.from([42]))
+process.on('exit', function () {
+ assert.strictEqual(parsed.val, 42)
+})
+const serializer = new Transform({
+ writableObjectMode: true
+})
+assert(!serializer._readableState.objectMode)
+assert(serializer._writableState.objectMode)
+assert.strictEqual(serializer.readableHighWaterMark, 16 * 1024)
+assert.strictEqual(serializer.writableHighWaterMark, 16)
+assert.strictEqual(parser.readableHighWaterMark, parser._readableState.highWaterMark)
+assert.strictEqual(parser.writableHighWaterMark, parser._writableState.highWaterMark)
+serializer._transform = function (obj, _, callback) {
+ callback(null, Buffer.from([obj.val]))
+}
+let serialized
+serializer.on('data', function (chunk) {
+ serialized = chunk
+})
+serializer.write({
+ val: 42
+})
+process.on('exit', function () {
+ assert.strictEqual(serialized[0], 42)
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-uint8array.js b/test/parallel/test-stream-uint8array.js
new file mode 100644
index 0000000000..d770a86828
--- /dev/null
+++ b/test/parallel/test-stream-uint8array.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+const ABC = new Uint8Array([0x41, 0x42, 0x43])
+const DEF = new Uint8Array([0x44, 0x45, 0x46])
+const GHI = new Uint8Array([0x47, 0x48, 0x49])
+{
+ // Simple Writable test.
+
+ let n = 0
+ const writable = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert(chunk instanceof Buffer)
+ if (n++ === 0) {
+ assert.strictEqual(String(chunk), 'ABC')
+ } else {
+ assert.strictEqual(String(chunk), 'DEF')
+ }
+ cb()
+ }, 2)
+ })
+ writable.write(ABC)
+ writable.end(DEF)
+}
+{
+ // Writable test, pass in Uint8Array in object mode.
+
+ const writable = new Writable({
+ objectMode: true,
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert(!(chunk instanceof Buffer))
+ assert(chunk instanceof Uint8Array)
+ assert.strictEqual(chunk, ABC)
+ assert.strictEqual(encoding, 'utf8')
+ cb()
+ })
+ })
+ writable.end(ABC)
+}
+{
+ // Writable test, multiple writes carried out via writev.
+ let callback
+ const writable = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ assert(chunk instanceof Buffer)
+ assert.strictEqual(encoding, 'buffer')
+ assert.strictEqual(String(chunk), 'ABC')
+ callback = cb
+ }),
+ writev: common.mustCall((chunks, cb) => {
+ assert.strictEqual(chunks.length, 2)
+ assert.strictEqual(chunks[0].encoding, 'buffer')
+ assert.strictEqual(chunks[1].encoding, 'buffer')
+ assert.strictEqual(chunks[0].chunk + chunks[1].chunk, 'DEFGHI')
+ })
+ })
+ writable.write(ABC)
+ writable.write(DEF)
+ writable.end(GHI)
+ callback()
+}
+{
+ // Simple Readable test.
+ const readable = new Readable({
+ read() {}
+ })
+ readable.push(DEF)
+ readable.unshift(ABC)
+ const buf = readable.read()
+ assert(buf instanceof Buffer)
+ assert.deepStrictEqual([...buf], [...ABC, ...DEF])
+}
+{
+ // Readable test, setEncoding.
+ const readable = new Readable({
+ read() {}
+ })
+ readable.setEncoding('utf8')
+ readable.push(DEF)
+ readable.unshift(ABC)
+ const out = readable.read()
+ assert.strictEqual(out, 'ABCDEF')
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-unpipe-event.js b/test/parallel/test-stream-unpipe-event.js
new file mode 100644
index 0000000000..58a3bbf17d
--- /dev/null
+++ b/test/parallel/test-stream-unpipe-event.js
@@ -0,0 +1,101 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Writable, Readable } = require('../../lib/ours/index')
+class NullWriteable extends Writable {
+ _write(chunk, encoding, callback) {
+ return callback()
+ }
+}
+class QuickEndReadable extends Readable {
+ _read() {
+ this.push(null)
+ }
+}
+class NeverEndReadable extends Readable {
+ _read() {}
+}
+{
+ const dest = new NullWriteable()
+ const src = new QuickEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustCall())
+ src.pipe(dest)
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 0)
+ })
+}
+{
+ const dest = new NullWriteable()
+ const src = new NeverEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted'))
+ src.pipe(dest)
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 1)
+ })
+}
+{
+ const dest = new NullWriteable()
+ const src = new NeverEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustCall())
+ src.pipe(dest)
+ src.unpipe(dest)
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 0)
+ })
+}
+{
+ const dest = new NullWriteable()
+ const src = new QuickEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustCall())
+ src.pipe(dest, {
+ end: false
+ })
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 0)
+ })
+}
+{
+ const dest = new NullWriteable()
+ const src = new NeverEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustNotCall('unpipe should not have been emitted'))
+ src.pipe(dest, {
+ end: false
+ })
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 1)
+ })
+}
+{
+ const dest = new NullWriteable()
+ const src = new NeverEndReadable()
+ dest.on('pipe', common.mustCall())
+ dest.on('unpipe', common.mustCall())
+ src.pipe(dest, {
+ end: false
+ })
+ src.unpipe(dest)
+ setImmediate(() => {
+ assert.strictEqual(src._readableState.pipes.length, 0)
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js
new file mode 100644
index 0000000000..2e9c312fad
--- /dev/null
+++ b/test/parallel/test-stream-unshift-empty-chunk.js
@@ -0,0 +1,92 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+
+// This test verifies that stream.unshift(Buffer.alloc(0)) or
+// stream.unshift('') does not set state.reading=false.
+const Readable = require('../../lib/ours/index').Readable
+const r = new Readable()
+let nChunks = 10
+const chunk = Buffer.alloc(10, 'x')
+r._read = function (n) {
+ setImmediate(() => {
+ r.push(--nChunks === 0 ? null : chunk)
+ })
+}
+let readAll = false
+const seen = []
+r.on('readable', () => {
+ let chunk
+ while ((chunk = r.read()) !== null) {
+ seen.push(chunk.toString())
+ // Simulate only reading a certain amount of the data,
+ // and then putting the rest of the chunk back into the
+ // stream, like a parser might do. We just fill it with
+ // 'y' so that it's easy to see which bits were touched,
+ // and which were not.
+ const putBack = Buffer.alloc(readAll ? 0 : 5, 'y')
+ readAll = !readAll
+ r.unshift(putBack)
+ }
+})
+const expect = [
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy',
+ 'xxxxxxxxxx',
+ 'yyyyy'
+]
+r.on('end', () => {
+ assert.deepStrictEqual(seen, expect)
+ silentConsole.log('ok')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js
new file mode 100644
index 0000000000..75c9b6b838
--- /dev/null
+++ b/test/parallel/test-stream-unshift-read-race.js
@@ -0,0 +1,149 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+
+// This test verifies that:
+// 1. unshift() does not cause colliding _read() calls.
+// 2. unshift() after the 'end' event is an error, but after the EOF
+// signalling null, it is ok, and just creates a new readable chunk.
+// 3. push() after the EOF signaling null is an error.
+// 4. _read() is not called after pushing the EOF null chunk.
+
+const stream = require('../../lib/ours/index')
+const hwm = 10
+const r = stream.Readable({
+ highWaterMark: hwm,
+ autoDestroy: false
+})
+const chunks = 10
+const data = Buffer.allocUnsafe(chunks * hwm + Math.ceil(hwm / 2))
+for (let i = 0; i < data.length; i++) {
+ const c = 'asdf'.charCodeAt(i % 4)
+ data[i] = c
+}
+let pos = 0
+let pushedNull = false
+r._read = function (n) {
+ assert(!pushedNull, '_read after null push')
+
+ // Every third chunk is fast
+ push(!(chunks % 3))
+ function push(fast) {
+ assert(!pushedNull, 'push() after null push')
+ const c = pos >= data.length ? null : data.slice(pos, pos + n)
+ pushedNull = c === null
+ if (fast) {
+ pos += n
+ r.push(c)
+ if (c === null) pushError()
+ } else {
+ setTimeout(function () {
+ pos += n
+ r.push(c)
+ if (c === null) pushError()
+ }, 1)
+ }
+ }
+}
+function pushError() {
+ r.unshift(Buffer.allocUnsafe(1))
+ w.end()
+ assert.throws(
+ () => {
+ r.push(Buffer.allocUnsafe(1))
+ },
+ {
+ code: 'ERR_STREAM_PUSH_AFTER_EOF',
+ name: 'Error',
+ message: 'stream.push() after EOF'
+ }
+ )
+}
+const w = stream.Writable()
+const written = []
+w._write = function (chunk, encoding, cb) {
+ written.push(chunk.toString())
+ cb()
+}
+r.on('end', common.mustNotCall())
+r.on('readable', function () {
+ let chunk
+ while (null !== (chunk = r.read(10))) {
+ w.write(chunk)
+ if (chunk.length > 4) r.unshift(Buffer.from('1234'))
+ }
+})
+w.on(
+ 'finish',
+ common.mustCall(function () {
+ // Each chunk should start with 1234, and then be asfdasdfasdf...
+ // The first got pulled out before the first unshift('1234'), so it's
+ // lacking that piece.
+ assert.strictEqual(written[0], 'asdfasdfas')
+ let asdf = 'd'
+ silentConsole.error(`0: ${written[0]}`)
+ for (let i = 1; i < written.length; i++) {
+ silentConsole.error(`${i.toString(32)}: ${written[i]}`)
+ assert.strictEqual(written[i].slice(0, 4), '1234')
+ for (let j = 4; j < written[i].length; j++) {
+ const c = written[i].charAt(j)
+ assert.strictEqual(c, asdf)
+ switch (asdf) {
+ case 'a':
+ asdf = 's'
+ break
+ case 's':
+ asdf = 'd'
+ break
+ case 'd':
+ asdf = 'f'
+ break
+ case 'f':
+ asdf = 'a'
+ break
+ }
+ }
+ }
+ })
+)
+process.on('exit', function () {
+ assert.strictEqual(written.length, 18)
+ silentConsole.log('ok')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-aborted.js b/test/parallel/test-stream-writable-aborted.js
new file mode 100644
index 0000000000..c2d298f21d
--- /dev/null
+++ b/test/parallel/test-stream-writable-aborted.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Writable } = require('../../lib/ours/index')
+{
+ const writable = new Writable({
+ write() {}
+ })
+ assert.strictEqual(writable.writableAborted, false)
+ writable.destroy()
+ assert.strictEqual(writable.writableAborted, true)
+}
+{
+ const writable = new Writable({
+ write() {}
+ })
+ assert.strictEqual(writable.writableAborted, false)
+ writable.end()
+ writable.destroy()
+ assert.strictEqual(writable.writableAborted, true)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js
new file mode 100644
index 0000000000..31935af345
--- /dev/null
+++ b/test/parallel/test-stream-writable-change-default-encoding.js
@@ -0,0 +1,106 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+class MyWritable extends stream.Writable {
+ constructor(fn, options) {
+ super(options)
+ this.fn = fn
+ }
+ _write(chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+ }
+}
+;(function defaultCondingIsUtf8() {
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert.strictEqual(enc, 'utf8')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.write('foo')
+ m.end()
+})()
+;(function changeDefaultEncodingToAscii() {
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert.strictEqual(enc, 'ascii')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.setDefaultEncoding('ascii')
+ m.write('bar')
+ m.end()
+})()
+
+// Change default encoding to invalid value.
+assert.throws(
+ () => {
+ const m = new MyWritable((isBuffer, type, enc) => {}, {
+ decodeStrings: false
+ })
+ m.setDefaultEncoding({})
+ m.write('bar')
+ m.end()
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_UNKNOWN_ENCODING',
+ message: 'Unknown encoding: {}'
+ }
+)
+;(function checkVariableCaseEncoding() {
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert.strictEqual(enc, 'ascii')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.setDefaultEncoding('AsCii')
+ m.write('bar')
+ m.end()
+})()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-clear-buffer.js b/test/parallel/test-stream-writable-clear-buffer.js
new file mode 100644
index 0000000000..7be01a19fa
--- /dev/null
+++ b/test/parallel/test-stream-writable-clear-buffer.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+// This test ensures that the _writeableState.bufferedRequestCount and
+// the actual buffered request count are the same.
+
+const common = require('../common')
+const Stream = require('../../lib/ours/index')
+const assert = require('assert')
+class StreamWritable extends Stream.Writable {
+ constructor() {
+ super({
+ objectMode: true
+ })
+ }
+
+ // Refs: https://github.com/nodejs/node/issues/6758
+ // We need a timer like on the original issue thread.
+ // Otherwise the code will never reach our test case.
+ _write(chunk, encoding, cb) {
+ setImmediate(cb)
+ }
+}
+const testStream = new StreamWritable()
+testStream.cork()
+for (let i = 1; i <= 5; i++) {
+ testStream.write(
+ i,
+ common.mustCall(() => {
+ assert.strictEqual(testStream._writableState.bufferedRequestCount, testStream._writableState.getBuffer().length)
+ })
+ )
+}
+testStream.end()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js
new file mode 100644
index 0000000000..20064e6838
--- /dev/null
+++ b/test/parallel/test-stream-writable-constructor-set-methods.js
@@ -0,0 +1,50 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Writable } = require('../../lib/ours/index')
+const bufferBlerg = Buffer.from('blerg')
+const w = new Writable()
+assert.throws(
+ () => {
+ w.end(bufferBlerg)
+ },
+ {
+ name: 'Error',
+ code: 'ERR_METHOD_NOT_IMPLEMENTED',
+ message: 'The _write() method is not implemented'
+ }
+)
+const _write = common.mustCall((chunk, _, next) => {
+ next()
+})
+const _writev = common.mustCall((chunks, next) => {
+ assert.strictEqual(chunks.length, 2)
+ next()
+})
+const w2 = new Writable({
+ write: _write,
+ writev: _writev
+})
+assert.strictEqual(w2._write, _write)
+assert.strictEqual(w2._writev, _writev)
+w2.write(bufferBlerg)
+w2.cork()
+w2.write(bufferBlerg)
+w2.write(bufferBlerg)
+w2.end()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js
new file mode 100644
index 0000000000..6d2deb9462
--- /dev/null
+++ b/test/parallel/test-stream-writable-decoded-encoding.js
@@ -0,0 +1,79 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+class MyWritable extends stream.Writable {
+ constructor(fn, options) {
+ super(options)
+ this.fn = fn
+ }
+ _write(chunk, encoding, callback) {
+ this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding)
+ callback()
+ }
+}
+{
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert(isBuffer)
+ assert.strictEqual(type, 'object')
+ assert.strictEqual(enc, 'buffer')
+ },
+ {
+ decodeStrings: true
+ }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+}
+{
+ const m = new MyWritable(
+ function (isBuffer, type, enc) {
+ assert(!isBuffer)
+ assert.strictEqual(type, 'string')
+ assert.strictEqual(enc, 'utf8')
+ },
+ {
+ decodeStrings: false
+ }
+ )
+ m.write('some-text', 'utf8')
+ m.end()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-destroy.js b/test/parallel/test-stream-writable-destroy.js
new file mode 100644
index 0000000000..cc5afb2f8d
--- /dev/null
+++ b/test/parallel/test-stream-writable-destroy.js
@@ -0,0 +1,566 @@
+/* replacement start */
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
+const EventTarget = globalThis.EventTarget || require('event-target-shim').EventTarget
+if (typeof AbortSignal.abort !== 'function') {
+ AbortSignal.abort = function () {
+ const controller = new AbortController()
+ controller.abort()
+ return controller.signal
+ }
+}
+/* replacement end */
+
+;('use strict')
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable, addAbortSignal } = require('../../lib/ours/index')
+const assert = require('assert')
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on('finish', common.mustNotCall())
+ write.on('close', common.mustCall())
+ write.destroy()
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ this.destroy(new Error('asd'))
+ cb()
+ }
+ })
+ write.on('error', common.mustCall())
+ write.on('finish', common.mustNotCall())
+ write.end('asd')
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ const expected = new Error('kaboom')
+ write.on('finish', common.mustNotCall())
+ write.on('close', common.mustCall())
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ write.destroy(expected)
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write._destroy = function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb(err)
+ }
+ const expected = new Error('kaboom')
+ write.on('finish', common.mustNotCall('no finish event'))
+ write.on('close', common.mustCall())
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ write.destroy(expected)
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ },
+ destroy: common.mustCall(function (err, cb) {
+ assert.strictEqual(err, expected)
+ cb()
+ })
+ })
+ const expected = new Error('kaboom')
+ write.on('finish', common.mustNotCall('no finish event'))
+ write.on('close', common.mustCall())
+
+ // Error is swallowed by the custom _destroy
+ write.on('error', common.mustNotCall('no error event'))
+ write.destroy(expected)
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb()
+ })
+ write.destroy()
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ process.nextTick(() => {
+ this.end()
+ cb()
+ })
+ })
+ const fail = common.mustNotCall('no finish event')
+ write.on('finish', fail)
+ write.on('close', common.mustCall())
+ write.destroy()
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ const expected = new Error('kaboom')
+ write._destroy = common.mustCall(function (err, cb) {
+ assert.strictEqual(err, null)
+ cb(expected)
+ })
+ write.on('close', common.mustCall())
+ write.on('finish', common.mustNotCall('no finish event'))
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, expected)
+ })
+ )
+ write.destroy()
+ assert.strictEqual(write.destroyed, true)
+}
+{
+ // double error case
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ let ticked = false
+ write.on(
+ 'close',
+ common.mustCall(() => {
+ assert.strictEqual(ticked, true)
+ })
+ )
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.message, 'kaboom 1')
+ assert.strictEqual(write._writableState.errorEmitted, true)
+ })
+ )
+ const expected = new Error('kaboom 1')
+ write.destroy(expected)
+ write.destroy(new Error('kaboom 2'))
+ assert.strictEqual(write._writableState.errored, expected)
+ assert.strictEqual(write._writableState.errorEmitted, false)
+ assert.strictEqual(write.destroyed, true)
+ ticked = true
+}
+{
+ const writable = new Writable({
+ destroy: common.mustCall(function (err, cb) {
+ process.nextTick(cb, new Error('kaboom 1'))
+ }),
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ let ticked = false
+ writable.on(
+ 'close',
+ common.mustCall(() => {
+ writable.on('error', common.mustNotCall())
+ writable.destroy(new Error('hello'))
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(writable._writableState.errorEmitted, true)
+ })
+ )
+ writable.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.message, 'kaboom 1')
+ assert.strictEqual(writable._writableState.errorEmitted, true)
+ })
+ )
+ writable.destroy()
+ assert.strictEqual(writable.destroyed, true)
+ assert.strictEqual(writable._writableState.errored, null)
+ assert.strictEqual(writable._writableState.errorEmitted, false)
+
+ // Test case where `writable.destroy()` is called again with an error before
+ // the `_destroy()` callback is called.
+ writable.destroy(new Error('kaboom 2'))
+ assert.strictEqual(writable._writableState.errorEmitted, false)
+ assert.strictEqual(writable._writableState.errored, null)
+ ticked = true
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.destroyed = true
+ assert.strictEqual(write.destroyed, true)
+
+ // The internal destroy() mechanism should not be triggered
+ write.on('close', common.mustNotCall())
+ write.destroy()
+}
+{
+ function MyWritable() {
+ assert.strictEqual(this.destroyed, false)
+ this.destroyed = false
+ Writable.call(this)
+ }
+ Object.setPrototypeOf(MyWritable.prototype, Writable.prototype)
+ Object.setPrototypeOf(MyWritable, Writable)
+ new MyWritable()
+}
+{
+ // Destroy and destroy callback
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.destroy()
+ const expected = new Error('kaboom')
+ write.destroy(
+ expected,
+ common.mustCall((err) => {
+ assert.strictEqual(err, undefined)
+ })
+ )
+}
+{
+ // Checks that `._undestroy()` restores the state so that `final` will be
+ // called again.
+ const write = new Writable({
+ write: common.mustNotCall(),
+ final: common.mustCall((cb) => cb(), 2),
+ autoDestroy: true
+ })
+ write.end()
+ write.once(
+ 'close',
+ common.mustCall(() => {
+ write._undestroy()
+ write.end()
+ })
+ )
+}
+{
+ const write = new Writable()
+ write.destroy()
+ write.on('error', common.mustNotCall())
+ write.write(
+ 'asd',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_STREAM_DESTROYED',
+ message: 'Cannot call write after a stream was destroyed'
+ })
+ )
+}
+{
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on('error', common.mustNotCall())
+ write.cork()
+ write.write('asd', common.mustCall())
+ write.uncork()
+ write.cork()
+ write.write(
+ 'asd',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_STREAM_DESTROYED',
+ message: 'Cannot call write after a stream was destroyed'
+ })
+ )
+ write.destroy()
+ write.write(
+ 'asd',
+ common.expectsError({
+ name: 'Error',
+ code: 'ERR_STREAM_DESTROYED',
+ message: 'Cannot call write after a stream was destroyed'
+ })
+ )
+ write.uncork()
+}
+{
+ // Call end(cb) after error & destroy
+
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb(new Error('asd'))
+ }
+ })
+ write.on(
+ 'error',
+ common.mustCall(() => {
+ write.destroy()
+ let ticked = false
+ write.end(
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED')
+ })
+ )
+ ticked = true
+ })
+ )
+ write.write('asd')
+}
+{
+ // Call end(cb) after finish & destroy
+
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on(
+ 'finish',
+ common.mustCall(() => {
+ write.destroy()
+ let ticked = false
+ write.end(
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED')
+ })
+ )
+ ticked = true
+ })
+ )
+ write.end()
+}
+{
+ // Call end(cb) after error & destroy and don't trigger
+ // unhandled exception.
+
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ process.nextTick(cb)
+ }
+ })
+ const _err = new Error('asd')
+ write.once(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'asd')
+ })
+ )
+ write.end(
+ 'asd',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ write.destroy(_err)
+}
+{
+ // Call buffered write callback with error
+
+ const _err = new Error('asd')
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ process.nextTick(cb, _err)
+ },
+ autoDestroy: false
+ })
+ write.cork()
+ write.write(
+ 'asd',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ write.write(
+ 'asd',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ write.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ write.uncork()
+}
+{
+ // Ensure callback order.
+
+ let state = 0
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ // `setImmediate()` is used on purpose to ensure the callback is called
+ // after `process.nextTick()` callbacks.
+ setImmediate(cb)
+ }
+ })
+ write.write(
+ 'asd',
+ common.mustCall(() => {
+ assert.strictEqual(state++, 0)
+ })
+ )
+ write.write(
+ 'asd',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED')
+ assert.strictEqual(state++, 1)
+ })
+ )
+ write.destroy()
+}
+{
+ const write = new Writable({
+ autoDestroy: false,
+ write(chunk, enc, cb) {
+ cb()
+ cb()
+ }
+ })
+ write.on(
+ 'error',
+ common.mustCall(() => {
+ assert(write._writableState.errored)
+ })
+ )
+ write.write('asd')
+}
+{
+ const ac = new AbortController()
+ const write = addAbortSignal(
+ ac.signal,
+ new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ )
+ write.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ assert.strictEqual(write.destroyed, true)
+ })
+ )
+ write.write('asd')
+ ac.abort()
+}
+{
+ const ac = new AbortController()
+ const write = new Writable({
+ signal: ac.signal,
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ assert.strictEqual(write.destroyed, true)
+ })
+ )
+ write.write('asd')
+ ac.abort()
+}
+{
+ const signal = AbortSignal.abort()
+ const write = new Writable({
+ signal,
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.on(
+ 'error',
+ common.mustCall((e) => {
+ assert.strictEqual(e.name, 'AbortError')
+ assert.strictEqual(write.destroyed, true)
+ })
+ )
+}
+{
+ // Destroy twice
+ const write = new Writable({
+ write(chunk, enc, cb) {
+ cb()
+ }
+ })
+ write.end(common.mustCall())
+ write.destroy()
+ write.destroy()
+}
+{
+ // https://github.com/nodejs/node/issues/39356
+ const s = new Writable({
+ final() {}
+ })
+ const _err = new Error('oh no')
+ // Remove `callback` and it works
+ s.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ s.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ s.destroy(_err)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-end-cb-error.js b/test/parallel/test-stream-writable-end-cb-error.js
new file mode 100644
index 0000000000..17c23ae44c
--- /dev/null
+++ b/test/parallel/test-stream-writable-end-cb-error.js
@@ -0,0 +1,112 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+{
+ // Invoke end callback on failure.
+ const writable = new stream.Writable()
+ const _err = new Error('kaboom')
+ writable._write = (chunk, encoding, cb) => {
+ process.nextTick(cb, _err)
+ }
+ writable.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ writable.write('asd')
+ writable.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+ writable.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+ )
+}
+{
+ // Don't invoke end callback twice
+ const writable = new stream.Writable()
+ writable._write = (chunk, encoding, cb) => {
+ process.nextTick(cb)
+ }
+ let called = false
+ writable.end(
+ 'asd',
+ common.mustCall((err) => {
+ called = true
+ assert.strictEqual(err, undefined)
+ })
+ )
+ writable.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'kaboom')
+ })
+ )
+ writable.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(called, true)
+ writable.emit('error', new Error('kaboom'))
+ })
+ )
+}
+{
+ const w = new stream.Writable({
+ write(chunk, encoding, callback) {
+ setImmediate(callback)
+ },
+ finish(callback) {
+ setImmediate(callback)
+ }
+ })
+ w.end(
+ 'testing ended state',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ assert.strictEqual(w.destroyed, false)
+ assert.strictEqual(w.writableEnded, true)
+ w.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ assert.strictEqual(w.destroyed, false)
+ assert.strictEqual(w.writableEnded, true)
+ w.end(
+ 'end',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ assert.strictEqual(w.destroyed, true)
+ w.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ w.on('finish', common.mustNotCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-end-cb-uncaught.js b/test/parallel/test-stream-writable-end-cb-uncaught.js
new file mode 100644
index 0000000000..04d331c4e0
--- /dev/null
+++ b/test/parallel/test-stream-writable-end-cb-uncaught.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+process.on(
+ 'uncaughtException',
+ common.mustCall((err) => {
+ assert.strictEqual(err.message, 'kaboom')
+ })
+)
+const writable = new stream.Writable()
+const _err = new Error('kaboom')
+writable._write = (chunk, encoding, cb) => {
+ cb()
+}
+writable._final = (cb) => {
+ cb(_err)
+}
+writable.write('asd')
+writable.end(
+ common.mustCall((err) => {
+ assert.strictEqual(err, _err)
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-end-multiple.js b/test/parallel/test-stream-writable-end-multiple.js
new file mode 100644
index 0000000000..8986f083ef
--- /dev/null
+++ b/test/parallel/test-stream-writable-end-multiple.js
@@ -0,0 +1,39 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const writable = new stream.Writable()
+writable._write = (chunk, encoding, cb) => {
+ setTimeout(() => cb(), 10)
+}
+writable.end('testing ended state', common.mustCall())
+writable.end(common.mustCall())
+writable.on(
+ 'finish',
+ common.mustCall(() => {
+ let ticked = false
+ writable.end(
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_ALREADY_FINISHED')
+ })
+ )
+ ticked = true
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-ended-state.js b/test/parallel/test-stream-writable-ended-state.js
new file mode 100644
index 0000000000..0dbc4d1c2d
--- /dev/null
+++ b/test/parallel/test-stream-writable-ended-state.js
@@ -0,0 +1,44 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const writable = new stream.Writable()
+writable._write = (chunk, encoding, cb) => {
+ assert.strictEqual(writable._writableState.ended, false)
+ assert.strictEqual(writable._writableState.writable, undefined)
+ assert.strictEqual(writable.writableEnded, false)
+ cb()
+}
+assert.strictEqual(writable._writableState.ended, false)
+assert.strictEqual(writable._writableState.writable, undefined)
+assert.strictEqual(writable.writable, true)
+assert.strictEqual(writable.writableEnded, false)
+writable.end(
+ 'testing ended state',
+ common.mustCall(() => {
+ assert.strictEqual(writable._writableState.ended, true)
+ assert.strictEqual(writable._writableState.writable, undefined)
+ assert.strictEqual(writable.writable, false)
+ assert.strictEqual(writable.writableEnded, true)
+ })
+)
+assert.strictEqual(writable._writableState.ended, true)
+assert.strictEqual(writable._writableState.writable, undefined)
+assert.strictEqual(writable.writable, false)
+assert.strictEqual(writable.writableEnded, true)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-final-async.js b/test/parallel/test-stream-writable-final-async.js
new file mode 100644
index 0000000000..4be04adc5d
--- /dev/null
+++ b/test/parallel/test-stream-writable-final-async.js
@@ -0,0 +1,40 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Duplex } = require('../../lib/ours/index')
+const st = require('timers').setTimeout
+function setTimeout(ms) {
+ return new Promise((resolve) => {
+ st(resolve, ms)
+ })
+}
+{
+ class Foo extends Duplex {
+ async _final(callback) {
+ await setTimeout(common.platformTimeout(1))
+ callback()
+ }
+ _read() {}
+ }
+ const foo = new Foo()
+ foo._write = common.mustCall((chunk, encoding, cb) => {
+ cb()
+ })
+ foo.end('test', common.mustCall())
+ foo.on('error', common.mustNotCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-final-destroy.js b/test/parallel/test-stream-writable-final-destroy.js
new file mode 100644
index 0000000000..fd0c6a27f3
--- /dev/null
+++ b/test/parallel/test-stream-writable-final-destroy.js
@@ -0,0 +1,34 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable } = require('../../lib/ours/index')
+{
+ const w = new Writable({
+ write(chunk, encoding, callback) {
+ callback(null)
+ },
+ final(callback) {
+ queueMicrotask(callback)
+ }
+ })
+ w.end()
+ w.destroy()
+ w.on('prefinish', common.mustNotCall())
+ w.on('finish', common.mustNotCall())
+ w.on('close', common.mustCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-final-throw.js b/test/parallel/test-stream-writable-final-throw.js
new file mode 100644
index 0000000000..8a6d316c0f
--- /dev/null
+++ b/test/parallel/test-stream-writable-final-throw.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Duplex } = require('../../lib/ours/index')
+{
+ class Foo extends Duplex {
+ _final(callback) {
+ throw new Error('fhqwhgads')
+ }
+ _read() {}
+ }
+ const foo = new Foo()
+ foo._write = common.mustCall((chunk, encoding, cb) => {
+ cb()
+ })
+ foo.end(
+ 'test',
+ common.expectsError({
+ message: 'fhqwhgads'
+ })
+ )
+ foo.on('error', common.mustCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-finish-destroyed.js b/test/parallel/test-stream-writable-finish-destroyed.js
new file mode 100644
index 0000000000..7ef5eed4be
--- /dev/null
+++ b/test/parallel/test-stream-writable-finish-destroyed.js
@@ -0,0 +1,58 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable } = require('../../lib/ours/index')
+{
+ const w = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ cb()
+ })
+ )
+ })
+ })
+ w.on('finish', common.mustNotCall())
+ w.end('asd')
+ w.destroy()
+}
+{
+ const w = new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ w.on(
+ 'close',
+ common.mustCall(() => {
+ cb()
+ w.end()
+ })
+ )
+ })
+ })
+ w.on('finish', common.mustNotCall())
+ w.write('asd')
+ w.destroy()
+}
+{
+ const w = new Writable({
+ write() {}
+ })
+ w.on('finish', common.mustNotCall())
+ w.end()
+ w.destroy()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-finished-state.js b/test/parallel/test-stream-writable-finished-state.js
new file mode 100644
index 0000000000..2d1d8e8a7f
--- /dev/null
+++ b/test/parallel/test-stream-writable-finished-state.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const writable = new stream.Writable()
+writable._write = (chunk, encoding, cb) => {
+ // The state finished should start in false.
+ assert.strictEqual(writable._writableState.finished, false)
+ cb()
+}
+writable.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(writable._writableState.finished, true)
+ })
+)
+writable.end(
+ 'testing finished state',
+ common.mustCall(() => {
+ assert.strictEqual(writable._writableState.finished, true)
+ })
+)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-finished.js b/test/parallel/test-stream-writable-finished.js
new file mode 100644
index 0000000000..fe1cda8e09
--- /dev/null
+++ b/test/parallel/test-stream-writable-finished.js
@@ -0,0 +1,114 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable } = require('../../lib/ours/index')
+const assert = require('assert')
+
+// basic
+{
+ // Find it on Writable.prototype
+ assert(Reflect.has(Writable.prototype, 'writableFinished'))
+}
+
+// event
+{
+ const writable = new Writable()
+ writable._write = (chunk, encoding, cb) => {
+ // The state finished should start in false.
+ assert.strictEqual(writable.writableFinished, false)
+ cb()
+ }
+ writable.on(
+ 'finish',
+ common.mustCall(() => {
+ assert.strictEqual(writable.writableFinished, true)
+ })
+ )
+ writable.end(
+ 'testing finished state',
+ common.mustCall(() => {
+ assert.strictEqual(writable.writableFinished, true)
+ })
+ )
+}
+{
+ // Emit finish asynchronously.
+
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ }
+ })
+ w.end()
+ w.on('finish', common.mustCall())
+}
+{
+ // Emit prefinish synchronously.
+
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ }
+ })
+ let sync = true
+ w.on(
+ 'prefinish',
+ common.mustCall(() => {
+ assert.strictEqual(sync, true)
+ })
+ )
+ w.end()
+ sync = false
+}
+{
+ // Emit prefinish synchronously w/ final.
+
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ },
+ final(cb) {
+ cb()
+ }
+ })
+ let sync = true
+ w.on(
+ 'prefinish',
+ common.mustCall(() => {
+ assert.strictEqual(sync, true)
+ })
+ )
+ w.end()
+ sync = false
+}
+{
+ // Call _final synchronously.
+
+ let sync = true
+ const w = new Writable({
+ write(chunk, encoding, cb) {
+ cb()
+ },
+ final: common.mustCall((cb) => {
+ assert.strictEqual(sync, true)
+ cb()
+ })
+ })
+ w.end()
+ sync = false
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-invalid-chunk.js b/test/parallel/test-stream-writable-invalid-chunk.js
new file mode 100644
index 0000000000..32247b7c41
--- /dev/null
+++ b/test/parallel/test-stream-writable-invalid-chunk.js
@@ -0,0 +1,53 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+function testWriteType(val, objectMode, code) {
+ const writable = new stream.Writable({
+ objectMode,
+ write: () => {}
+ })
+ writable.on('error', common.mustNotCall())
+ if (code) {
+ assert.throws(
+ () => {
+ writable.write(val)
+ },
+ {
+ code
+ }
+ )
+ } else {
+ writable.write(val)
+ }
+}
+testWriteType([], false, 'ERR_INVALID_ARG_TYPE')
+testWriteType({}, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(0, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(true, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(0.0, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(undefined, false, 'ERR_INVALID_ARG_TYPE')
+testWriteType(null, false, 'ERR_STREAM_NULL_VALUES')
+testWriteType([], true)
+testWriteType({}, true)
+testWriteType(0, true)
+testWriteType(true, true)
+testWriteType(0.0, true)
+testWriteType(undefined, true)
+testWriteType(null, true, 'ERR_STREAM_NULL_VALUES')
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-needdrain-state.js b/test/parallel/test-stream-writable-needdrain-state.js
new file mode 100644
index 0000000000..fea3566269
--- /dev/null
+++ b/test/parallel/test-stream-writable-needdrain-state.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+const assert = require('assert')
+const transform = new stream.Transform({
+ transform: _transform,
+ highWaterMark: 1
+})
+function _transform(chunk, encoding, cb) {
+ process.nextTick(() => {
+ assert.strictEqual(transform._writableState.needDrain, true)
+ cb()
+ })
+}
+assert.strictEqual(transform._writableState.needDrain, false)
+transform.write(
+ 'asdasd',
+ common.mustCall(() => {
+ assert.strictEqual(transform._writableState.needDrain, false)
+ })
+)
+assert.strictEqual(transform._writableState.needDrain, true)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-null.js b/test/parallel/test-stream-writable-null.js
new file mode 100644
index 0000000000..60706dd482
--- /dev/null
+++ b/test/parallel/test-stream-writable-null.js
@@ -0,0 +1,74 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+class MyWritable extends stream.Writable {
+ constructor(options) {
+ super({
+ autoDestroy: false,
+ ...options
+ })
+ }
+ _write(chunk, encoding, callback) {
+ assert.notStrictEqual(chunk, null)
+ callback()
+ }
+}
+{
+ const m = new MyWritable({
+ objectMode: true
+ })
+ m.on('error', common.mustNotCall())
+ assert.throws(
+ () => {
+ m.write(null)
+ },
+ {
+ code: 'ERR_STREAM_NULL_VALUES'
+ }
+ )
+}
+{
+ const m = new MyWritable()
+ m.on('error', common.mustNotCall())
+ assert.throws(
+ () => {
+ m.write(false)
+ },
+ {
+ code: 'ERR_INVALID_ARG_TYPE'
+ }
+ )
+}
+{
+ // Should not throw.
+ const m = new MyWritable({
+ objectMode: true
+ })
+ m.write(false, assert.ifError)
+}
+{
+ // Should not throw.
+ const m = new MyWritable({
+ objectMode: true
+ }).on('error', (e) => {
+ assert.ifError(e || new Error('should not get here'))
+ })
+ m.write(false, assert.ifError)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-properties.js b/test/parallel/test-stream-writable-properties.js
new file mode 100644
index 0000000000..b926687632
--- /dev/null
+++ b/test/parallel/test-stream-writable-properties.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Writable } = require('../../lib/ours/index')
+{
+ const w = new Writable()
+ assert.strictEqual(w.writableCorked, 0)
+ w.uncork()
+ assert.strictEqual(w.writableCorked, 0)
+ w.cork()
+ assert.strictEqual(w.writableCorked, 1)
+ w.cork()
+ assert.strictEqual(w.writableCorked, 2)
+ w.uncork()
+ assert.strictEqual(w.writableCorked, 1)
+ w.uncork()
+ assert.strictEqual(w.writableCorked, 0)
+ w.uncork()
+ assert.strictEqual(w.writableCorked, 0)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-samecb-singletick.js b/test/parallel/test-stream-writable-samecb-singletick.js
new file mode 100644
index 0000000000..e462dba854
--- /dev/null
+++ b/test/parallel/test-stream-writable-samecb-singletick.js
@@ -0,0 +1,52 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Console } = require('console')
+const { Writable } = require('../../lib/ours/index')
+const async_hooks = require('async_hooks')
+
+// Make sure that repeated calls to silentConsole.log(), and by extension
+// stream.write() for the underlying stream, allocate exactly 1 tick object.
+// At the time of writing, that is enough to ensure a flat memory profile
+// from repeated silentConsole.log() calls, rather than having callbacks pile up
+// over time, assuming that data can be written synchronously.
+// Refs: https://github.com/nodejs/node/issues/18013
+// Refs: https://github.com/nodejs/node/issues/18367
+
+const checkTickCreated = common.mustCall()
+const hook = async_hooks
+ .createHook({
+ init(id, type, triggerId, resource) {
+ if (type === 'TickObject') checkTickCreated()
+ }
+ })
+ .enable()
+const console = new Console(
+ new Writable({
+ write: common.mustCall((chunk, encoding, cb) => {
+ cb()
+ }, 100)
+ })
+)
+for (let i = 0; i < 100; i++) console.log(i)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ hook.disable()
+})
+/* replacement end */
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-writable.js b/test/parallel/test-stream-writable-writable.js
new file mode 100644
index 0000000000..66d46fdab4
--- /dev/null
+++ b/test/parallel/test-stream-writable-writable.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Writable } = require('../../lib/ours/index')
+{
+ const w = new Writable({
+ write() {}
+ })
+ assert.strictEqual(w.writable, true)
+ w.destroy()
+ assert.strictEqual(w.writable, false)
+}
+{
+ const w = new Writable({
+ write: common.mustCall((chunk, encoding, callback) => {
+ callback(new Error())
+ })
+ })
+ assert.strictEqual(w.writable, true)
+ w.write('asd')
+ assert.strictEqual(w.writable, false)
+ w.on('error', common.mustCall())
+}
+{
+ const w = new Writable({
+ write: common.mustCall((chunk, encoding, callback) => {
+ process.nextTick(() => {
+ callback(new Error())
+ assert.strictEqual(w.writable, false)
+ })
+ })
+ })
+ w.write('asd')
+ w.on('error', common.mustCall())
+}
+{
+ const w = new Writable({
+ write: common.mustNotCall()
+ })
+ assert.strictEqual(w.writable, true)
+ w.end()
+ assert.strictEqual(w.writable, false)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-write-cb-error.js b/test/parallel/test-stream-writable-write-cb-error.js
new file mode 100644
index 0000000000..f14e4da48f
--- /dev/null
+++ b/test/parallel/test-stream-writable-write-cb-error.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable } = require('../../lib/ours/index')
+const assert = require('assert')
+
+// Ensure callback is always invoked before
+// error is emitted. Regardless if error was
+// sync or async.
+
+{
+ let callbackCalled = false
+ // Sync Error
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ cb(new Error())
+ })
+ })
+ writable.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(callbackCalled, true)
+ })
+ )
+ writable.write(
+ 'hi',
+ common.mustCall(() => {
+ callbackCalled = true
+ })
+ )
+}
+{
+ let callbackCalled = false
+ // Async Error
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ process.nextTick(cb, new Error())
+ })
+ })
+ writable.on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(callbackCalled, true)
+ })
+ )
+ writable.write(
+ 'hi',
+ common.mustCall(() => {
+ callbackCalled = true
+ })
+ )
+}
+{
+ // Sync Error
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ cb(new Error())
+ })
+ })
+ writable.on('error', common.mustCall())
+ let cnt = 0
+ // Ensure we don't live lock on sync error
+ while (writable.write('a')) cnt++
+ assert.strictEqual(cnt, 0)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-write-cb-twice.js b/test/parallel/test-stream-writable-write-cb-twice.js
new file mode 100644
index 0000000000..b2b6ff6986
--- /dev/null
+++ b/test/parallel/test-stream-writable-write-cb-twice.js
@@ -0,0 +1,74 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable } = require('../../lib/ours/index')
+{
+ // Sync + Sync
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ cb()
+ cb()
+ })
+ })
+ writable.write('hi')
+ writable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_MULTIPLE_CALLBACK',
+ name: 'Error'
+ })
+ )
+}
+{
+ // Sync + Async
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ cb()
+ process.nextTick(() => {
+ cb()
+ })
+ })
+ })
+ writable.write('hi')
+ writable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_MULTIPLE_CALLBACK',
+ name: 'Error'
+ })
+ )
+}
+{
+ // Async + Async
+ const writable = new Writable({
+ write: common.mustCall((buf, enc, cb) => {
+ process.nextTick(cb)
+ process.nextTick(() => {
+ cb()
+ })
+ })
+ })
+ writable.write('hi')
+ writable.on(
+ 'error',
+ common.expectsError({
+ code: 'ERR_MULTIPLE_CALLBACK',
+ name: 'Error'
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-write-error.js b/test/parallel/test-stream-writable-write-error.js
new file mode 100644
index 0000000000..2ba22f083c
--- /dev/null
+++ b/test/parallel/test-stream-writable-write-error.js
@@ -0,0 +1,91 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Writable } = require('../../lib/ours/index')
+function expectError(w, args, code, sync) {
+ if (sync) {
+ if (code) {
+ assert.throws(() => w.write(...args), {
+ code
+ })
+ } else {
+ w.write(...args)
+ }
+ } else {
+ let errorCalled = false
+ let ticked = false
+ w.write(
+ ...args,
+ common.mustCall((err) => {
+ assert.strictEqual(ticked, true)
+ assert.strictEqual(errorCalled, false)
+ assert.strictEqual(err.code, code)
+ })
+ )
+ ticked = true
+ w.on(
+ 'error',
+ common.mustCall((err) => {
+ errorCalled = true
+ assert.strictEqual(err.code, code)
+ })
+ )
+ }
+}
+function test(autoDestroy) {
+ {
+ const w = new Writable({
+ autoDestroy,
+ _write() {}
+ })
+ w.end()
+ expectError(w, ['asd'], 'ERR_STREAM_WRITE_AFTER_END')
+ }
+ {
+ const w = new Writable({
+ autoDestroy,
+ _write() {}
+ })
+ w.destroy()
+ }
+ {
+ const w = new Writable({
+ autoDestroy,
+ _write() {}
+ })
+ expectError(w, [null], 'ERR_STREAM_NULL_VALUES', true)
+ }
+ {
+ const w = new Writable({
+ autoDestroy,
+ _write() {}
+ })
+ expectError(w, [{}], 'ERR_INVALID_ARG_TYPE', true)
+ }
+ {
+ const w = new Writable({
+ decodeStrings: false,
+ autoDestroy,
+ _write() {}
+ })
+ expectError(w, ['asd', 'noencoding'], 'ERR_UNKNOWN_ENCODING', true)
+ }
+}
+test(false)
+test(true)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writable-write-writev-finish.js b/test/parallel/test-stream-writable-write-writev-finish.js
new file mode 100644
index 0000000000..77e782355d
--- /dev/null
+++ b/test/parallel/test-stream-writable-write-writev-finish.js
@@ -0,0 +1,151 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+
+// Ensure consistency between the finish event when using cork()
+// and writev and when not using them
+
+{
+ const writable = new stream.Writable()
+ writable._write = (chunks, encoding, cb) => {
+ cb(new Error('write test error'))
+ }
+ writable.on('finish', common.mustNotCall())
+ writable.on('prefinish', common.mustNotCall())
+ writable.on(
+ 'error',
+ common.mustCall((er) => {
+ assert.strictEqual(er.message, 'write test error')
+ })
+ )
+ writable.end('test')
+}
+{
+ const writable = new stream.Writable()
+ writable._write = (chunks, encoding, cb) => {
+ setImmediate(cb, new Error('write test error'))
+ }
+ writable.on('finish', common.mustNotCall())
+ writable.on('prefinish', common.mustNotCall())
+ writable.on(
+ 'error',
+ common.mustCall((er) => {
+ assert.strictEqual(er.message, 'write test error')
+ })
+ )
+ writable.end('test')
+}
+{
+ const writable = new stream.Writable()
+ writable._write = (chunks, encoding, cb) => {
+ cb(new Error('write test error'))
+ }
+ writable._writev = (chunks, cb) => {
+ cb(new Error('writev test error'))
+ }
+ writable.on('finish', common.mustNotCall())
+ writable.on('prefinish', common.mustNotCall())
+ writable.on(
+ 'error',
+ common.mustCall((er) => {
+ assert.strictEqual(er.message, 'writev test error')
+ })
+ )
+ writable.cork()
+ writable.write('test')
+ setImmediate(function () {
+ writable.end('test')
+ })
+}
+{
+ const writable = new stream.Writable()
+ writable._write = (chunks, encoding, cb) => {
+ setImmediate(cb, new Error('write test error'))
+ }
+ writable._writev = (chunks, cb) => {
+ setImmediate(cb, new Error('writev test error'))
+ }
+ writable.on('finish', common.mustNotCall())
+ writable.on('prefinish', common.mustNotCall())
+ writable.on(
+ 'error',
+ common.mustCall((er) => {
+ assert.strictEqual(er.message, 'writev test error')
+ })
+ )
+ writable.cork()
+ writable.write('test')
+ setImmediate(function () {
+ writable.end('test')
+ })
+}
+
+// Regression test for
+// https://github.com/nodejs/node/issues/13812
+
+{
+ const rs = new stream.Readable()
+ rs.push('ok')
+ rs.push(null)
+ rs._read = () => {}
+ const ws = new stream.Writable()
+ ws.on('finish', common.mustNotCall())
+ ws.on('error', common.mustCall())
+ ws._write = (chunk, encoding, done) => {
+ setImmediate(done, new Error())
+ }
+ rs.pipe(ws)
+}
+{
+ const rs = new stream.Readable()
+ rs.push('ok')
+ rs.push(null)
+ rs._read = () => {}
+ const ws = new stream.Writable()
+ ws.on('finish', common.mustNotCall())
+ ws.on('error', common.mustCall())
+ ws._write = (chunk, encoding, done) => {
+ done(new Error())
+ }
+ rs.pipe(ws)
+}
+{
+ const w = new stream.Writable()
+ w._write = (chunk, encoding, cb) => {
+ process.nextTick(cb)
+ }
+ w.on('error', common.mustCall())
+ w.on('finish', common.mustNotCall())
+ w.on('prefinish', () => {
+ w.write("shouldn't write in prefinish listener")
+ })
+ w.end()
+}
+{
+ const w = new stream.Writable()
+ w._write = (chunk, encoding, cb) => {
+ process.nextTick(cb)
+ }
+ w.on('error', common.mustCall())
+ w.on('finish', () => {
+ w.write("shouldn't write in finish listener")
+ })
+ w.end()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writableState-ending.js b/test/parallel/test-stream-writableState-ending.js
new file mode 100644
index 0000000000..6b7bd5c3bc
--- /dev/null
+++ b/test/parallel/test-stream-writableState-ending.js
@@ -0,0 +1,46 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const writable = new stream.Writable()
+function testStates(ending, finished, ended) {
+ assert.strictEqual(writable._writableState.ending, ending)
+ assert.strictEqual(writable._writableState.finished, finished)
+ assert.strictEqual(writable._writableState.ended, ended)
+}
+writable._write = (chunk, encoding, cb) => {
+ // Ending, finished, ended start in false.
+ testStates(false, false, false)
+ cb()
+}
+writable.on('finish', () => {
+ // Ending, finished, ended = true.
+ testStates(true, true, true)
+})
+const result = writable.end('testing function end()', () => {
+ // Ending, finished, ended = true.
+ testStates(true, true, true)
+})
+
+// End returns the writable instance
+assert.strictEqual(result, writable)
+
+// Ending, ended = true.
+// finished = false.
+testStates(true, false, true)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js
new file mode 100644
index 0000000000..1f38765e12
--- /dev/null
+++ b/test/parallel/test-stream-writableState-uncorked-bufferedRequestCount.js
@@ -0,0 +1,67 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const writable = new stream.Writable()
+writable._writev = common.mustCall((chunks, cb) => {
+ assert.strictEqual(chunks.length, 2)
+ cb()
+}, 1)
+writable._write = common.mustCall((chunk, encoding, cb) => {
+ cb()
+}, 1)
+
+// first cork
+writable.cork()
+assert.strictEqual(writable._writableState.corked, 1)
+assert.strictEqual(writable._writableState.bufferedRequestCount, 0)
+
+// cork again
+writable.cork()
+assert.strictEqual(writable._writableState.corked, 2)
+
+// The first chunk is buffered
+writable.write('first chunk')
+assert.strictEqual(writable._writableState.bufferedRequestCount, 1)
+
+// First uncork does nothing
+writable.uncork()
+assert.strictEqual(writable._writableState.corked, 1)
+assert.strictEqual(writable._writableState.bufferedRequestCount, 1)
+process.nextTick(uncork)
+
+// The second chunk is buffered, because we uncork at the end of tick
+writable.write('second chunk')
+assert.strictEqual(writable._writableState.corked, 1)
+assert.strictEqual(writable._writableState.bufferedRequestCount, 2)
+function uncork() {
+ // Second uncork flushes the buffer
+ writable.uncork()
+ assert.strictEqual(writable._writableState.corked, 0)
+ assert.strictEqual(writable._writableState.bufferedRequestCount, 0)
+
+ // Verify that end() uncorks correctly
+ writable.cork()
+ writable.write('third chunk')
+ writable.end()
+
+ // End causes an uncork() as well
+ assert.strictEqual(writable._writableState.corked, 0)
+ assert.strictEqual(writable._writableState.bufferedRequestCount, 0)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-write-destroy.js b/test/parallel/test-stream-write-destroy.js
new file mode 100644
index 0000000000..1af41d0057
--- /dev/null
+++ b/test/parallel/test-stream-write-destroy.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Writable } = require('../../lib/ours/index')
+
+// Test interaction between calling .destroy() on a writable and pending
+// writes.
+
+for (const withPendingData of [false, true]) {
+ for (const useEnd of [false, true]) {
+ const callbacks = []
+ const w = new Writable({
+ write(data, enc, cb) {
+ callbacks.push(cb)
+ },
+ // Effectively disable the HWM to observe 'drain' events more easily.
+ highWaterMark: 1
+ })
+ let chunksWritten = 0
+ let drains = 0
+ w.on('drain', () => drains++)
+ function onWrite(err) {
+ if (err) {
+ assert.strictEqual(w.destroyed, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED')
+ } else {
+ chunksWritten++
+ }
+ }
+ w.write('abc', onWrite)
+ assert.strictEqual(chunksWritten, 0)
+ assert.strictEqual(drains, 0)
+ callbacks.shift()()
+ assert.strictEqual(chunksWritten, 1)
+ assert.strictEqual(drains, 1)
+ if (withPendingData) {
+ // Test 2 cases: There either is or is not data still in the write queue.
+ // (The second write will never actually get executed either way.)
+ w.write('def', onWrite)
+ }
+ if (useEnd) {
+ // Again, test 2 cases: Either we indicate that we want to end the
+ // writable or not.
+ w.end('ghi', onWrite)
+ } else {
+ w.write('ghi', onWrite)
+ }
+ assert.strictEqual(chunksWritten, 1)
+ w.destroy()
+ assert.strictEqual(chunksWritten, 1)
+ callbacks.shift()()
+ assert.strictEqual(chunksWritten, useEnd && !withPendingData ? 1 : 2)
+ assert.strictEqual(callbacks.length, 0)
+ assert.strictEqual(drains, 1)
+ }
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-write-drain.js b/test/parallel/test-stream-write-drain.js
new file mode 100644
index 0000000000..769cbd6415
--- /dev/null
+++ b/test/parallel/test-stream-write-drain.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable } = require('../../lib/ours/index')
+
+// Don't emit 'drain' if ended
+
+const w = new Writable({
+ write(data, enc, cb) {
+ process.nextTick(cb)
+ },
+ highWaterMark: 1
+})
+w.on('drain', common.mustNotCall())
+w.write('asd')
+w.end()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-write-final.js b/test/parallel/test-stream-write-final.js
new file mode 100644
index 0000000000..ef549dc8b4
--- /dev/null
+++ b/test/parallel/test-stream-write-final.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+let shutdown = false
+const w = new stream.Writable({
+ final: common.mustCall(function (cb) {
+ assert.strictEqual(this, w)
+ setTimeout(function () {
+ shutdown = true
+ cb()
+ }, 100)
+ }),
+ write: function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
+})
+w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert(shutdown)
+ })
+)
+w.write(Buffer.allocUnsafe(1))
+w.end(Buffer.allocUnsafe(0))
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js
new file mode 100644
index 0000000000..19c0e2a559
--- /dev/null
+++ b/test/parallel/test-stream-writev.js
@@ -0,0 +1,152 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const queue = []
+for (let decode = 0; decode < 2; decode++) {
+ for (let uncork = 0; uncork < 2; uncork++) {
+ for (let multi = 0; multi < 2; multi++) {
+ queue.push([!!decode, !!uncork, !!multi])
+ }
+ }
+}
+run()
+function run() {
+ const t = queue.pop()
+ if (t) test(t[0], t[1], t[2], run)
+ else silentConsole.log('ok')
+}
+function test(decode, uncork, multi, next) {
+ silentConsole.log(`# decode=${decode} uncork=${uncork} multi=${multi}`)
+ let counter = 0
+ let expectCount = 0
+ function cnt(msg) {
+ expectCount++
+ const expect = expectCount
+ return function (er) {
+ assert.ifError(er)
+ counter++
+ assert.strictEqual(counter, expect)
+ }
+ }
+ const w = new stream.Writable({
+ decodeStrings: decode
+ })
+ w._write = common.mustNotCall('Should not call _write')
+ const expectChunks = decode
+ ? [
+ {
+ encoding: 'buffer',
+ chunk: [104, 101, 108, 108, 111, 44, 32]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [119, 111, 114, 108, 100]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [33]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46]
+ },
+ {
+ encoding: 'buffer',
+ chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]
+ }
+ ]
+ : [
+ {
+ encoding: 'ascii',
+ chunk: 'hello, '
+ },
+ {
+ encoding: 'utf8',
+ chunk: 'world'
+ },
+ {
+ encoding: 'buffer',
+ chunk: [33]
+ },
+ {
+ encoding: 'latin1',
+ chunk: '\nand then...'
+ },
+ {
+ encoding: 'hex',
+ chunk: 'facebea7deadbeefdecafbad'
+ }
+ ]
+ let actualChunks
+ w._writev = function (chunks, cb) {
+ actualChunks = chunks.map(function (chunk) {
+ return {
+ encoding: chunk.encoding,
+ chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk
+ }
+ })
+ cb()
+ }
+ w.cork()
+ w.write('hello, ', 'ascii', cnt('hello'))
+ w.write('world', 'utf8', cnt('world'))
+ if (multi) w.cork()
+ w.write(Buffer.from('!'), 'buffer', cnt('!'))
+ w.write('\nand then...', 'latin1', cnt('and then'))
+ if (multi) w.uncork()
+ w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'))
+ if (uncork) w.uncork()
+ w.end(cnt('end'))
+ w.on('finish', function () {
+ // Make sure finish comes after all the write cb
+ cnt('finish')()
+ assert.deepStrictEqual(actualChunks, expectChunks)
+ next()
+ })
+}
+{
+ const w = new stream.Writable({
+ writev: common.mustCall(function (chunks, cb) {
+ cb()
+ })
+ })
+ w.write('asd', common.mustCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js
new file mode 100644
index 0000000000..df875b828f
--- /dev/null
+++ b/test/parallel/test-stream2-base64-single-char-read-end.js
@@ -0,0 +1,68 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const { Readable: R, Writable: W } = require('../../lib/ours/index')
+const assert = require('assert')
+const src = new R({
+ encoding: 'base64'
+})
+const dst = new W()
+let hasRead = false
+const accum = []
+src._read = function (n) {
+ if (!hasRead) {
+ hasRead = true
+ process.nextTick(function () {
+ src.push(Buffer.from('1'))
+ src.push(null)
+ })
+ }
+}
+dst._write = function (chunk, enc, cb) {
+ accum.push(chunk)
+ cb()
+}
+src.on('end', function () {
+ assert.strictEqual(String(Buffer.concat(accum)), 'MQ==')
+ clearTimeout(timeout)
+})
+src.pipe(dst)
+const timeout = setTimeout(function () {
+ assert.fail('timed out waiting for _write')
+}, 100)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-basic.js b/test/parallel/test-stream2-basic.js
new file mode 100644
index 0000000000..753e97a685
--- /dev/null
+++ b/test/parallel/test-stream2-basic.js
@@ -0,0 +1,386 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable: R, Writable: W } = require('../../lib/ours/index')
+const assert = require('assert')
+const EE = require('events').EventEmitter
+class TestReader extends R {
+ constructor(n) {
+ super()
+ this._buffer = Buffer.alloc(n || 100, 'x')
+ this._pos = 0
+ this._bufs = 10
+ }
+ _read(n) {
+ const max = this._buffer.length - this._pos
+ n = Math.max(n, 0)
+ const toRead = Math.min(n, max)
+ if (toRead === 0) {
+ // Simulate the read buffer filling up with some more bytes some time
+ // in the future.
+ setTimeout(() => {
+ this._pos = 0
+ this._bufs -= 1
+ if (this._bufs <= 0) {
+ // read them all!
+ if (!this.ended) this.push(null)
+ } else {
+ // now we have more.
+ // kinda cheating by calling _read, but whatever,
+ // it's just fake anyway.
+ this._read(n)
+ }
+ }, 10)
+ return
+ }
+ const ret = this._buffer.slice(this._pos, this._pos + toRead)
+ this._pos += toRead
+ this.push(ret)
+ }
+}
+class TestWriter extends EE {
+ constructor() {
+ super()
+ this.received = []
+ this.flush = false
+ }
+ write(c) {
+ this.received.push(c.toString())
+ this.emit('write', c)
+ return true
+ }
+ end(c) {
+ if (c) this.write(c)
+ this.emit('end', this.received)
+ }
+}
+{
+ // Test basic functionality
+ const r = new TestReader(20)
+ const reads = []
+ const expect = [
+ 'x',
+ 'xx',
+ 'xxx',
+ 'xxxx',
+ 'xxxxx',
+ 'xxxxxxxxx',
+ 'xxxxxxxxxx',
+ 'xxxxxxxxxxxx',
+ 'xxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxxxxxxxx',
+ 'xxxxxxxxxxxxxxxxxxxxx'
+ ]
+ r.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(reads, expect)
+ })
+ )
+ let readSize = 1
+ function flow() {
+ let res
+ while (null !== (res = r.read(readSize++))) {
+ reads.push(res.toString())
+ }
+ r.once('readable', flow)
+ }
+ flow()
+}
+{
+ // Verify pipe
+ const r = new TestReader(5)
+ const expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']
+ const w = new TestWriter()
+ w.on(
+ 'end',
+ common.mustCall(function (received) {
+ assert.deepStrictEqual(received, expect)
+ })
+ )
+ r.pipe(w)
+}
+;[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function (SPLIT) {
+ // Verify unpipe
+ const r = new TestReader(5)
+
+ // Unpipe after 3 writes, then write to another stream instead.
+ let expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']
+ expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)]
+ const w = [new TestWriter(), new TestWriter()]
+ let writes = SPLIT
+ w[0].on('write', function () {
+ if (--writes === 0) {
+ r.unpipe()
+ assert.deepStrictEqual(r._readableState.pipes, [])
+ w[0].end()
+ r.pipe(w[1])
+ assert.deepStrictEqual(r._readableState.pipes, [w[1]])
+ }
+ })
+ let ended = 0
+ w[0].on(
+ 'end',
+ common.mustCall(function (results) {
+ ended++
+ assert.strictEqual(ended, 1)
+ assert.deepStrictEqual(results, expect[0])
+ })
+ )
+ w[1].on(
+ 'end',
+ common.mustCall(function (results) {
+ ended++
+ assert.strictEqual(ended, 2)
+ assert.deepStrictEqual(results, expect[1])
+ })
+ )
+ r.pipe(w[0])
+})
+{
+ // Verify both writers get the same data when piping to destinations
+ const r = new TestReader(5)
+ const w = [new TestWriter(), new TestWriter()]
+ const expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']
+ w[0].on(
+ 'end',
+ common.mustCall(function (received) {
+ assert.deepStrictEqual(received, expect)
+ })
+ )
+ w[1].on(
+ 'end',
+ common.mustCall(function (received) {
+ assert.deepStrictEqual(received, expect)
+ })
+ )
+ r.pipe(w[0])
+ r.pipe(w[1])
+}
+;[1, 2, 3, 4, 5, 6, 7, 8, 9].forEach(function (SPLIT) {
+ // Verify multi-unpipe
+ const r = new TestReader(5)
+
+ // Unpipe after 3 writes, then write to another stream instead.
+ let expect = ['xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx', 'xxxxx']
+ expect = [expect.slice(0, SPLIT), expect.slice(SPLIT)]
+ const w = [new TestWriter(), new TestWriter(), new TestWriter()]
+ let writes = SPLIT
+ w[0].on('write', function () {
+ if (--writes === 0) {
+ r.unpipe()
+ w[0].end()
+ r.pipe(w[1])
+ }
+ })
+ let ended = 0
+ w[0].on(
+ 'end',
+ common.mustCall(function (results) {
+ ended++
+ assert.strictEqual(ended, 1)
+ assert.deepStrictEqual(results, expect[0])
+ })
+ )
+ w[1].on(
+ 'end',
+ common.mustCall(function (results) {
+ ended++
+ assert.strictEqual(ended, 2)
+ assert.deepStrictEqual(results, expect[1])
+ })
+ )
+ r.pipe(w[0])
+ r.pipe(w[2])
+})
+{
+ // Verify that back pressure is respected
+ const r = new R({
+ objectMode: true
+ })
+ r._read = common.mustNotCall()
+ let counter = 0
+ r.push(['one'])
+ r.push(['two'])
+ r.push(['three'])
+ r.push(['four'])
+ r.push(null)
+ const w1 = new R()
+ w1.write = function (chunk) {
+ assert.strictEqual(chunk[0], 'one')
+ w1.emit('close')
+ process.nextTick(function () {
+ r.pipe(w2)
+ r.pipe(w3)
+ })
+ }
+ w1.end = common.mustNotCall()
+ r.pipe(w1)
+ const expected = ['two', 'two', 'three', 'three', 'four', 'four']
+ const w2 = new R()
+ w2.write = function (chunk) {
+ assert.strictEqual(chunk[0], expected.shift())
+ assert.strictEqual(counter, 0)
+ counter++
+ if (chunk[0] === 'four') {
+ return true
+ }
+ setTimeout(function () {
+ counter--
+ w2.emit('drain')
+ }, 10)
+ return false
+ }
+ w2.end = common.mustCall()
+ const w3 = new R()
+ w3.write = function (chunk) {
+ assert.strictEqual(chunk[0], expected.shift())
+ assert.strictEqual(counter, 1)
+ counter++
+ if (chunk[0] === 'four') {
+ return true
+ }
+ setTimeout(function () {
+ counter--
+ w3.emit('drain')
+ }, 50)
+ return false
+ }
+ w3.end = common.mustCall(function () {
+ assert.strictEqual(counter, 2)
+ assert.strictEqual(expected.length, 0)
+ })
+}
+{
+ // Verify read(0) behavior for ended streams
+ const r = new R()
+ let written = false
+ let ended = false
+ r._read = common.mustNotCall()
+ r.push(Buffer.from('foo'))
+ r.push(null)
+ const v = r.read(0)
+ assert.strictEqual(v, null)
+ const w = new R()
+ w.write = function (buffer) {
+ written = true
+ assert.strictEqual(ended, false)
+ assert.strictEqual(buffer.toString(), 'foo')
+ }
+ w.end = common.mustCall(function () {
+ ended = true
+ assert.strictEqual(written, true)
+ })
+ r.pipe(w)
+}
+{
+ // Verify synchronous _read ending
+ const r = new R()
+ let called = false
+ r._read = function (n) {
+ r.push(null)
+ }
+ r.once('end', function () {
+ // Verify that this is called before the next tick
+ called = true
+ })
+ r.read()
+ process.nextTick(function () {
+ assert.strictEqual(called, true)
+ })
+}
+{
+ // Verify that adding readable listeners trigger data flow
+ const r = new R({
+ highWaterMark: 5
+ })
+ let onReadable = false
+ let readCalled = 0
+ r._read = function (n) {
+ if (readCalled++ === 2) r.push(null)
+ else r.push(Buffer.from('asdf'))
+ }
+ r.on('readable', function () {
+ onReadable = true
+ r.read()
+ })
+ r.on(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(readCalled, 3)
+ assert.ok(onReadable)
+ })
+ )
+}
+{
+ // Verify that streams are chainable
+ const r = new R()
+ r._read = common.mustCall()
+ const r2 = r.setEncoding('utf8').pause().resume().pause()
+ assert.strictEqual(r, r2)
+}
+{
+ // Verify readableEncoding property
+ assert(Reflect.has(R.prototype, 'readableEncoding'))
+ const r = new R({
+ encoding: 'utf8'
+ })
+ assert.strictEqual(r.readableEncoding, 'utf8')
+}
+{
+ // Verify readableObjectMode property
+ assert(Reflect.has(R.prototype, 'readableObjectMode'))
+ const r = new R({
+ objectMode: true
+ })
+ assert.strictEqual(r.readableObjectMode, true)
+}
+{
+ // Verify writableObjectMode property
+ assert(Reflect.has(W.prototype, 'writableObjectMode'))
+ const w = new W({
+ objectMode: true
+ })
+ assert.strictEqual(w.writableObjectMode, true)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js
new file mode 100644
index 0000000000..590f5817c1
--- /dev/null
+++ b/test/parallel/test-stream2-compatibility.js
@@ -0,0 +1,77 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const { Readable: R, Writable: W } = require('../../lib/ours/index')
+const assert = require('assert')
+let ondataCalled = 0
+class TestReader extends R {
+ constructor() {
+ super()
+ this._buffer = Buffer.alloc(100, 'x')
+ this.on('data', () => {
+ ondataCalled++
+ })
+ }
+ _read(n) {
+ this.push(this._buffer)
+ this._buffer = Buffer.alloc(0)
+ }
+}
+const reader = new TestReader()
+setImmediate(function () {
+ assert.strictEqual(ondataCalled, 1)
+ silentConsole.log('ok')
+ reader.push(null)
+})
+class TestWriter extends W {
+ constructor() {
+ super()
+ this.write('foo')
+ this.end()
+ }
+ _write(chunk, enc, cb) {
+ cb()
+ }
+}
+const writer = new TestWriter()
+process.on('exit', function () {
+ assert.strictEqual(reader.readable, false)
+ assert.strictEqual(writer.writable, false)
+ silentConsole.log('ok')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-decode-partial.js b/test/parallel/test-stream2-decode-partial.js
new file mode 100644
index 0000000000..8ebac01acf
--- /dev/null
+++ b/test/parallel/test-stream2-decode-partial.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const { Readable } = require('../../lib/ours/index')
+const assert = require('assert')
+let buf = ''
+const euro = Buffer.from([0xe2, 0x82, 0xac])
+const cent = Buffer.from([0xc2, 0xa2])
+const source = Buffer.concat([euro, cent])
+const readable = Readable({
+ encoding: 'utf8'
+})
+readable.push(source.slice(0, 2))
+readable.push(source.slice(2, 4))
+readable.push(source.slice(4, 6))
+readable.push(null)
+readable.on('data', function (data) {
+ buf += data
+})
+process.on('exit', function () {
+ assert.strictEqual(buf, '€¢')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-finish-pipe-error.js b/test/parallel/test-stream2-finish-pipe-error.js
new file mode 100644
index 0000000000..7e8cbe624f
--- /dev/null
+++ b/test/parallel/test-stream2-finish-pipe-error.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const stream = require('../../lib/ours/index')
+process.on('uncaughtException', common.mustCall())
+const r = new stream.Readable()
+r._read = function (size) {
+ r.push(Buffer.allocUnsafe(size))
+}
+const w = new stream.Writable()
+w._write = function (data, encoding, cb) {
+ cb(null)
+}
+r.pipe(w)
+
+// end() after pipe should cause unhandled exception
+w.end()
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-finish-pipe.js b/test/parallel/test-stream2-finish-pipe.js
new file mode 100644
index 0000000000..d3abfb4e05
--- /dev/null
+++ b/test/parallel/test-stream2-finish-pipe.js
@@ -0,0 +1,57 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const stream = require('../../lib/ours/index')
+const r = new stream.Readable()
+r._read = function (size) {
+ r.push(Buffer.allocUnsafe(size))
+}
+const w = new stream.Writable()
+w._write = function (data, encoding, cb) {
+ process.nextTick(cb, null)
+}
+r.pipe(w)
+
+// end() must be called in nextTick or a WRITE_AFTER_END error occurs.
+process.nextTick(() => {
+ // This might sound unrealistic, but it happens in net.js. When
+ // socket.allowHalfOpen === false, EOF will cause .destroySoon() call which
+ // ends the writable side of net.Socket.
+ w.end()
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-httpclient-response-end.js b/test/parallel/test-stream2-httpclient-response-end.js
new file mode 100644
index 0000000000..a492eb20e6
--- /dev/null
+++ b/test/parallel/test-stream2-httpclient-response-end.js
@@ -0,0 +1,56 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const http = require('http')
+const msg = 'Hello'
+const server = http
+ .createServer(function (req, res) {
+ res.writeHead(200, {
+ 'Content-Type': 'text/plain'
+ })
+ res.end(msg)
+ })
+ .listen(0, function () {
+ http.get(
+ {
+ port: this.address().port
+ },
+ function (res) {
+ let data = ''
+ res.on(
+ 'readable',
+ common.mustCall(function () {
+ silentConsole.log('readable event')
+ let chunk
+ while ((chunk = res.read()) !== null) {
+ data += chunk
+ }
+ })
+ )
+ res.on(
+ 'end',
+ common.mustCall(function () {
+ silentConsole.log('end event')
+ assert.strictEqual(msg, data)
+ server.close()
+ })
+ )
+ }
+ )
+ })
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js
new file mode 100644
index 0000000000..8208dcdf64
--- /dev/null
+++ b/test/parallel/test-stream2-large-read-stall.js
@@ -0,0 +1,80 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+
+// If everything aligns so that you do a read(n) of exactly the
+// remaining buffer, then make sure that 'end' still emits.
+
+const READSIZE = 100
+const PUSHSIZE = 20
+const PUSHCOUNT = 1000
+const HWM = 50
+const Readable = require('../../lib/ours/index').Readable
+const r = new Readable({
+ highWaterMark: HWM
+})
+const rs = r._readableState
+r._read = push
+r.on('readable', function () {
+ silentConsole.error('>> readable')
+ let ret
+ do {
+ silentConsole.error(` > read(${READSIZE})`)
+ ret = r.read(READSIZE)
+ silentConsole.error(` < ${ret && ret.length} (${rs.length} remain)`)
+ } while (ret && ret.length === READSIZE)
+ silentConsole.error('<< after read()', ret && ret.length, rs.needReadable, rs.length)
+})
+r.on(
+ 'end',
+ common.mustCall(function () {
+ assert.strictEqual(pushes, PUSHCOUNT + 1)
+ })
+)
+let pushes = 0
+function push() {
+ if (pushes > PUSHCOUNT) return
+ if (pushes++ === PUSHCOUNT) {
+ silentConsole.error(' push(EOF)')
+ return r.push(null)
+ }
+ silentConsole.error(` push #${pushes}`)
+ if (r.push(Buffer.allocUnsafe(PUSHSIZE))) setTimeout(push, 1)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js
new file mode 100644
index 0000000000..1acd4b641f
--- /dev/null
+++ b/test/parallel/test-stream2-objects.js
@@ -0,0 +1,365 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable, Writable } = require('../../lib/ours/index')
+const assert = require('assert')
+function toArray(callback) {
+ const stream = new Writable({
+ objectMode: true
+ })
+ const list = []
+ stream.write = function (chunk) {
+ list.push(chunk)
+ }
+ stream.end = common.mustCall(function () {
+ callback(list)
+ })
+ return stream
+}
+function fromArray(list) {
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = common.mustNotCall()
+ list.forEach(function (chunk) {
+ r.push(chunk)
+ })
+ r.push(null)
+ return r
+}
+{
+ // Verify that objects can be read from the stream
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ const v1 = r.read()
+ const v2 = r.read()
+ const v3 = r.read()
+ assert.deepStrictEqual(v1, {
+ one: '1'
+ })
+ assert.deepStrictEqual(v2, {
+ two: '2'
+ })
+ assert.strictEqual(v3, null)
+}
+{
+ // Verify that objects can be piped into the stream
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ r.pipe(
+ toArray(
+ common.mustCall(function (list) {
+ assert.deepStrictEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ )
+}
+{
+ // Verify that read(n) is ignored
+ const r = fromArray([
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ const value = r.read(2)
+ assert.deepStrictEqual(value, {
+ one: '1'
+ })
+}
+{
+ // Verify that objects can be synchronously read
+ const r = new Readable({
+ objectMode: true
+ })
+ const list = [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ]
+ r._read = function (n) {
+ const item = list.shift()
+ r.push(item || null)
+ }
+ r.pipe(
+ toArray(
+ common.mustCall(function (list) {
+ assert.deepStrictEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ )
+}
+{
+ // Verify that objects can be asynchronously read
+ const r = new Readable({
+ objectMode: true
+ })
+ const list = [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ]
+ r._read = function (n) {
+ const item = list.shift()
+ process.nextTick(function () {
+ r.push(item || null)
+ })
+ }
+ r.pipe(
+ toArray(
+ common.mustCall(function (list) {
+ assert.deepStrictEqual(list, [
+ {
+ one: '1'
+ },
+ {
+ two: '2'
+ }
+ ])
+ })
+ )
+ )
+}
+{
+ // Verify that strings can be read as objects
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = common.mustNotCall()
+ const list = ['one', 'two', 'three']
+ list.forEach(function (str) {
+ r.push(str)
+ })
+ r.push(null)
+ r.pipe(
+ toArray(
+ common.mustCall(function (array) {
+ assert.deepStrictEqual(array, list)
+ })
+ )
+ )
+}
+{
+ // Verify read(0) behavior for object streams
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = common.mustNotCall()
+ r.push('foobar')
+ r.push(null)
+ r.pipe(
+ toArray(
+ common.mustCall(function (array) {
+ assert.deepStrictEqual(array, ['foobar'])
+ })
+ )
+ )
+}
+{
+ // Verify the behavior of pushing falsey values
+ const r = new Readable({
+ objectMode: true
+ })
+ r._read = common.mustNotCall()
+ r.push(false)
+ r.push(0)
+ r.push('')
+ r.push(null)
+ r.pipe(
+ toArray(
+ common.mustCall(function (array) {
+ assert.deepStrictEqual(array, [false, 0, ''])
+ })
+ )
+ )
+}
+{
+ // Verify high watermark _read() behavior
+ const r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ })
+ let calls = 0
+ const list = ['1', '2', '3', '4', '5', '6', '7', '8']
+ r._read = function (n) {
+ calls++
+ }
+ list.forEach(function (c) {
+ r.push(c)
+ })
+ const v = r.read()
+ assert.strictEqual(calls, 0)
+ assert.strictEqual(v, '1')
+ const v2 = r.read()
+ assert.strictEqual(v2, '2')
+ const v3 = r.read()
+ assert.strictEqual(v3, '3')
+ assert.strictEqual(calls, 1)
+}
+{
+ // Verify high watermark push behavior
+ const r = new Readable({
+ highWaterMark: 6,
+ objectMode: true
+ })
+ r._read = common.mustNotCall()
+ for (let i = 0; i < 6; i++) {
+ const bool = r.push(i)
+ assert.strictEqual(bool, i !== 5)
+ }
+}
+{
+ // Verify that objects can be written to stream
+ const w = new Writable({
+ objectMode: true
+ })
+ w._write = function (chunk, encoding, cb) {
+ assert.deepStrictEqual(chunk, {
+ foo: 'bar'
+ })
+ cb()
+ }
+ w.on('finish', common.mustCall())
+ w.write({
+ foo: 'bar'
+ })
+ w.end()
+}
+{
+ // Verify that multiple objects can be written to stream
+ const w = new Writable({
+ objectMode: true
+ })
+ const list = []
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ cb()
+ }
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.deepStrictEqual(list, [0, 1, 2, 3, 4])
+ })
+ )
+ w.write(0)
+ w.write(1)
+ w.write(2)
+ w.write(3)
+ w.write(4)
+ w.end()
+}
+{
+ // Verify that strings can be written as objects
+ const w = new Writable({
+ objectMode: true
+ })
+ const list = []
+ w._write = function (chunk, encoding, cb) {
+ list.push(chunk)
+ process.nextTick(cb)
+ }
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.deepStrictEqual(list, ['0', '1', '2', '3', '4'])
+ })
+ )
+ w.write('0')
+ w.write('1')
+ w.write('2')
+ w.write('3')
+ w.write('4')
+ w.end()
+}
+{
+ // Verify that stream buffers finish until callback is called
+ const w = new Writable({
+ objectMode: true
+ })
+ let called = false
+ w._write = function (chunk, encoding, cb) {
+ assert.strictEqual(chunk, 'foo')
+ process.nextTick(function () {
+ called = true
+ cb()
+ })
+ }
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(called, true)
+ })
+ )
+ w.write('foo')
+ w.end()
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js
new file mode 100644
index 0000000000..5f3559b73f
--- /dev/null
+++ b/test/parallel/test-stream2-pipe-error-handling.js
@@ -0,0 +1,108 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+{
+ let count = 1000
+ const source = new stream.Readable()
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.allocUnsafe(n))
+ }
+ let unpipedDest
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+ const dest = new stream.Writable()
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+ source.pipe(dest)
+ let gotErr = null
+ dest.on('error', function (err) {
+ gotErr = err
+ })
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+ const err = new Error('This stream turned into bacon.')
+ dest.emit('error', err)
+ assert.strictEqual(gotErr, err)
+ assert.strictEqual(unpipedSource, source)
+ assert.strictEqual(unpipedDest, dest)
+}
+{
+ let count = 1000
+ const source = new stream.Readable()
+ source._read = function (n) {
+ n = Math.min(count, n)
+ count -= n
+ source.push(Buffer.allocUnsafe(n))
+ }
+ let unpipedDest
+ source.unpipe = function (dest) {
+ unpipedDest = dest
+ stream.Readable.prototype.unpipe.call(this, dest)
+ }
+ const dest = new stream.Writable({
+ autoDestroy: false
+ })
+ dest._write = function (chunk, encoding, cb) {
+ cb()
+ }
+ source.pipe(dest)
+ let unpipedSource
+ dest.on('unpipe', function (src) {
+ unpipedSource = src
+ })
+ const err = new Error('This stream turned into bacon.')
+ let gotErr = null
+ try {
+ dest.emit('error', err)
+ } catch (e) {
+ gotErr = e
+ }
+ assert.strictEqual(gotErr, err)
+ assert.strictEqual(unpipedSource, source)
+ assert.strictEqual(unpipedDest, dest)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-pipe-error-once-listener.js b/test/parallel/test-stream2-pipe-error-once-listener.js
new file mode 100644
index 0000000000..e4a4a0cd9c
--- /dev/null
+++ b/test/parallel/test-stream2-pipe-error-once-listener.js
@@ -0,0 +1,62 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const stream = require('../../lib/ours/index')
+class Read extends stream.Readable {
+ _read(size) {
+ this.push('x')
+ this.push(null)
+ }
+}
+class Write extends stream.Writable {
+ _write(buffer, encoding, cb) {
+ this.emit('error', new Error('boom'))
+ this.emit('alldone')
+ }
+}
+const read = new Read()
+const write = new Write()
+write.once('error', () => {})
+write.once('alldone', function (err) {
+ silentConsole.log('ok')
+})
+process.on('exit', function (c) {
+ silentConsole.error('error thrown even with listener')
+})
+read.pipe(write)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js
new file mode 100644
index 0000000000..7be29e96b3
--- /dev/null
+++ b/test/parallel/test-stream2-push.js
@@ -0,0 +1,130 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+const EE = require('events').EventEmitter
+
+// A mock thing a bit like the net.Socket/tcp_wrap.handle interaction
+
+const stream = new Readable({
+ highWaterMark: 16,
+ encoding: 'utf8'
+})
+const source = new EE()
+stream._read = function () {
+ silentConsole.error('stream._read')
+ readStart()
+}
+let ended = false
+stream.on('end', function () {
+ ended = true
+})
+source.on('data', function (chunk) {
+ const ret = stream.push(chunk)
+ silentConsole.error('data', stream.readableLength)
+ if (!ret) readStop()
+})
+source.on('end', function () {
+ stream.push(null)
+})
+let reading = false
+function readStart() {
+ silentConsole.error('readStart')
+ reading = true
+}
+function readStop() {
+ silentConsole.error('readStop')
+ reading = false
+ process.nextTick(function () {
+ const r = stream.read()
+ if (r !== null) writer.write(r)
+ })
+}
+const writer = new Writable({
+ decodeStrings: false
+})
+const written = []
+const expectWritten = [
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg',
+ 'asdfgasdfgasdfgasdfg'
+]
+writer._write = function (chunk, encoding, cb) {
+ silentConsole.error(`WRITE ${chunk}`)
+ written.push(chunk)
+ process.nextTick(cb)
+}
+writer.on('finish', finish)
+
+// Now emit some chunks.
+
+const chunk = 'asdfg'
+let set = 0
+readStart()
+data()
+function data() {
+ assert(reading)
+ source.emit('data', chunk)
+ assert(reading)
+ source.emit('data', chunk)
+ assert(reading)
+ source.emit('data', chunk)
+ assert(reading)
+ source.emit('data', chunk)
+ assert(!reading)
+ if (set++ < 5) setTimeout(data, 10)
+ else end()
+}
+function finish() {
+ silentConsole.error('finish')
+ assert.deepStrictEqual(written, expectWritten)
+ silentConsole.log('ok')
+}
+function end() {
+ source.emit('end')
+ assert(!reading)
+ writer.end(stream.read())
+ setImmediate(function () {
+ assert(ended)
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-read-sync-stack.js b/test/parallel/test-stream2-read-sync-stack.js
new file mode 100644
index 0000000000..ce9a7690e5
--- /dev/null
+++ b/test/parallel/test-stream2-read-sync-stack.js
@@ -0,0 +1,57 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const Readable = require('../../lib/ours/index').Readable
+
+// This tests synchronous read callbacks and verifies that even if they nest
+// heavily the process handles it without an error
+
+const r = new Readable()
+const N = 256 * 1024
+let reads = 0
+r._read = function (n) {
+ const chunk = reads++ === N ? null : Buffer.allocUnsafe(1)
+ r.push(chunk)
+}
+r.on('readable', function onReadable() {
+ if (!(r.readableLength % 256)) silentConsole.error('readable', r.readableLength)
+ r.read(N * 2)
+})
+r.on('end', common.mustCall())
+r.read(0)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
new file mode 100644
index 0000000000..c4aa3b4a14
--- /dev/null
+++ b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
@@ -0,0 +1,126 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const Readable = require('../../lib/ours/index').Readable
+test1()
+test2()
+function test1() {
+ const r = new Readable()
+
+ // Should not end when we get a Buffer.alloc(0) or '' as the _read
+ // result that just means that there is *temporarily* no data, but to
+ // go ahead and try again later.
+ //
+ // note that this is very unusual. it only works for crypto streams
+ // because the other side of the stream will call read(0) to cycle
+ // data through openssl. that's why setImmediate() is used to call
+ // r.read(0) again later, otherwise there is no more work being done
+ // and the process just exits.
+
+ const buf = Buffer.alloc(5, 'x')
+ let reads = 5
+ r._read = function (n) {
+ switch (reads--) {
+ case 5:
+ return setImmediate(() => {
+ return r.push(buf)
+ })
+ case 4:
+ setImmediate(() => {
+ return r.push(Buffer.alloc(0))
+ })
+ return setImmediate(r.read.bind(r, 0))
+ case 3:
+ setImmediate(r.read.bind(r, 0))
+ return process.nextTick(() => {
+ return r.push(Buffer.alloc(0))
+ })
+ case 2:
+ setImmediate(r.read.bind(r, 0))
+ return r.push(Buffer.alloc(0))
+ // Not-EOF!
+ case 1:
+ return r.push(buf)
+ case 0:
+ return r.push(null)
+ // EOF
+ default:
+ throw new Error('unreachable')
+ }
+ }
+ const results = []
+ function flow() {
+ let chunk
+ while (null !== (chunk = r.read())) results.push(String(chunk))
+ }
+ r.on('readable', flow)
+ r.on('end', () => {
+ results.push('EOF')
+ })
+ flow()
+ process.on('exit', () => {
+ assert.deepStrictEqual(results, ['xxxxx', 'xxxxx', 'EOF'])
+ silentConsole.log('ok')
+ })
+}
+function test2() {
+ const r = new Readable({
+ encoding: 'base64'
+ })
+ let reads = 5
+ r._read = function (n) {
+ if (!reads--) return r.push(null) // EOF
+ return r.push(Buffer.from('x'))
+ }
+ const results = []
+ function flow() {
+ let chunk
+ while (null !== (chunk = r.read())) results.push(String(chunk))
+ }
+ r.on('readable', flow)
+ r.on('end', () => {
+ results.push('EOF')
+ })
+ flow()
+ process.on('exit', () => {
+ assert.deepStrictEqual(results, ['eHh4', 'eHg=', 'EOF'])
+ silentConsole.log('ok')
+ })
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js
new file mode 100644
index 0000000000..3d7b2aaeab
--- /dev/null
+++ b/test/parallel/test-stream2-readable-from-list.js
@@ -0,0 +1,129 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// Flags: --expose-internals
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const fromList = require('../../lib/ours/index').Readable._fromList
+const BufferList = require('../../lib/internal/streams/buffer_list')
+const util = require('util')
+function bufferListFromArray(arr) {
+ const bl = new BufferList()
+ for (let i = 0; i < arr.length; ++i) bl.push(arr[i])
+ return bl
+}
+{
+ // Verify behavior with buffers
+ let list = [Buffer.from('foog'), Buffer.from('bark'), Buffer.from('bazy'), Buffer.from('kuel')]
+ list = bufferListFromArray(list)
+ assert.strictEqual(typeof list.head, 'object')
+ assert.strictEqual(typeof list.tail, 'object')
+ assert.strictEqual(list.length, 4)
+
+ // Read more than the first element.
+ let ret = fromList(6, {
+ buffer: list,
+ length: 16
+ })
+ assert.strictEqual(ret.toString(), 'foogba')
+
+ // Read exactly the first element.
+ ret = fromList(2, {
+ buffer: list,
+ length: 10
+ })
+ assert.strictEqual(ret.toString(), 'rk')
+
+ // Read less than the first element.
+ ret = fromList(2, {
+ buffer: list,
+ length: 8
+ })
+ assert.strictEqual(ret.toString(), 'ba')
+
+ // Read more than we have.
+ ret = fromList(100, {
+ buffer: list,
+ length: 6
+ })
+ assert.strictEqual(ret.toString(), 'zykuel')
+
+ // all consumed.
+ assert.deepStrictEqual(list, new BufferList())
+}
+{
+ // Verify behavior with strings
+ let list = ['foog', 'bark', 'bazy', 'kuel']
+ list = bufferListFromArray(list)
+
+ // Read more than the first element.
+ let ret = fromList(6, {
+ buffer: list,
+ length: 16,
+ decoder: true
+ })
+ assert.strictEqual(ret, 'foogba')
+
+ // Read exactly the first element.
+ ret = fromList(2, {
+ buffer: list,
+ length: 10,
+ decoder: true
+ })
+ assert.strictEqual(ret, 'rk')
+
+ // Read less than the first element.
+ ret = fromList(2, {
+ buffer: list,
+ length: 8,
+ decoder: true
+ })
+ assert.strictEqual(ret, 'ba')
+
+ // Read more than we have.
+ ret = fromList(100, {
+ buffer: list,
+ length: 6,
+ decoder: true
+ })
+ assert.strictEqual(ret, 'zykuel')
+
+ // all consumed.
+ assert.deepStrictEqual(list, new BufferList())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-legacy-drain.js b/test/parallel/test-stream2-readable-legacy-drain.js
new file mode 100644
index 0000000000..db68158add
--- /dev/null
+++ b/test/parallel/test-stream2-readable-legacy-drain.js
@@ -0,0 +1,64 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const Stream = require('../../lib/ours/index')
+const Readable = Stream.Readable
+const r = new Readable()
+const N = 256
+let reads = 0
+r._read = function (n) {
+ return r.push(++reads === N ? null : Buffer.allocUnsafe(1))
+}
+r.on('end', common.mustCall())
+const w = new Stream()
+w.writable = true
+let buffered = 0
+w.write = function (c) {
+ buffered += c.length
+ process.nextTick(drain)
+ return false
+}
+function drain() {
+ assert(buffered <= 3)
+ buffered = 0
+ w.emit('drain')
+}
+w.end = common.mustCall()
+r.pipe(w)
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js
new file mode 100644
index 0000000000..f99a0a55f9
--- /dev/null
+++ b/test/parallel/test-stream2-readable-non-empty-end.js
@@ -0,0 +1,83 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+let len = 0
+const chunks = new Array(10)
+for (let i = 1; i <= 10; i++) {
+ chunks[i - 1] = Buffer.allocUnsafe(i)
+ len += i
+}
+const test = new Readable()
+let n = 0
+test._read = function (size) {
+ const chunk = chunks[n++]
+ setTimeout(function () {
+ test.push(chunk === undefined ? null : chunk)
+ }, 1)
+}
+test.on('end', thrower)
+function thrower() {
+ throw new Error('this should not happen!')
+}
+let bytesread = 0
+test.on('readable', function () {
+ const b = len - bytesread - 1
+ const res = test.read(b)
+ if (res) {
+ bytesread += res.length
+ silentConsole.error(`br=${bytesread} len=${len}`)
+ setTimeout(next, 1)
+ }
+ test.read(0)
+})
+test.read(0)
+function next() {
+ // Now let's make 'end' happen
+ test.removeListener('end', thrower)
+ test.on('end', common.mustCall())
+
+ // One to get the last byte
+ let r = test.read()
+ assert(r)
+ assert.strictEqual(r.length, 1)
+ r = test.read()
+ assert.strictEqual(r, null)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-wrap-destroy.js b/test/parallel/test-stream2-readable-wrap-destroy.js
new file mode 100644
index 0000000000..2b93a9c79c
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap-destroy.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const EE = require('events').EventEmitter
+const oldStream = new EE()
+oldStream.pause = () => {}
+oldStream.resume = () => {}
+{
+ new Readable({
+ autoDestroy: false,
+ destroy: common.mustCall()
+ }).wrap(oldStream)
+ oldStream.emit('destroy')
+}
+{
+ new Readable({
+ autoDestroy: false,
+ destroy: common.mustCall()
+ }).wrap(oldStream)
+ oldStream.emit('close')
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-wrap-empty.js b/test/parallel/test-stream2-readable-wrap-empty.js
new file mode 100644
index 0000000000..69471dd7fc
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap-empty.js
@@ -0,0 +1,47 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Readable } = require('../../lib/ours/index')
+const EE = require('events').EventEmitter
+const oldStream = new EE()
+oldStream.pause = () => {}
+oldStream.resume = () => {}
+const newStream = new Readable().wrap(oldStream)
+newStream.on('readable', () => {}).on('end', common.mustCall())
+oldStream.emit('end')
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-wrap-error.js b/test/parallel/test-stream2-readable-wrap-error.js
new file mode 100644
index 0000000000..e7979406e2
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap-error.js
@@ -0,0 +1,59 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable } = require('../../lib/ours/index')
+const EE = require('events').EventEmitter
+class LegacyStream extends EE {
+ pause() {}
+ resume() {}
+}
+{
+ const err = new Error()
+ const oldStream = new LegacyStream()
+ const r = new Readable({
+ autoDestroy: true
+ })
+ .wrap(oldStream)
+ .on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.errorEmitted, true)
+ assert.strictEqual(r._readableState.errored, err)
+ assert.strictEqual(r.destroyed, true)
+ })
+ )
+ oldStream.emit('error', err)
+}
+{
+ const err = new Error()
+ const oldStream = new LegacyStream()
+ const r = new Readable({
+ autoDestroy: false
+ })
+ .wrap(oldStream)
+ .on(
+ 'error',
+ common.mustCall(() => {
+ assert.strictEqual(r._readableState.errorEmitted, true)
+ assert.strictEqual(r._readableState.errored, err)
+ assert.strictEqual(r.destroyed, false)
+ })
+ )
+ oldStream.emit('error', err)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-readable-wrap.js b/test/parallel/test-stream2-readable-wrap.js
new file mode 100644
index 0000000000..6290b6a9b3
--- /dev/null
+++ b/test/parallel/test-stream2-readable-wrap.js
@@ -0,0 +1,134 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable, Writable } = require('../../lib/ours/index')
+const EE = require('events').EventEmitter
+function runTest(highWaterMark, objectMode, produce) {
+ const old = new EE()
+ const r = new Readable({
+ highWaterMark,
+ objectMode
+ })
+ assert.strictEqual(r, r.wrap(old))
+ r.on('end', common.mustCall())
+ old.pause = function () {
+ old.emit('pause')
+ flowing = false
+ }
+ old.resume = function () {
+ old.emit('resume')
+ flow()
+ }
+
+ // Make sure pause is only emitted once.
+ let pausing = false
+ r.on('pause', () => {
+ assert.strictEqual(pausing, false)
+ pausing = true
+ process.nextTick(() => {
+ pausing = false
+ })
+ })
+ let flowing
+ let chunks = 10
+ let oldEnded = false
+ const expected = []
+ function flow() {
+ flowing = true
+ while (flowing && chunks-- > 0) {
+ const item = produce()
+ expected.push(item)
+ old.emit('data', item)
+ }
+ if (chunks <= 0) {
+ oldEnded = true
+ old.emit('end')
+ }
+ }
+ const w = new Writable({
+ highWaterMark: highWaterMark * 2,
+ objectMode
+ })
+ const written = []
+ w._write = function (chunk, encoding, cb) {
+ written.push(chunk)
+ setTimeout(cb, 1)
+ }
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ performAsserts()
+ })
+ )
+ r.pipe(w)
+ flow()
+ function performAsserts() {
+ assert(oldEnded)
+ assert.deepStrictEqual(written, expected)
+ }
+}
+runTest(100, false, function () {
+ return Buffer.allocUnsafe(100)
+})
+runTest(10, false, function () {
+ return Buffer.from('xxxxxxxxxx')
+})
+runTest(1, true, function () {
+ return {
+ foo: 'bar'
+ }
+})
+const objectChunks = [
+ 5,
+ 'a',
+ false,
+ 0,
+ '',
+ 'xyz',
+ {
+ x: 4
+ },
+ 7,
+ [],
+ 555
+]
+runTest(1, true, function () {
+ return objectChunks.shift()
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js
new file mode 100644
index 0000000000..dcff597c53
--- /dev/null
+++ b/test/parallel/test-stream2-set-encoding.js
@@ -0,0 +1,339 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { Readable: R } = require('../../lib/ours/index')
+class TestReader extends R {
+ constructor(n, opts) {
+ super(opts)
+ this.pos = 0
+ this.len = n || 100
+ }
+ _read(n) {
+ setTimeout(() => {
+ if (this.pos >= this.len) {
+ // Double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+ n = Math.min(n, this.len - this.pos)
+ if (n <= 0) {
+ // Double push(null) to test eos handling
+ this.push(null)
+ return this.push(null)
+ }
+ this.pos += n
+ const ret = Buffer.alloc(n, 'a')
+ return this.push(ret)
+ }, 1)
+ }
+}
+{
+ // Verify utf8 encoding
+ const tr = new TestReader(100)
+ tr.setEncoding('utf8')
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
+}
+{
+ // Verify hex encoding
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
+}
+{
+ // Verify hex encoding with read(13)
+ const tr = new TestReader(100)
+ tr.setEncoding('hex')
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while (null !== (chunk = tr.read(13))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
+}
+{
+ // Verify base64 encoding
+ const tr = new TestReader(100)
+ tr.setEncoding('base64')
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
+}
+{
+ // Verify utf8 encoding
+ const tr = new TestReader(100, {
+ encoding: 'utf8'
+ })
+ const out = []
+ const expect = [
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa',
+ 'aaaaaaaaaa'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
+}
+{
+ // Verify hex encoding
+ const tr = new TestReader(100, {
+ encoding: 'hex'
+ })
+ const out = []
+ const expect = [
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161',
+ '6161616161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
+}
+{
+ // Verify hex encoding with read(13)
+ const tr = new TestReader(100, {
+ encoding: 'hex'
+ })
+ const out = []
+ const expect = [
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '1616161616161',
+ '6161616161616',
+ '16161'
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while (null !== (chunk = tr.read(13))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
+}
+{
+ // Verify base64 encoding
+ const tr = new TestReader(100, {
+ encoding: 'base64'
+ })
+ const out = []
+ const expect = [
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYWFhYWFh',
+ 'YWFhYWFhYW',
+ 'FhYQ=='
+ ]
+ tr.on('readable', function flow() {
+ let chunk
+ while (null !== (chunk = tr.read(10))) out.push(chunk)
+ })
+ tr.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(out, expect)
+ })
+ )
+}
+{
+ // Verify chaining behavior
+ const tr = new TestReader(100)
+ assert.deepStrictEqual(tr.setEncoding('utf8'), tr)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js
new file mode 100644
index 0000000000..742ff01a26
--- /dev/null
+++ b/test/parallel/test-stream2-transform.js
@@ -0,0 +1,516 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const { PassThrough, Transform } = require('../../lib/ours/index')
+{
+ // Verify writable side consumption
+ const tx = new Transform({
+ highWaterMark: 10
+ })
+ let transformed = 0
+ tx._transform = function (chunk, encoding, cb) {
+ transformed += chunk.length
+ tx.push(chunk)
+ cb()
+ }
+ for (let i = 1; i <= 10; i++) {
+ tx.write(Buffer.allocUnsafe(i))
+ }
+ tx.end()
+ assert.strictEqual(tx.readableLength, 10)
+ assert.strictEqual(transformed, 10)
+ assert.deepStrictEqual(
+ tx.writableBuffer.map(function (c) {
+ return c.chunk.length
+ }),
+ [5, 6, 7, 8, 9, 10]
+ )
+}
+{
+ // Verify passthrough behavior
+ const pt = new PassThrough()
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ assert.strictEqual(pt.read(5).toString(), 'foogb')
+ assert.strictEqual(pt.read(5).toString(), 'arkba')
+ assert.strictEqual(pt.read(5).toString(), 'zykue')
+ assert.strictEqual(pt.read(5).toString(), 'l')
+}
+{
+ // Verify object passthrough behavior
+ const pt = new PassThrough({
+ objectMode: true
+ })
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({
+ a: 'b'
+ })
+ pt.end()
+ assert.strictEqual(pt.read(), 1)
+ assert.strictEqual(pt.read(), true)
+ assert.strictEqual(pt.read(), false)
+ assert.strictEqual(pt.read(), 0)
+ assert.strictEqual(pt.read(), 'foo')
+ assert.strictEqual(pt.read(), '')
+ assert.deepStrictEqual(pt.read(), {
+ a: 'b'
+ })
+}
+{
+ // Verify passthrough constructor behavior
+ const pt = PassThrough()
+ assert(pt instanceof PassThrough)
+}
+{
+ // Verify transform constructor behavior
+ const pt = Transform()
+ assert(pt instanceof Transform)
+}
+{
+ // Perform a simple transform
+ const pt = new Transform()
+ pt._transform = function (c, e, cb) {
+ const ret = Buffer.alloc(c.length, 'x')
+ pt.push(ret)
+ cb()
+ }
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ assert.strictEqual(pt.read(5).toString(), 'xxxxx')
+ assert.strictEqual(pt.read(5).toString(), 'xxxxx')
+ assert.strictEqual(pt.read(5).toString(), 'xxxxx')
+ assert.strictEqual(pt.read(5).toString(), 'x')
+}
+{
+ // Verify simple object transform
+ const pt = new Transform({
+ objectMode: true
+ })
+ pt._transform = function (c, e, cb) {
+ pt.push(JSON.stringify(c))
+ cb()
+ }
+ pt.write(1)
+ pt.write(true)
+ pt.write(false)
+ pt.write(0)
+ pt.write('foo')
+ pt.write('')
+ pt.write({
+ a: 'b'
+ })
+ pt.end()
+ assert.strictEqual(pt.read(), '1')
+ assert.strictEqual(pt.read(), 'true')
+ assert.strictEqual(pt.read(), 'false')
+ assert.strictEqual(pt.read(), '0')
+ assert.strictEqual(pt.read(), '"foo"')
+ assert.strictEqual(pt.read(), '""')
+ assert.strictEqual(pt.read(), '{"a":"b"}')
+}
+{
+ // Verify async passthrough
+ const pt = new Transform()
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ pt.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'foogb')
+ assert.strictEqual(pt.read(5).toString(), 'arkba')
+ assert.strictEqual(pt.read(5).toString(), 'zykue')
+ assert.strictEqual(pt.read(5).toString(), 'l')
+ })
+ )
+}
+{
+ // Verify asymmetric transform (expand)
+ const pt = new Transform()
+
+ // Emit each chunk 2 times.
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
+ pt.push(chunk)
+ setTimeout(function () {
+ pt.push(chunk)
+ cb()
+ }, 10)
+ }, 10)
+ }
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ pt.end()
+ pt.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'foogf')
+ assert.strictEqual(pt.read(5).toString(), 'oogba')
+ assert.strictEqual(pt.read(5).toString(), 'rkbar')
+ assert.strictEqual(pt.read(5).toString(), 'kbazy')
+ assert.strictEqual(pt.read(5).toString(), 'bazyk')
+ assert.strictEqual(pt.read(5).toString(), 'uelku')
+ assert.strictEqual(pt.read(5).toString(), 'el')
+ })
+ )
+}
+{
+ // Verify asymmetric transform (compress)
+ const pt = new Transform()
+
+ // Each output is the first char of 3 consecutive chunks,
+ // or whatever's left.
+ pt.state = ''
+ pt._transform = function (chunk, encoding, cb) {
+ if (!chunk) chunk = ''
+ const s = chunk.toString()
+ setTimeout(() => {
+ this.state += s.charAt(0)
+ if (this.state.length === 3) {
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ }
+ cb()
+ }, 10)
+ }
+ pt._flush = function (cb) {
+ // Just output whatever we have.
+ pt.push(Buffer.from(this.state))
+ this.state = ''
+ cb()
+ }
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.write(Buffer.from('eeee'))
+ pt.write(Buffer.from('aaaa'))
+ pt.write(Buffer.from('bbbb'))
+ pt.write(Buffer.from('cccc'))
+ pt.write(Buffer.from('dddd'))
+ pt.end()
+
+ // 'abcdeabcdeabcd'
+ pt.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'abcde')
+ assert.strictEqual(pt.read(5).toString(), 'abcde')
+ assert.strictEqual(pt.read(5).toString(), 'abcd')
+ })
+ )
+}
+
+// This tests for a stall when data is written to a full stream
+// that has empty transforms.
+{
+ // Verify complex transform behavior
+ let count = 0
+ let saved = null
+ const pt = new Transform({
+ highWaterMark: 3
+ })
+ pt._transform = function (c, e, cb) {
+ if (count++ === 1) saved = c
+ else {
+ if (saved) {
+ pt.push(saved)
+ saved = null
+ }
+ pt.push(c)
+ }
+ cb()
+ }
+ pt.once('readable', function () {
+ process.nextTick(function () {
+ pt.write(Buffer.from('d'))
+ pt.write(
+ Buffer.from('ef'),
+ common.mustCall(function () {
+ pt.end()
+ })
+ )
+ assert.strictEqual(pt.read().toString(), 'abcdef')
+ assert.strictEqual(pt.read(), null)
+ })
+ })
+ pt.write(Buffer.from('abc'))
+}
+{
+ // Verify passthrough event emission
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ emits++
+ })
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ assert.strictEqual(emits, 0)
+ assert.strictEqual(pt.read(5).toString(), 'foogb')
+ assert.strictEqual(String(pt.read(5)), 'null')
+ assert.strictEqual(emits, 0)
+ pt.write(Buffer.from('bazy'))
+ pt.write(Buffer.from('kuel'))
+ assert.strictEqual(emits, 0)
+ assert.strictEqual(pt.read(5).toString(), 'arkba')
+ assert.strictEqual(pt.read(5).toString(), 'zykue')
+ assert.strictEqual(pt.read(5), null)
+ pt.end()
+ assert.strictEqual(emits, 1)
+ assert.strictEqual(pt.read(5).toString(), 'l')
+ assert.strictEqual(pt.read(5), null)
+ assert.strictEqual(emits, 1)
+}
+{
+ // Verify passthrough event emission reordering
+ const pt = new PassThrough()
+ let emits = 0
+ pt.on('readable', function () {
+ emits++
+ })
+ pt.write(Buffer.from('foog'))
+ pt.write(Buffer.from('bark'))
+ assert.strictEqual(emits, 0)
+ assert.strictEqual(pt.read(5).toString(), 'foogb')
+ assert.strictEqual(pt.read(5), null)
+ pt.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'arkba')
+ assert.strictEqual(pt.read(5), null)
+ pt.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'zykue')
+ assert.strictEqual(pt.read(5), null)
+ pt.once(
+ 'readable',
+ common.mustCall(function () {
+ assert.strictEqual(pt.read(5).toString(), 'l')
+ assert.strictEqual(pt.read(5), null)
+ assert.strictEqual(emits, 3)
+ })
+ )
+ pt.end()
+ })
+ )
+ pt.write(Buffer.from('kuel'))
+ })
+ )
+ pt.write(Buffer.from('bazy'))
+}
+{
+ // Verify passthrough facade
+ const pt = new PassThrough()
+ const datas = []
+ pt.on('data', function (chunk) {
+ datas.push(chunk.toString())
+ })
+ pt.on(
+ 'end',
+ common.mustCall(function () {
+ assert.deepStrictEqual(datas, ['foog', 'bark', 'bazy', 'kuel'])
+ })
+ )
+ pt.write(Buffer.from('foog'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bark'))
+ setTimeout(function () {
+ pt.write(Buffer.from('bazy'))
+ setTimeout(function () {
+ pt.write(Buffer.from('kuel'))
+ setTimeout(function () {
+ pt.end()
+ }, 10)
+ }, 10)
+ }, 10)
+ }, 10)
+}
+{
+ // Verify object transform (JSON parse)
+ const jp = new Transform({
+ objectMode: true
+ })
+ jp._transform = function (data, encoding, cb) {
+ try {
+ jp.push(JSON.parse(data))
+ cb()
+ } catch (er) {
+ cb(er)
+ }
+ }
+
+ // Anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ const objects = [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string',
+ {
+ nested: {
+ things: [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string'
+ ]
+ }
+ }
+ ]
+ let ended = false
+ jp.on('end', function () {
+ ended = true
+ })
+ objects.forEach(function (obj) {
+ jp.write(JSON.stringify(obj))
+ const res = jp.read()
+ assert.deepStrictEqual(res, obj)
+ })
+ jp.end()
+ // Read one more time to get the 'end' event
+ jp.read()
+ process.nextTick(
+ common.mustCall(function () {
+ assert.strictEqual(ended, true)
+ })
+ )
+}
+{
+ // Verify object transform (JSON stringify)
+ const js = new Transform({
+ objectMode: true
+ })
+ js._transform = function (data, encoding, cb) {
+ try {
+ js.push(JSON.stringify(data))
+ cb()
+ } catch (er) {
+ cb(er)
+ }
+ }
+
+ // Anything except null/undefined is fine.
+ // those are "magic" in the stream API, because they signal EOF.
+ const objects = [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string',
+ {
+ nested: {
+ things: [
+ {
+ foo: 'bar'
+ },
+ 100,
+ 'string'
+ ]
+ }
+ }
+ ]
+ let ended = false
+ js.on('end', function () {
+ ended = true
+ })
+ objects.forEach(function (obj) {
+ js.write(obj)
+ const res = js.read()
+ assert.strictEqual(res, JSON.stringify(obj))
+ })
+ js.end()
+ // Read one more time to get the 'end' event
+ js.read()
+ process.nextTick(
+ common.mustCall(function () {
+ assert.strictEqual(ended, true)
+ })
+ )
+}
+{
+ const s = new Transform({
+ objectMode: true,
+ construct(callback) {
+ this.push('header from constructor')
+ callback()
+ },
+ transform: (row, encoding, callback) => {
+ callback(null, row)
+ }
+ })
+ const expected = ['header from constructor', 'firstLine', 'secondLine']
+ s.on(
+ 'data',
+ common.mustCall((data) => {
+ assert.strictEqual(data.toString(), expected.shift())
+ }, 3)
+ )
+ s.write('firstLine')
+ process.nextTick(() => s.write('secondLine'))
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js
new file mode 100644
index 0000000000..c183e69c5b
--- /dev/null
+++ b/test/parallel/test-stream2-unpipe-drain.js
@@ -0,0 +1,75 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+class TestWriter extends stream.Writable {
+ _write(buffer, encoding, callback) {
+ silentConsole.log('write called')
+ // Super slow write stream (callback never called)
+ }
+}
+const dest = new TestWriter()
+class TestReader extends stream.Readable {
+ constructor() {
+ super()
+ this.reads = 0
+ }
+ _read(size) {
+ this.reads += 1
+ this.push(Buffer.alloc(size))
+ }
+}
+const src1 = new TestReader()
+const src2 = new TestReader()
+src1.pipe(dest)
+src1.once('readable', () => {
+ process.nextTick(() => {
+ src2.pipe(dest)
+ src2.once('readable', () => {
+ process.nextTick(() => {
+ src1.unpipe(dest)
+ })
+ })
+ })
+})
+process.on('exit', () => {
+ assert.strictEqual(src1.reads, 2)
+ assert.strictEqual(src2.reads, 2)
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js
new file mode 100644
index 0000000000..790df11fbc
--- /dev/null
+++ b/test/parallel/test-stream2-unpipe-leak.js
@@ -0,0 +1,80 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const chunk = Buffer.from('hallo')
+class TestWriter extends stream.Writable {
+ _write(buffer, encoding, callback) {
+ callback(null)
+ }
+}
+const dest = new TestWriter()
+
+// Set this high so that we'd trigger a nextTick warning
+// and/or RangeError if we do maybeReadMore wrong.
+class TestReader extends stream.Readable {
+ constructor() {
+ super({
+ highWaterMark: 0x10000
+ })
+ }
+ _read(size) {
+ this.push(chunk)
+ }
+}
+const src = new TestReader()
+for (let i = 0; i < 10; i++) {
+ src.pipe(dest)
+ src.unpipe(dest)
+}
+assert.strictEqual(src.listeners('end').length, 0)
+assert.strictEqual(src.listeners('readable').length, 0)
+assert.strictEqual(dest.listeners('unpipe').length, 0)
+assert.strictEqual(dest.listeners('drain').length, 0)
+assert.strictEqual(dest.listeners('error').length, 0)
+assert.strictEqual(dest.listeners('close').length, 0)
+assert.strictEqual(dest.listeners('finish').length, 0)
+silentConsole.error(src._readableState)
+process.on('exit', function () {
+ src.readableBuffer.length = 0
+ silentConsole.error(src._readableState)
+ assert(src.readableLength >= src.readableHighWaterMark)
+ silentConsole.log('ok')
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js
new file mode 100644
index 0000000000..2e600650e4
--- /dev/null
+++ b/test/parallel/test-stream2-writable.js
@@ -0,0 +1,470 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const { Writable: W, Duplex: D } = require('../../lib/ours/index')
+const assert = require('assert')
+class TestWriter extends W {
+ constructor(opts) {
+ super(opts)
+ this.buffer = []
+ this.written = 0
+ }
+ _write(chunk, encoding, cb) {
+ // Simulate a small unpredictable latency
+ setTimeout(() => {
+ this.buffer.push(chunk.toString())
+ this.written += chunk.length
+ cb()
+ }, Math.floor(Math.random() * 10))
+ }
+}
+const chunks = new Array(50)
+for (let i = 0; i < chunks.length; i++) {
+ chunks[i] = 'x'.repeat(i)
+}
+{
+ // Verify fast writing
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ tw.on(
+ 'finish',
+ common.mustCall(function () {
+ // Got chunks in the right order
+ assert.deepStrictEqual(tw.buffer, chunks)
+ })
+ )
+ chunks.forEach(function (chunk) {
+ // Ignore backpressure. Just buffer it all up.
+ tw.write(chunk)
+ })
+ tw.end()
+}
+{
+ // Verify slow writing
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ tw.on(
+ 'finish',
+ common.mustCall(function () {
+ // Got chunks in the right order
+ assert.deepStrictEqual(tw.buffer, chunks)
+ })
+ )
+ let i = 0
+ ;(function W() {
+ tw.write(chunks[i++])
+ if (i < chunks.length) setTimeout(W, 10)
+ else tw.end()
+ })()
+}
+{
+ // Verify write backpressure
+ const tw = new TestWriter({
+ highWaterMark: 50
+ })
+ let drains = 0
+ tw.on(
+ 'finish',
+ common.mustCall(function () {
+ // Got chunks in the right order
+ assert.deepStrictEqual(tw.buffer, chunks)
+ assert.strictEqual(drains, 17)
+ })
+ )
+ tw.on('drain', function () {
+ drains++
+ })
+ let i = 0
+ ;(function W() {
+ let ret
+ do {
+ ret = tw.write(chunks[i++])
+ } while (ret !== false && i < chunks.length)
+ if (i < chunks.length) {
+ assert(tw.writableLength >= 50)
+ tw.once('drain', W)
+ } else {
+ tw.end()
+ }
+ })()
+}
+{
+ // Verify write buffersize
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'latin1',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+ tw.on('finish', function () {
+ // Got the expected chunks
+ assert.deepStrictEqual(tw.buffer, chunks)
+ })
+ chunks.forEach(function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+}
+{
+ // Verify write with no buffersize
+ const tw = new TestWriter({
+ highWaterMark: 100,
+ decodeStrings: false
+ })
+ tw._write = function (chunk, encoding, cb) {
+ assert.strictEqual(typeof chunk, 'string')
+ chunk = Buffer.from(chunk, encoding)
+ return TestWriter.prototype._write.call(this, chunk, encoding, cb)
+ }
+ const encodings = [
+ 'hex',
+ 'utf8',
+ 'utf-8',
+ 'ascii',
+ 'latin1',
+ 'binary',
+ 'base64',
+ 'ucs2',
+ 'ucs-2',
+ 'utf16le',
+ 'utf-16le',
+ undefined
+ ]
+ tw.on('finish', function () {
+ // Got the expected chunks
+ assert.deepStrictEqual(tw.buffer, chunks)
+ })
+ chunks.forEach(function (chunk, i) {
+ const enc = encodings[i % encodings.length]
+ chunk = Buffer.from(chunk)
+ tw.write(chunk.toString(enc), enc)
+ })
+}
+{
+ // Verify write callbacks
+ const callbacks = chunks
+ .map(function (chunk, i) {
+ return [
+ i,
+ function () {
+ callbacks._called[i] = chunk
+ }
+ ]
+ })
+ .reduce(function (set, x) {
+ set[`callback-${x[0]}`] = x[1]
+ return set
+ }, {})
+ callbacks._called = []
+ const tw = new TestWriter({
+ highWaterMark: 100
+ })
+ tw.on(
+ 'finish',
+ common.mustCall(function () {
+ process.nextTick(
+ common.mustCall(function () {
+ // Got chunks in the right order
+ assert.deepStrictEqual(tw.buffer, chunks)
+ // Called all callbacks
+ assert.deepStrictEqual(callbacks._called, chunks)
+ })
+ )
+ })
+ )
+ chunks.forEach(function (chunk, i) {
+ tw.write(chunk, callbacks[`callback-${i}`])
+ })
+ tw.end()
+}
+{
+ // Verify end() callback
+ const tw = new TestWriter()
+ tw.end(common.mustCall())
+}
+const helloWorldBuffer = Buffer.from('hello world')
+{
+ // Verify end() callback with chunk
+ const tw = new TestWriter()
+ tw.end(helloWorldBuffer, common.mustCall())
+}
+{
+ // Verify end() callback with chunk and encoding
+ const tw = new TestWriter()
+ tw.end('hello world', 'ascii', common.mustCall())
+}
+{
+ // Verify end() callback after write() call
+ const tw = new TestWriter()
+ tw.write(helloWorldBuffer)
+ tw.end(common.mustCall())
+}
+{
+ // Verify end() callback after write() callback
+ const tw = new TestWriter()
+ let writeCalledback = false
+ tw.write(helloWorldBuffer, function () {
+ writeCalledback = true
+ })
+ tw.end(
+ common.mustCall(function () {
+ assert.strictEqual(writeCalledback, true)
+ })
+ )
+}
+{
+ // Verify encoding is ignored for buffers
+ const tw = new W()
+ const hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'
+ tw._write = common.mustCall(function (chunk) {
+ assert.strictEqual(chunk.toString('hex'), hex)
+ })
+ const buf = Buffer.from(hex, 'hex')
+ tw.write(buf, 'latin1')
+}
+{
+ // Verify writables cannot be piped
+ const w = new W({
+ autoDestroy: false
+ })
+ w._write = common.mustNotCall()
+ let gotError = false
+ w.on('error', function () {
+ gotError = true
+ })
+ w.pipe(process.stdout)
+ assert.strictEqual(gotError, true)
+}
+{
+ // Verify that duplex streams cannot be piped
+ const d = new D()
+ d._read = common.mustCall()
+ d._write = common.mustNotCall()
+ let gotError = false
+ d.on('error', function () {
+ gotError = true
+ })
+ d.pipe(process.stdout)
+ assert.strictEqual(gotError, false)
+}
+{
+ // Verify that end(chunk) twice is an error
+ const w = new W()
+ w._write = common.mustCall((msg) => {
+ assert.strictEqual(msg.toString(), 'this is the end')
+ })
+ let gotError = false
+ w.on('error', function (er) {
+ gotError = true
+ assert.strictEqual(er.message, 'write after end')
+ })
+ w.end('this is the end')
+ w.end('and so is this')
+ process.nextTick(
+ common.mustCall(function () {
+ assert.strictEqual(gotError, true)
+ })
+ )
+}
+{
+ // Verify stream doesn't end while writing
+ const w = new W()
+ let wrote = false
+ w._write = function (chunk, e, cb) {
+ assert.strictEqual(this.writing, undefined)
+ wrote = true
+ this.writing = true
+ setTimeout(() => {
+ this.writing = false
+ cb()
+ }, 1)
+ }
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(wrote, true)
+ assert.strictEqual(this.writing, false)
+ })
+ )
+ w.write(Buffer.alloc(0))
+ w.end()
+}
+{
+ // Verify finish does not come before write() callback
+ const w = new W()
+ let writeCb = false
+ w._write = function (chunk, e, cb) {
+ setTimeout(function () {
+ writeCb = true
+ cb()
+ }, 10)
+ }
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(writeCb, true)
+ })
+ )
+ w.write(Buffer.alloc(0))
+ w.end()
+}
+{
+ // Verify finish does not come before synchronous _write() callback
+ const w = new W()
+ let writeCb = false
+ w._write = function (chunk, e, cb) {
+ cb()
+ }
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(writeCb, true)
+ })
+ )
+ w.write(Buffer.alloc(0), function () {
+ writeCb = true
+ })
+ w.end()
+}
+{
+ // Verify finish is emitted if the last chunk is empty
+ const w = new W()
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
+ w.on('finish', common.mustCall())
+ w.write(Buffer.allocUnsafe(1))
+ w.end(Buffer.alloc(0))
+}
+{
+ // Verify that finish is emitted after shutdown
+ const w = new W()
+ let shutdown = false
+ w._final = common.mustCall(function (cb) {
+ assert.strictEqual(this, w)
+ setTimeout(function () {
+ shutdown = true
+ cb()
+ }, 100)
+ })
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
+ w.on(
+ 'finish',
+ common.mustCall(function () {
+ assert.strictEqual(shutdown, true)
+ })
+ )
+ w.write(Buffer.allocUnsafe(1))
+ w.end(Buffer.allocUnsafe(0))
+}
+{
+ // Verify that error is only emitted once when failing in _finish.
+ const w = new W()
+ w._final = common.mustCall(function (cb) {
+ cb(new Error('test'))
+ })
+ w.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(w._writableState.errorEmitted, true)
+ assert.strictEqual(err.message, 'test')
+ w.on('error', common.mustNotCall())
+ w.destroy(new Error())
+ })
+ )
+ w.end()
+}
+{
+ // Verify that error is only emitted once when failing in write.
+ const w = new W()
+ w.on('error', common.mustNotCall())
+ assert.throws(
+ () => {
+ w.write(null)
+ },
+ {
+ code: 'ERR_STREAM_NULL_VALUES'
+ }
+ )
+}
+{
+ // Verify that error is only emitted once when failing in write after end.
+ const w = new W()
+ w.on(
+ 'error',
+ common.mustCall((err) => {
+ assert.strictEqual(w._writableState.errorEmitted, true)
+ assert.strictEqual(err.code, 'ERR_STREAM_WRITE_AFTER_END')
+ })
+ )
+ w.end()
+ w.write('hello')
+ w.destroy(new Error())
+}
+{
+ // Verify that finish is not emitted after error
+ const w = new W()
+ w._final = common.mustCall(function (cb) {
+ cb(new Error())
+ })
+ w._write = function (chunk, e, cb) {
+ process.nextTick(cb)
+ }
+ w.on('error', common.mustCall())
+ w.on('prefinish', common.mustNotCall())
+ w.on('finish', common.mustNotCall())
+ w.write(Buffer.allocUnsafe(1))
+ w.end(Buffer.allocUnsafe(0))
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream3-cork-end.js b/test/parallel/test-stream3-cork-end.js
new file mode 100644
index 0000000000..92849640bd
--- /dev/null
+++ b/test/parallel/test-stream3-cork-end.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const Writable = stream.Writable
+
+// Test the buffering behavior of Writable streams.
+//
+// The call to cork() triggers storing chunks which are flushed
+// on calling end() and the stream subsequently ended.
+//
+// node version target: 0.12
+
+const expectedChunks = ['please', 'buffer', 'me', 'kindly']
+const inputChunks = expectedChunks.slice(0)
+let seenChunks = []
+let seenEnd = false
+const w = new Writable()
+// Let's arrange to store the chunks.
+w._write = function (chunk, encoding, cb) {
+ // Stream end event is not seen before the last write.
+ assert.ok(!seenEnd)
+ // Default encoding given none was specified.
+ assert.strictEqual(encoding, 'buffer')
+ seenChunks.push(chunk)
+ cb()
+}
+// Let's record the stream end event.
+w.on('finish', () => {
+ seenEnd = true
+})
+function writeChunks(remainingChunks, callback) {
+ const writeChunk = remainingChunks.shift()
+ let writeState
+ if (writeChunk) {
+ setImmediate(() => {
+ writeState = w.write(writeChunk)
+ // We were not told to stop writing.
+ assert.ok(writeState)
+ writeChunks(remainingChunks, callback)
+ })
+ } else {
+ callback()
+ }
+}
+
+// Do an initial write.
+w.write('stuff')
+// The write was immediate.
+assert.strictEqual(seenChunks.length, 1)
+// Reset the seen chunks.
+seenChunks = []
+
+// Trigger stream buffering.
+w.cork()
+
+// Write the bufferedChunks.
+writeChunks(inputChunks, () => {
+ // Should not have seen anything yet.
+ assert.strictEqual(seenChunks.length, 0)
+
+ // Trigger flush and ending the stream.
+ w.end()
+
+ // Stream should not ended in current tick.
+ assert.ok(!seenEnd)
+
+ // Buffered bytes should be seen in current tick.
+ assert.strictEqual(seenChunks.length, 4)
+
+ // Did the chunks match.
+ for (let i = 0, l = expectedChunks.length; i < l; i++) {
+ const seen = seenChunks[i]
+ // There was a chunk.
+ assert.ok(seen)
+ const expected = Buffer.from(expectedChunks[i])
+ // It was what we expected.
+ assert.ok(seen.equals(expected))
+ }
+ setImmediate(() => {
+ // Stream should have ended in next tick.
+ assert.ok(seenEnd)
+ })
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream3-cork-uncork.js b/test/parallel/test-stream3-cork-uncork.js
new file mode 100644
index 0000000000..e0a89eddb2
--- /dev/null
+++ b/test/parallel/test-stream3-cork-uncork.js
@@ -0,0 +1,95 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const Writable = stream.Writable
+
+// Test the buffering behavior of Writable streams.
+//
+// The call to cork() triggers storing chunks which are flushed
+// on calling uncork() in the same tick.
+//
+// node version target: 0.12
+
+const expectedChunks = ['please', 'buffer', 'me', 'kindly']
+const inputChunks = expectedChunks.slice(0)
+let seenChunks = []
+let seenEnd = false
+const w = new Writable()
+// Let's arrange to store the chunks.
+w._write = function (chunk, encoding, cb) {
+ // Default encoding given none was specified.
+ assert.strictEqual(encoding, 'buffer')
+ seenChunks.push(chunk)
+ cb()
+}
+// Let's record the stream end event.
+w.on('finish', () => {
+ seenEnd = true
+})
+function writeChunks(remainingChunks, callback) {
+ const writeChunk = remainingChunks.shift()
+ let writeState
+ if (writeChunk) {
+ setImmediate(() => {
+ writeState = w.write(writeChunk)
+ // We were not told to stop writing.
+ assert.ok(writeState)
+ writeChunks(remainingChunks, callback)
+ })
+ } else {
+ callback()
+ }
+}
+
+// Do an initial write.
+w.write('stuff')
+// The write was immediate.
+assert.strictEqual(seenChunks.length, 1)
+// Reset the chunks seen so far.
+seenChunks = []
+
+// Trigger stream buffering.
+w.cork()
+
+// Write the bufferedChunks.
+writeChunks(inputChunks, () => {
+ // Should not have seen anything yet.
+ assert.strictEqual(seenChunks.length, 0)
+
+ // Trigger writing out the buffer.
+ w.uncork()
+
+ // Buffered bytes should be seen in current tick.
+ assert.strictEqual(seenChunks.length, 4)
+
+ // Did the chunks match.
+ for (let i = 0, l = expectedChunks.length; i < l; i++) {
+ const seen = seenChunks[i]
+ // There was a chunk.
+ assert.ok(seen)
+ const expected = Buffer.from(expectedChunks[i])
+ // It was what we expected.
+ assert.ok(seen.equals(expected))
+ }
+ setImmediate(() => {
+ // The stream should not have been ended.
+ assert.ok(!seenEnd)
+ })
+})
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js
new file mode 100644
index 0000000000..e5272baa5b
--- /dev/null
+++ b/test/parallel/test-stream3-pause-then-read.js
@@ -0,0 +1,175 @@
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const Readable = stream.Readable
+const Writable = stream.Writable
+const totalChunks = 100
+const chunkSize = 99
+const expectTotalData = totalChunks * chunkSize
+let expectEndingData = expectTotalData
+const r = new Readable({
+ highWaterMark: 1000
+})
+let chunks = totalChunks
+r._read = function (n) {
+ silentConsole.log('_read called', chunks)
+ if (!(chunks % 2)) setImmediate(push)
+ else if (!(chunks % 3)) process.nextTick(push)
+ else push()
+}
+let totalPushed = 0
+function push() {
+ const chunk = chunks-- > 0 ? Buffer.alloc(chunkSize, 'x') : null
+ if (chunk) {
+ totalPushed += chunk.length
+ }
+ silentConsole.log('chunks', chunks)
+ r.push(chunk)
+}
+read100()
+
+// First we read 100 bytes.
+function read100() {
+ readn(100, onData)
+}
+function readn(n, then) {
+ silentConsole.error(`read ${n}`)
+ expectEndingData -= n
+ ;(function read() {
+ const c = r.read(n)
+ silentConsole.error('c', c)
+ if (!c) r.once('readable', read)
+ else {
+ assert.strictEqual(c.length, n)
+ assert(!r.readableFlowing)
+ then()
+ }
+ })()
+}
+
+// Then we listen to some data events.
+function onData() {
+ expectEndingData -= 100
+ silentConsole.error('onData')
+ let seen = 0
+ r.on('data', function od(c) {
+ seen += c.length
+ if (seen >= 100) {
+ // Seen enough
+ r.removeListener('data', od)
+ r.pause()
+ if (seen > 100) {
+ // Oh no, seen too much!
+ // Put the extra back.
+ const diff = seen - 100
+ r.unshift(c.slice(c.length - diff))
+ silentConsole.error('seen too much', seen, diff)
+ }
+
+ // Nothing should be lost in-between.
+ setImmediate(pipeLittle)
+ }
+ })
+}
+
+// Just pipe 200 bytes, then unshift the extra and unpipe.
+function pipeLittle() {
+ expectEndingData -= 200
+ silentConsole.error('pipe a little')
+ const w = new Writable()
+ let written = 0
+ w.on('finish', () => {
+ assert.strictEqual(written, 200)
+ setImmediate(read1234)
+ })
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ if (written >= 200) {
+ r.unpipe(w)
+ w.end()
+ cb()
+ if (written > 200) {
+ const diff = written - 200
+ written -= diff
+ r.unshift(chunk.slice(chunk.length - diff))
+ }
+ } else {
+ setImmediate(cb)
+ }
+ }
+ r.pipe(w)
+}
+
+// Now read 1234 more bytes.
+function read1234() {
+ readn(1234, resumePause)
+}
+function resumePause() {
+ silentConsole.error('resumePause')
+ // Don't read anything, just resume and re-pause a whole bunch.
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ r.resume()
+ r.pause()
+ setImmediate(pipe)
+}
+function pipe() {
+ silentConsole.error('pipe the rest')
+ const w = new Writable()
+ let written = 0
+ w._write = function (chunk, encoding, cb) {
+ written += chunk.length
+ cb()
+ }
+ w.on('finish', () => {
+ silentConsole.error('written', written, totalPushed)
+ assert.strictEqual(written, expectEndingData)
+ assert.strictEqual(totalPushed, expectTotalData)
+ silentConsole.log('ok')
+ })
+ r.pipe(w)
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-stream3-pipeline-async-iterator.js b/test/parallel/test-stream3-pipeline-async-iterator.js
new file mode 100644
index 0000000000..981cfc919e
--- /dev/null
+++ b/test/parallel/test-stream3-pipeline-async-iterator.js
@@ -0,0 +1,39 @@
+/* eslint-disable node-core/require-common-first, require-yield */
+
+'use strict'
+
+const AbortController = globalThis.AbortController || require('abort-controller').AbortController
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const { pipeline } = require('../../lib/stream').promises
+{
+ // Ensure that async iterators can act as readable and writable streams
+ async function* myCustomReadable() {
+ yield 'Hello'
+ yield 'World'
+ }
+ const messages = []
+ async function* myCustomWritable(stream) {
+ for await (const chunk of stream) {
+ messages.push(chunk)
+ }
+ }
+ ;(async () => {
+ await pipeline(myCustomReadable, myCustomWritable)
+ // Importing here to avoid initializing streams
+ require('assert').deepStrictEqual(messages, ['Hello', 'World'])
+ })().then(require('../common').mustCall())
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/parallel/test-streams-highwatermark.js b/test/parallel/test-streams-highwatermark.js
new file mode 100644
index 0000000000..1ff481e5c2
--- /dev/null
+++ b/test/parallel/test-streams-highwatermark.js
@@ -0,0 +1,103 @@
+'use strict'
+
+const tap = require('tap')
+const silentConsole = {
+ log() {},
+ error() {}
+}
+const common = require('../common')
+const assert = require('assert')
+const stream = require('../../lib/ours/index')
+const { inspect } = require('util')
+{
+ // This test ensures that the stream implementation correctly handles values
+ // for highWaterMark which exceed the range of signed 32 bit integers and
+ // rejects invalid values.
+
+ // This number exceeds the range of 32 bit integer arithmetic but should still
+ // be handled correctly.
+ const ovfl = Number.MAX_SAFE_INTEGER
+ const readable = stream.Readable({
+ highWaterMark: ovfl
+ })
+ assert.strictEqual(readable._readableState.highWaterMark, ovfl)
+ const writable = stream.Writable({
+ highWaterMark: ovfl
+ })
+ assert.strictEqual(writable._writableState.highWaterMark, ovfl)
+ for (const invalidHwm of [true, false, '5', {}, -5, NaN]) {
+ for (const type of [stream.Readable, stream.Writable]) {
+ assert.throws(
+ () => {
+ type({
+ highWaterMark: invalidHwm
+ })
+ },
+ {
+ name: 'TypeError',
+ code: 'ERR_INVALID_ARG_VALUE',
+ message: "The property 'options.highWaterMark' is invalid. " + `Received ${inspect(invalidHwm)}`
+ }
+ )
+ }
+ }
+}
+{
+ // This test ensures that the push method's implementation
+ // correctly handles the edge case where the highWaterMark and
+ // the state.length are both zero
+
+ const readable = stream.Readable({
+ highWaterMark: 0
+ })
+ for (let i = 0; i < 3; i++) {
+ const needMoreData = readable.push()
+ assert.strictEqual(needMoreData, true)
+ }
+}
+{
+ // This test ensures that the read(n) method's implementation
+ // correctly handles the edge case where the highWaterMark, state.length
+ // and n are all zero
+
+ const readable = stream.Readable({
+ highWaterMark: 0
+ })
+ readable._read = common.mustCall()
+ readable.read(0)
+}
+{
+ // Parse size as decimal integer
+ ;['1', '1.0', 1].forEach((size) => {
+ const readable = new stream.Readable({
+ read: common.mustCall(),
+ highWaterMark: 0
+ })
+ readable.read(size)
+ assert.strictEqual(readable._readableState.highWaterMark, Number(size))
+ })
+}
+{
+ // Test highwatermark limit
+ const hwm = 0x40000000 + 1
+ const readable = stream.Readable({
+ read() {}
+ })
+ assert.throws(
+ () => readable.read(hwm),
+ common.expectsError({
+ code: 'ERR_OUT_OF_RANGE',
+ message: 'The value of "size" is out of range.' + ' It must be <= 1GiB. Received ' + hwm
+ })
+ )
+}
+
+/* replacement start */
+process.on('beforeExit', (code) => {
+ if (code === 0) {
+ tap.pass('test succeeded')
+ } else {
+ tap.fail(`test failed - exited code ${code}`)
+ }
+})
+/* replacement end */
diff --git a/test/passthrough.js b/test/passthrough.js
deleted file mode 100644
index 35a1293fd7..0000000000
--- a/test/passthrough.js
+++ /dev/null
@@ -1,81 +0,0 @@
-var PassThrough = require('../passthrough.js');
-var test = require('tap').test;
-
-test('passthrough', function(t) {
- var pt = new PassThrough;
-
- pt.write(new Buffer('foog'));
- pt.write(new Buffer('bark'));
- pt.write(new Buffer('bazy'));
- pt.write(new Buffer('kuel'));
-
- t.equal(pt.read(5).toString(), 'foogb');
- t.equal(pt.read(5).toString(), 'arkba');
- t.equal(pt.read(5).toString(), 'zykue');
- t.equal(pt.read(5).toString(), 'l');
- t.end();
-});
-
-test('passthrough with transform', function(t) {
- pt = new PassThrough;
- pt.transform = function(c) {
- var ret = new Buffer(c.length);
- ret.fill('x');
- return ret;
- };
-
- pt.write(new Buffer('foog'));
- pt.write(new Buffer('bark'));
- pt.write(new Buffer('bazy'));
- pt.write(new Buffer('kuel'));
-
- t.equal(pt.read(5).toString(), 'xxxxx');
- t.equal(pt.read(5).toString(), 'xxxxx');
- t.equal(pt.read(5).toString(), 'xxxxx');
- t.equal(pt.read(5).toString(), 'x');
- t.end();
-});
-
-test('passthrough reordered', function(t) {
- pt = new PassThrough;
- var emits = 0;
- pt.on('readable', function() {
- emits++;
- });
-
- pt.write(new Buffer('foog'));
- pt.write(new Buffer('bark'));
-
- t.equal(pt.read(5).toString(), 'foogb');
- t.equal(pt.read(5).toString(), 'ark');
- t.equal(pt.read(5), null);
-
- pt.write(new Buffer('bazy'));
- pt.write(new Buffer('kuel'));
-
- t.equal(pt.read(5).toString(), 'bazyk');
- t.equal(pt.read(5).toString(), 'uel');
- t.equal(pt.read(5), null);
-
- t.equal(emits, 2);
- t.end();
-});
-
-test('passthrough facaded', function(t) {
- var pt = new PassThrough;
- var datas = [];
- pt.on('data', function(chunk) {
- datas.push(chunk.toString());
- });
-
- pt.on('end', function() {
- t.same(datas, ['foog', 'bark', 'bazy', 'kuel']);
- t.end();
- });
-
- pt.write(new Buffer('foog'));
- pt.write(new Buffer('bark'));
- pt.write(new Buffer('bazy'));
- pt.write(new Buffer('kuel'));
- pt.end();
-});