diff --git a/.eslintrc.js b/.eslintrc.js index bda641797e2951..f5100fd3cc91b8 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -98,6 +98,14 @@ module.exports = { name: 'require', message: 'Use import instead', }, + { + name: 'Buffer', + message: 'Import Buffer instead of using the global' + }, + { + name: 'process', + message: 'Import process instead of using the global' + }, ] }, }, ], diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 95363251fe41a0..7fdbdc7ef86e95 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -22,7 +22,6 @@ /doc/guides/contributing/pull-requests.md @nodejs/tsc /doc/guides/collaborator-guide.md @nodejs/tsc /doc/guides/offboarding.md @nodejs/tsc -/doc/guides/onboarding-extras.md @nodejs/tsc # streams @@ -55,7 +54,7 @@ # tls/crypto /lib/internal/crypto/* @nodejs/crypto -/lib/internal/tls.js @nodejs/crypto @nodejs/net +/lib/internal/tls/* @nodejs/crypto @nodejs/net /lib/crypto.js @nodejs/crypto /lib/tls.js @nodejs/crypto @nodejs/net /src/node_crypto* @nodejs/crypto @@ -119,3 +118,8 @@ # V8 /deps/v8/* @nodejs/v8-update /tools/v8_gypfiles/* @nodejs/v8-update + +# Actions + +/.github/workflows/* @nodejs/actions +/tools/actions/* @nodejs/actions diff --git a/.github/ISSUE_TEMPLATE/1-bug-report.md b/.github/ISSUE_TEMPLATE/1-bug-report.md deleted file mode 100644 index 84b6daf665e522..00000000000000 --- a/.github/ISSUE_TEMPLATE/1-bug-report.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -name: "\U0001F41B Bug report" -about: Create a report to help us improve - ---- - - - -* **Version**: -* **Platform**: -* **Subsystem**: - -### What steps will reproduce the bug? - - - -### How often does it reproduce? Is there a required condition? - -### What is the expected behavior? - - - -### What do you see instead? - - - -### Additional information - - diff --git a/.github/ISSUE_TEMPLATE/1-bug-report.yml b/.github/ISSUE_TEMPLATE/1-bug-report.yml new file mode 100644 index 00000000000000..965a2eca45d978 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/1-bug-report.yml @@ -0,0 +1,45 @@ +name: "\U0001F41B Bug report" +description: Create a report to help us improve +body: + - type: markdown + attributes: + value: | + Thank you for reporting an issue. + + This issue tracker is for bugs and issues found within Node.js core. + If you require more general support please file an issue on our help repo. https://github.com/nodejs/help + + Please fill in as much of the form below as you're able. + - type: input + attributes: + label: Version + description: Output of `node -v` + - type: input + attributes: + label: Platform + description: | + UNIX: output of `uname -a` + Windows: output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in PowerShell console + - type: input + attributes: + label: Subsystem + description: If known, please specify affected core module name + - type: textarea + attributes: + label: What steps will reproduce the bug? + description: Enter details about your bug, preferably a simple code snippet that can be run using `node` directly without installing third-party dependencies. + - type: textarea + attributes: + label: How often does it reproduce? Is there a required condition? + - type: textarea + attributes: + label: What is the expected behavior? + description: If possible please provide textual output instead of screenshots. + - type: textarea + attributes: + label: What do you see instead? + description: If possible please provide textual output instead of screenshots. + - type: textarea + attributes: + label: Additional information + description: Tell us anything else you think we should know. diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 1f802ca12b56c6..7022b6c1142678 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -58,8 +58,8 @@ jobs: name: tarballs - name: Extract tarball run: | - tar xzf tarballs/*.tar.gz - echo "TAR_DIR=`basename tarballs/*.tar.gz .tar.gz`" >> $GITHUB_ENV + tar xzf tarballs/*.tar.gz -C $RUNNER_TEMP + echo "TAR_DIR=$RUNNER_TEMP/`basename tarballs/*.tar.gz .tar.gz`" >> $GITHUB_ENV - name: Copy directories needed for testing run: | cp -r tools/node_modules $TAR_DIR/tools diff --git a/.github/workflows/commit-lint-problem-matcher.json b/.github/workflows/commit-lint-problem-matcher.json new file mode 100644 index 00000000000000..72dd13b9e0929d --- /dev/null +++ b/.github/workflows/commit-lint-problem-matcher.json @@ -0,0 +1,13 @@ +{ + "problemMatcher": [ + { + "owner": "core-validate-commit", + "pattern": [ + { + "regexp": "^not ok \\d+ (.*)$", + "message": 1 + } + ] + } + ] +} diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml new file mode 100644 index 00000000000000..9ea6337b51f181 --- /dev/null +++ b/.github/workflows/commit-lint.yml @@ -0,0 +1,20 @@ +name: "Commit messages adheres to guidelines at https://goo.gl/p2fr5Q" + +on: [pull_request] + +jobs: + lint-commit-message: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + # Last 100 commits should be enough for a PR + fetch-depth: 100 + - name: Use Node.js 12 + uses: actions/setup-node@v1 + with: + node-version: 12.x + - name: Validate commit messages + run: | + echo "::add-matcher::.github/workflows/commit-lint-problem-matcher.json" + git log --oneline ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }} | grep -v -e fixup -e squash | awk '{ print $1 }' | xargs npx -q core-validate-commit --no-validate-metadata --tap diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml new file mode 100644 index 00000000000000..8e459b5b8b2942 --- /dev/null +++ b/.github/workflows/find-inactive-collaborators.yml @@ -0,0 +1,17 @@ +name: Find inactive collaborators + +on: + schedule: + # Run on the 15th day of the month at 4:05 AM UTC. + - cron: '5 4 15 * *' + + workflow_dispatch: + +jobs: + find: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - run: tools/find-inactive-collaborators.mjs '1 year ago' diff --git a/.github/workflows/label-pr.yml b/.github/workflows/label-pr.yml index 4e56676e41e5ff..58e9b226dab0d0 100644 --- a/.github/workflows/label-pr.yml +++ b/.github/workflows/label-pr.yml @@ -11,4 +11,5 @@ jobs: steps: - uses: nodejs/node-pr-labeler@v1 with: + repo-token: ${{ secrets.GH_USER_TOKEN }} configuration-path: .github/label-pr-config.yml diff --git a/.gitignore b/.gitignore index b46679450bdbe6..08b44ff9561db3 100644 --- a/.gitignore +++ b/.gitignore @@ -133,7 +133,7 @@ tools/*/*.i.tmp /deps/uv/.github/ /deps/uv/docs/code/ /deps/uv/docs/src/guide/ -# Ignore dependencies fetched by deps/v8/tools/node/fetch_deps.py +# Ignore dependencies fetched by tools/v8/fetch_deps.py /deps/.cipd # === Rules for Windows vcbuild.bat === diff --git a/.mailmap b/.mailmap index 4597ae9e47f61d..c63bd511e6b5d9 100644 --- a/.mailmap +++ b/.mailmap @@ -63,6 +63,7 @@ Beth Griggs Bethany Griggs Beth Griggs Bethany N Griggs Beth Griggs BethGriggs Bidisha Pyne +bl-ue bl-ue <54780737+bl-ue@users.noreply.github.com> Brad Decker brad-decker Brad Larson BradLarson Bradley Meck Bradley Farias @@ -103,8 +104,10 @@ Daniel Paulino dpaulino Daniel Pihlström Daniel Wang firedfox Daniel Wang firedfox +Danielle Adams Danielle Adams Danny Nemer Danny Nemer +Darshan Sen Dave Pacheco David Cai DavidCai David Mark Clements @@ -207,6 +210,7 @@ Josh Erickson Josh Hunter jopann Joshua S. Weinstein Joyee Cheung joyeecheung +Joyee Cheung Joyee Cheung Juan Soto Julien Klepatch jklepatch Julien Waechter julien.waechter @@ -294,6 +298,7 @@ Nicholas Kinsey Nick Soggin Nikolai Vavilov Nils Kuhnhenn +Nitzan Uziely Nitzan Uziely Noah Rose Ledesma Noah Rose Noah Rose Ledesma Oluwaseun Omoyajowo @@ -356,6 +361,7 @@ Segu Riluvan Sergey Kryzhanovsky Shannen Saez Shaopeng Zhang szhang351 +Shelley Vohr Shelley Vohr Shigeki Ohtsu Shigeki Ohtsu Shivang Saxena @@ -371,6 +377,7 @@ Sreepurna Jasti sreepurnajasti Stefan Budeanu Stefan Bühler +Stephen Belanger Stephen Belanger Steve Mao Steven R. Loomis @@ -426,6 +433,7 @@ Ujjwal Sharma Viktor Karpov vitkarpov Vincent Voyer Vladimir de Turckheim +Voltrex Voltrex <62040526+VoltrexMaster@users.noreply.github.com> vsemozhetbyt Vse Mozhet Byt Wang Xinyong Weijia Wang <381152119@qq.com> diff --git a/AUTHORS b/AUTHORS index a61c4010be5d3c..6d9af83ca23a64 100644 --- a/AUTHORS +++ b/AUTHORS @@ -784,7 +784,7 @@ Sven Slootweg Dmitry Vasilyev Malcolm Ahoy Imran Iqbal -Stewart X Addison +Stewart X Addison Matt Harrison Christopher J. Brody Salman Aljammaz @@ -3003,7 +3003,7 @@ Conor ONeill tsabolov Swagat Konchada Yuhanun Citgez -Danielle Adams +Danielle Adams Andrey Pechkurov Jeff simon @@ -3139,5 +3139,186 @@ Shigma <33423008+Shigma@users.noreply.github.com> atian25@qq.com Amila Welihinda schamberg97 <50446906+schamberg97@users.noreply.github.com> +DrunkenPoney +Christoph Tavan +Clark Kozak +Michael Auderer +Linn Dahlgren +Ikko Ashimine +Anatoly Korniltsev +Victor Antonio Barzana Crespo +Matthieu Larcher +anlex N <1293006794@qq.com> +ThakurKarthik +Aastha Gupta +Yohanan Baruchel +Dmitry Gozman +Daniil Demidovich +Hussaina Begum Nandyala +Danny Sonnenschein +Sourav Shaw +H Adinarayana +lucasg +Brian 'bdougie' Douglas +Lee, Bonggi +Momtchil Momtchev +Josh Dague +Vincent Boivin +ax1 <16510021+ax1@users.noreply.github.com> +Shubham Parihar <51517103+iShibi@users.noreply.github.com> +Darshan Sen +Matthew Francis Brunetti +Chris Opperwall +Takuya Noguchi +tyankatsu +Ben Turner <7623873+ben-turner@users.noreply.github.com> +Bryan Field +krank2me +masx200 <34191203+masx200@users.noreply.github.com> +Baruch Odem (Rothkoff) +Mattias Runge-Broberg +Szymon Marczak <36894700+szmarczak@users.noreply.github.com> +Dmitry Semigradsky +Ole André Vadla Ravnås +rickyes <0x19951125@gmail.com> +Aleksandr Krutko +Brian Ingenito <28159742+bingenito@users.noreply.github.com> +FeelyChau +Darcy Clarke +mayank agarwal +woodfairy +Nikola Glavina +Rishabh Mehan +Anna Henningsen +Andrew Casey +Anders Kaseorg +Hollow Man +nlf +naortedgi +Narasimha Prasanna HN +Zijian Liu +inokawa <48897392+inokawa@users.noreply.github.com> +Michael Bashurov +Moshe vilner +Nicolai Stange +kai zhu +FrankQiu +Rock +Chinmoy Chakraborty +Maksym Baranovskyi +Michael Chen <4326639+mcgitty@users.noreply.github.com> +François-Denis Gonthier +Dr +Nitzan Uziely +Adrien Maret +Thiago Padilha +Joseph Hackman +Pranshu Jethmalani +Rohan Chougule +Mohamed Kamagate +Ajay Poshak +Isaac Levy +ugultopu +Nicholas Schamberg +Dimitris Halatsis +Mattia Pontonio <44380480+mattiapontonio@users.noreply.github.com> +Milad Fa +Emil Sivervik +alexbs +Ian Storm Taylor +Carlos Fuentes +Tyler Ang-Wanek +Matthew Mario Di Pasquale +ttzztztz +Romuald Brillout +Dave Cardwell +Akash Negi <55234838+NegiAkash890@users.noreply.github.com> +James Addison +Fabian Cook +Kalvin Vasconcellos +marsonya +Qingyu Deng +Matin Zadehdolatabad +Daniel Clark +Sajal Khandelwal +Cheng Liu +Utku Gultopu +Jay Tailor <60511316+JayvaScript@users.noreply.github.com> +Greg Ziskind +Dan Čermák +ttzztztz +Vít Ondruch +humanwebpl <58517331+humanwebpl@users.noreply.github.com> +Dawid Rusnak +obi-el +Merlin Luntke <22600241+Luntke@users.noreply.github.com> +Marko Kaznovac +Gabriel Schulhof +Ian Kerins +dbachko +Mattias Buelens +Dylan Elliott +Wassim Chegham +simov +wwwzbwcom +David Glasser +pezhmanparsaee +Hassaan Pasha +Darkripper214 +Anu Pasumarthy +HiroyukiYagihashi +Arkerone +Voltrex +ycjcl868 +Serkan Özel +Ferdi +eladkeyshawn +luyahan +Simon Knott +Siddharth +Cactysman +David Brownman +Michael Rommel +Chengzhong Wu +Andres +Jayden Seric +divlo +Rohit Gohri +Giora Guttsait +takayama +Rafael Gonzaga +Arnold Zokas +Nils Dralle +Jesse Chan +helloyou2012 +MrJithil +Rodolfo Carvalho +Jordan Baczuk +moander +Hitesh Sharma +Andreas Schwab +Moritz Kneilmann +fisker Cheung +Issam E. Maghni +TodorTotev <51530311+TodorTotev@users.noreply.github.com> +Wael Almattar +yotamselementor <83912471+yotamselementor@users.noreply.github.com> +pengjie <37610029@qq.com> +Philip +julianjany <54538266+julianjany@users.noreply.github.com> +bl-ue +npm-robot +Shaun Keys +Simone Busoli +ycjcl868 <45808948@qq.com> +Qingyu Deng +Derevianchenko Maksym <32910350+maks-white@users.noreply.github.com> +RA80533 <32469082+RA80533@users.noreply.github.com> +Mao Wtm +Houssem Chebab +Davidson Francis +Rohan Sharma +AkshayK +FrankEntriken <42781627+FrankEntriken@users.noreply.github.com> # Generated by tools/update-authors.js diff --git a/CHANGELOG.md b/CHANGELOG.md index e204dea1cb261b..a66acfc20b054e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,8 @@ release. -16.4.2
+16.5.0
+16.4.2
16.4.1
16.4.0
16.3.0
@@ -41,7 +42,9 @@ release. 16.0.0
-14.17.0
+14.17.2
+14.17.1
+14.17.0
14.16.1
14.16.0
14.15.5
@@ -69,7 +72,8 @@ release. 14.0.0
-12.22.1
+12.22.2
+12.22.1
12.22.0
12.21.0
12.20.2
diff --git a/LICENSE b/LICENSE index 41f9bc05611a82..18392b1d6a1a5f 100644 --- a/LICENSE +++ b/LICENSE @@ -55,30 +55,7 @@ The externally maintained libraries used by Node.js are: """ MIT License - Copyright (C) 2012-2018 by various contributors (see AUTHORS) - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. - """ - -- Acorn plugins, located at deps/acorn-plugins, is licensed as follows: - """ - Copyright (C) 2017-2018 by Adrian Heine + Copyright (C) 2012-2020 by various contributors (see AUTHORS) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -1274,7 +1251,7 @@ The externally maintained libraries used by Node.js are: WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -- gtest, located at test/cctest/gtest, is licensed as follows: +- gtest, located at src/gtest and test/cctest/gtest, is licensed as follows: """ Copyright 2008, Google Inc. All rights reserved. diff --git a/Makefile b/Makefile index 58260979e11458..e4d13a818a66b4 100644 --- a/Makefile +++ b/Makefile @@ -204,7 +204,7 @@ check: test # in place coverage-clean: $(RM) -r node_modules - $(RM) -r gcovr build + $(RM) -r gcovr $(RM) -r coverage/tmp $(FIND) out/$(BUILDTYPE)/obj.target \( -name "*.gcda" -o -name "*.gcno" \) \ -type f -exec $(RM) {} \; @@ -220,13 +220,7 @@ coverage: coverage-test ## Run the tests and generate a coverage report. .PHONY: coverage-build coverage-build: all -$(MAKE) coverage-build-js - if [ ! -d gcovr ]; then git clone -b 3.4 --depth=1 \ - --single-branch https://github.com/gcovr/gcovr.git; fi - if [ ! -d build ]; then git clone --depth=1 \ - --single-branch https://github.com/nodejs/build.git; fi - if [ ! -f gcovr/scripts/gcovr.orig ]; then \ - (cd gcovr && patch -N -p1 < \ - "$(CURDIR)/build/jenkins/scripts/coverage/gcovr-patches-3.4.diff"); fi + if [ ! -d gcovr ]; then $(PYTHON) -m pip install -t gcovr gcovr==4.2; fi $(MAKE) .PHONY: coverage-build-js @@ -238,16 +232,14 @@ coverage-build-js: .PHONY: coverage-test coverage-test: coverage-build - $(RM) out/$(BUILDTYPE)/obj.target/node/src/*.gcda - $(RM) out/$(BUILDTYPE)/obj.target/node/src/*/*.gcda - $(RM) out/$(BUILDTYPE)/obj.target/node_lib/src/*.gcda - $(RM) out/$(BUILDTYPE)/obj.target/node_lib/src/*/*.gcda + $(FIND) out/$(BUILDTYPE)/obj.target -name "*.gcda" -type f -exec $(RM) {} \; -NODE_V8_COVERAGE=coverage/tmp \ TEST_CI_ARGS="$(TEST_CI_ARGS) --type=coverage" $(MAKE) $(COVTESTS) $(MAKE) coverage-report-js - -(cd out && "../gcovr/scripts/gcovr" \ + -(cd out && PYTHONPATH=../gcovr $(PYTHON) -m gcovr \ --gcov-exclude='.*\b(deps|usr|out|cctest|embedding)\b' -v \ - -r Release/obj.target --html --html-detail -o ../coverage/cxxcoverage.html \ + -r ../src/ --object-directory Release/obj.target \ + --html --html-details -o ../coverage/cxxcoverage.html \ --gcov-executable="$(GCOV)") @printf "Javascript coverage %%: " @grep -B1 Lines coverage/index.html | head -n1 \ @@ -667,12 +659,12 @@ test-v8: v8 ## Runs the V8 test suite on deps/v8. test-v8-intl: v8 export PATH="$(NO_BIN_OVERRIDE_PATH)" && \ deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) \ - --mode=$(BUILDTYPE_LOWER) intl \ + intl \ $(TAP_V8_INTL) test-v8-benchmarks: v8 export PATH="$(NO_BIN_OVERRIDE_PATH)" && \ - deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) --mode=$(BUILDTYPE_LOWER) \ + deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) \ benchmarks \ $(TAP_V8_BENCHMARKS) @@ -847,6 +839,9 @@ else ifeq ($(findstring powerpc,$(shell uname -p)),powerpc) DESTCPU ?= ppc64 else +ifeq ($(findstring riscv64,$(UNAME_M)),riscv64) +DESTCPU ?= riscv64 +else DESTCPU ?= x86 endif endif @@ -857,6 +852,7 @@ endif endif endif endif +endif ifeq ($(DESTCPU),x64) ARCH=x64 else @@ -878,6 +874,9 @@ else ifeq ($(DESTCPU),s390x) ARCH=s390x else +ifeq ($(DESTCPU),riscv64) +ARCH=riscv64 +else ARCH=x86 endif endif @@ -886,6 +885,7 @@ endif endif endif endif +endif # node and v8 use different arch names (e.g. node 'x86' vs v8 'ia32'). # pass the proper v8 arch name to $V8_ARCH based on user-specified $DESTCPU. @@ -983,7 +983,7 @@ $(PKG): release-only ifneq ($(OSTYPE),darwin) $(warning Invalid OSTYPE) $(error OSTYPE should be `darwin` currently is $(OSTYPE)) -endif +endif ifneq ($(ARCHTYPE),arm64) $(warning Invalid ARCHTYPE) $(error ARCHTYPE should be `arm64` currently is $(ARCHTYPE)) @@ -1093,7 +1093,7 @@ $(TARBALL): release-only doc-only find $(TARNAME)/deps/v8/test -type f ! -regex '.*/test/torque/.*' | xargs $(RM) find $(TARNAME)/deps/zlib/contrib/* -type d ! -regex '.*/contrib/optimizations$$' | xargs $(RM) -r find $(TARNAME)/ -name ".eslint*" -maxdepth 2 | xargs $(RM) - find $(TARNAME)/ -type l | xargs $(RM) # annoying on windows + find $(TARNAME)/ -type l | xargs $(RM) tar -cf $(TARNAME).tar $(TARNAME) $(RM) -r $(TARNAME) gzip -c -f -9 $(TARNAME).tar > $(TARNAME).tar.gz diff --git a/README.md b/README.md index 39a47f0f7c6c3a..d6725488daeb47 100644 --- a/README.md +++ b/README.md @@ -200,6 +200,10 @@ For information about the governance of the Node.js project, see * [Trott](https://github.com/Trott) - **Rich Trott** <rtrott@gmail.com> (he/him) +
+ +Emeriti + ### TSC emeriti * [addaleax](https://github.com/addaleax) - @@ -243,6 +247,8 @@ For information about the governance of the Node.js project, see * [trevnorris](https://github.com/trevnorris) - **Trevor Norris** <trev.norris@gmail.com> +
+ ### Collaborators * [addaleax](https://github.com/addaleax) - @@ -251,8 +257,6 @@ For information about the governance of the Node.js project, see **Antoine du Hamel** <duhamelantoine1995@gmail.com> (he/him) * [ak239](https://github.com/ak239) - **Aleksei Koziatinskii** <ak239spb@gmail.com> -* [AndreasMadsen](https://github.com/AndreasMadsen) - -**Andreas Madsen** <amwebdk@gmail.com> (he/him) * [antsmartian](https://github.com/antsmartian) - **Anto Aravinth** <anto.aravinth.cse@gmail.com> (he/him) * [apapirovski](https://github.com/apapirovski) - @@ -316,7 +320,7 @@ For information about the governance of the Node.js project, see * [gabrielschulhof](https://github.com/gabrielschulhof) - **Gabriel Schulhof** <gabrielschulhof@gmail.com> * [gdams](https://github.com/gdams) - -**George Adams** <george.adams@uk.ibm.com> (he/him) +**George Adams** <george.adams@microsoft.com> (he/him) * [geek](https://github.com/geek) - **Wyatt Preul** <wpreul@gmail.com> * [gengjiawen](https://github.com/gengjiawen) - @@ -387,8 +391,6 @@ For information about the governance of the Node.js project, see **Brian White** <mscdex@mscdex.net> * [MylesBorins](https://github.com/MylesBorins) - **Myles Borins** <myles.borins@gmail.com> (he/him) -* [ofrobots](https://github.com/ofrobots) - -**Ali Ijaz Sheikh** <ofrobots@google.com> (he/him) * [oyyd](https://github.com/oyyd) - **Ouyang Yadong** <oyydoibh@gmail.com> (he/him) * [panva](https://github.com/panva) - @@ -411,8 +413,6 @@ For information about the governance of the Node.js project, see **Ricky Zhou** <0x19951125@gmail.com> (he/him) * [ronag](https://github.com/ronag) - **Robert Nagy** <ronagy@icloud.com> -* [rubys](https://github.com/rubys) - -**Sam Ruby** <rubys@intertwingly.net> * [ruyadorno](https://github.com/ruyadorno) - **Ruy Adorno** <ruyadorno@github.com> (he/him) * [rvagg](https://github.com/rvagg) - @@ -464,12 +464,18 @@ For information about the governance of the Node.js project, see * [ZYSzys](https://github.com/ZYSzys) - **Yongsheng Zhang** <zyszys98@gmail.com> (he/him) +
+ +Emeriti + ### Collaborator emeriti * [andrasq](https://github.com/andrasq) - **Andras** <andras@kinvey.com> * [AnnaMag](https://github.com/AnnaMag) - **Anna M. Kedzierska** <anna.m.kedzierska@gmail.com> +* [AndreasMadsen](https://github.com/AndreasMadsen) - +**Andreas Madsen** <amwebdk@gmail.com> (he/him) * [aqrln](https://github.com/aqrln) - **Alexey Orlenko** <eaglexrlnk@gmail.com> (he/him) * [bnoordhuis](https://github.com/bnoordhuis) - @@ -538,6 +544,8 @@ For information about the governance of the Node.js project, see **Chen Gang** <gangc.cxy@foxmail.com> * [not-an-aardvark](https://github.com/not-an-aardvark) - **Teddy Katz** <teddy.katz@gmail.com> (he/him) +* [ofrobots](https://github.com/ofrobots) - +**Ali Ijaz Sheikh** <ofrobots@google.com> (he/him) * [Olegas](https://github.com/Olegas) - **Oleg Elifantiev** <oleg@elifantiev.ru> * [orangemocha](https://github.com/orangemocha) - @@ -568,6 +576,8 @@ For information about the governance of the Node.js project, see **Ron Korving** <ron@ronkorving.nl> * [RReverser](https://github.com/RReverser) - **Ingvar Stepanyan** <me@rreverser.com> +* [rubys](https://github.com/rubys) - +**Sam Ruby** <rubys@intertwingly.net> * [sam-github](https://github.com/sam-github) - **Sam Roberts** <vieuxtech@gmail.com> * [sebdeckers](https://github.com/sebdeckers) - @@ -592,6 +602,8 @@ For information about the governance of the Node.js project, see **Vse Mozhet Byt** <vsemozhetbyt@gmail.com> (he/him) * [whitlockjc](https://github.com/whitlockjc) - **Jeremy Whitlock** <jwhitlock@apache.org> + +
Collaborators follow the [Collaborator Guide](./doc/guides/collaborator-guide.md) in @@ -655,7 +667,9 @@ gpg --keyserver pool.sks-keyservers.net --recv-keys B9E2F5981AA6E0CD28160D9FF139 See the section above on [Verifying Binaries](#verifying-binaries) for how to use these keys to verify a downloaded file. -Other keys used to sign some previous releases: +
+ +Other keys used to sign some previous releases * **Chris Dickinson** <christopher.s.dickinson@gmail.com> `9554F04D7259F04124DE6B476D5A82AC7E37093B` @@ -676,6 +690,8 @@ Other keys used to sign some previous releases: * **Timothy J Fontaine** <tjfontaine@gmail.com> `7937DFD2AB06298B2293C3187D33FF9D0246406D` +
+ ## License Node.js is available under the diff --git a/common.gypi b/common.gypi index aa42c69f96391b..71862791dae3be 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.14', + 'v8_embedder_string': '-node.20', ##### V8 defaults for Node.js ##### diff --git a/configure.py b/configure.py index 4bc790e2f24219..d69c52521d80fb 100755 --- a/configure.py +++ b/configure.py @@ -1066,6 +1066,7 @@ def host_arch_cc(): '__PPC__' : 'ppc64', '__x86_64__' : 'x64', '__s390x__' : 's390x', + '__riscv' : 'riscv', } rtn = 'ia32' # default @@ -1078,6 +1079,12 @@ def host_arch_cc(): if rtn == 'mipsel' and '_LP64' in k: rtn = 'mips64el' + if rtn == 'riscv': + if k['__riscv_xlen'] == '64': + rtn = 'riscv64' + else: + rtn = 'riscv32' + return rtn diff --git a/deps/acorn-plugins/acorn-class-fields/CHANGELOG.md b/deps/acorn-plugins/acorn-class-fields/CHANGELOG.md deleted file mode 100644 index de2c66b0c3bc65..00000000000000 --- a/deps/acorn-plugins/acorn-class-fields/CHANGELOG.md +++ /dev/null @@ -1,28 +0,0 @@ -## 0.3.1 (2019-02-09) - -* Restore compatibility with acorn-private-methods - -## 0.3.0 (2019-02-09) - -* Require acorn >= 6.1.0 - -## 0.2.1 (2018-11-06) - -* Adapt to changes in acorn 6.0.3 - -## 0.2.0 (2018-09-14) - -* Update to new acorn 6 interface -* Change license to MIT - -## 0.1.2 (2018-01-26) - -* Don't accept whitespace between hash and private name - -## 0.1.1 (2018-01-17) - -* Correctly parse all fields named `async` - -## 0.1.0 (2018-01-13) - -Initial release diff --git a/deps/acorn-plugins/acorn-class-fields/LICENSE b/deps/acorn-plugins/acorn-class-fields/LICENSE deleted file mode 100644 index 7c2b27a19c033c..00000000000000 --- a/deps/acorn-plugins/acorn-class-fields/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2017-2018 by Adrian Heine - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/deps/acorn-plugins/acorn-class-fields/README.md b/deps/acorn-plugins/acorn-class-fields/README.md deleted file mode 100644 index 60f3463e94d315..00000000000000 --- a/deps/acorn-plugins/acorn-class-fields/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Class fields support for Acorn - -[![NPM version](https://img.shields.io/npm/v/acorn-class-fields.svg)](https://www.npmjs.org/package/acorn-class-fields) - -This is a plugin for [Acorn](http://marijnhaverbeke.nl/acorn/) - a tiny, fast JavaScript parser, written completely in JavaScript. - -It implements support for class fields as defined in the stage 3 proposal [Class field declarations for JavaScript](https://github.com/tc39/proposal-class-fields). The emitted AST follows [an ESTree proposal](https://github.com/estree/estree/pull/180). - -## Usage - -This module provides a plugin that can be used to extend the Acorn `Parser` class: - -```javascript -const {Parser} = require('acorn'); -const classFields = require('acorn-class-fields'); -Parser.extend(classFields).parse('class X { x = 0 }'); -``` - -## License - -This plugin is released under an [MIT License](./LICENSE). diff --git a/deps/acorn-plugins/acorn-class-fields/index.js b/deps/acorn-plugins/acorn-class-fields/index.js deleted file mode 100644 index 20348c80c6bcbf..00000000000000 --- a/deps/acorn-plugins/acorn-class-fields/index.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict" - -const acorn = require('internal/deps/acorn/acorn/dist/acorn') -const tt = acorn.tokTypes -const privateClassElements = require('internal/deps/acorn-plugins/acorn-private-class-elements/index') - -function maybeParseFieldValue(field) { - if (this.eat(tt.eq)) { - const oldInFieldValue = this._inFieldValue - this._inFieldValue = true - field.value = this.parseExpression() - this._inFieldValue = oldInFieldValue - } else field.value = null -} - -module.exports = function(Parser) { - Parser = privateClassElements(Parser) - return class extends Parser { - // Parse fields - parseClassElement(_constructorAllowsSuper) { - if (this.options.ecmaVersion >= 8 && (this.type == tt.name || this.type == this.privateNameToken || this.type == tt.bracketL || this.type == tt.string)) { - const branch = this._branch() - if (branch.type == tt.bracketL) { - let count = 0 - do { - if (branch.eat(tt.bracketL)) ++count - else if (branch.eat(tt.bracketR)) --count - else branch.next() - } while (count > 0) - } else branch.next() - if (branch.type == tt.eq || branch.canInsertSemicolon() || branch.type == tt.semi) { - const node = this.startNode() - if (this.type == this.privateNameToken) { - this.parsePrivateClassElementName(node) - } else { - this.parsePropertyName(node) - } - if ((node.key.type === "Identifier" && node.key.name === "constructor") || - (node.key.type === "Literal" && node.key.value === "constructor")) { - this.raise(node.key.start, "Classes may not have a field called constructor") - } - maybeParseFieldValue.call(this, node) - this.finishNode(node, "FieldDefinition") - this.semicolon() - return node - } - } - - return super.parseClassElement.apply(this, arguments) - } - - // Prohibit arguments in class field initializers - parseIdent(liberal, isBinding) { - const ident = super.parseIdent(liberal, isBinding) - if (this._inFieldValue && ident.name == "arguments") this.raise(ident.start, "A class field initializer may not contain arguments") - return ident - } - } -} diff --git a/deps/acorn-plugins/acorn-class-fields/package.json b/deps/acorn-plugins/acorn-class-fields/package.json deleted file mode 100644 index 550511399f248d..00000000000000 --- a/deps/acorn-plugins/acorn-class-fields/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "acorn-class-fields", - "description": "Support for class fields in acorn", - "homepage": "https://github.com/acornjs/acorn-class-fields", - "contributors": [ - "Adrian Heine " - ], - "engines": { - "node": ">=4.8.2" - }, - "repository": { - "type": "git", - "url": "https://github.com/acornjs/acorn-class-fields" - }, - "license": "MIT", - "scripts": { - "test": "mocha", - "test:test262": "node run_test262.js", - "lint": "eslint -c .eslintrc.json ." - }, - "peerDependencies": { - "acorn": "^6.0.0" - }, - "version": "0.3.1", - "devDependencies": { - "acorn": "^6.1.0", - "eslint": "^5.13.0", - "eslint-plugin-node": "^8.0.1", - "mocha": "^5.2.0", - "test262": "git+https://github.com/tc39/test262.git#33a306d1026b72227eb50a918db19ada16f12b3d", - "test262-parser-runner": "^0.5.0" - }, - "dependencies": { - "acorn-private-class-elements": "^0.1.1" - } -} \ No newline at end of file diff --git a/deps/acorn-plugins/acorn-private-class-elements/CHANGELOG.md b/deps/acorn-plugins/acorn-private-class-elements/CHANGELOG.md deleted file mode 100644 index 6d4854429aacfa..00000000000000 --- a/deps/acorn-plugins/acorn-private-class-elements/CHANGELOG.md +++ /dev/null @@ -1,11 +0,0 @@ -## 0.2.0 (2020-03-07) - -* Mark as compatible with acorn v7 - -## 0.1.1 (2019-02-09) - -* Add \_branch() method - -## 0.1.0 (2019-02-09) - -Initial release diff --git a/deps/acorn-plugins/acorn-private-class-elements/LICENSE b/deps/acorn-plugins/acorn-private-class-elements/LICENSE deleted file mode 100644 index 7c2b27a19c033c..00000000000000 --- a/deps/acorn-plugins/acorn-private-class-elements/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2017-2018 by Adrian Heine - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/deps/acorn-plugins/acorn-private-class-elements/README.md b/deps/acorn-plugins/acorn-private-class-elements/README.md deleted file mode 100644 index 0d228820cd1d01..00000000000000 --- a/deps/acorn-plugins/acorn-private-class-elements/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Helpers for supporting private class methods and fields for Acorn - -[![NPM version](https://img.shields.io/npm/v/acorn-private-class-elements.svg)](https://www.npmjs.org/package/acorn-private-class-elements) - -This is a plugin for [Acorn](http://marijnhaverbeke.nl/acorn/) - a tiny, fast JavaScript parser, written completely in JavaScript. - -It provides helpers for implementing support for private class elements. The emitted AST follows [an ESTree proposal](https://github.com/estree/estree/pull/180). - -## License - -This plugin is released under an [MIT License](./LICENSE). diff --git a/deps/acorn-plugins/acorn-private-class-elements/index.js b/deps/acorn-plugins/acorn-private-class-elements/index.js deleted file mode 100644 index 1f40bda9baa2eb..00000000000000 --- a/deps/acorn-plugins/acorn-private-class-elements/index.js +++ /dev/null @@ -1,123 +0,0 @@ -"use strict" - -const acorn = require('internal/deps/acorn/acorn/dist/acorn') -if (acorn.version.indexOf("6.") != 0 && acorn.version.indexOf("6.0.") == 0 && acorn.version.indexOf("7.") != 0) { - throw new Error(`acorn-private-class-elements requires acorn@^6.1.0 or acorn@7.0.0, not ${acorn.version}`) -} -const tt = acorn.tokTypes -const TokenType = acorn.TokenType - -module.exports = function(Parser) { - // Only load this plugin once. - if (Parser.prototype.parsePrivateName) { - return Parser - } - - // Make sure `Parser` comes from the same acorn as our `tt`, - // otherwise the comparisons fail. - let cur = Parser - while (cur && cur !== acorn.Parser) { - cur = cur.__proto__ - } - if (cur !== acorn.Parser) { - throw new Error("acorn-private-class-elements does not support mixing different acorn copies") - } - - Parser = class extends Parser { - _branch() { - this.__branch = this.__branch || new Parser({ecmaVersion: this.options.ecmaVersion}, this.input) - this.__branch.end = this.end - this.__branch.pos = this.pos - this.__branch.type = this.type - this.__branch.value = this.value - this.__branch.containsEsc = this.containsEsc - return this.__branch - } - - parsePrivateClassElementName(element) { - element.computed = false - element.key = this.parsePrivateName() - if (element.key.name == "constructor") this.raise(element.key.start, "Classes may not have a private element named constructor") - const accept = {get: "set", set: "get"}[element.kind] - const privateBoundNames = this._privateBoundNamesStack[this._privateBoundNamesStack.length - 1] - if (Object.prototype.hasOwnProperty.call(privateBoundNames, element.key.name) && privateBoundNames[element.key.name] !== accept) { - this.raise(element.start, "Duplicate private element") - } - privateBoundNames[element.key.name] = element.kind || true - delete this._unresolvedPrivateNamesStack[this._unresolvedPrivateNamesStack.length - 1][element.key.name] - return element.key - } - - parsePrivateName() { - const node = this.startNode() - node.name = this.value - this.next() - this.finishNode(node, "PrivateName") - if (this.options.allowReserved == "never") this.checkUnreserved(node) - return node - } - - // Parse # token - getTokenFromCode(code) { - if (code === 35) { - ++this.pos - const word = this.readWord1() - return this.finishToken(this.privateNameToken, word) - } - return super.getTokenFromCode(code) - } - - // Manage stacks and check for undeclared private names - parseClass(node, isStatement) { - this._privateBoundNamesStack = this._privateBoundNamesStack || [] - const privateBoundNames = Object.create(this._privateBoundNamesStack[this._privateBoundNamesStack.length - 1] || null) - this._privateBoundNamesStack.push(privateBoundNames) - this._unresolvedPrivateNamesStack = this._unresolvedPrivateNamesStack || [] - const unresolvedPrivateNames = Object.create(null) - this._unresolvedPrivateNamesStack.push(unresolvedPrivateNames) - const _return = super.parseClass(node, isStatement) - this._privateBoundNamesStack.pop() - this._unresolvedPrivateNamesStack.pop() - if (!this._unresolvedPrivateNamesStack.length) { - const names = Object.keys(unresolvedPrivateNames) - if (names.length) { - names.sort((n1, n2) => unresolvedPrivateNames[n1] - unresolvedPrivateNames[n2]) - this.raise(unresolvedPrivateNames[names[0]], "Usage of undeclared private name") - } - } else Object.assign(this._unresolvedPrivateNamesStack[this._unresolvedPrivateNamesStack.length - 1], unresolvedPrivateNames) - return _return - } - - // Parse private element access - parseSubscript(base, startPos, startLoc, noCalls, maybeAsyncArrow) { - if (!this.eat(tt.dot)) { - return super.parseSubscript(base, startPos, startLoc, noCalls, maybeAsyncArrow) - } - let node = this.startNodeAt(startPos, startLoc) - node.object = base - node.computed = false - if (this.type == this.privateNameToken) { - node.property = this.parsePrivateName() - if (!this._privateBoundNamesStack.length || !this._privateBoundNamesStack[this._privateBoundNamesStack.length - 1][node.property.name]) { - this._unresolvedPrivateNamesStack[this._unresolvedPrivateNamesStack.length - 1][node.property.name] = node.property.start - } - } else { - node.property = this.parseIdent(true) - } - return this.finishNode(node, "MemberExpression") - } - - // Prohibit delete of private class elements - parseMaybeUnary(refDestructuringErrors, sawUnary) { - const _return = super.parseMaybeUnary(refDestructuringErrors, sawUnary) - if (_return.operator == "delete") { - if (_return.argument.type == "MemberExpression" && _return.argument.property.type == "PrivateName") { - this.raise(_return.start, "Private elements may not be deleted") - } - } - return _return - } - } - Parser.prototype.privateNameToken = new TokenType("privateName") - return Parser -} diff --git a/deps/acorn-plugins/acorn-private-class-elements/package.json b/deps/acorn-plugins/acorn-private-class-elements/package.json deleted file mode 100644 index b8b652a1e9e2f3..00000000000000 --- a/deps/acorn-plugins/acorn-private-class-elements/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "acorn-private-class-elements", - "description": "Helpers for supporting private class methods and fields in acorn", - "homepage": "https://github.com/acornjs/acorn-private-class-elements", - "contributors": [ - "Adrian Heine " - ], - "engines": { - "node": ">=4.8.2" - }, - "repository": { - "type": "git", - "url": "https://github.com/acornjs/acorn-private-class-elements" - }, - "license": "MIT", - "scripts": { - "test": "mocha", - "lint": "eslint -c .eslintrc.json ." - }, - "peerDependencies": { - "acorn": "^6.1.0 || ^7.0.0" - }, - "version": "0.2.0", - "devDependencies": { - "acorn": "^7.0.0", - "eslint": "^6.8.0", - "eslint-plugin-node": "^11.0.0", - "mocha": "^7.1.0" - } -} \ No newline at end of file diff --git a/deps/acorn-plugins/acorn-private-methods/CHANGELOG.md b/deps/acorn-plugins/acorn-private-methods/CHANGELOG.md deleted file mode 100644 index 5de4b97b8111b0..00000000000000 --- a/deps/acorn-plugins/acorn-private-methods/CHANGELOG.md +++ /dev/null @@ -1,29 +0,0 @@ -## 0.3.0 (2019-02-09) - -* Require acorn >= 6.1.0 - -## 0.2.3 (2019-02-09) - -* Forbid binding await in async arrow function's parameter list - -## 0.2.2 (2019-01-30) - -* Fix parsing of chained subscripts - -## 0.2.1 (2018-11-06) - -* Adapt to changes in acorn 6.0.3 - -## 0.2.0 (2018-09-14) - -* Update to new acorn 6 interface -* Change license to MIT -* Don't allow direct super() calls in private methods - -## 0.1.1 (2018-02-09) - -* Don't accept whitespace between hash and private name - -## 0.1.0 (2018-01-13) - -Initial release diff --git a/deps/acorn-plugins/acorn-private-methods/LICENSE b/deps/acorn-plugins/acorn-private-methods/LICENSE deleted file mode 100644 index 7c2b27a19c033c..00000000000000 --- a/deps/acorn-plugins/acorn-private-methods/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2017-2018 by Adrian Heine - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/deps/acorn-plugins/acorn-private-methods/README.md b/deps/acorn-plugins/acorn-private-methods/README.md deleted file mode 100644 index 6929e84ba620a8..00000000000000 --- a/deps/acorn-plugins/acorn-private-methods/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Private methods and getter/setters support for Acorn - -[![NPM version](https://img.shields.io/npm/v/acorn-private-methods.svg)](https://www.npmjs.org/package/acorn-private-methods) - -This is a plugin for [Acorn](http://marijnhaverbeke.nl/acorn/) - a tiny, fast JavaScript parser, written completely in JavaScript. - -It implements support for private methods, getters and setters as defined in the stage 3 proposal [Private methods and getter/setters for JavaScript classes](https://github.com/tc39/proposal-private-methods). The emitted AST follows [an ESTree proposal](https://github.com/estree/estree/pull/180). - -## Usage - -This module provides a plugin that can be used to extend the Acorn `Parser` class: - -```javascript -const {Parser} = require('acorn'); -const privateMethods = require('acorn-private-methods'); -Parser.extend(privateMethods).parse('class X { #a() {} }'); -``` - -## License - -This plugin is released under an [MIT License](./LICENSE). diff --git a/deps/acorn-plugins/acorn-private-methods/index.js b/deps/acorn-plugins/acorn-private-methods/index.js deleted file mode 100644 index a2964251680565..00000000000000 --- a/deps/acorn-plugins/acorn-private-methods/index.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict" - -const privateClassElements = require('internal/deps/acorn-plugins/acorn-private-class-elements/index') - -module.exports = function(Parser) { - const ExtendedParser = privateClassElements(Parser) - - return class extends ExtendedParser { - // Parse private methods - parseClassElement(_constructorAllowsSuper) { - const oldInClassMemberName = this._inClassMemberName - this._inClassMemberName = true - const result = super.parseClassElement.apply(this, arguments) - this._inClassMemberName = oldInClassMemberName - return result - } - - parsePropertyName(prop) { - const isPrivate = this.options.ecmaVersion >= 8 && this._inClassMemberName && this.type == this.privateNameToken - this._inClassMemberName = false - if (!isPrivate) return super.parsePropertyName(prop) - return this.parsePrivateClassElementName(prop) - } - } -} diff --git a/deps/acorn-plugins/acorn-private-methods/package.json b/deps/acorn-plugins/acorn-private-methods/package.json deleted file mode 100644 index e7b91592aaa9b9..00000000000000 --- a/deps/acorn-plugins/acorn-private-methods/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "acorn-private-methods", - "description": "Support for private methods in acorn", - "homepage": "https://github.com/acornjs/acorn-private-methods", - "contributors": [ - "Adrian Heine " - ], - "engines": { - "node": ">=4.8.2" - }, - "repository": { - "type": "git", - "url": "https://github.com/acornjs/acorn-private-methods" - }, - "license": "MIT", - "scripts": { - "test": "mocha", - "test:test262": "node run_test262.js", - "lint": "eslint -c .eslintrc.json ." - }, - "peerDependencies": { - "acorn": "^6.1.0" - }, - "dependencies": { - "acorn-private-class-elements": "^0.1.0" - }, - "version": "0.3.0", - "devDependencies": { - "acorn": "^6.1.0", - "eslint": "^5.13.0", - "eslint-plugin-node": "^8.0.1", - "mocha": "^5.2.0", - "test262": "git+https://github.com/tc39/test262.git#33a306d1026b72227eb50a918db19ada16f12b3d", - "test262-parser-runner": "^0.5.0" - } -} \ No newline at end of file diff --git a/deps/acorn-plugins/acorn-static-class-features/CHANGELOG.md b/deps/acorn-plugins/acorn-static-class-features/CHANGELOG.md deleted file mode 100644 index b9896a4bc5a45e..00000000000000 --- a/deps/acorn-plugins/acorn-static-class-features/CHANGELOG.md +++ /dev/null @@ -1,11 +0,0 @@ -## 0.2.0 (2019-02-09) - -* Require acorn >= 6.1.0 - -## 0.1.1 (2018-11-06) - -* Adapt to changes in acorn 6.0.3 - -## 0.1.0 (2018-09-14) - -Initial release diff --git a/deps/acorn-plugins/acorn-static-class-features/LICENSE b/deps/acorn-plugins/acorn-static-class-features/LICENSE deleted file mode 100644 index 7c2b27a19c033c..00000000000000 --- a/deps/acorn-plugins/acorn-static-class-features/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2017-2018 by Adrian Heine - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/deps/acorn-plugins/acorn-static-class-features/README.md b/deps/acorn-plugins/acorn-static-class-features/README.md deleted file mode 100644 index bb214fce164a1a..00000000000000 --- a/deps/acorn-plugins/acorn-static-class-features/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Static class features support for Acorn - -[![NPM version](https://img.shields.io/npm/v/acorn-class-fields.svg)](https://www.npmjs.org/package/acorn-static-class-features) - -This is a plugin for [Acorn](http://marijnhaverbeke.nl/acorn/) - a tiny, fast JavaScript parser, written completely in JavaScript. - -It implements support for static class features as defined in the stage 3 proposal [Static class features](https://github.com/tc39/proposal-static-class-features). The emitted AST follows [an ESTree proposal](https://github.com/estree/estree/pull/180). - -## Usage - -This module provides a plugin that can be used to extend the Acorn `Parser` class: - -```javascript -const {Parser} = require('acorn'); -const staticClassFeatures = require('acorn-static-class-features'); -Parser.extend(staticClassFeatures).parse('class X { static x = 0 }'); -``` - -## License - -This plugin is released under an [MIT License](./LICENSE). diff --git a/deps/acorn-plugins/acorn-static-class-features/index.js b/deps/acorn-plugins/acorn-static-class-features/index.js deleted file mode 100644 index d8954bf3275e0e..00000000000000 --- a/deps/acorn-plugins/acorn-static-class-features/index.js +++ /dev/null @@ -1,126 +0,0 @@ -"use strict" - -const skipWhiteSpace = /(?:\s|\/\/.*|\/\*[^]*?\*\/)*/g - -const acorn = require('internal/deps/acorn/acorn/dist/acorn') -const tt = acorn.tokTypes - -function maybeParseFieldValue(field) { - if (this.eat(tt.eq)) { - const oldInFieldValue = this._inStaticFieldValue - this._inStaticFieldValue = true - field.value = this.parseExpression() - this._inStaticFieldValue = oldInFieldValue - } else field.value = null -} - -const privateClassElements = require("internal/deps/acorn-plugins/acorn-private-class-elements/index") - -module.exports = function(Parser) { - const ExtendedParser = privateClassElements(Parser) - - return class extends ExtendedParser { - // Parse private fields - parseClassElement(_constructorAllowsSuper) { - if (this.eat(tt.semi)) return null - - const node = this.startNode() - - const tryContextual = (k, noLineBreak) => { - if (typeof noLineBreak == "undefined") noLineBreak = false - const start = this.start, startLoc = this.startLoc - if (!this.eatContextual(k)) return false - if (this.type !== tt.parenL && (!noLineBreak || !this.canInsertSemicolon())) return true - if (node.key) this.unexpected() - node.computed = false - node.key = this.startNodeAt(start, startLoc) - node.key.name = k - this.finishNode(node.key, "Identifier") - return false - } - - node.static = tryContextual("static") - if (!node.static) return super.parseClassElement.apply(this, arguments) - - let isGenerator = this.eat(tt.star) - let isAsync = false - if (!isGenerator) { - // Special-case for `async`, since `parseClassMember` currently looks - // for `(` to determine whether `async` is a method name - if (this.options.ecmaVersion >= 8 && this.isContextual("async")) { - skipWhiteSpace.lastIndex = this.pos - let skip = skipWhiteSpace.exec(this.input) - let next = this.input.charAt(this.pos + skip[0].length) - if (next === ";" || next === "=") { - node.key = this.parseIdent(true) - node.computed = false - maybeParseFieldValue.call(this, node) - this.finishNode(node, "FieldDefinition") - this.semicolon() - return node - } else if (this.options.ecmaVersion >= 8 && tryContextual("async", true)) { - isAsync = true - isGenerator = this.options.ecmaVersion >= 9 && this.eat(tt.star) - } - } else if (tryContextual("get")) { - node.kind = "get" - } else if (tryContextual("set")) { - node.kind = "set" - } - } - if (this.type === this.privateNameToken) { - this.parsePrivateClassElementName(node) - if (this.type !== tt.parenL) { - if (node.key.name === "prototype") { - this.raise(node.key.start, "Classes may not have a private static property named prototype") - } - maybeParseFieldValue.call(this, node) - this.finishNode(node, "FieldDefinition") - this.semicolon() - return node - } - } else if (!node.key) { - this.parsePropertyName(node) - if ((node.key.name || node.key.value) === "prototype" && !node.computed) { - this.raise(node.key.start, "Classes may not have a static property named prototype") - } - } - if (!node.kind) node.kind = "method" - this.parseClassMethod(node, isGenerator, isAsync) - if (!node.kind && (node.key.name || node.key.value) === "constructor" && !node.computed) { - this.raise(node.key.start, "Classes may not have a static field named constructor") - } - if (node.kind === "get" && node.value.params.length !== 0) { - this.raiseRecoverable(node.value.start, "getter should have no params") - } - if (node.kind === "set" && node.value.params.length !== 1) { - this.raiseRecoverable(node.value.start, "setter should have exactly one param") - } - if (node.kind === "set" && node.value.params[0].type === "RestElement") { - this.raiseRecoverable(node.value.params[0].start, "Setter cannot use rest params") - } - - return node - - } - - // Parse public static fields - parseClassMethod(method, isGenerator, isAsync, _allowsDirectSuper) { - if (isGenerator || isAsync || method.kind != "method" || !method.static || this.options.ecmaVersion < 8 || this.type == tt.parenL) { - return super.parseClassMethod.apply(this, arguments) - } - maybeParseFieldValue.call(this, method) - delete method.kind - method = this.finishNode(method, "FieldDefinition") - this.semicolon() - return method - } - - // Prohibit arguments in class field initializers - parseIdent(liberal, isBinding) { - const ident = super.parseIdent(liberal, isBinding) - if (this._inStaticFieldValue && ident.name == "arguments") this.raise(ident.start, "A static class field initializer may not contain arguments") - return ident - } - } -} diff --git a/deps/acorn-plugins/acorn-static-class-features/package.json b/deps/acorn-plugins/acorn-static-class-features/package.json deleted file mode 100644 index 066223d9965df5..00000000000000 --- a/deps/acorn-plugins/acorn-static-class-features/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "acorn-static-class-features", - "description": "Support for static class features in acorn", - "homepage": "https://github.com/acornjs/acorn-static-class-features", - "contributors": [ - "Adrian Heine " - ], - "engines": { - "node": ">=4.8.2" - }, - "repository": { - "type": "git", - "url": "https://github.com/acornjs/acorn-static-class-features" - }, - "license": "MIT", - "scripts": { - "test": "mocha", - "test:test262": "node run_test262.js", - "lint": "eslint -c .eslintrc.json ." - }, - "peerDependencies": { - "acorn": "^6.1.0" - }, - "version": "0.2.0", - "devDependencies": { - "acorn": "^6.1.0", - "eslint": "^5.13.0", - "eslint-plugin-node": "^8.0.1", - "mocha": "^5.2.0", - "test262": "git+https://github.com/tc39/test262.git#33a306d1026b72227eb50a918db19ada16f12b3d", - "test262-parser-runner": "^0.5.0" - }, - "dependencies": { - "acorn-private-class-elements": "^0.1.1" - } -} \ No newline at end of file diff --git a/deps/acorn/acorn-walk/CHANGELOG.md b/deps/acorn/acorn-walk/CHANGELOG.md index 204e04b33974e2..a9382d0db9abef 100644 --- a/deps/acorn/acorn-walk/CHANGELOG.md +++ b/deps/acorn/acorn-walk/CHANGELOG.md @@ -1,3 +1,21 @@ +## 8.1.0 (2021-04-24) + +### New features + +Support node types for class fields and private methods. + +## 8.0.2 (2021-01-25) + +### Bug fixes + +Adjust package.json to work with Node 12.16.0 and 13.0-13.6. + +## 8.0.0 (2021-01-05) + +### Bug fixes + +Fix a bug where `full` and `fullAncestor` would skip nodes with overridden types. + ## 8.0.0 (2020-08-12) ### New features diff --git a/deps/acorn/acorn-walk/LICENSE b/deps/acorn/acorn-walk/LICENSE index cc5272c966db45..d6be6db2cfff57 100644 --- a/deps/acorn/acorn-walk/LICENSE +++ b/deps/acorn/acorn-walk/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (C) 2012-2018 by various contributors (see AUTHORS) +Copyright (C) 2012-2020 by various contributors (see AUTHORS) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/deps/acorn/acorn-walk/dist/walk.js b/deps/acorn/acorn-walk/dist/walk.js index 17da67c61171f1..5582c907595e11 100644 --- a/deps/acorn/acorn-walk/dist/walk.js +++ b/deps/acorn/acorn-walk/dist/walk.js @@ -72,11 +72,15 @@ // A full walk triggers the callback on each node function full(node, callback, baseVisitor, state, override) { - if (!baseVisitor) { baseVisitor = base - ; }(function c(node, st, override) { + if (!baseVisitor) { baseVisitor = base; } + var last + ;(function c(node, st, override) { var type = override || node.type; baseVisitor[type](node, st, c); - if (!override) { callback(node, st, type); } + if (last !== node) { + callback(node, st, type); + last = node; + } })(node, state, override); } @@ -84,13 +88,16 @@ // the callback on each node function fullAncestor(node, callback, baseVisitor, state) { if (!baseVisitor) { baseVisitor = base; } - var ancestors = [] + var ancestors = [], last ;(function c(node, st, override) { var type = override || node.type; var isNew = node !== ancestors[ancestors.length - 1]; if (isNew) { ancestors.push(node); } baseVisitor[type](node, st, c); - if (!override) { callback(node, st || ancestors, ancestors, type); } + if (last !== node) { + callback(node, st || ancestors, ancestors, type); + last = node; + } if (isNew) { ancestors.pop(); } })(node, state); } @@ -168,17 +175,10 @@ return max } - // Fallback to an Object.create polyfill for older environments. - var create = Object.create || function(proto) { - function Ctor() {} - Ctor.prototype = proto; - return new Ctor - }; - // Used to create a custom walker. Will fill in all missing node // type properties with the defaults. function make(funcs, baseVisitor) { - var visitor = create(baseVisitor || base); + var visitor = Object.create(baseVisitor || base); for (var type in funcs) { visitor[type] = funcs[type]; } return visitor } @@ -421,7 +421,7 @@ base.ImportExpression = function (node, st, c) { c(node.source, st, "Expression"); }; - base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore; + base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore; base.TaggedTemplateExpression = function (node, st, c) { c(node.tag, st, "Expression"); @@ -441,9 +441,9 @@ c(elt, st); } }; - base.MethodDefinition = base.Property = function (node, st, c) { + base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) { if (node.computed) { c(node.key, st, "Expression"); } - c(node.value, st, "Expression"); + if (node.value) { c(node.value, st, "Expression"); } }; exports.ancestor = ancestor; diff --git a/deps/acorn/acorn-walk/dist/walk.js.map b/deps/acorn/acorn-walk/dist/walk.js.map deleted file mode 100644 index 5590a2924f2de6..00000000000000 --- a/deps/acorn/acorn-walk/dist/walk.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"walk.js","sources":["../src/index.js"],"sourcesContent":["// AST walker module for Mozilla Parser API compatible trees\n\n// A simple walk is one where you simply specify callbacks to be\n// called on specific nodes. The last two arguments are optional. A\n// simple use would be\n//\n// walk.simple(myTree, {\n// Expression: function(node) { ... }\n// });\n//\n// to do something with all expressions. All Parser API node types\n// can be used to identify node types, as well as Expression and\n// Statement, which denote categories of nodes.\n//\n// The base argument can be used to pass a custom (recursive)\n// walker, and state can be used to give this walked an initial\n// state.\n\nexport function simple(node, visitors, baseVisitor, state, override) {\n if (!baseVisitor) baseVisitor = base\n ;(function c(node, st, override) {\n let type = override || node.type, found = visitors[type]\n baseVisitor[type](node, st, c)\n if (found) found(node, st)\n })(node, state, override)\n}\n\n// An ancestor walk keeps an array of ancestor nodes (including the\n// current node) and passes them to the callback as third parameter\n// (and also as state parameter when no other state is present).\nexport function ancestor(node, visitors, baseVisitor, state) {\n let ancestors = []\n if (!baseVisitor) baseVisitor = base\n ;(function c(node, st, override) {\n let type = override || node.type, found = visitors[type]\n let isNew = node !== ancestors[ancestors.length - 1]\n if (isNew) ancestors.push(node)\n baseVisitor[type](node, st, c)\n if (found) found(node, st || ancestors, ancestors)\n if (isNew) ancestors.pop()\n })(node, state)\n}\n\n// A recursive walk is one where your functions override the default\n// walkers. They can modify and replace the state parameter that's\n// threaded through the walk, and can opt how and whether to walk\n// their child nodes (by calling their third argument on these\n// nodes).\nexport function recursive(node, state, funcs, baseVisitor, override) {\n let visitor = funcs ? make(funcs, baseVisitor || undefined) : baseVisitor\n ;(function c(node, st, override) {\n visitor[override || node.type](node, st, c)\n })(node, state, override)\n}\n\nfunction makeTest(test) {\n if (typeof test === \"string\")\n return type => type === test\n else if (!test)\n return () => true\n else\n return test\n}\n\nclass Found {\n constructor(node, state) { this.node = node; this.state = state }\n}\n\n// A full walk triggers the callback on each node\nexport function full(node, callback, baseVisitor, state, override) {\n if (!baseVisitor) baseVisitor = base\n ;(function c(node, st, override) {\n let type = override || node.type\n baseVisitor[type](node, st, c)\n if (!override) callback(node, st, type)\n })(node, state, override)\n}\n\n// An fullAncestor walk is like an ancestor walk, but triggers\n// the callback on each node\nexport function fullAncestor(node, callback, baseVisitor, state) {\n if (!baseVisitor) baseVisitor = base\n let ancestors = []\n ;(function c(node, st, override) {\n let type = override || node.type\n let isNew = node !== ancestors[ancestors.length - 1]\n if (isNew) ancestors.push(node)\n baseVisitor[type](node, st, c)\n if (!override) callback(node, st || ancestors, ancestors, type)\n if (isNew) ancestors.pop()\n })(node, state)\n}\n\n// Find a node with a given start, end, and type (all are optional,\n// null can be used as wildcard). Returns a {node, state} object, or\n// undefined when it doesn't find a matching node.\nexport function findNodeAt(node, start, end, test, baseVisitor, state) {\n if (!baseVisitor) baseVisitor = base\n test = makeTest(test)\n try {\n (function c(node, st, override) {\n let type = override || node.type\n if ((start == null || node.start <= start) &&\n (end == null || node.end >= end))\n baseVisitor[type](node, st, c)\n if ((start == null || node.start === start) &&\n (end == null || node.end === end) &&\n test(type, node))\n throw new Found(node, st)\n })(node, state)\n } catch (e) {\n if (e instanceof Found) return e\n throw e\n }\n}\n\n// Find the innermost node of a given type that contains the given\n// position. Interface similar to findNodeAt.\nexport function findNodeAround(node, pos, test, baseVisitor, state) {\n test = makeTest(test)\n if (!baseVisitor) baseVisitor = base\n try {\n (function c(node, st, override) {\n let type = override || node.type\n if (node.start > pos || node.end < pos) return\n baseVisitor[type](node, st, c)\n if (test(type, node)) throw new Found(node, st)\n })(node, state)\n } catch (e) {\n if (e instanceof Found) return e\n throw e\n }\n}\n\n// Find the outermost matching node after a given position.\nexport function findNodeAfter(node, pos, test, baseVisitor, state) {\n test = makeTest(test)\n if (!baseVisitor) baseVisitor = base\n try {\n (function c(node, st, override) {\n if (node.end < pos) return\n let type = override || node.type\n if (node.start >= pos && test(type, node)) throw new Found(node, st)\n baseVisitor[type](node, st, c)\n })(node, state)\n } catch (e) {\n if (e instanceof Found) return e\n throw e\n }\n}\n\n// Find the outermost matching node before a given position.\nexport function findNodeBefore(node, pos, test, baseVisitor, state) {\n test = makeTest(test)\n if (!baseVisitor) baseVisitor = base\n let max\n ;(function c(node, st, override) {\n if (node.start > pos) return\n let type = override || node.type\n if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))\n max = new Found(node, st)\n baseVisitor[type](node, st, c)\n })(node, state)\n return max\n}\n\n// Fallback to an Object.create polyfill for older environments.\nconst create = Object.create || function(proto) {\n function Ctor() {}\n Ctor.prototype = proto\n return new Ctor\n}\n\n// Used to create a custom walker. Will fill in all missing node\n// type properties with the defaults.\nexport function make(funcs, baseVisitor) {\n let visitor = create(baseVisitor || base)\n for (let type in funcs) visitor[type] = funcs[type]\n return visitor\n}\n\nfunction skipThrough(node, st, c) { c(node, st) }\nfunction ignore(_node, _st, _c) {}\n\n// Node walkers.\n\nexport const base = {}\n\nbase.Program = base.BlockStatement = (node, st, c) => {\n for (let stmt of node.body)\n c(stmt, st, \"Statement\")\n}\nbase.Statement = skipThrough\nbase.EmptyStatement = ignore\nbase.ExpressionStatement = base.ParenthesizedExpression =\n (node, st, c) => c(node.expression, st, \"Expression\")\nbase.IfStatement = (node, st, c) => {\n c(node.test, st, \"Expression\")\n c(node.consequent, st, \"Statement\")\n if (node.alternate) c(node.alternate, st, \"Statement\")\n}\nbase.LabeledStatement = (node, st, c) => c(node.body, st, \"Statement\")\nbase.BreakStatement = base.ContinueStatement = ignore\nbase.WithStatement = (node, st, c) => {\n c(node.object, st, \"Expression\")\n c(node.body, st, \"Statement\")\n}\nbase.SwitchStatement = (node, st, c) => {\n c(node.discriminant, st, \"Expression\")\n for (let cs of node.cases) {\n if (cs.test) c(cs.test, st, \"Expression\")\n for (let cons of cs.consequent)\n c(cons, st, \"Statement\")\n }\n}\nbase.SwitchCase = (node, st, c) => {\n if (node.test) c(node.test, st, \"Expression\")\n for (let cons of node.consequent)\n c(cons, st, \"Statement\")\n}\nbase.ReturnStatement = base.YieldExpression = base.AwaitExpression = (node, st, c) => {\n if (node.argument) c(node.argument, st, \"Expression\")\n}\nbase.ThrowStatement = base.SpreadElement =\n (node, st, c) => c(node.argument, st, \"Expression\")\nbase.TryStatement = (node, st, c) => {\n c(node.block, st, \"Statement\")\n if (node.handler) c(node.handler, st)\n if (node.finalizer) c(node.finalizer, st, \"Statement\")\n}\nbase.CatchClause = (node, st, c) => {\n if (node.param) c(node.param, st, \"Pattern\")\n c(node.body, st, \"Statement\")\n}\nbase.WhileStatement = base.DoWhileStatement = (node, st, c) => {\n c(node.test, st, \"Expression\")\n c(node.body, st, \"Statement\")\n}\nbase.ForStatement = (node, st, c) => {\n if (node.init) c(node.init, st, \"ForInit\")\n if (node.test) c(node.test, st, \"Expression\")\n if (node.update) c(node.update, st, \"Expression\")\n c(node.body, st, \"Statement\")\n}\nbase.ForInStatement = base.ForOfStatement = (node, st, c) => {\n c(node.left, st, \"ForInit\")\n c(node.right, st, \"Expression\")\n c(node.body, st, \"Statement\")\n}\nbase.ForInit = (node, st, c) => {\n if (node.type === \"VariableDeclaration\") c(node, st)\n else c(node, st, \"Expression\")\n}\nbase.DebuggerStatement = ignore\n\nbase.FunctionDeclaration = (node, st, c) => c(node, st, \"Function\")\nbase.VariableDeclaration = (node, st, c) => {\n for (let decl of node.declarations)\n c(decl, st)\n}\nbase.VariableDeclarator = (node, st, c) => {\n c(node.id, st, \"Pattern\")\n if (node.init) c(node.init, st, \"Expression\")\n}\n\nbase.Function = (node, st, c) => {\n if (node.id) c(node.id, st, \"Pattern\")\n for (let param of node.params)\n c(param, st, \"Pattern\")\n c(node.body, st, node.expression ? \"Expression\" : \"Statement\")\n}\n\nbase.Pattern = (node, st, c) => {\n if (node.type === \"Identifier\")\n c(node, st, \"VariablePattern\")\n else if (node.type === \"MemberExpression\")\n c(node, st, \"MemberPattern\")\n else\n c(node, st)\n}\nbase.VariablePattern = ignore\nbase.MemberPattern = skipThrough\nbase.RestElement = (node, st, c) => c(node.argument, st, \"Pattern\")\nbase.ArrayPattern = (node, st, c) => {\n for (let elt of node.elements) {\n if (elt) c(elt, st, \"Pattern\")\n }\n}\nbase.ObjectPattern = (node, st, c) => {\n for (let prop of node.properties) {\n if (prop.type === \"Property\") {\n if (prop.computed) c(prop.key, st, \"Expression\")\n c(prop.value, st, \"Pattern\")\n } else if (prop.type === \"RestElement\") {\n c(prop.argument, st, \"Pattern\")\n }\n }\n}\n\nbase.Expression = skipThrough\nbase.ThisExpression = base.Super = base.MetaProperty = ignore\nbase.ArrayExpression = (node, st, c) => {\n for (let elt of node.elements) {\n if (elt) c(elt, st, \"Expression\")\n }\n}\nbase.ObjectExpression = (node, st, c) => {\n for (let prop of node.properties)\n c(prop, st)\n}\nbase.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration\nbase.SequenceExpression = (node, st, c) => {\n for (let expr of node.expressions)\n c(expr, st, \"Expression\")\n}\nbase.TemplateLiteral = (node, st, c) => {\n for (let quasi of node.quasis)\n c(quasi, st)\n\n for (let expr of node.expressions)\n c(expr, st, \"Expression\")\n}\nbase.TemplateElement = ignore\nbase.UnaryExpression = base.UpdateExpression = (node, st, c) => {\n c(node.argument, st, \"Expression\")\n}\nbase.BinaryExpression = base.LogicalExpression = (node, st, c) => {\n c(node.left, st, \"Expression\")\n c(node.right, st, \"Expression\")\n}\nbase.AssignmentExpression = base.AssignmentPattern = (node, st, c) => {\n c(node.left, st, \"Pattern\")\n c(node.right, st, \"Expression\")\n}\nbase.ConditionalExpression = (node, st, c) => {\n c(node.test, st, \"Expression\")\n c(node.consequent, st, \"Expression\")\n c(node.alternate, st, \"Expression\")\n}\nbase.NewExpression = base.CallExpression = (node, st, c) => {\n c(node.callee, st, \"Expression\")\n if (node.arguments)\n for (let arg of node.arguments)\n c(arg, st, \"Expression\")\n}\nbase.MemberExpression = (node, st, c) => {\n c(node.object, st, \"Expression\")\n if (node.computed) c(node.property, st, \"Expression\")\n}\nbase.ExportNamedDeclaration = base.ExportDefaultDeclaration = (node, st, c) => {\n if (node.declaration)\n c(node.declaration, st, node.type === \"ExportNamedDeclaration\" || node.declaration.id ? \"Statement\" : \"Expression\")\n if (node.source) c(node.source, st, \"Expression\")\n}\nbase.ExportAllDeclaration = (node, st, c) => {\n c(node.source, st, \"Expression\")\n}\nbase.ImportDeclaration = (node, st, c) => {\n for (let spec of node.specifiers)\n c(spec, st)\n c(node.source, st, \"Expression\")\n}\nbase.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore\n\nbase.TaggedTemplateExpression = (node, st, c) => {\n c(node.tag, st, \"Expression\")\n c(node.quasi, st, \"Expression\")\n}\nbase.ClassDeclaration = base.ClassExpression = (node, st, c) => c(node, st, \"Class\")\nbase.Class = (node, st, c) => {\n if (node.id) c(node.id, st, \"Pattern\")\n if (node.superClass) c(node.superClass, st, \"Expression\")\n c(node.body, st)\n}\nbase.ClassBody = (node, st, c) => {\n for (let elt of node.body)\n c(elt, st)\n}\nbase.MethodDefinition = base.Property = (node, st, c) => {\n if (node.computed) c(node.key, st, \"Expression\")\n c(node.value, st, \"Expression\")\n}\n"],"names":["let","const"],"mappings":";;;;;;AAAA;;;;;;;;;;;;;;;;;;AAkBA,AAAO,SAAS,MAAM,CAAC,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,QAAQ,EAAE;EACnE,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,IAAI;GACnC,EAAA,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,IAAI,EAAE,KAAK,GAAG,QAAQ,CAAC,IAAI,EAAC;IACxD,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;IAC9B,IAAI,KAAK,EAAE,EAAA,KAAK,CAAC,IAAI,EAAE,EAAE,EAAC,EAAA;GAC3B,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAC;CAC1B;;;;;AAKD,AAAO,SAAS,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE;EAC3DA,IAAI,SAAS,GAAG,GAAE;EAClB,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,IAAI;GACnC,EAAA,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,IAAI,EAAE,KAAK,GAAG,QAAQ,CAAC,IAAI,EAAC;IACxDA,IAAI,KAAK,GAAG,IAAI,KAAK,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAC;IACpD,IAAI,KAAK,EAAE,EAAA,SAAS,CAAC,IAAI,CAAC,IAAI,EAAC,EAAA;IAC/B,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;IAC9B,IAAI,KAAK,EAAE,EAAA,KAAK,CAAC,IAAI,EAAE,EAAE,IAAI,SAAS,EAAE,SAAS,EAAC,EAAA;IAClD,IAAI,KAAK,EAAE,EAAA,SAAS,CAAC,GAAG,GAAE,EAAA;GAC3B,EAAE,IAAI,EAAE,KAAK,EAAC;CAChB;;;;;;;AAOD,AAAO,SAAS,SAAS,CAAC,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,WAAW,EAAE,QAAQ,EAAE;EACnEA,IAAI,OAAO,GAAG,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,IAAI,SAAS,CAAC,GAAG,WAAW,CACxE,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/B,OAAO,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;GAC5C,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAC;CAC1B;;AAED,SAAS,QAAQ,CAAC,IAAI,EAAE;EACtB,IAAI,OAAO,IAAI,KAAK,QAAQ;IAC1B,EAAA,OAAO,UAAA,IAAI,EAAC,SAAG,IAAI,KAAK,IAAI,GAAA,EAAA;OACzB,IAAI,CAAC,IAAI;IACZ,EAAA,OAAO,YAAG,SAAG,IAAI,GAAA,EAAA;;IAEjB,EAAA,OAAO,IAAI,EAAA;CACd;;AAED,IAAM,KAAK,GAAC,cACC,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAK,EAAE,CAAA;;;AAInE,AAAO,SAAS,IAAI,CAAC,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,QAAQ,EAAE;EACjE,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,IAAI;GACnC,EAAA,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;IAChC,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;IAC9B,IAAI,CAAC,QAAQ,EAAE,EAAA,QAAQ,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,EAAC,EAAA;GACxC,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAC;CAC1B;;;;AAID,AAAO,SAAS,YAAY,CAAC,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE;EAC/D,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpCA,IAAI,SAAS,GAAG,EAAE,CACjB,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;IAChCA,IAAI,KAAK,GAAG,IAAI,KAAK,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAC;IACpD,IAAI,KAAK,EAAE,EAAA,SAAS,CAAC,IAAI,CAAC,IAAI,EAAC,EAAA;IAC/B,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;IAC9B,IAAI,CAAC,QAAQ,EAAE,EAAA,QAAQ,CAAC,IAAI,EAAE,EAAE,IAAI,SAAS,EAAE,SAAS,EAAE,IAAI,EAAC,EAAA;IAC/D,IAAI,KAAK,EAAE,EAAA,SAAS,CAAC,GAAG,GAAE,EAAA;GAC3B,EAAE,IAAI,EAAE,KAAK,EAAC;CAChB;;;;;AAKD,AAAO,SAAS,UAAU,CAAC,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE;EACrE,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpC,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAC;EACrB,IAAI;IACF,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;MAC9BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;MAChC,IAAI,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,IAAI,KAAK;WACpC,GAAG,IAAI,IAAI,IAAI,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;QAClC,EAAA,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC,EAAA;MAChC,IAAI,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK;WACrC,GAAG,IAAI,IAAI,IAAI,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC;UACjC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC;QAClB,EAAA,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,EAAA;KAC5B,EAAE,IAAI,EAAE,KAAK,EAAC;GAChB,CAAC,OAAO,CAAC,EAAE;IACV,IAAI,CAAC,YAAY,KAAK,EAAE,EAAA,OAAO,CAAC,EAAA;IAChC,MAAM,CAAC;GACR;CACF;;;;AAID,AAAO,SAAS,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE;EAClE,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAC;EACrB,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpC,IAAI;IACF,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;MAC9BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;MAChC,IAAI,IAAI,CAAC,KAAK,GAAG,GAAG,IAAI,IAAI,CAAC,GAAG,GAAG,GAAG,EAAE,EAAA,MAAM,EAAA;MAC9C,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;MAC9B,IAAI,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,EAAA,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,EAAA;KAChD,EAAE,IAAI,EAAE,KAAK,EAAC;GAChB,CAAC,OAAO,CAAC,EAAE;IACV,IAAI,CAAC,YAAY,KAAK,EAAE,EAAA,OAAO,CAAC,EAAA;IAChC,MAAM,CAAC;GACR;CACF;;;AAGD,AAAO,SAAS,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE;EACjE,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAC;EACrB,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpC,IAAI;IACF,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;MAC9B,IAAI,IAAI,CAAC,GAAG,GAAG,GAAG,EAAE,EAAA,MAAM,EAAA;MAC1BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;MAChC,IAAI,IAAI,CAAC,KAAK,IAAI,GAAG,IAAI,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,EAAA,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,EAAA;MACpE,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;KAC/B,EAAE,IAAI,EAAE,KAAK,EAAC;GAChB,CAAC,OAAO,CAAC,EAAE;IACV,IAAI,CAAC,YAAY,KAAK,EAAE,EAAA,OAAO,CAAC,EAAA;IAChC,MAAM,CAAC;GACR;CACF;;;AAGD,AAAO,SAAS,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE;EAClE,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAC;EACrB,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpCA,IAAI,GAAG,CACN,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/B,IAAI,IAAI,CAAC,KAAK,GAAG,GAAG,EAAE,EAAA,MAAM,EAAA;IAC5BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;IAChC,IAAI,IAAI,CAAC,GAAG,IAAI,GAAG,KAAK,CAAC,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC;MAC1E,EAAA,GAAG,GAAG,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE,EAAC,EAAA;IAC3B,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;GAC/B,EAAE,IAAI,EAAE,KAAK,EAAC;EACf,OAAO,GAAG;CACX;;;AAGDC,IAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,SAAS,KAAK,EAAE;EAC9C,SAAS,IAAI,GAAG,EAAE;EAClB,IAAI,CAAC,SAAS,GAAG,MAAK;EACtB,OAAO,IAAI,IAAI;EAChB;;;;AAID,AAAO,SAAS,IAAI,CAAC,KAAK,EAAE,WAAW,EAAE;EACvCD,IAAI,OAAO,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,EAAC;EACzC,KAAKA,IAAI,IAAI,IAAI,KAAK,EAAE,EAAA,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,IAAI,EAAC,EAAA;EACnD,OAAO,OAAO;CACf;;AAED,SAAS,WAAW,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC,EAAE;AACjD,SAAS,MAAM,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE;;;;AAIlC,AAAOC,IAAM,IAAI,GAAG,GAAE;;AAEtB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,cAAc,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjD,KAAa,kBAAI,IAAI,CAAC,IAAI,yBAAA;IAArB;IAAAD,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;GAAA;EAC3B;AACD,IAAI,CAAC,SAAS,GAAG,YAAW;AAC5B,IAAI,CAAC,cAAc,GAAG,OAAM;AAC5B,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,uBAAuB;EACrD,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,EAAE,YAAY,CAAC,IAAA;AACvD,IAAI,CAAC,WAAW,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC/B,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;EAC9B,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,EAAE,WAAW,EAAC;EACnC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,EAAE,WAAW,EAAC,EAAA;EACvD;AACD,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,CAAC,IAAA;AACtE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,iBAAiB,GAAG,OAAM;AACrD,IAAI,CAAC,aAAa,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACnC,CAAC,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,EAAE,YAAY,EAAC;EACtC,KAAW,kBAAI,IAAI,CAAC,KAAK,yBAAA,EAAE;IAAtBA,IAAI,EAAE;;IACT,IAAI,EAAE,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;IACzC,KAAa,sBAAI,EAAE,CAAC,UAAU,+BAAA;MAAzB;MAAAA,IAAI,IAAI;;MACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;KAAA;GAC3B;EACF;AACD,IAAI,CAAC,UAAU,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC9B,IAAI,IAAI,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAC7C,KAAa,kBAAI,IAAI,CAAC,UAAU,yBAAA;IAA3B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;GAAA;EAC3B;AACD,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjF,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EACtD;AACD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,aAAa;EACtC,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,YAAY,CAAC,IAAA;AACrD,IAAI,CAAC,YAAY,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAChC,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B,IAAI,IAAI,CAAC,OAAO,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,EAAC,EAAA;EACrC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,EAAE,WAAW,EAAC,EAAA;EACvD;AACD,IAAI,CAAC,WAAW,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC/B,IAAI,IAAI,CAAC,KAAK,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;EAC5C,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC1D,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;EAC9B,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,YAAY,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAChC,IAAI,IAAI,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;EAC1C,IAAI,IAAI,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAC7C,IAAI,IAAI,CAAC,MAAM,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EACjD,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACxD,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,SAAS,EAAC;EAC3B,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;EAC/B,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,OAAO,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC3B,IAAI,IAAI,CAAC,IAAI,KAAK,qBAAqB,EAAE,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC,EAAA;OAC/C,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAC/B;AACD,IAAI,CAAC,iBAAiB,GAAG,OAAM;;AAE/B,IAAI,CAAC,mBAAmB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,UAAU,CAAC,IAAA;AACnE,IAAI,CAAC,mBAAmB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACvC,KAAa,kBAAI,IAAI,CAAC,YAAY,yBAAA;IAA7B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC;GAAA;EACd;AACD,IAAI,CAAC,kBAAkB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACtC,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,SAAS,EAAC;EACzB,IAAI,IAAI,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAC9C;;AAED,IAAI,CAAC,QAAQ,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC5B,IAAI,IAAI,CAAC,EAAE,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;EACtC,KAAc,kBAAI,IAAI,CAAC,MAAM,yBAAA;IAAxB;IAAAA,IAAI,KAAK;;IACZ,CAAC,CAAC,KAAK,EAAE,EAAE,EAAE,SAAS,EAAC;GAAA;EACzB,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,CAAC,UAAU,GAAG,YAAY,GAAG,WAAW,EAAC;EAC/D;;AAED,IAAI,CAAC,OAAO,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC3B,IAAI,IAAI,CAAC,IAAI,KAAK,YAAY;IAC5B,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,iBAAiB,EAAC,EAAA;OAC3B,IAAI,IAAI,CAAC,IAAI,KAAK,kBAAkB;IACvC,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,eAAe,EAAC,EAAA;;IAE5B,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC,EAAA;EACd;AACD,IAAI,CAAC,eAAe,GAAG,OAAM;AAC7B,IAAI,CAAC,aAAa,GAAG,YAAW;AAChC,IAAI,CAAC,WAAW,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,SAAS,CAAC,IAAA;AACnE,IAAI,CAAC,YAAY,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAChC,KAAY,kBAAI,IAAI,CAAC,QAAQ,yBAAA,EAAE;IAA1BA,IAAI,GAAG;;IACV,IAAI,GAAG,EAAE,EAAA,CAAC,CAAC,GAAG,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;GAC/B;EACF;AACD,IAAI,CAAC,aAAa,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjC,KAAa,kBAAI,IAAI,CAAC,UAAU,yBAAA,EAAE;IAA7BA,IAAI,IAAI;;IACX,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,EAAE;MAC5B,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;MAChD,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,SAAS,EAAC;KAC7B,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,aAAa,EAAE;MACtC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,SAAS,EAAC;KAChC;GACF;EACF;;AAED,IAAI,CAAC,UAAU,GAAG,YAAW;AAC7B,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,YAAY,GAAG,OAAM;AAC7D,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACnC,KAAY,kBAAI,IAAI,CAAC,QAAQ,yBAAA,EAAE;IAA1BA,IAAI,GAAG;;IACV,IAAI,GAAG,EAAE,EAAA,CAAC,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;GAClC;EACF;AACD,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACpC,KAAa,kBAAI,IAAI,CAAC,UAAU,yBAAA;IAA3B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC;GAAA;EACd;AACD,IAAI,CAAC,kBAAkB,GAAG,IAAI,CAAC,uBAAuB,GAAG,IAAI,CAAC,oBAAmB;AACjF,IAAI,CAAC,kBAAkB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACtC,KAAa,kBAAI,IAAI,CAAC,WAAW,yBAAA;IAA5B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;GAAA;EAC5B;AACD,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACnC,KAAc,kBAAI,IAAI,CAAC,MAAM,yBAAA;IAAxB;IAAAA,IAAI,KAAK;;IACZ,CAAC,CAAC,KAAK,EAAE,EAAE,EAAC;GAAA;;EAEd,KAAa,sBAAI,IAAI,CAAC,WAAW,+BAAA;IAA5B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;GAAA;EAC5B;AACD,IAAI,CAAC,eAAe,GAAG,OAAM;AAC7B,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC3D,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,YAAY,EAAC;EACnC;AACD,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,iBAAiB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC7D,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;EAC9B,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC;AACD,IAAI,CAAC,oBAAoB,GAAG,IAAI,CAAC,iBAAiB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,SAAS,EAAC;EAC3B,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC;AACD,IAAI,CAAC,qBAAqB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACzC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;EAC9B,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,EAAE,YAAY,EAAC;EACpC,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,EAAE,YAAY,EAAC;EACpC;AACD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,cAAc,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACvD,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC,IAAI,IAAI,CAAC,SAAS;IAChB,EAAA,KAAY,kBAAI,IAAI,CAAC,SAAS,yBAAA;MAAzB;QAAAA,IAAI,GAAG;;QACV,CAAC,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC;OAAA,EAAA;EAC7B;AACD,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACpC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EACtD;AACD,IAAI,CAAC,sBAAsB,GAAG,IAAI,CAAC,wBAAwB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC1E,IAAI,IAAI,CAAC,WAAW;IAClB,EAAA,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,KAAK,wBAAwB,IAAI,IAAI,CAAC,WAAW,CAAC,EAAE,GAAG,WAAW,GAAG,YAAY,EAAC,EAAA;EACrH,IAAI,IAAI,CAAC,MAAM,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAClD;AACD,IAAI,CAAC,oBAAoB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACxC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EACjC;AACD,IAAI,CAAC,iBAAiB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACrC,KAAa,kBAAI,IAAI,CAAC,UAAU,yBAAA;IAA3B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC;GAAA;EACb,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EACjC;AACD,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,sBAAsB,GAAG,IAAI,CAAC,wBAAwB,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,OAAO,GAAG,OAAM;;AAE5H,IAAI,CAAC,wBAAwB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC5C,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC;EAC7B,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC;AACD,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,IAAA;AACpF,IAAI,CAAC,KAAK,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACzB,IAAI,IAAI,CAAC,EAAE,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;EACtC,IAAI,IAAI,CAAC,UAAU,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EACzD,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAC;EACjB;AACD,IAAI,CAAC,SAAS,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC7B,KAAY,kBAAI,IAAI,CAAC,IAAI,yBAAA;IAApB;IAAAA,IAAI,GAAG;;IACV,CAAC,CAAC,GAAG,EAAE,EAAE,EAAC;GAAA;EACb;AACD,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,QAAQ,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACpD,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAChD,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;CAChC;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/deps/acorn/acorn-walk/dist/walk.mjs b/deps/acorn/acorn-walk/dist/walk.mjs index a7fcec855e12e7..ea5d4b0bc2041f 100644 --- a/deps/acorn/acorn-walk/dist/walk.mjs +++ b/deps/acorn/acorn-walk/dist/walk.mjs @@ -66,11 +66,15 @@ var Found = function Found(node, state) { this.node = node; this.state = state; // A full walk triggers the callback on each node function full(node, callback, baseVisitor, state, override) { - if (!baseVisitor) { baseVisitor = base - ; }(function c(node, st, override) { + if (!baseVisitor) { baseVisitor = base; } + var last + ;(function c(node, st, override) { var type = override || node.type; baseVisitor[type](node, st, c); - if (!override) { callback(node, st, type); } + if (last !== node) { + callback(node, st, type); + last = node; + } })(node, state, override); } @@ -78,13 +82,16 @@ function full(node, callback, baseVisitor, state, override) { // the callback on each node function fullAncestor(node, callback, baseVisitor, state) { if (!baseVisitor) { baseVisitor = base; } - var ancestors = [] + var ancestors = [], last ;(function c(node, st, override) { var type = override || node.type; var isNew = node !== ancestors[ancestors.length - 1]; if (isNew) { ancestors.push(node); } baseVisitor[type](node, st, c); - if (!override) { callback(node, st || ancestors, ancestors, type); } + if (last !== node) { + callback(node, st || ancestors, ancestors, type); + last = node; + } if (isNew) { ancestors.pop(); } })(node, state); } @@ -162,17 +169,10 @@ function findNodeBefore(node, pos, test, baseVisitor, state) { return max } -// Fallback to an Object.create polyfill for older environments. -var create = Object.create || function(proto) { - function Ctor() {} - Ctor.prototype = proto; - return new Ctor -}; - // Used to create a custom walker. Will fill in all missing node // type properties with the defaults. function make(funcs, baseVisitor) { - var visitor = create(baseVisitor || base); + var visitor = Object.create(baseVisitor || base); for (var type in funcs) { visitor[type] = funcs[type]; } return visitor } @@ -415,7 +415,7 @@ base.ImportDeclaration = function (node, st, c) { base.ImportExpression = function (node, st, c) { c(node.source, st, "Expression"); }; -base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore; +base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore; base.TaggedTemplateExpression = function (node, st, c) { c(node.tag, st, "Expression"); @@ -435,9 +435,9 @@ base.ClassBody = function (node, st, c) { c(elt, st); } }; -base.MethodDefinition = base.Property = function (node, st, c) { +base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) { if (node.computed) { c(node.key, st, "Expression"); } - c(node.value, st, "Expression"); + if (node.value) { c(node.value, st, "Expression"); } }; export { ancestor, base, findNodeAfter, findNodeAround, findNodeAt, findNodeBefore, full, fullAncestor, make, recursive, simple }; diff --git a/deps/acorn/acorn-walk/dist/walk.mjs.map b/deps/acorn/acorn-walk/dist/walk.mjs.map deleted file mode 100644 index 2a94219c3bada4..00000000000000 --- a/deps/acorn/acorn-walk/dist/walk.mjs.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"walk.mjs","sources":["../src/index.js"],"sourcesContent":["// AST walker module for Mozilla Parser API compatible trees\n\n// A simple walk is one where you simply specify callbacks to be\n// called on specific nodes. The last two arguments are optional. A\n// simple use would be\n//\n// walk.simple(myTree, {\n// Expression: function(node) { ... }\n// });\n//\n// to do something with all expressions. All Parser API node types\n// can be used to identify node types, as well as Expression and\n// Statement, which denote categories of nodes.\n//\n// The base argument can be used to pass a custom (recursive)\n// walker, and state can be used to give this walked an initial\n// state.\n\nexport function simple(node, visitors, baseVisitor, state, override) {\n if (!baseVisitor) baseVisitor = base\n ;(function c(node, st, override) {\n let type = override || node.type, found = visitors[type]\n baseVisitor[type](node, st, c)\n if (found) found(node, st)\n })(node, state, override)\n}\n\n// An ancestor walk keeps an array of ancestor nodes (including the\n// current node) and passes them to the callback as third parameter\n// (and also as state parameter when no other state is present).\nexport function ancestor(node, visitors, baseVisitor, state) {\n let ancestors = []\n if (!baseVisitor) baseVisitor = base\n ;(function c(node, st, override) {\n let type = override || node.type, found = visitors[type]\n let isNew = node !== ancestors[ancestors.length - 1]\n if (isNew) ancestors.push(node)\n baseVisitor[type](node, st, c)\n if (found) found(node, st || ancestors, ancestors)\n if (isNew) ancestors.pop()\n })(node, state)\n}\n\n// A recursive walk is one where your functions override the default\n// walkers. They can modify and replace the state parameter that's\n// threaded through the walk, and can opt how and whether to walk\n// their child nodes (by calling their third argument on these\n// nodes).\nexport function recursive(node, state, funcs, baseVisitor, override) {\n let visitor = funcs ? make(funcs, baseVisitor || undefined) : baseVisitor\n ;(function c(node, st, override) {\n visitor[override || node.type](node, st, c)\n })(node, state, override)\n}\n\nfunction makeTest(test) {\n if (typeof test === \"string\")\n return type => type === test\n else if (!test)\n return () => true\n else\n return test\n}\n\nclass Found {\n constructor(node, state) { this.node = node; this.state = state }\n}\n\n// A full walk triggers the callback on each node\nexport function full(node, callback, baseVisitor, state, override) {\n if (!baseVisitor) baseVisitor = base\n ;(function c(node, st, override) {\n let type = override || node.type\n baseVisitor[type](node, st, c)\n if (!override) callback(node, st, type)\n })(node, state, override)\n}\n\n// An fullAncestor walk is like an ancestor walk, but triggers\n// the callback on each node\nexport function fullAncestor(node, callback, baseVisitor, state) {\n if (!baseVisitor) baseVisitor = base\n let ancestors = []\n ;(function c(node, st, override) {\n let type = override || node.type\n let isNew = node !== ancestors[ancestors.length - 1]\n if (isNew) ancestors.push(node)\n baseVisitor[type](node, st, c)\n if (!override) callback(node, st || ancestors, ancestors, type)\n if (isNew) ancestors.pop()\n })(node, state)\n}\n\n// Find a node with a given start, end, and type (all are optional,\n// null can be used as wildcard). Returns a {node, state} object, or\n// undefined when it doesn't find a matching node.\nexport function findNodeAt(node, start, end, test, baseVisitor, state) {\n if (!baseVisitor) baseVisitor = base\n test = makeTest(test)\n try {\n (function c(node, st, override) {\n let type = override || node.type\n if ((start == null || node.start <= start) &&\n (end == null || node.end >= end))\n baseVisitor[type](node, st, c)\n if ((start == null || node.start === start) &&\n (end == null || node.end === end) &&\n test(type, node))\n throw new Found(node, st)\n })(node, state)\n } catch (e) {\n if (e instanceof Found) return e\n throw e\n }\n}\n\n// Find the innermost node of a given type that contains the given\n// position. Interface similar to findNodeAt.\nexport function findNodeAround(node, pos, test, baseVisitor, state) {\n test = makeTest(test)\n if (!baseVisitor) baseVisitor = base\n try {\n (function c(node, st, override) {\n let type = override || node.type\n if (node.start > pos || node.end < pos) return\n baseVisitor[type](node, st, c)\n if (test(type, node)) throw new Found(node, st)\n })(node, state)\n } catch (e) {\n if (e instanceof Found) return e\n throw e\n }\n}\n\n// Find the outermost matching node after a given position.\nexport function findNodeAfter(node, pos, test, baseVisitor, state) {\n test = makeTest(test)\n if (!baseVisitor) baseVisitor = base\n try {\n (function c(node, st, override) {\n if (node.end < pos) return\n let type = override || node.type\n if (node.start >= pos && test(type, node)) throw new Found(node, st)\n baseVisitor[type](node, st, c)\n })(node, state)\n } catch (e) {\n if (e instanceof Found) return e\n throw e\n }\n}\n\n// Find the outermost matching node before a given position.\nexport function findNodeBefore(node, pos, test, baseVisitor, state) {\n test = makeTest(test)\n if (!baseVisitor) baseVisitor = base\n let max\n ;(function c(node, st, override) {\n if (node.start > pos) return\n let type = override || node.type\n if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))\n max = new Found(node, st)\n baseVisitor[type](node, st, c)\n })(node, state)\n return max\n}\n\n// Fallback to an Object.create polyfill for older environments.\nconst create = Object.create || function(proto) {\n function Ctor() {}\n Ctor.prototype = proto\n return new Ctor\n}\n\n// Used to create a custom walker. Will fill in all missing node\n// type properties with the defaults.\nexport function make(funcs, baseVisitor) {\n let visitor = create(baseVisitor || base)\n for (let type in funcs) visitor[type] = funcs[type]\n return visitor\n}\n\nfunction skipThrough(node, st, c) { c(node, st) }\nfunction ignore(_node, _st, _c) {}\n\n// Node walkers.\n\nexport const base = {}\n\nbase.Program = base.BlockStatement = (node, st, c) => {\n for (let stmt of node.body)\n c(stmt, st, \"Statement\")\n}\nbase.Statement = skipThrough\nbase.EmptyStatement = ignore\nbase.ExpressionStatement = base.ParenthesizedExpression =\n (node, st, c) => c(node.expression, st, \"Expression\")\nbase.IfStatement = (node, st, c) => {\n c(node.test, st, \"Expression\")\n c(node.consequent, st, \"Statement\")\n if (node.alternate) c(node.alternate, st, \"Statement\")\n}\nbase.LabeledStatement = (node, st, c) => c(node.body, st, \"Statement\")\nbase.BreakStatement = base.ContinueStatement = ignore\nbase.WithStatement = (node, st, c) => {\n c(node.object, st, \"Expression\")\n c(node.body, st, \"Statement\")\n}\nbase.SwitchStatement = (node, st, c) => {\n c(node.discriminant, st, \"Expression\")\n for (let cs of node.cases) {\n if (cs.test) c(cs.test, st, \"Expression\")\n for (let cons of cs.consequent)\n c(cons, st, \"Statement\")\n }\n}\nbase.SwitchCase = (node, st, c) => {\n if (node.test) c(node.test, st, \"Expression\")\n for (let cons of node.consequent)\n c(cons, st, \"Statement\")\n}\nbase.ReturnStatement = base.YieldExpression = base.AwaitExpression = (node, st, c) => {\n if (node.argument) c(node.argument, st, \"Expression\")\n}\nbase.ThrowStatement = base.SpreadElement =\n (node, st, c) => c(node.argument, st, \"Expression\")\nbase.TryStatement = (node, st, c) => {\n c(node.block, st, \"Statement\")\n if (node.handler) c(node.handler, st)\n if (node.finalizer) c(node.finalizer, st, \"Statement\")\n}\nbase.CatchClause = (node, st, c) => {\n if (node.param) c(node.param, st, \"Pattern\")\n c(node.body, st, \"Statement\")\n}\nbase.WhileStatement = base.DoWhileStatement = (node, st, c) => {\n c(node.test, st, \"Expression\")\n c(node.body, st, \"Statement\")\n}\nbase.ForStatement = (node, st, c) => {\n if (node.init) c(node.init, st, \"ForInit\")\n if (node.test) c(node.test, st, \"Expression\")\n if (node.update) c(node.update, st, \"Expression\")\n c(node.body, st, \"Statement\")\n}\nbase.ForInStatement = base.ForOfStatement = (node, st, c) => {\n c(node.left, st, \"ForInit\")\n c(node.right, st, \"Expression\")\n c(node.body, st, \"Statement\")\n}\nbase.ForInit = (node, st, c) => {\n if (node.type === \"VariableDeclaration\") c(node, st)\n else c(node, st, \"Expression\")\n}\nbase.DebuggerStatement = ignore\n\nbase.FunctionDeclaration = (node, st, c) => c(node, st, \"Function\")\nbase.VariableDeclaration = (node, st, c) => {\n for (let decl of node.declarations)\n c(decl, st)\n}\nbase.VariableDeclarator = (node, st, c) => {\n c(node.id, st, \"Pattern\")\n if (node.init) c(node.init, st, \"Expression\")\n}\n\nbase.Function = (node, st, c) => {\n if (node.id) c(node.id, st, \"Pattern\")\n for (let param of node.params)\n c(param, st, \"Pattern\")\n c(node.body, st, node.expression ? \"Expression\" : \"Statement\")\n}\n\nbase.Pattern = (node, st, c) => {\n if (node.type === \"Identifier\")\n c(node, st, \"VariablePattern\")\n else if (node.type === \"MemberExpression\")\n c(node, st, \"MemberPattern\")\n else\n c(node, st)\n}\nbase.VariablePattern = ignore\nbase.MemberPattern = skipThrough\nbase.RestElement = (node, st, c) => c(node.argument, st, \"Pattern\")\nbase.ArrayPattern = (node, st, c) => {\n for (let elt of node.elements) {\n if (elt) c(elt, st, \"Pattern\")\n }\n}\nbase.ObjectPattern = (node, st, c) => {\n for (let prop of node.properties) {\n if (prop.type === \"Property\") {\n if (prop.computed) c(prop.key, st, \"Expression\")\n c(prop.value, st, \"Pattern\")\n } else if (prop.type === \"RestElement\") {\n c(prop.argument, st, \"Pattern\")\n }\n }\n}\n\nbase.Expression = skipThrough\nbase.ThisExpression = base.Super = base.MetaProperty = ignore\nbase.ArrayExpression = (node, st, c) => {\n for (let elt of node.elements) {\n if (elt) c(elt, st, \"Expression\")\n }\n}\nbase.ObjectExpression = (node, st, c) => {\n for (let prop of node.properties)\n c(prop, st)\n}\nbase.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration\nbase.SequenceExpression = (node, st, c) => {\n for (let expr of node.expressions)\n c(expr, st, \"Expression\")\n}\nbase.TemplateLiteral = (node, st, c) => {\n for (let quasi of node.quasis)\n c(quasi, st)\n\n for (let expr of node.expressions)\n c(expr, st, \"Expression\")\n}\nbase.TemplateElement = ignore\nbase.UnaryExpression = base.UpdateExpression = (node, st, c) => {\n c(node.argument, st, \"Expression\")\n}\nbase.BinaryExpression = base.LogicalExpression = (node, st, c) => {\n c(node.left, st, \"Expression\")\n c(node.right, st, \"Expression\")\n}\nbase.AssignmentExpression = base.AssignmentPattern = (node, st, c) => {\n c(node.left, st, \"Pattern\")\n c(node.right, st, \"Expression\")\n}\nbase.ConditionalExpression = (node, st, c) => {\n c(node.test, st, \"Expression\")\n c(node.consequent, st, \"Expression\")\n c(node.alternate, st, \"Expression\")\n}\nbase.NewExpression = base.CallExpression = (node, st, c) => {\n c(node.callee, st, \"Expression\")\n if (node.arguments)\n for (let arg of node.arguments)\n c(arg, st, \"Expression\")\n}\nbase.MemberExpression = (node, st, c) => {\n c(node.object, st, \"Expression\")\n if (node.computed) c(node.property, st, \"Expression\")\n}\nbase.ExportNamedDeclaration = base.ExportDefaultDeclaration = (node, st, c) => {\n if (node.declaration)\n c(node.declaration, st, node.type === \"ExportNamedDeclaration\" || node.declaration.id ? \"Statement\" : \"Expression\")\n if (node.source) c(node.source, st, \"Expression\")\n}\nbase.ExportAllDeclaration = (node, st, c) => {\n c(node.source, st, \"Expression\")\n}\nbase.ImportDeclaration = (node, st, c) => {\n for (let spec of node.specifiers)\n c(spec, st)\n c(node.source, st, \"Expression\")\n}\nbase.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.Literal = ignore\n\nbase.TaggedTemplateExpression = (node, st, c) => {\n c(node.tag, st, \"Expression\")\n c(node.quasi, st, \"Expression\")\n}\nbase.ClassDeclaration = base.ClassExpression = (node, st, c) => c(node, st, \"Class\")\nbase.Class = (node, st, c) => {\n if (node.id) c(node.id, st, \"Pattern\")\n if (node.superClass) c(node.superClass, st, \"Expression\")\n c(node.body, st)\n}\nbase.ClassBody = (node, st, c) => {\n for (let elt of node.body)\n c(elt, st)\n}\nbase.MethodDefinition = base.Property = (node, st, c) => {\n if (node.computed) c(node.key, st, \"Expression\")\n c(node.value, st, \"Expression\")\n}\n"],"names":["let","const"],"mappings":"AAAA;;;;;;;;;;;;;;;;;;AAkBA,AAAO,SAAS,MAAM,CAAC,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,QAAQ,EAAE;EACnE,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,IAAI;GACnC,EAAA,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,IAAI,EAAE,KAAK,GAAG,QAAQ,CAAC,IAAI,EAAC;IACxD,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;IAC9B,IAAI,KAAK,EAAE,EAAA,KAAK,CAAC,IAAI,EAAE,EAAE,EAAC,EAAA;GAC3B,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAC;CAC1B;;;;;AAKD,AAAO,SAAS,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE;EAC3DA,IAAI,SAAS,GAAG,GAAE;EAClB,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,IAAI;GACnC,EAAA,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,IAAI,EAAE,KAAK,GAAG,QAAQ,CAAC,IAAI,EAAC;IACxDA,IAAI,KAAK,GAAG,IAAI,KAAK,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAC;IACpD,IAAI,KAAK,EAAE,EAAA,SAAS,CAAC,IAAI,CAAC,IAAI,EAAC,EAAA;IAC/B,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;IAC9B,IAAI,KAAK,EAAE,EAAA,KAAK,CAAC,IAAI,EAAE,EAAE,IAAI,SAAS,EAAE,SAAS,EAAC,EAAA;IAClD,IAAI,KAAK,EAAE,EAAA,SAAS,CAAC,GAAG,GAAE,EAAA;GAC3B,EAAE,IAAI,EAAE,KAAK,EAAC;CAChB;;;;;;;AAOD,AAAO,SAAS,SAAS,CAAC,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,WAAW,EAAE,QAAQ,EAAE;EACnEA,IAAI,OAAO,GAAG,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,IAAI,SAAS,CAAC,GAAG,WAAW,CACxE,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/B,OAAO,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;GAC5C,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAC;CAC1B;;AAED,SAAS,QAAQ,CAAC,IAAI,EAAE;EACtB,IAAI,OAAO,IAAI,KAAK,QAAQ;IAC1B,EAAA,OAAO,UAAA,IAAI,EAAC,SAAG,IAAI,KAAK,IAAI,GAAA,EAAA;OACzB,IAAI,CAAC,IAAI;IACZ,EAAA,OAAO,YAAG,SAAG,IAAI,GAAA,EAAA;;IAEjB,EAAA,OAAO,IAAI,EAAA;CACd;;AAED,IAAM,KAAK,GAAC,cACC,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,KAAK,GAAG,MAAK,EAAE,CAAA;;;AAInE,AAAO,SAAS,IAAI,CAAC,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE,QAAQ,EAAE;EACjE,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,IAAI;GACnC,EAAA,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;IAChC,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;IAC9B,IAAI,CAAC,QAAQ,EAAE,EAAA,QAAQ,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,EAAC,EAAA;GACxC,EAAE,IAAI,EAAE,KAAK,EAAE,QAAQ,EAAC;CAC1B;;;;AAID,AAAO,SAAS,YAAY,CAAC,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,KAAK,EAAE;EAC/D,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpCA,IAAI,SAAS,GAAG,EAAE,CACjB,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;IAChCA,IAAI,KAAK,GAAG,IAAI,KAAK,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAC;IACpD,IAAI,KAAK,EAAE,EAAA,SAAS,CAAC,IAAI,CAAC,IAAI,EAAC,EAAA;IAC/B,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;IAC9B,IAAI,CAAC,QAAQ,EAAE,EAAA,QAAQ,CAAC,IAAI,EAAE,EAAE,IAAI,SAAS,EAAE,SAAS,EAAE,IAAI,EAAC,EAAA;IAC/D,IAAI,KAAK,EAAE,EAAA,SAAS,CAAC,GAAG,GAAE,EAAA;GAC3B,EAAE,IAAI,EAAE,KAAK,EAAC;CAChB;;;;;AAKD,AAAO,SAAS,UAAU,CAAC,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE;EACrE,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpC,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAC;EACrB,IAAI;IACF,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;MAC9BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;MAChC,IAAI,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,IAAI,KAAK;WACpC,GAAG,IAAI,IAAI,IAAI,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;QAClC,EAAA,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC,EAAA;MAChC,IAAI,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK;WACrC,GAAG,IAAI,IAAI,IAAI,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC;UACjC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC;QAClB,EAAA,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,EAAA;KAC5B,EAAE,IAAI,EAAE,KAAK,EAAC;GAChB,CAAC,OAAO,CAAC,EAAE;IACV,IAAI,CAAC,YAAY,KAAK,EAAE,EAAA,OAAO,CAAC,EAAA;IAChC,MAAM,CAAC;GACR;CACF;;;;AAID,AAAO,SAAS,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE;EAClE,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAC;EACrB,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpC,IAAI;IACF,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;MAC9BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;MAChC,IAAI,IAAI,CAAC,KAAK,GAAG,GAAG,IAAI,IAAI,CAAC,GAAG,GAAG,GAAG,EAAE,EAAA,MAAM,EAAA;MAC9C,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;MAC9B,IAAI,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,EAAA,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,EAAA;KAChD,EAAE,IAAI,EAAE,KAAK,EAAC;GAChB,CAAC,OAAO,CAAC,EAAE;IACV,IAAI,CAAC,YAAY,KAAK,EAAE,EAAA,OAAO,CAAC,EAAA;IAChC,MAAM,CAAC;GACR;CACF;;;AAGD,AAAO,SAAS,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE;EACjE,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAC;EACrB,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpC,IAAI;IACF,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;MAC9B,IAAI,IAAI,CAAC,GAAG,GAAG,GAAG,EAAE,EAAA,MAAM,EAAA;MAC1BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;MAChC,IAAI,IAAI,CAAC,KAAK,IAAI,GAAG,IAAI,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE,EAAA,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,EAAA;MACpE,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;KAC/B,EAAE,IAAI,EAAE,KAAK,EAAC;GAChB,CAAC,OAAO,CAAC,EAAE;IACV,IAAI,CAAC,YAAY,KAAK,EAAE,EAAA,OAAO,CAAC,EAAA;IAChC,MAAM,CAAC;GACR;CACF;;;AAGD,AAAO,SAAS,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE;EAClE,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAC;EACrB,IAAI,CAAC,WAAW,EAAE,EAAA,WAAW,GAAG,KAAI,EAAA;EACpCA,IAAI,GAAG,CACN,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE;IAC/B,IAAI,IAAI,CAAC,KAAK,GAAG,GAAG,EAAE,EAAA,MAAM,EAAA;IAC5BA,IAAI,IAAI,GAAG,QAAQ,IAAI,IAAI,CAAC,KAAI;IAChC,IAAI,IAAI,CAAC,GAAG,IAAI,GAAG,KAAK,CAAC,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC;MAC1E,EAAA,GAAG,GAAG,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE,EAAC,EAAA;IAC3B,WAAW,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAC;GAC/B,EAAE,IAAI,EAAE,KAAK,EAAC;EACf,OAAO,GAAG;CACX;;;AAGDC,IAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,SAAS,KAAK,EAAE;EAC9C,SAAS,IAAI,GAAG,EAAE;EAClB,IAAI,CAAC,SAAS,GAAG,MAAK;EACtB,OAAO,IAAI,IAAI;EAChB;;;;AAID,AAAO,SAAS,IAAI,CAAC,KAAK,EAAE,WAAW,EAAE;EACvCD,IAAI,OAAO,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,EAAC;EACzC,KAAKA,IAAI,IAAI,IAAI,KAAK,EAAE,EAAA,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,IAAI,EAAC,EAAA;EACnD,OAAO,OAAO;CACf;;AAED,SAAS,WAAW,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC,EAAE;AACjD,SAAS,MAAM,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE;;;;AAIlC,AAAOC,IAAM,IAAI,GAAG,GAAE;;AAEtB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,cAAc,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjD,KAAa,kBAAI,IAAI,CAAC,IAAI,yBAAA;IAArB;IAAAD,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;GAAA;EAC3B;AACD,IAAI,CAAC,SAAS,GAAG,YAAW;AAC5B,IAAI,CAAC,cAAc,GAAG,OAAM;AAC5B,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,uBAAuB;EACrD,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,EAAE,YAAY,CAAC,IAAA;AACvD,IAAI,CAAC,WAAW,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC/B,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;EAC9B,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,EAAE,WAAW,EAAC;EACnC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,EAAE,WAAW,EAAC,EAAA;EACvD;AACD,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,CAAC,IAAA;AACtE,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,iBAAiB,GAAG,OAAM;AACrD,IAAI,CAAC,aAAa,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACnC,CAAC,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,EAAE,YAAY,EAAC;EACtC,KAAW,kBAAI,IAAI,CAAC,KAAK,yBAAA,EAAE;IAAtBA,IAAI,EAAE;;IACT,IAAI,EAAE,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,EAAE,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;IACzC,KAAa,sBAAI,EAAE,CAAC,UAAU,+BAAA;MAAzB;MAAAA,IAAI,IAAI;;MACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;KAAA;GAC3B;EACF;AACD,IAAI,CAAC,UAAU,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC9B,IAAI,IAAI,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAC7C,KAAa,kBAAI,IAAI,CAAC,UAAU,yBAAA;IAA3B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;GAAA;EAC3B;AACD,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjF,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EACtD;AACD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,aAAa;EACtC,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,YAAY,CAAC,IAAA;AACrD,IAAI,CAAC,YAAY,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAChC,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B,IAAI,IAAI,CAAC,OAAO,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,EAAC,EAAA;EACrC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,EAAE,WAAW,EAAC,EAAA;EACvD;AACD,IAAI,CAAC,WAAW,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC/B,IAAI,IAAI,CAAC,KAAK,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;EAC5C,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC1D,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;EAC9B,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,YAAY,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAChC,IAAI,IAAI,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;EAC1C,IAAI,IAAI,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAC7C,IAAI,IAAI,CAAC,MAAM,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EACjD,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACxD,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,SAAS,EAAC;EAC3B,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;EAC/B,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,WAAW,EAAC;EAC9B;AACD,IAAI,CAAC,OAAO,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC3B,IAAI,IAAI,CAAC,IAAI,KAAK,qBAAqB,EAAE,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC,EAAA;OAC/C,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAC/B;AACD,IAAI,CAAC,iBAAiB,GAAG,OAAM;;AAE/B,IAAI,CAAC,mBAAmB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,UAAU,CAAC,IAAA;AACnE,IAAI,CAAC,mBAAmB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACvC,KAAa,kBAAI,IAAI,CAAC,YAAY,yBAAA;IAA7B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC;GAAA;EACd;AACD,IAAI,CAAC,kBAAkB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACtC,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,SAAS,EAAC;EACzB,IAAI,IAAI,CAAC,IAAI,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAC9C;;AAED,IAAI,CAAC,QAAQ,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC5B,IAAI,IAAI,CAAC,EAAE,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;EACtC,KAAc,kBAAI,IAAI,CAAC,MAAM,yBAAA;IAAxB;IAAAA,IAAI,KAAK;;IACZ,CAAC,CAAC,KAAK,EAAE,EAAE,EAAE,SAAS,EAAC;GAAA;EACzB,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,IAAI,CAAC,UAAU,GAAG,YAAY,GAAG,WAAW,EAAC;EAC/D;;AAED,IAAI,CAAC,OAAO,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC3B,IAAI,IAAI,CAAC,IAAI,KAAK,YAAY;IAC5B,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,iBAAiB,EAAC,EAAA;OAC3B,IAAI,IAAI,CAAC,IAAI,KAAK,kBAAkB;IACvC,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,eAAe,EAAC,EAAA;;IAE5B,EAAA,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC,EAAA;EACd;AACD,IAAI,CAAC,eAAe,GAAG,OAAM;AAC7B,IAAI,CAAC,aAAa,GAAG,YAAW;AAChC,IAAI,CAAC,WAAW,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,SAAS,CAAC,IAAA;AACnE,IAAI,CAAC,YAAY,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAChC,KAAY,kBAAI,IAAI,CAAC,QAAQ,yBAAA,EAAE;IAA1BA,IAAI,GAAG;;IACV,IAAI,GAAG,EAAE,EAAA,CAAC,CAAC,GAAG,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;GAC/B;EACF;AACD,IAAI,CAAC,aAAa,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjC,KAAa,kBAAI,IAAI,CAAC,UAAU,yBAAA,EAAE;IAA7BA,IAAI,IAAI;;IACX,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,EAAE;MAC5B,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;MAChD,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,SAAS,EAAC;KAC7B,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,aAAa,EAAE;MACtC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,SAAS,EAAC;KAChC;GACF;EACF;;AAED,IAAI,CAAC,UAAU,GAAG,YAAW;AAC7B,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,YAAY,GAAG,OAAM;AAC7D,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACnC,KAAY,kBAAI,IAAI,CAAC,QAAQ,yBAAA,EAAE;IAA1BA,IAAI,GAAG;;IACV,IAAI,GAAG,EAAE,EAAA,CAAC,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;GAClC;EACF;AACD,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACpC,KAAa,kBAAI,IAAI,CAAC,UAAU,yBAAA;IAA3B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC;GAAA;EACd;AACD,IAAI,CAAC,kBAAkB,GAAG,IAAI,CAAC,uBAAuB,GAAG,IAAI,CAAC,oBAAmB;AACjF,IAAI,CAAC,kBAAkB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACtC,KAAa,kBAAI,IAAI,CAAC,WAAW,yBAAA;IAA5B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;GAAA;EAC5B;AACD,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACnC,KAAc,kBAAI,IAAI,CAAC,MAAM,yBAAA;IAAxB;IAAAA,IAAI,KAAK;;IACZ,CAAC,CAAC,KAAK,EAAE,EAAE,EAAC;GAAA;;EAEd,KAAa,sBAAI,IAAI,CAAC,WAAW,+BAAA;IAA5B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;GAAA;EAC5B;AACD,IAAI,CAAC,eAAe,GAAG,OAAM;AAC7B,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC3D,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,YAAY,EAAC;EACnC;AACD,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,iBAAiB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC7D,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;EAC9B,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC;AACD,IAAI,CAAC,oBAAoB,GAAG,IAAI,CAAC,iBAAiB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACjE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,SAAS,EAAC;EAC3B,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC;AACD,IAAI,CAAC,qBAAqB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACzC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAE,YAAY,EAAC;EAC9B,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,EAAE,YAAY,EAAC;EACpC,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,EAAE,YAAY,EAAC;EACpC;AACD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,cAAc,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACvD,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC,IAAI,IAAI,CAAC,SAAS;IAChB,EAAA,KAAY,kBAAI,IAAI,CAAC,SAAS,yBAAA;MAAzB;QAAAA,IAAI,GAAG;;QACV,CAAC,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC;OAAA,EAAA;EAC7B;AACD,IAAI,CAAC,gBAAgB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACpC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EACtD;AACD,IAAI,CAAC,sBAAsB,GAAG,IAAI,CAAC,wBAAwB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC1E,IAAI,IAAI,CAAC,WAAW;IAClB,EAAA,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,KAAK,wBAAwB,IAAI,IAAI,CAAC,WAAW,CAAC,EAAE,GAAG,WAAW,GAAG,YAAY,EAAC,EAAA;EACrH,IAAI,IAAI,CAAC,MAAM,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAClD;AACD,IAAI,CAAC,oBAAoB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACxC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EACjC;AACD,IAAI,CAAC,iBAAiB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACrC,KAAa,kBAAI,IAAI,CAAC,UAAU,yBAAA;IAA3B;IAAAA,IAAI,IAAI;;IACX,CAAC,CAAC,IAAI,EAAE,EAAE,EAAC;GAAA;EACb,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,YAAY,EAAC;EACjC;AACD,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,sBAAsB,GAAG,IAAI,CAAC,wBAAwB,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,OAAO,GAAG,OAAM;;AAE5H,IAAI,CAAC,wBAAwB,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC5C,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC;EAC7B,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;EAChC;AACD,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,eAAe,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE,SAAG,CAAC,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,IAAA;AACpF,IAAI,CAAC,KAAK,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACzB,IAAI,IAAI,CAAC,EAAE,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,EAAE,SAAS,EAAC,EAAA;EACtC,IAAI,IAAI,CAAC,UAAU,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EACzD,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,EAAC;EACjB;AACD,IAAI,CAAC,SAAS,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EAC7B,KAAY,kBAAI,IAAI,CAAC,IAAI,yBAAA;IAApB;IAAAA,IAAI,GAAG;;IACV,CAAC,CAAC,GAAG,EAAE,EAAE,EAAC;GAAA;EACb;AACD,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,QAAQ,GAAG,UAAC,IAAI,EAAE,EAAE,EAAE,CAAC,EAAE;EACpD,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAA,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE,YAAY,EAAC,EAAA;EAChD,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,EAAE,EAAE,YAAY,EAAC;CAChC;;;;"} \ No newline at end of file diff --git a/deps/acorn/acorn-walk/package.json b/deps/acorn/acorn-walk/package.json index ccd942df896c23..118bb0be2c72f5 100644 --- a/deps/acorn/acorn-walk/package.json +++ b/deps/acorn/acorn-walk/package.json @@ -6,10 +6,17 @@ "types": "dist/walk.d.ts", "module": "dist/walk.mjs", "exports": { - "import": "./dist/walk.mjs", - "require": "./dist/walk.js" + ".": [ + { + "import": "./dist/walk.mjs", + "require": "./dist/walk.js", + "default": "./dist/walk.js" + }, + "./dist/walk.js" + ], + "./package.json": "./package.json" }, - "version": "8.0.0", + "version": "8.1.0", "engines": {"node": ">=0.4.0"}, "maintainers": [ { diff --git a/deps/acorn/acorn/CHANGELOG.md b/deps/acorn/acorn/CHANGELOG.md index 6489d0c397b379..d69ad881253118 100644 --- a/deps/acorn/acorn/CHANGELOG.md +++ b/deps/acorn/acorn/CHANGELOG.md @@ -1,3 +1,79 @@ +## 8.4.1 (2021-06-24) + +### Bug fixes + +Fix a bug where `allowAwaitOutsideFunction` would allow `await` in class field initializers, and setting `ecmaVersion` to 13 or higher would allow top-level await in non-module sources. + +## 8.4.0 (2021-06-11) + +### New features + +A new option, `allowSuperOutsideMethod`, can be used to suppress the error when `super` is used in the wrong context. + +## 8.3.0 (2021-05-31) + +### New features + +Default `allowAwaitOutsideFunction` to true for ECMAScript 2022 an higher. + +Add support for the `p` ([indices](https://github.com/tc39/proposal-regexp-match-indices)) regexp flag. + +## 8.2.4 (2021-05-04) + +### Bug fixes + +Fix spec conformity in corner case 'for await (async of ...)'. + +## 8.2.3 (2021-05-04) + +### Bug fixes + +Fix an issue where the library couldn't parse 'for (async of ...)'. + +Fix a bug in UTF-16 decoding that would read characters incorrectly in some circumstances. + +## 8.2.2 (2021-04-29) + +### Bug fixes + +Fix a bug where a class field initialized to an async arrow function wouldn't allow await inside it. Same issue existed for generator arrow functions with yield. + +## 8.2.1 (2021-04-24) + +### Bug fixes + +Fix a regression introduced in 8.2.0 where static or async class methods with keyword names fail to parse. + +## 8.2.0 (2021-04-24) + +### New features + +Add support for ES2022 class fields and private methods. + +## 8.1.1 (2021-04-12) + +### Various + +Stop shipping source maps in the NPM package. + +## 8.1.0 (2021-03-09) + +### Bug fixes + +Fix a spurious error in nested destructuring arrays. + +### New features + +Expose `allowAwaitOutsideFunction` in CLI interface. + +Make `allowImportExportAnywhere` also apply to `import.meta`. + +## 8.0.5 (2021-01-25) + +### Bug fixes + +Adjust package.json to work with Node 12.16.0 and 13.0-13.6. + ## 8.0.4 (2020-10-05) ### Bug fixes diff --git a/deps/acorn/acorn/LICENSE b/deps/acorn/acorn/LICENSE index cc5272c966db45..d6be6db2cfff57 100644 --- a/deps/acorn/acorn/LICENSE +++ b/deps/acorn/acorn/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (C) 2012-2018 by various contributors (see AUTHORS) +Copyright (C) 2012-2020 by various contributors (see AUTHORS) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/deps/acorn/acorn/README.md b/deps/acorn/acorn/README.md index 029086785fa256..f27a99444b6f3f 100644 --- a/deps/acorn/acorn/README.md +++ b/deps/acorn/acorn/README.md @@ -54,9 +54,10 @@ required): - **ecmaVersion**: Indicates the ECMAScript version to parse. Must be either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10 (2019), - 11 (2020), or 12 (2021, partial support), or `"latest"` (the latest - the library supports). This influences support for strict mode, the - set of reserved words, and support for new syntax features. + 11 (2020), 12 (2021, partial support), 13 (2022, partial support) + or `"latest"` (the latest the library supports). This influences + support for strict mode, the set of reserved words, and support + for new syntax features. **NOTE**: Only 'stage 4' (finalized) ECMAScript features are being implemented by Acorn. Other proposed new features must be @@ -90,13 +91,19 @@ required): - **allowImportExportEverywhere**: By default, `import` and `export` declarations can only appear at a program's top level. Setting this - option to `true` allows them anywhere where a statement is allowed. - -- **allowAwaitOutsideFunction**: By default, `await` expressions can - only appear inside `async` functions. Setting this option to + option to `true` allows them anywhere where a statement is allowed, + and also allows `import.meta` expressions to appear in scripts + (when `sourceType` is not `"module"`). + +- **allowAwaitOutsideFunction**: If `false`, `await` expressions can + only appear inside `async` functions. Defaults to `true` for + `ecmaVersion` 2022 and later, `false` for lower versions. Setting this option to `true` allows to have top-level `await` expressions. They are still not allowed in non-`async` functions, though. +- **allowSuperOutsideMethod**: By default, `super` outside a method + raises an error. Set this to `true` to accept such code. + - **allowHashBang**: When this is enabled (off by default), if the code starts with the characters `#!` (as in a shellscript), the first line will be treated as a comment. @@ -250,6 +257,9 @@ options: - `--allow-hash-bang`: If the code starts with the characters #! (as in a shellscript), the first line will be treated as a comment. +- `--allow-await-outside-function`: Allows top-level `await` expressions. + See the `allowAwaitOutsideFunction` option for more information. + - `--compact`: No whitespace is used in the AST output. - `--silent`: Do not output the AST, just return the exit status. diff --git a/deps/acorn/acorn/dist/acorn.d.ts b/deps/acorn/acorn/dist/acorn.d.ts index f788637e88eb6e..8e8fbbcb426c3c 100644 --- a/deps/acorn/acorn/dist/acorn.d.ts +++ b/deps/acorn/acorn/dist/acorn.d.ts @@ -12,7 +12,7 @@ declare namespace acorn { } interface Options { - ecmaVersion: 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 'latest' + ecmaVersion: 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 2022 | 'latest' sourceType?: 'script' | 'module' onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void @@ -20,6 +20,7 @@ declare namespace acorn { allowReturnOutsideFunction?: boolean allowImportExportEverywhere?: boolean allowAwaitOutsideFunction?: boolean + allowSuperOutsideMethod?: boolean allowHashBang?: boolean locations?: boolean onToken?: ((token: Token) => any) | Token[] @@ -88,6 +89,7 @@ declare namespace acorn { regexp: TokenType string: TokenType name: TokenType + privateId: TokenType eof: TokenType bracketL: TokenType bracketR: TokenType diff --git a/deps/acorn/acorn/dist/acorn.js b/deps/acorn/acorn/dist/acorn.js index d9b5920e43acfe..af24e36cf16d2e 100644 --- a/deps/acorn/acorn/dist/acorn.js +++ b/deps/acorn/acorn/dist/acorn.js @@ -152,6 +152,7 @@ regexp: new TokenType("regexp", startsExpr), string: new TokenType("string", startsExpr), name: new TokenType("name", startsExpr), + privateId: new TokenType("privateId", startsExpr), eof: new TokenType("eof"), // Punctuation token types. @@ -320,9 +321,10 @@ var defaultOptions = { // `ecmaVersion` indicates the ECMAScript version to parse. Must be // either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10 - // (2019), 11 (2020), 12 (2021), or `"latest"` (the latest version - // the library supports). This influences support for strict mode, - // the set of reserved words, and support for new syntax features. + // (2019), 11 (2020), 12 (2021), 13 (2022), or `"latest"` (the + // latest version the library supports). This influences support + // for strict mode, the set of reserved words, and support for + // new syntax features. ecmaVersion: null, // `sourceType` indicates the mode the code should be parsed in. // Can be either `"script"` or `"module"`. This influences global @@ -346,11 +348,16 @@ // error. allowReturnOutsideFunction: false, // When enabled, import/export statements are not constrained to - // appearing at the top of the program. + // appearing at the top of the program, and an import.meta expression + // in a script isn't considered an error. allowImportExportEverywhere: false, + // By default, await identifiers are allowed to appear at the top-level scope only if ecmaVersion >= 2022. // When enabled, await identifiers are allowed to appear at the top-level scope, // but they are still not allowed in non-async functions. - allowAwaitOutsideFunction: false, + allowAwaitOutsideFunction: null, + // When enabled, super identifiers are not constrained to + // appearing in methods and do not raise an error when they appear elsewhere. + allowSuperOutsideMethod: null, // When enabled, hashbang directive in the beginning of file // is allowed and treated as a line comment. allowHashBang: false, @@ -537,13 +544,14 @@ // Used to signify the start of a potential arrow function this.potentialArrowAt = -1; + this.potentialArrowInForAwait = false; // Positions to delayed-check that yield/await does not exist in default parameters. this.yieldPos = this.awaitPos = this.awaitIdentPos = 0; // Labels in scope. this.labels = []; // Thus-far undefined exports. - this.undefinedExports = {}; + this.undefinedExports = Object.create(null); // If enabled, skip leading hashbang line. if (this.pos === 0 && options.allowHashBang && this.input.slice(0, 2) === "#!") @@ -555,9 +563,14 @@ // For RegExp validation this.regexpState = null; + + // The stack of private names. + // Each element has two properties: 'declared' and 'used'. + // When it exited from the outermost class definition, all used private names must be declared. + this.privateNameStack = []; }; - var prototypeAccessors = { inFunction: { configurable: true },inGenerator: { configurable: true },inAsync: { configurable: true },allowSuper: { configurable: true },allowDirectSuper: { configurable: true },treatFunctionsAsVar: { configurable: true },inNonArrowFunction: { configurable: true } }; + var prototypeAccessors = { inFunction: { configurable: true },inGenerator: { configurable: true },inAsync: { configurable: true },canAwait: { configurable: true },allowSuper: { configurable: true },allowDirectSuper: { configurable: true },treatFunctionsAsVar: { configurable: true },inNonArrowFunction: { configurable: true } }; Parser.prototype.parse = function parse () { var node = this.options.program || this.startNode(); @@ -566,12 +579,30 @@ }; prototypeAccessors.inFunction.get = function () { return (this.currentVarScope().flags & SCOPE_FUNCTION) > 0 }; - prototypeAccessors.inGenerator.get = function () { return (this.currentVarScope().flags & SCOPE_GENERATOR) > 0 }; - prototypeAccessors.inAsync.get = function () { return (this.currentVarScope().flags & SCOPE_ASYNC) > 0 }; - prototypeAccessors.allowSuper.get = function () { return (this.currentThisScope().flags & SCOPE_SUPER) > 0 }; + prototypeAccessors.inGenerator.get = function () { return (this.currentVarScope().flags & SCOPE_GENERATOR) > 0 && !this.currentVarScope().inClassFieldInit }; + prototypeAccessors.inAsync.get = function () { return (this.currentVarScope().flags & SCOPE_ASYNC) > 0 && !this.currentVarScope().inClassFieldInit }; + prototypeAccessors.canAwait.get = function () { + for (var i = this.scopeStack.length - 1; i >= 0; i--) { + var scope = this.scopeStack[i]; + if (scope.inClassFieldInit) { return false } + if (scope.flags & SCOPE_FUNCTION) { return (scope.flags & SCOPE_ASYNC) > 0 } + } + return (this.inModule && this.options.ecmaVersion >= 13) || this.options.allowAwaitOutsideFunction + }; + prototypeAccessors.allowSuper.get = function () { + var ref = this.currentThisScope(); + var flags = ref.flags; + var inClassFieldInit = ref.inClassFieldInit; + return (flags & SCOPE_SUPER) > 0 || inClassFieldInit || this.options.allowSuperOutsideMethod + }; prototypeAccessors.allowDirectSuper.get = function () { return (this.currentThisScope().flags & SCOPE_DIRECT_SUPER) > 0 }; prototypeAccessors.treatFunctionsAsVar.get = function () { return this.treatFunctionsAsVarInScope(this.currentScope()) }; - prototypeAccessors.inNonArrowFunction.get = function () { return (this.currentThisScope().flags & SCOPE_FUNCTION) > 0 }; + prototypeAccessors.inNonArrowFunction.get = function () { + var ref = this.currentThisScope(); + var flags = ref.flags; + var inClassFieldInit = ref.inClassFieldInit; + return (flags & SCOPE_FUNCTION) > 0 || inClassFieldInit + }; Parser.extend = function extend () { var plugins = [], len = arguments.length; @@ -757,7 +788,7 @@ // to its body instead of creating a new node. pp$1.parseTopLevel = function(node) { - var exports = {}; + var exports = Object.create(null); if (!node.body) { node.body = []; } while (this.type !== types.eof) { var stmt = this.parseStatement(null, true, exports); @@ -787,13 +818,14 @@ // Statement) is allowed here. If context is not empty then only a Statement // is allowed. However, `let [` is an explicit negative lookahead for // ExpressionStatement, so special-case it first. - if (nextCh === 91) { return true } // '[' + if (nextCh === 91 || nextCh === 92 || nextCh > 0xd7ff && nextCh < 0xdc00) { return true } // '[', '/', astral if (context) { return false } if (nextCh === 123) { return true } // '{' if (isIdentifierStart(nextCh, true)) { var pos = next + 1; - while (isIdentifierChar(this.input.charCodeAt(pos), true)) { ++pos; } + while (isIdentifierChar(nextCh = this.input.charCodeAt(pos), true)) { ++pos; } + if (nextCh === 92 || nextCh > 0xd7ff && nextCh < 0xdc00) { return true } var ident = this.input.slice(next, pos); if (!keywordRelationalOperator.test(ident)) { return true } } @@ -809,10 +841,11 @@ skipWhiteSpace.lastIndex = this.pos; var skip = skipWhiteSpace.exec(this.input); - var next = this.pos + skip[0].length; + var next = this.pos + skip[0].length, after; return !lineBreak.test(this.input.slice(this.pos, next)) && this.input.slice(next, next + 8) === "function" && - (next + 8 === this.input.length || !isIdentifierChar(this.input.charAt(next + 8))) + (next + 8 === this.input.length || + !(isIdentifierChar(after = this.input.charCodeAt(next + 8)) || after > 0xd7ff && after < 0xdc00)) }; // Parse a single statement. @@ -952,7 +985,7 @@ pp$1.parseForStatement = function(node) { this.next(); - var awaitAt = (this.options.ecmaVersion >= 9 && (this.inAsync || (!this.inFunction && this.options.allowAwaitOutsideFunction)) && this.eatContextual("await")) ? this.lastTokStart : -1; + var awaitAt = (this.options.ecmaVersion >= 9 && this.canAwait && this.eatContextual("await")) ? this.lastTokStart : -1; this.labels.push(loopLabel); this.enterScope(0); this.expect(types.parenL); @@ -978,7 +1011,7 @@ return this.parseFor(node, init$1) } var refDestructuringErrors = new DestructuringErrors; - var init = this.parseExpression(true, refDestructuringErrors); + var init = this.parseExpression(awaitAt > -1 ? "await" : true, refDestructuringErrors); if (this.type === types._in || (this.options.ecmaVersion >= 6 && this.isContextual("of"))) { if (this.options.ecmaVersion >= 9) { if (this.type === types._in) { @@ -1324,6 +1357,7 @@ this.parseClassId(node, isStatement); this.parseClassSuper(node); + var privateNameMap = this.enterClassBody(); var classBody = this.startNode(); var hadConstructor = false; classBody.body = []; @@ -1335,77 +1369,154 @@ if (element.type === "MethodDefinition" && element.kind === "constructor") { if (hadConstructor) { this.raise(element.start, "Duplicate constructor in the same class"); } hadConstructor = true; + } else if (element.key.type === "PrivateIdentifier" && isPrivateNameConflicted(privateNameMap, element)) { + this.raiseRecoverable(element.key.start, ("Identifier '#" + (element.key.name) + "' has already been declared")); } } } this.strict = oldStrict; this.next(); node.body = this.finishNode(classBody, "ClassBody"); + this.exitClassBody(); return this.finishNode(node, isStatement ? "ClassDeclaration" : "ClassExpression") }; pp$1.parseClassElement = function(constructorAllowsSuper) { - var this$1 = this; - if (this.eat(types.semi)) { return null } - var method = this.startNode(); - var tryContextual = function (k, noLineBreak) { - if ( noLineBreak === void 0 ) noLineBreak = false; - - var start = this$1.start, startLoc = this$1.startLoc; - if (!this$1.eatContextual(k)) { return false } - if (this$1.type !== types.parenL && (!noLineBreak || !this$1.canInsertSemicolon())) { return true } - if (method.key) { this$1.unexpected(); } - method.computed = false; - method.key = this$1.startNodeAt(start, startLoc); - method.key.name = k; - this$1.finishNode(method.key, "Identifier"); - return false - }; - - method.kind = "method"; - method.static = tryContextual("static"); - var isGenerator = this.eat(types.star); + var ecmaVersion = this.options.ecmaVersion; + var node = this.startNode(); + var keyName = ""; + var isGenerator = false; var isAsync = false; - if (!isGenerator) { - if (this.options.ecmaVersion >= 8 && tryContextual("async", true)) { + var kind = "method"; + + // Parse modifiers + node.static = false; + if (this.eatContextual("static")) { + if (this.isClassElementNameStart() || this.type === types.star) { + node.static = true; + } else { + keyName = "static"; + } + } + if (!keyName && ecmaVersion >= 8 && this.eatContextual("async")) { + if ((this.isClassElementNameStart() || this.type === types.star) && !this.canInsertSemicolon()) { isAsync = true; - isGenerator = this.options.ecmaVersion >= 9 && this.eat(types.star); - } else if (tryContextual("get")) { - method.kind = "get"; - } else if (tryContextual("set")) { - method.kind = "set"; + } else { + keyName = "async"; + } + } + if (!keyName && (ecmaVersion >= 9 || !isAsync) && this.eat(types.star)) { + isGenerator = true; + } + if (!keyName && !isAsync && !isGenerator) { + var lastValue = this.value; + if (this.eatContextual("get") || this.eatContextual("set")) { + if (this.isClassElementNameStart()) { + kind = lastValue; + } else { + keyName = lastValue; + } } } - if (!method.key) { this.parsePropertyName(method); } + + // Parse element name + if (keyName) { + // 'async', 'get', 'set', or 'static' were not a keyword contextually. + // The last token is any of those. Make it the element name. + node.computed = false; + node.key = this.startNodeAt(this.lastTokStart, this.lastTokStartLoc); + node.key.name = keyName; + this.finishNode(node.key, "Identifier"); + } else { + this.parseClassElementName(node); + } + + // Parse element value + if (ecmaVersion < 13 || this.type === types.parenL || kind !== "method" || isGenerator || isAsync) { + var isConstructor = !node.static && checkKeyName(node, "constructor"); + var allowsDirectSuper = isConstructor && constructorAllowsSuper; + // Couldn't move this check into the 'parseClassMethod' method for backward compatibility. + if (isConstructor && kind !== "method") { this.raise(node.key.start, "Constructor can't have get/set modifier"); } + node.kind = isConstructor ? "constructor" : kind; + this.parseClassMethod(node, isGenerator, isAsync, allowsDirectSuper); + } else { + this.parseClassField(node); + } + + return node + }; + + pp$1.isClassElementNameStart = function() { + return ( + this.type === types.name || + this.type === types.privateId || + this.type === types.num || + this.type === types.string || + this.type === types.bracketL || + this.type.keyword + ) + }; + + pp$1.parseClassElementName = function(element) { + if (this.type === types.privateId) { + if (this.value === "constructor") { + this.raise(this.start, "Classes can't have an element named '#constructor'"); + } + element.computed = false; + element.key = this.parsePrivateIdent(); + } else { + this.parsePropertyName(element); + } + }; + + pp$1.parseClassMethod = function(method, isGenerator, isAsync, allowsDirectSuper) { + // Check key and flags var key = method.key; - var allowsDirectSuper = false; - if (!method.computed && !method.static && (key.type === "Identifier" && key.name === "constructor" || - key.type === "Literal" && key.value === "constructor")) { - if (method.kind !== "method") { this.raise(key.start, "Constructor can't have get/set modifier"); } + if (method.kind === "constructor") { if (isGenerator) { this.raise(key.start, "Constructor can't be a generator"); } if (isAsync) { this.raise(key.start, "Constructor can't be an async method"); } - method.kind = "constructor"; - allowsDirectSuper = constructorAllowsSuper; - } else if (method.static && key.type === "Identifier" && key.name === "prototype") { + } else if (method.static && checkKeyName(method, "prototype")) { this.raise(key.start, "Classes may not have a static property named prototype"); } - this.parseClassMethod(method, isGenerator, isAsync, allowsDirectSuper); - if (method.kind === "get" && method.value.params.length !== 0) - { this.raiseRecoverable(method.value.start, "getter should have no params"); } - if (method.kind === "set" && method.value.params.length !== 1) - { this.raiseRecoverable(method.value.start, "setter should have exactly one param"); } - if (method.kind === "set" && method.value.params[0].type === "RestElement") - { this.raiseRecoverable(method.value.params[0].start, "Setter cannot use rest params"); } - return method - }; - pp$1.parseClassMethod = function(method, isGenerator, isAsync, allowsDirectSuper) { - method.value = this.parseMethod(isGenerator, isAsync, allowsDirectSuper); + // Parse value + var value = method.value = this.parseMethod(isGenerator, isAsync, allowsDirectSuper); + + // Check value + if (method.kind === "get" && value.params.length !== 0) + { this.raiseRecoverable(value.start, "getter should have no params"); } + if (method.kind === "set" && value.params.length !== 1) + { this.raiseRecoverable(value.start, "setter should have exactly one param"); } + if (method.kind === "set" && value.params[0].type === "RestElement") + { this.raiseRecoverable(value.params[0].start, "Setter cannot use rest params"); } + return this.finishNode(method, "MethodDefinition") }; + pp$1.parseClassField = function(field) { + if (checkKeyName(field, "constructor")) { + this.raise(field.key.start, "Classes can't have a field named 'constructor'"); + } else if (field.static && checkKeyName(field, "prototype")) { + this.raise(field.key.start, "Classes can't have a static field named 'prototype'"); + } + + if (this.eat(types.eq)) { + // To raise SyntaxError if 'arguments' exists in the initializer. + var scope = this.currentThisScope(); + var inClassFieldInit = scope.inClassFieldInit; + scope.inClassFieldInit = true; + field.value = this.parseMaybeAssign(); + scope.inClassFieldInit = inClassFieldInit; + } else { + field.value = null; + } + this.semicolon(); + + return this.finishNode(field, "PropertyDefinition") + }; + pp$1.parseClassId = function(node, isStatement) { if (this.type === types.name) { node.id = this.parseIdent(); @@ -1422,6 +1533,65 @@ node.superClass = this.eat(types._extends) ? this.parseExprSubscripts() : null; }; + pp$1.enterClassBody = function() { + var element = {declared: Object.create(null), used: []}; + this.privateNameStack.push(element); + return element.declared + }; + + pp$1.exitClassBody = function() { + var ref = this.privateNameStack.pop(); + var declared = ref.declared; + var used = ref.used; + var len = this.privateNameStack.length; + var parent = len === 0 ? null : this.privateNameStack[len - 1]; + for (var i = 0; i < used.length; ++i) { + var id = used[i]; + if (!has(declared, id.name)) { + if (parent) { + parent.used.push(id); + } else { + this.raiseRecoverable(id.start, ("Private field '#" + (id.name) + "' must be declared in an enclosing class")); + } + } + } + }; + + function isPrivateNameConflicted(privateNameMap, element) { + var name = element.key.name; + var curr = privateNameMap[name]; + + var next = "true"; + if (element.type === "MethodDefinition" && (element.kind === "get" || element.kind === "set")) { + next = (element.static ? "s" : "i") + element.kind; + } + + // `class { get #a(){}; static set #a(_){} }` is also conflict. + if ( + curr === "iget" && next === "iset" || + curr === "iset" && next === "iget" || + curr === "sget" && next === "sset" || + curr === "sset" && next === "sget" + ) { + privateNameMap[name] = "true"; + return false + } else if (!curr) { + privateNameMap[name] = next; + return false + } else { + return true + } + } + + function checkKeyName(node, name) { + var computed = node.computed; + var key = node.key; + return !computed && ( + key.type === "Identifier" && key.name === name || + key.type === "Literal" && key.value === name + ) + } + // Parses module export declaration. pp$1.parseExport = function(node, exports) { @@ -2040,13 +2210,13 @@ // and object pattern might appear (so it's possible to raise // delayed syntax error at correct position). - pp$3.parseExpression = function(noIn, refDestructuringErrors) { + pp$3.parseExpression = function(forInit, refDestructuringErrors) { var startPos = this.start, startLoc = this.startLoc; - var expr = this.parseMaybeAssign(noIn, refDestructuringErrors); + var expr = this.parseMaybeAssign(forInit, refDestructuringErrors); if (this.type === types.comma) { var node = this.startNodeAt(startPos, startLoc); node.expressions = [expr]; - while (this.eat(types.comma)) { node.expressions.push(this.parseMaybeAssign(noIn, refDestructuringErrors)); } + while (this.eat(types.comma)) { node.expressions.push(this.parseMaybeAssign(forInit, refDestructuringErrors)); } return this.finishNode(node, "SequenceExpression") } return expr @@ -2055,9 +2225,9 @@ // Parse an assignment expression. This includes applications of // operators like `+=`. - pp$3.parseMaybeAssign = function(noIn, refDestructuringErrors, afterLeftParse) { + pp$3.parseMaybeAssign = function(forInit, refDestructuringErrors, afterLeftParse) { if (this.isContextual("yield")) { - if (this.inGenerator) { return this.parseYield(noIn) } + if (this.inGenerator) { return this.parseYield(forInit) } // The tokenizer will assume an expression is allowed after // `yield`, but this isn't that kind of yield else { this.exprAllowed = false; } @@ -2074,9 +2244,11 @@ } var startPos = this.start, startLoc = this.startLoc; - if (this.type === types.parenL || this.type === types.name) - { this.potentialArrowAt = this.start; } - var left = this.parseMaybeConditional(noIn, refDestructuringErrors); + if (this.type === types.parenL || this.type === types.name) { + this.potentialArrowAt = this.start; + this.potentialArrowInForAwait = forInit === "await"; + } + var left = this.parseMaybeConditional(forInit, refDestructuringErrors); if (afterLeftParse) { left = afterLeftParse.call(this, left, startPos, startLoc); } if (this.type.isAssign) { var node = this.startNodeAt(startPos, startLoc); @@ -2094,7 +2266,7 @@ { this.checkLValSimple(left); } node.left = left; this.next(); - node.right = this.parseMaybeAssign(noIn); + node.right = this.parseMaybeAssign(forInit); return this.finishNode(node, "AssignmentExpression") } else { if (ownDestructuringErrors) { this.checkExpressionErrors(refDestructuringErrors, true); } @@ -2106,16 +2278,16 @@ // Parse a ternary conditional (`?:`) operator. - pp$3.parseMaybeConditional = function(noIn, refDestructuringErrors) { + pp$3.parseMaybeConditional = function(forInit, refDestructuringErrors) { var startPos = this.start, startLoc = this.startLoc; - var expr = this.parseExprOps(noIn, refDestructuringErrors); + var expr = this.parseExprOps(forInit, refDestructuringErrors); if (this.checkExpressionErrors(refDestructuringErrors)) { return expr } if (this.eat(types.question)) { var node = this.startNodeAt(startPos, startLoc); node.test = expr; node.consequent = this.parseMaybeAssign(); this.expect(types.colon); - node.alternate = this.parseMaybeAssign(noIn); + node.alternate = this.parseMaybeAssign(forInit); return this.finishNode(node, "ConditionalExpression") } return expr @@ -2123,11 +2295,11 @@ // Start the precedence parser. - pp$3.parseExprOps = function(noIn, refDestructuringErrors) { + pp$3.parseExprOps = function(forInit, refDestructuringErrors) { var startPos = this.start, startLoc = this.startLoc; var expr = this.parseMaybeUnary(refDestructuringErrors, false); if (this.checkExpressionErrors(refDestructuringErrors)) { return expr } - return expr.start === startPos && expr.type === "ArrowFunctionExpression" ? expr : this.parseExprOp(expr, startPos, startLoc, -1, noIn) + return expr.start === startPos && expr.type === "ArrowFunctionExpression" ? expr : this.parseExprOp(expr, startPos, startLoc, -1, forInit) }; // Parse binary operators with the operator precedence parsing @@ -2136,9 +2308,9 @@ // defer further parser to one of its callers when it encounters an // operator that has a lower precedence than the set it is parsing. - pp$3.parseExprOp = function(left, leftStartPos, leftStartLoc, minPrec, noIn) { + pp$3.parseExprOp = function(left, leftStartPos, leftStartLoc, minPrec, forInit) { var prec = this.type.binop; - if (prec != null && (!noIn || this.type !== types._in)) { + if (prec != null && (!forInit || this.type !== types._in)) { if (prec > minPrec) { var logical = this.type === types.logicalOR || this.type === types.logicalAND; var coalesce = this.type === types.coalesce; @@ -2150,12 +2322,12 @@ var op = this.value; this.next(); var startPos = this.start, startLoc = this.startLoc; - var right = this.parseExprOp(this.parseMaybeUnary(null, false), startPos, startLoc, prec, noIn); + var right = this.parseExprOp(this.parseMaybeUnary(null, false), startPos, startLoc, prec, forInit); var node = this.buildBinary(leftStartPos, leftStartLoc, left, right, op, logical || coalesce); if ((logical && this.type === types.coalesce) || (coalesce && (this.type === types.logicalOR || this.type === types.logicalAND))) { this.raiseRecoverable(this.start, "Logical expressions and coalesce expressions cannot be mixed. Wrap either by parentheses"); } - return this.parseExprOp(node, leftStartPos, leftStartLoc, minPrec, noIn) + return this.parseExprOp(node, leftStartPos, leftStartLoc, minPrec, forInit) } } return left @@ -2171,9 +2343,9 @@ // Parse unary operators, both prefix and postfix. - pp$3.parseMaybeUnary = function(refDestructuringErrors, sawUnary) { + pp$3.parseMaybeUnary = function(refDestructuringErrors, sawUnary, incDec) { var startPos = this.start, startLoc = this.startLoc, expr; - if (this.isContextual("await") && (this.inAsync || (!this.inFunction && this.options.allowAwaitOutsideFunction))) { + if (this.isContextual("await") && this.canAwait) { expr = this.parseAwait(); sawUnary = true; } else if (this.type.prefix) { @@ -2181,12 +2353,14 @@ node.operator = this.value; node.prefix = true; this.next(); - node.argument = this.parseMaybeUnary(null, true); + node.argument = this.parseMaybeUnary(null, true, update); this.checkExpressionErrors(refDestructuringErrors, true); if (update) { this.checkLValSimple(node.argument); } else if (this.strict && node.operator === "delete" && node.argument.type === "Identifier") { this.raiseRecoverable(node.start, "Deleting local variable in strict mode"); } + else if (node.operator === "delete" && isPrivateFieldAccess(node.argument)) + { this.raiseRecoverable(node.start, "Private fields can not be deleted"); } else { sawUnary = true; } expr = this.finishNode(node, update ? "UpdateExpression" : "UnaryExpression"); } else { @@ -2203,12 +2377,23 @@ } } - if (!sawUnary && this.eat(types.starstar)) - { return this.buildBinary(startPos, startLoc, expr, this.parseMaybeUnary(null, false), "**", false) } - else - { return expr } + if (!incDec && this.eat(types.starstar)) { + if (sawUnary) + { this.unexpected(this.lastTokStart); } + else + { return this.buildBinary(startPos, startLoc, expr, this.parseMaybeUnary(null, false), "**", false) } + } else { + return expr + } }; + function isPrivateFieldAccess(node) { + return ( + node.type === "MemberExpression" && node.property.type === "PrivateIdentifier" || + node.type === "ChainExpression" && isPrivateFieldAccess(node.expression) + ) + } + // Parse call, dot, and `[]`-subscript expressions. pp$3.parseExprSubscripts = function(refDestructuringErrors) { @@ -2220,6 +2405,7 @@ if (refDestructuringErrors && result.type === "MemberExpression") { if (refDestructuringErrors.parenthesizedAssign >= result.start) { refDestructuringErrors.parenthesizedAssign = -1; } if (refDestructuringErrors.parenthesizedBind >= result.start) { refDestructuringErrors.parenthesizedBind = -1; } + if (refDestructuringErrors.trailingComma >= result.start) { refDestructuringErrors.trailingComma = -1; } } return result }; @@ -2256,9 +2442,15 @@ if (computed || (optional && this.type !== types.parenL && this.type !== types.backQuote) || this.eat(types.dot)) { var node = this.startNodeAt(startPos, startLoc); node.object = base; - node.property = computed ? this.parseExpression() : this.parseIdent(this.options.allowReserved !== "never"); + if (computed) { + node.property = this.parseExpression(); + this.expect(types.bracketR); + } else if (this.type === types.privateId && base.type !== "Super") { + node.property = this.parsePrivateIdent(); + } else { + node.property = this.parseIdent(this.options.allowReserved !== "never"); + } node.computed = !!computed; - if (computed) { this.expect(types.bracketR); } if (optionalSupported) { node.optional = optional; } @@ -2344,7 +2536,8 @@ if (canBeArrow && !this.canInsertSemicolon()) { if (this.eat(types.arrow)) { return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), [id], false) } - if (this.options.ecmaVersion >= 8 && id.name === "async" && this.type === types.name && !containsEsc) { + if (this.options.ecmaVersion >= 8 && id.name === "async" && this.type === types.name && !containsEsc && + (!this.potentialArrowInForAwait || this.value !== "of" || this.containsEsc)) { id = this.parseIdent(false); if (this.canInsertSemicolon() || !this.eat(types.arrow)) { this.unexpected(); } @@ -2462,7 +2655,7 @@ { this.raiseRecoverable(node.property.start, "The only valid meta property for import is 'import.meta'"); } if (containsEsc) { this.raiseRecoverable(node.start, "'import.meta' must not contain escaped characters"); } - if (this.options.sourceType !== "module") + if (this.options.sourceType !== "module" && !this.options.allowImportExportEverywhere) { this.raiseRecoverable(node.start, "Cannot use 'import.meta' outside a module"); } return this.finishNode(node, "MetaProperty") @@ -2883,7 +3076,7 @@ // or "arguments" and duplicate parameters. pp$3.checkParams = function(node, allowDuplicates) { - var nameHash = {}; + var nameHash = Object.create(null); for (var i = 0, list = node.params; i < list.length; i += 1) { var param = list[i]; @@ -2930,6 +3123,8 @@ { this.raiseRecoverable(start, "Cannot use 'yield' as identifier inside a generator"); } if (this.inAsync && name === "await") { this.raiseRecoverable(start, "Cannot use 'await' as identifier inside an async function"); } + if (this.currentThisScope().inClassFieldInit && name === "arguments") + { this.raiseRecoverable(start, "Cannot use 'arguments' in class field initializer"); } if (this.keywords.test(name)) { this.raise(start, ("Unexpected keyword '" + name + "'")); } if (this.options.ecmaVersion < 6 && @@ -2974,9 +3169,29 @@ return node }; + pp$3.parsePrivateIdent = function() { + var node = this.startNode(); + if (this.type === types.privateId) { + node.name = this.value; + } else { + this.unexpected(); + } + this.next(); + this.finishNode(node, "PrivateIdentifier"); + + // For validating existence + if (this.privateNameStack.length === 0) { + this.raise(node.start, ("Private field '#" + (node.name) + "' must be declared in an enclosing class")); + } else { + this.privateNameStack[this.privateNameStack.length - 1].used.push(node); + } + + return node + }; + // Parses yield expression inside generator. - pp$3.parseYield = function(noIn) { + pp$3.parseYield = function(forInit) { if (!this.yieldPos) { this.yieldPos = this.start; } var node = this.startNode(); @@ -2986,7 +3201,7 @@ node.argument = null; } else { node.delegate = this.eat(types.star); - node.argument = this.parseMaybeAssign(noIn); + node.argument = this.parseMaybeAssign(forInit); } return this.finishNode(node, "YieldExpression") }; @@ -3034,6 +3249,8 @@ this.lexical = []; // A list of lexically-declared FunctionDeclaration names in the current lexical scope this.functions = []; + // A switch to disallow the identifier reference 'arguments' + this.inClassFieldInit = false; }; // The functions in this module keep track of declared variables in the current scope in order to detect duplicate variable names. @@ -3365,7 +3582,7 @@ var RegExpValidationState = function RegExpValidationState(parser) { this.parser = parser; - this.validFlags = "gim" + (parser.options.ecmaVersion >= 6 ? "uy" : "") + (parser.options.ecmaVersion >= 9 ? "s" : ""); + this.validFlags = "gim" + (parser.options.ecmaVersion >= 6 ? "uy" : "") + (parser.options.ecmaVersion >= 9 ? "s" : "") + (parser.options.ecmaVersion >= 13 ? "d" : ""); this.unicodeProperties = data[parser.options.ecmaVersion >= 12 ? 12 : parser.options.ecmaVersion]; this.source = ""; this.flags = ""; @@ -4510,9 +4727,9 @@ pp$9.fullCharCodeAtPos = function() { var code = this.input.charCodeAt(this.pos); - if (code <= 0xd7ff || code >= 0xe000) { return code } + if (code <= 0xd7ff || code >= 0xdc00) { return code } var next = this.input.charCodeAt(this.pos + 1); - return (code << 10) + next - 0x35fdc00 + return next <= 0xdbff || next >= 0xe000 ? code : (code << 10) + next - 0x35fdc00 }; pp$9.skipBlockComment = function() { @@ -4731,6 +4948,20 @@ return this.finishOp(types.question, 1) }; + pp$9.readToken_numberSign = function() { // '#' + var ecmaVersion = this.options.ecmaVersion; + var code = 35; // '#' + if (ecmaVersion >= 13) { + ++this.pos; + code = this.fullCharCodeAtPos(); + if (isIdentifierStart(code, true) || code === 92 /* '\' */) { + return this.finishToken(types.privateId, this.readWord1()) + } + } + + this.raise(this.pos, "Unexpected character '" + codePointToString$1(code) + "'"); + }; + pp$9.getTokenFromCode = function(code) { switch (code) { // The interpretation of a dot depends on whether it is followed @@ -4802,6 +5033,9 @@ case 126: // '~' return this.finishOp(types.prefix, 1) + + case 35: // '#' + return this.readToken_numberSign() } this.raise(this.pos, "Unexpected character '" + codePointToString$1(code) + "'"); @@ -5213,7 +5447,7 @@ // Acorn is a tiny, fast JavaScript parser written in JavaScript. - var version = "8.0.4"; + var version = "8.4.1"; Parser.acorn = { Parser: Parser, @@ -5289,4 +5523,3 @@ Object.defineProperty(exports, '__esModule', { value: true }); }))); -//# sourceMappingURL=acorn.js.map diff --git a/deps/acorn/acorn/dist/acorn.js.map b/deps/acorn/acorn/dist/acorn.js.map deleted file mode 100644 index 7781620d64592d..00000000000000 --- a/deps/acorn/acorn/dist/acorn.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"acorn.js","sources":["../src/identifier.js","../src/tokentype.js","../src/whitespace.js","../src/util.js","../src/locutil.js","../src/options.js","../src/scopeflags.js","../src/state.js","../src/parseutil.js","../src/statement.js","../src/lval.js","../src/expression.js","../src/location.js","../src/scope.js","../src/node.js","../src/tokencontext.js","../src/unicode-property-data.js","../src/regexp.js","../src/tokenize.js","../src/index.js"],"sourcesContent":["// Reserved word lists for various dialects of the language\n\nexport const reservedWords = {\n 3: \"abstract boolean byte char class double enum export extends final float goto implements import int interface long native package private protected public short static super synchronized throws transient volatile\",\n 5: \"class enum extends super const export import\",\n 6: \"enum\",\n strict: \"implements interface let package private protected public static yield\",\n strictBind: \"eval arguments\"\n}\n\n// And the keywords\n\nconst ecma5AndLessKeywords = \"break case catch continue debugger default do else finally for function if return switch throw try var while with null true false instanceof typeof void delete new in this\"\n\nexport const keywords = {\n 5: ecma5AndLessKeywords,\n \"5module\": ecma5AndLessKeywords + \" export import\",\n 6: ecma5AndLessKeywords + \" const class extends export import super\"\n}\n\nexport const keywordRelationalOperator = /^in(stanceof)?$/\n\n// ## Character categories\n\n// Big ugly regular expressions that match characters in the\n// whitespace, identifier, and identifier-start categories. These\n// are only applied when a character is found to actually have a\n// code point above 128.\n// Generated by `bin/generate-identifier-regex.js`.\nlet nonASCIIidentifierStartChars = \"\\xaa\\xb5\\xba\\xc0-\\xd6\\xd8-\\xf6\\xf8-\\u02c1\\u02c6-\\u02d1\\u02e0-\\u02e4\\u02ec\\u02ee\\u0370-\\u0374\\u0376\\u0377\\u037a-\\u037d\\u037f\\u0386\\u0388-\\u038a\\u038c\\u038e-\\u03a1\\u03a3-\\u03f5\\u03f7-\\u0481\\u048a-\\u052f\\u0531-\\u0556\\u0559\\u0560-\\u0588\\u05d0-\\u05ea\\u05ef-\\u05f2\\u0620-\\u064a\\u066e\\u066f\\u0671-\\u06d3\\u06d5\\u06e5\\u06e6\\u06ee\\u06ef\\u06fa-\\u06fc\\u06ff\\u0710\\u0712-\\u072f\\u074d-\\u07a5\\u07b1\\u07ca-\\u07ea\\u07f4\\u07f5\\u07fa\\u0800-\\u0815\\u081a\\u0824\\u0828\\u0840-\\u0858\\u0860-\\u086a\\u08a0-\\u08b4\\u08b6-\\u08c7\\u0904-\\u0939\\u093d\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098c\\u098f\\u0990\\u0993-\\u09a8\\u09aa-\\u09b0\\u09b2\\u09b6-\\u09b9\\u09bd\\u09ce\\u09dc\\u09dd\\u09df-\\u09e1\\u09f0\\u09f1\\u09fc\\u0a05-\\u0a0a\\u0a0f\\u0a10\\u0a13-\\u0a28\\u0a2a-\\u0a30\\u0a32\\u0a33\\u0a35\\u0a36\\u0a38\\u0a39\\u0a59-\\u0a5c\\u0a5e\\u0a72-\\u0a74\\u0a85-\\u0a8d\\u0a8f-\\u0a91\\u0a93-\\u0aa8\\u0aaa-\\u0ab0\\u0ab2\\u0ab3\\u0ab5-\\u0ab9\\u0abd\\u0ad0\\u0ae0\\u0ae1\\u0af9\\u0b05-\\u0b0c\\u0b0f\\u0b10\\u0b13-\\u0b28\\u0b2a-\\u0b30\\u0b32\\u0b33\\u0b35-\\u0b39\\u0b3d\\u0b5c\\u0b5d\\u0b5f-\\u0b61\\u0b71\\u0b83\\u0b85-\\u0b8a\\u0b8e-\\u0b90\\u0b92-\\u0b95\\u0b99\\u0b9a\\u0b9c\\u0b9e\\u0b9f\\u0ba3\\u0ba4\\u0ba8-\\u0baa\\u0bae-\\u0bb9\\u0bd0\\u0c05-\\u0c0c\\u0c0e-\\u0c10\\u0c12-\\u0c28\\u0c2a-\\u0c39\\u0c3d\\u0c58-\\u0c5a\\u0c60\\u0c61\\u0c80\\u0c85-\\u0c8c\\u0c8e-\\u0c90\\u0c92-\\u0ca8\\u0caa-\\u0cb3\\u0cb5-\\u0cb9\\u0cbd\\u0cde\\u0ce0\\u0ce1\\u0cf1\\u0cf2\\u0d04-\\u0d0c\\u0d0e-\\u0d10\\u0d12-\\u0d3a\\u0d3d\\u0d4e\\u0d54-\\u0d56\\u0d5f-\\u0d61\\u0d7a-\\u0d7f\\u0d85-\\u0d96\\u0d9a-\\u0db1\\u0db3-\\u0dbb\\u0dbd\\u0dc0-\\u0dc6\\u0e01-\\u0e30\\u0e32\\u0e33\\u0e40-\\u0e46\\u0e81\\u0e82\\u0e84\\u0e86-\\u0e8a\\u0e8c-\\u0ea3\\u0ea5\\u0ea7-\\u0eb0\\u0eb2\\u0eb3\\u0ebd\\u0ec0-\\u0ec4\\u0ec6\\u0edc-\\u0edf\\u0f00\\u0f40-\\u0f47\\u0f49-\\u0f6c\\u0f88-\\u0f8c\\u1000-\\u102a\\u103f\\u1050-\\u1055\\u105a-\\u105d\\u1061\\u1065\\u1066\\u106e-\\u1070\\u1075-\\u1081\\u108e\\u10a0-\\u10c5\\u10c7\\u10cd\\u10d0-\\u10fa\\u10fc-\\u1248\\u124a-\\u124d\\u1250-\\u1256\\u1258\\u125a-\\u125d\\u1260-\\u1288\\u128a-\\u128d\\u1290-\\u12b0\\u12b2-\\u12b5\\u12b8-\\u12be\\u12c0\\u12c2-\\u12c5\\u12c8-\\u12d6\\u12d8-\\u1310\\u1312-\\u1315\\u1318-\\u135a\\u1380-\\u138f\\u13a0-\\u13f5\\u13f8-\\u13fd\\u1401-\\u166c\\u166f-\\u167f\\u1681-\\u169a\\u16a0-\\u16ea\\u16ee-\\u16f8\\u1700-\\u170c\\u170e-\\u1711\\u1720-\\u1731\\u1740-\\u1751\\u1760-\\u176c\\u176e-\\u1770\\u1780-\\u17b3\\u17d7\\u17dc\\u1820-\\u1878\\u1880-\\u18a8\\u18aa\\u18b0-\\u18f5\\u1900-\\u191e\\u1950-\\u196d\\u1970-\\u1974\\u1980-\\u19ab\\u19b0-\\u19c9\\u1a00-\\u1a16\\u1a20-\\u1a54\\u1aa7\\u1b05-\\u1b33\\u1b45-\\u1b4b\\u1b83-\\u1ba0\\u1bae\\u1baf\\u1bba-\\u1be5\\u1c00-\\u1c23\\u1c4d-\\u1c4f\\u1c5a-\\u1c7d\\u1c80-\\u1c88\\u1c90-\\u1cba\\u1cbd-\\u1cbf\\u1ce9-\\u1cec\\u1cee-\\u1cf3\\u1cf5\\u1cf6\\u1cfa\\u1d00-\\u1dbf\\u1e00-\\u1f15\\u1f18-\\u1f1d\\u1f20-\\u1f45\\u1f48-\\u1f4d\\u1f50-\\u1f57\\u1f59\\u1f5b\\u1f5d\\u1f5f-\\u1f7d\\u1f80-\\u1fb4\\u1fb6-\\u1fbc\\u1fbe\\u1fc2-\\u1fc4\\u1fc6-\\u1fcc\\u1fd0-\\u1fd3\\u1fd6-\\u1fdb\\u1fe0-\\u1fec\\u1ff2-\\u1ff4\\u1ff6-\\u1ffc\\u2071\\u207f\\u2090-\\u209c\\u2102\\u2107\\u210a-\\u2113\\u2115\\u2118-\\u211d\\u2124\\u2126\\u2128\\u212a-\\u2139\\u213c-\\u213f\\u2145-\\u2149\\u214e\\u2160-\\u2188\\u2c00-\\u2c2e\\u2c30-\\u2c5e\\u2c60-\\u2ce4\\u2ceb-\\u2cee\\u2cf2\\u2cf3\\u2d00-\\u2d25\\u2d27\\u2d2d\\u2d30-\\u2d67\\u2d6f\\u2d80-\\u2d96\\u2da0-\\u2da6\\u2da8-\\u2dae\\u2db0-\\u2db6\\u2db8-\\u2dbe\\u2dc0-\\u2dc6\\u2dc8-\\u2dce\\u2dd0-\\u2dd6\\u2dd8-\\u2dde\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303c\\u3041-\\u3096\\u309b-\\u309f\\u30a1-\\u30fa\\u30fc-\\u30ff\\u3105-\\u312f\\u3131-\\u318e\\u31a0-\\u31bf\\u31f0-\\u31ff\\u3400-\\u4dbf\\u4e00-\\u9ffc\\ua000-\\ua48c\\ua4d0-\\ua4fd\\ua500-\\ua60c\\ua610-\\ua61f\\ua62a\\ua62b\\ua640-\\ua66e\\ua67f-\\ua69d\\ua6a0-\\ua6ef\\ua717-\\ua71f\\ua722-\\ua788\\ua78b-\\ua7bf\\ua7c2-\\ua7ca\\ua7f5-\\ua801\\ua803-\\ua805\\ua807-\\ua80a\\ua80c-\\ua822\\ua840-\\ua873\\ua882-\\ua8b3\\ua8f2-\\ua8f7\\ua8fb\\ua8fd\\ua8fe\\ua90a-\\ua925\\ua930-\\ua946\\ua960-\\ua97c\\ua984-\\ua9b2\\ua9cf\\ua9e0-\\ua9e4\\ua9e6-\\ua9ef\\ua9fa-\\ua9fe\\uaa00-\\uaa28\\uaa40-\\uaa42\\uaa44-\\uaa4b\\uaa60-\\uaa76\\uaa7a\\uaa7e-\\uaaaf\\uaab1\\uaab5\\uaab6\\uaab9-\\uaabd\\uaac0\\uaac2\\uaadb-\\uaadd\\uaae0-\\uaaea\\uaaf2-\\uaaf4\\uab01-\\uab06\\uab09-\\uab0e\\uab11-\\uab16\\uab20-\\uab26\\uab28-\\uab2e\\uab30-\\uab5a\\uab5c-\\uab69\\uab70-\\uabe2\\uac00-\\ud7a3\\ud7b0-\\ud7c6\\ud7cb-\\ud7fb\\uf900-\\ufa6d\\ufa70-\\ufad9\\ufb00-\\ufb06\\ufb13-\\ufb17\\ufb1d\\ufb1f-\\ufb28\\ufb2a-\\ufb36\\ufb38-\\ufb3c\\ufb3e\\ufb40\\ufb41\\ufb43\\ufb44\\ufb46-\\ufbb1\\ufbd3-\\ufd3d\\ufd50-\\ufd8f\\ufd92-\\ufdc7\\ufdf0-\\ufdfb\\ufe70-\\ufe74\\ufe76-\\ufefc\\uff21-\\uff3a\\uff41-\\uff5a\\uff66-\\uffbe\\uffc2-\\uffc7\\uffca-\\uffcf\\uffd2-\\uffd7\\uffda-\\uffdc\"\nlet nonASCIIidentifierChars = \"\\u200c\\u200d\\xb7\\u0300-\\u036f\\u0387\\u0483-\\u0487\\u0591-\\u05bd\\u05bf\\u05c1\\u05c2\\u05c4\\u05c5\\u05c7\\u0610-\\u061a\\u064b-\\u0669\\u0670\\u06d6-\\u06dc\\u06df-\\u06e4\\u06e7\\u06e8\\u06ea-\\u06ed\\u06f0-\\u06f9\\u0711\\u0730-\\u074a\\u07a6-\\u07b0\\u07c0-\\u07c9\\u07eb-\\u07f3\\u07fd\\u0816-\\u0819\\u081b-\\u0823\\u0825-\\u0827\\u0829-\\u082d\\u0859-\\u085b\\u08d3-\\u08e1\\u08e3-\\u0903\\u093a-\\u093c\\u093e-\\u094f\\u0951-\\u0957\\u0962\\u0963\\u0966-\\u096f\\u0981-\\u0983\\u09bc\\u09be-\\u09c4\\u09c7\\u09c8\\u09cb-\\u09cd\\u09d7\\u09e2\\u09e3\\u09e6-\\u09ef\\u09fe\\u0a01-\\u0a03\\u0a3c\\u0a3e-\\u0a42\\u0a47\\u0a48\\u0a4b-\\u0a4d\\u0a51\\u0a66-\\u0a71\\u0a75\\u0a81-\\u0a83\\u0abc\\u0abe-\\u0ac5\\u0ac7-\\u0ac9\\u0acb-\\u0acd\\u0ae2\\u0ae3\\u0ae6-\\u0aef\\u0afa-\\u0aff\\u0b01-\\u0b03\\u0b3c\\u0b3e-\\u0b44\\u0b47\\u0b48\\u0b4b-\\u0b4d\\u0b55-\\u0b57\\u0b62\\u0b63\\u0b66-\\u0b6f\\u0b82\\u0bbe-\\u0bc2\\u0bc6-\\u0bc8\\u0bca-\\u0bcd\\u0bd7\\u0be6-\\u0bef\\u0c00-\\u0c04\\u0c3e-\\u0c44\\u0c46-\\u0c48\\u0c4a-\\u0c4d\\u0c55\\u0c56\\u0c62\\u0c63\\u0c66-\\u0c6f\\u0c81-\\u0c83\\u0cbc\\u0cbe-\\u0cc4\\u0cc6-\\u0cc8\\u0cca-\\u0ccd\\u0cd5\\u0cd6\\u0ce2\\u0ce3\\u0ce6-\\u0cef\\u0d00-\\u0d03\\u0d3b\\u0d3c\\u0d3e-\\u0d44\\u0d46-\\u0d48\\u0d4a-\\u0d4d\\u0d57\\u0d62\\u0d63\\u0d66-\\u0d6f\\u0d81-\\u0d83\\u0dca\\u0dcf-\\u0dd4\\u0dd6\\u0dd8-\\u0ddf\\u0de6-\\u0def\\u0df2\\u0df3\\u0e31\\u0e34-\\u0e3a\\u0e47-\\u0e4e\\u0e50-\\u0e59\\u0eb1\\u0eb4-\\u0ebc\\u0ec8-\\u0ecd\\u0ed0-\\u0ed9\\u0f18\\u0f19\\u0f20-\\u0f29\\u0f35\\u0f37\\u0f39\\u0f3e\\u0f3f\\u0f71-\\u0f84\\u0f86\\u0f87\\u0f8d-\\u0f97\\u0f99-\\u0fbc\\u0fc6\\u102b-\\u103e\\u1040-\\u1049\\u1056-\\u1059\\u105e-\\u1060\\u1062-\\u1064\\u1067-\\u106d\\u1071-\\u1074\\u1082-\\u108d\\u108f-\\u109d\\u135d-\\u135f\\u1369-\\u1371\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17b4-\\u17d3\\u17dd\\u17e0-\\u17e9\\u180b-\\u180d\\u1810-\\u1819\\u18a9\\u1920-\\u192b\\u1930-\\u193b\\u1946-\\u194f\\u19d0-\\u19da\\u1a17-\\u1a1b\\u1a55-\\u1a5e\\u1a60-\\u1a7c\\u1a7f-\\u1a89\\u1a90-\\u1a99\\u1ab0-\\u1abd\\u1abf\\u1ac0\\u1b00-\\u1b04\\u1b34-\\u1b44\\u1b50-\\u1b59\\u1b6b-\\u1b73\\u1b80-\\u1b82\\u1ba1-\\u1bad\\u1bb0-\\u1bb9\\u1be6-\\u1bf3\\u1c24-\\u1c37\\u1c40-\\u1c49\\u1c50-\\u1c59\\u1cd0-\\u1cd2\\u1cd4-\\u1ce8\\u1ced\\u1cf4\\u1cf7-\\u1cf9\\u1dc0-\\u1df9\\u1dfb-\\u1dff\\u203f\\u2040\\u2054\\u20d0-\\u20dc\\u20e1\\u20e5-\\u20f0\\u2cef-\\u2cf1\\u2d7f\\u2de0-\\u2dff\\u302a-\\u302f\\u3099\\u309a\\ua620-\\ua629\\ua66f\\ua674-\\ua67d\\ua69e\\ua69f\\ua6f0\\ua6f1\\ua802\\ua806\\ua80b\\ua823-\\ua827\\ua82c\\ua880\\ua881\\ua8b4-\\ua8c5\\ua8d0-\\ua8d9\\ua8e0-\\ua8f1\\ua8ff-\\ua909\\ua926-\\ua92d\\ua947-\\ua953\\ua980-\\ua983\\ua9b3-\\ua9c0\\ua9d0-\\ua9d9\\ua9e5\\ua9f0-\\ua9f9\\uaa29-\\uaa36\\uaa43\\uaa4c\\uaa4d\\uaa50-\\uaa59\\uaa7b-\\uaa7d\\uaab0\\uaab2-\\uaab4\\uaab7\\uaab8\\uaabe\\uaabf\\uaac1\\uaaeb-\\uaaef\\uaaf5\\uaaf6\\uabe3-\\uabea\\uabec\\uabed\\uabf0-\\uabf9\\ufb1e\\ufe00-\\ufe0f\\ufe20-\\ufe2f\\ufe33\\ufe34\\ufe4d-\\ufe4f\\uff10-\\uff19\\uff3f\"\n\nconst nonASCIIidentifierStart = new RegExp(\"[\" + nonASCIIidentifierStartChars + \"]\")\nconst nonASCIIidentifier = new RegExp(\"[\" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + \"]\")\n\nnonASCIIidentifierStartChars = nonASCIIidentifierChars = null\n\n// These are a run-length and offset encoded representation of the\n// >0xffff code points that are a valid part of identifiers. The\n// offset starts at 0x10000, and each pair of numbers represents an\n// offset to the next range, and then a size of the range. They were\n// generated by bin/generate-identifier-regex.js\n\n// eslint-disable-next-line comma-spacing\nconst astralIdentifierStartCodes = [0,11,2,25,2,18,2,1,2,14,3,13,35,122,70,52,268,28,4,48,48,31,14,29,6,37,11,29,3,35,5,7,2,4,43,157,19,35,5,35,5,39,9,51,157,310,10,21,11,7,153,5,3,0,2,43,2,1,4,0,3,22,11,22,10,30,66,18,2,1,11,21,11,25,71,55,7,1,65,0,16,3,2,2,2,28,43,28,4,28,36,7,2,27,28,53,11,21,11,18,14,17,111,72,56,50,14,50,14,35,349,41,7,1,79,28,11,0,9,21,107,20,28,22,13,52,76,44,33,24,27,35,30,0,3,0,9,34,4,0,13,47,15,3,22,0,2,0,36,17,2,24,85,6,2,0,2,3,2,14,2,9,8,46,39,7,3,1,3,21,2,6,2,1,2,4,4,0,19,0,13,4,159,52,19,3,21,2,31,47,21,1,2,0,185,46,42,3,37,47,21,0,60,42,14,0,72,26,230,43,117,63,32,7,3,0,3,7,2,1,2,23,16,0,2,0,95,7,3,38,17,0,2,0,29,0,11,39,8,0,22,0,12,45,20,0,35,56,264,8,2,36,18,0,50,29,113,6,2,1,2,37,22,0,26,5,2,1,2,31,15,0,328,18,190,0,80,921,103,110,18,195,2749,1070,4050,582,8634,568,8,30,114,29,19,47,17,3,32,20,6,18,689,63,129,74,6,0,67,12,65,1,2,0,29,6135,9,1237,43,8,8952,286,50,2,18,3,9,395,2309,106,6,12,4,8,8,9,5991,84,2,70,2,1,3,0,3,1,3,3,2,11,2,0,2,6,2,64,2,3,3,7,2,6,2,27,2,3,2,4,2,0,4,6,2,339,3,24,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,7,2357,44,11,6,17,0,370,43,1301,196,60,67,8,0,1205,3,2,26,2,1,2,0,3,0,2,9,2,3,2,0,2,0,7,0,5,0,2,0,2,0,2,2,2,1,2,0,3,0,2,0,2,0,2,0,2,0,2,1,2,0,3,3,2,6,2,3,2,3,2,0,2,9,2,16,6,2,2,4,2,16,4421,42717,35,4148,12,221,3,5761,15,7472,3104,541,1507,4938]\n\n// eslint-disable-next-line comma-spacing\nconst astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,166,1,574,3,9,9,370,1,154,10,176,2,54,14,32,9,16,3,46,10,54,9,7,2,37,13,2,9,6,1,45,0,13,2,49,13,9,3,2,11,83,11,7,0,161,11,6,9,7,3,56,1,2,6,3,1,3,2,10,0,11,1,3,6,4,4,193,17,10,9,5,0,82,19,13,9,214,6,3,8,28,1,83,16,16,9,82,12,9,9,84,14,5,9,243,14,166,9,71,5,2,1,3,3,2,0,2,1,13,9,120,6,3,6,4,0,29,9,41,6,2,3,9,0,10,10,47,15,406,7,2,7,17,9,57,21,2,13,123,5,4,0,2,1,2,6,2,0,9,9,49,4,2,1,2,4,9,9,330,3,19306,9,135,4,60,6,26,9,1014,0,2,54,8,3,82,0,12,1,19628,1,5319,4,4,5,9,7,3,6,31,3,149,2,1418,49,513,54,5,49,9,0,15,0,23,4,2,14,1361,6,2,16,3,6,2,1,2,4,262,6,10,9,419,13,1495,6,110,6,6,9,4759,9,787719,239]\n\n// This has a complexity linear to the value of the code. The\n// assumption is that looking up astral identifier characters is\n// rare.\nfunction isInAstralSet(code, set) {\n let pos = 0x10000\n for (let i = 0; i < set.length; i += 2) {\n pos += set[i]\n if (pos > code) return false\n pos += set[i + 1]\n if (pos >= code) return true\n }\n}\n\n// Test whether a given character code starts an identifier.\n\nexport function isIdentifierStart(code, astral) {\n if (code < 65) return code === 36\n if (code < 91) return true\n if (code < 97) return code === 95\n if (code < 123) return true\n if (code <= 0xffff) return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code))\n if (astral === false) return false\n return isInAstralSet(code, astralIdentifierStartCodes)\n}\n\n// Test whether a given character is part of an identifier.\n\nexport function isIdentifierChar(code, astral) {\n if (code < 48) return code === 36\n if (code < 58) return true\n if (code < 65) return false\n if (code < 91) return true\n if (code < 97) return code === 95\n if (code < 123) return true\n if (code <= 0xffff) return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code))\n if (astral === false) return false\n return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes)\n}\n","// ## Token types\n\n// The assignment of fine-grained, information-carrying type objects\n// allows the tokenizer to store the information it has about a\n// token in a way that is very cheap for the parser to look up.\n\n// All token type variables start with an underscore, to make them\n// easy to recognize.\n\n// The `beforeExpr` property is used to disambiguate between regular\n// expressions and divisions. It is set on all token types that can\n// be followed by an expression (thus, a slash after them would be a\n// regular expression).\n//\n// The `startsExpr` property is used to check if the token ends a\n// `yield` expression. It is set on all token types that either can\n// directly start an expression (like a quotation mark) or can\n// continue an expression (like the body of a string).\n//\n// `isLoop` marks a keyword as starting a loop, which is important\n// to know when parsing a label, in order to allow or disallow\n// continue jumps to that label.\n\nexport class TokenType {\n constructor(label, conf = {}) {\n this.label = label\n this.keyword = conf.keyword\n this.beforeExpr = !!conf.beforeExpr\n this.startsExpr = !!conf.startsExpr\n this.isLoop = !!conf.isLoop\n this.isAssign = !!conf.isAssign\n this.prefix = !!conf.prefix\n this.postfix = !!conf.postfix\n this.binop = conf.binop || null\n this.updateContext = null\n }\n}\n\nfunction binop(name, prec) {\n return new TokenType(name, {beforeExpr: true, binop: prec})\n}\nconst beforeExpr = {beforeExpr: true}, startsExpr = {startsExpr: true}\n\n// Map keyword names to token types.\n\nexport const keywords = {}\n\n// Succinct definitions of keyword token types\nfunction kw(name, options = {}) {\n options.keyword = name\n return keywords[name] = new TokenType(name, options)\n}\n\nexport const types = {\n num: new TokenType(\"num\", startsExpr),\n regexp: new TokenType(\"regexp\", startsExpr),\n string: new TokenType(\"string\", startsExpr),\n name: new TokenType(\"name\", startsExpr),\n eof: new TokenType(\"eof\"),\n\n // Punctuation token types.\n bracketL: new TokenType(\"[\", {beforeExpr: true, startsExpr: true}),\n bracketR: new TokenType(\"]\"),\n braceL: new TokenType(\"{\", {beforeExpr: true, startsExpr: true}),\n braceR: new TokenType(\"}\"),\n parenL: new TokenType(\"(\", {beforeExpr: true, startsExpr: true}),\n parenR: new TokenType(\")\"),\n comma: new TokenType(\",\", beforeExpr),\n semi: new TokenType(\";\", beforeExpr),\n colon: new TokenType(\":\", beforeExpr),\n dot: new TokenType(\".\"),\n question: new TokenType(\"?\", beforeExpr),\n questionDot: new TokenType(\"?.\"),\n arrow: new TokenType(\"=>\", beforeExpr),\n template: new TokenType(\"template\"),\n invalidTemplate: new TokenType(\"invalidTemplate\"),\n ellipsis: new TokenType(\"...\", beforeExpr),\n backQuote: new TokenType(\"`\", startsExpr),\n dollarBraceL: new TokenType(\"${\", {beforeExpr: true, startsExpr: true}),\n\n // Operators. These carry several kinds of properties to help the\n // parser use them properly (the presence of these properties is\n // what categorizes them as operators).\n //\n // `binop`, when present, specifies that this operator is a binary\n // operator, and will refer to its precedence.\n //\n // `prefix` and `postfix` mark the operator as a prefix or postfix\n // unary operator.\n //\n // `isAssign` marks all of `=`, `+=`, `-=` etcetera, which act as\n // binary operators with a very low precedence, that should result\n // in AssignmentExpression nodes.\n\n eq: new TokenType(\"=\", {beforeExpr: true, isAssign: true}),\n assign: new TokenType(\"_=\", {beforeExpr: true, isAssign: true}),\n incDec: new TokenType(\"++/--\", {prefix: true, postfix: true, startsExpr: true}),\n prefix: new TokenType(\"!/~\", {beforeExpr: true, prefix: true, startsExpr: true}),\n logicalOR: binop(\"||\", 1),\n logicalAND: binop(\"&&\", 2),\n bitwiseOR: binop(\"|\", 3),\n bitwiseXOR: binop(\"^\", 4),\n bitwiseAND: binop(\"&\", 5),\n equality: binop(\"==/!=/===/!==\", 6),\n relational: binop(\"/<=/>=\", 7),\n bitShift: binop(\"<>/>>>\", 8),\n plusMin: new TokenType(\"+/-\", {beforeExpr: true, binop: 9, prefix: true, startsExpr: true}),\n modulo: binop(\"%\", 10),\n star: binop(\"*\", 10),\n slash: binop(\"/\", 10),\n starstar: new TokenType(\"**\", {beforeExpr: true}),\n coalesce: binop(\"??\", 1),\n\n // Keyword token types.\n _break: kw(\"break\"),\n _case: kw(\"case\", beforeExpr),\n _catch: kw(\"catch\"),\n _continue: kw(\"continue\"),\n _debugger: kw(\"debugger\"),\n _default: kw(\"default\", beforeExpr),\n _do: kw(\"do\", {isLoop: true, beforeExpr: true}),\n _else: kw(\"else\", beforeExpr),\n _finally: kw(\"finally\"),\n _for: kw(\"for\", {isLoop: true}),\n _function: kw(\"function\", startsExpr),\n _if: kw(\"if\"),\n _return: kw(\"return\", beforeExpr),\n _switch: kw(\"switch\"),\n _throw: kw(\"throw\", beforeExpr),\n _try: kw(\"try\"),\n _var: kw(\"var\"),\n _const: kw(\"const\"),\n _while: kw(\"while\", {isLoop: true}),\n _with: kw(\"with\"),\n _new: kw(\"new\", {beforeExpr: true, startsExpr: true}),\n _this: kw(\"this\", startsExpr),\n _super: kw(\"super\", startsExpr),\n _class: kw(\"class\", startsExpr),\n _extends: kw(\"extends\", beforeExpr),\n _export: kw(\"export\"),\n _import: kw(\"import\", startsExpr),\n _null: kw(\"null\", startsExpr),\n _true: kw(\"true\", startsExpr),\n _false: kw(\"false\", startsExpr),\n _in: kw(\"in\", {beforeExpr: true, binop: 7}),\n _instanceof: kw(\"instanceof\", {beforeExpr: true, binop: 7}),\n _typeof: kw(\"typeof\", {beforeExpr: true, prefix: true, startsExpr: true}),\n _void: kw(\"void\", {beforeExpr: true, prefix: true, startsExpr: true}),\n _delete: kw(\"delete\", {beforeExpr: true, prefix: true, startsExpr: true})\n}\n","// Matches a whole line break (where CRLF is considered a single\n// line break). Used to count lines.\n\nexport const lineBreak = /\\r\\n?|\\n|\\u2028|\\u2029/\nexport const lineBreakG = new RegExp(lineBreak.source, \"g\")\n\nexport function isNewLine(code, ecma2019String) {\n return code === 10 || code === 13 || (!ecma2019String && (code === 0x2028 || code === 0x2029))\n}\n\nexport const nonASCIIwhitespace = /[\\u1680\\u2000-\\u200a\\u202f\\u205f\\u3000\\ufeff]/\n\nexport const skipWhiteSpace = /(?:\\s|\\/\\/.*|\\/\\*[^]*?\\*\\/)*/g\n","const {hasOwnProperty, toString} = Object.prototype\n\n// Checks if an object has a property.\n\nexport function has(obj, propName) {\n return hasOwnProperty.call(obj, propName)\n}\n\nexport const isArray = Array.isArray || ((obj) => (\n toString.call(obj) === \"[object Array]\"\n))\n\nexport function wordsRegexp(words) {\n return new RegExp(\"^(?:\" + words.replace(/ /g, \"|\") + \")$\")\n}\n","import {lineBreakG} from \"./whitespace.js\"\n\n// These are used when `options.locations` is on, for the\n// `startLoc` and `endLoc` properties.\n\nexport class Position {\n constructor(line, col) {\n this.line = line\n this.column = col\n }\n\n offset(n) {\n return new Position(this.line, this.column + n)\n }\n}\n\nexport class SourceLocation {\n constructor(p, start, end) {\n this.start = start\n this.end = end\n if (p.sourceFile !== null) this.source = p.sourceFile\n }\n}\n\n// The `getLineInfo` function is mostly useful when the\n// `locations` option is off (for performance reasons) and you\n// want to find the line/column position for a given character\n// offset. `input` should be the code string that the offset refers\n// into.\n\nexport function getLineInfo(input, offset) {\n for (let line = 1, cur = 0;;) {\n lineBreakG.lastIndex = cur\n let match = lineBreakG.exec(input)\n if (match && match.index < offset) {\n ++line\n cur = match.index + match[0].length\n } else {\n return new Position(line, offset - cur)\n }\n }\n}\n","import {has, isArray} from \"./util.js\"\nimport {SourceLocation} from \"./locutil.js\"\n\n// A second argument must be given to configure the parser process.\n// These options are recognized (only `ecmaVersion` is required):\n\nexport const defaultOptions = {\n // `ecmaVersion` indicates the ECMAScript version to parse. Must be\n // either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10\n // (2019), 11 (2020), 12 (2021), or `\"latest\"` (the latest version\n // the library supports). This influences support for strict mode,\n // the set of reserved words, and support for new syntax features.\n ecmaVersion: null,\n // `sourceType` indicates the mode the code should be parsed in.\n // Can be either `\"script\"` or `\"module\"`. This influences global\n // strict mode and parsing of `import` and `export` declarations.\n sourceType: \"script\",\n // `onInsertedSemicolon` can be a callback that will be called\n // when a semicolon is automatically inserted. It will be passed\n // the position of the comma as an offset, and if `locations` is\n // enabled, it is given the location as a `{line, column}` object\n // as second argument.\n onInsertedSemicolon: null,\n // `onTrailingComma` is similar to `onInsertedSemicolon`, but for\n // trailing commas.\n onTrailingComma: null,\n // By default, reserved words are only enforced if ecmaVersion >= 5.\n // Set `allowReserved` to a boolean value to explicitly turn this on\n // an off. When this option has the value \"never\", reserved words\n // and keywords can also not be used as property names.\n allowReserved: null,\n // When enabled, a return at the top level is not considered an\n // error.\n allowReturnOutsideFunction: false,\n // When enabled, import/export statements are not constrained to\n // appearing at the top of the program.\n allowImportExportEverywhere: false,\n // When enabled, await identifiers are allowed to appear at the top-level scope,\n // but they are still not allowed in non-async functions.\n allowAwaitOutsideFunction: false,\n // When enabled, hashbang directive in the beginning of file\n // is allowed and treated as a line comment.\n allowHashBang: false,\n // When `locations` is on, `loc` properties holding objects with\n // `start` and `end` properties in `{line, column}` form (with\n // line being 1-based and column 0-based) will be attached to the\n // nodes.\n locations: false,\n // A function can be passed as `onToken` option, which will\n // cause Acorn to call that function with object in the same\n // format as tokens returned from `tokenizer().getToken()`. Note\n // that you are not allowed to call the parser from the\n // callback—that will corrupt its internal state.\n onToken: null,\n // A function can be passed as `onComment` option, which will\n // cause Acorn to call that function with `(block, text, start,\n // end)` parameters whenever a comment is skipped. `block` is a\n // boolean indicating whether this is a block (`/* */`) comment,\n // `text` is the content of the comment, and `start` and `end` are\n // character offsets that denote the start and end of the comment.\n // When the `locations` option is on, two more parameters are\n // passed, the full `{line, column}` locations of the start and\n // end of the comments. Note that you are not allowed to call the\n // parser from the callback—that will corrupt its internal state.\n onComment: null,\n // Nodes have their start and end characters offsets recorded in\n // `start` and `end` properties (directly on the node, rather than\n // the `loc` object, which holds line/column data. To also add a\n // [semi-standardized][range] `range` property holding a `[start,\n // end]` array with the same numbers, set the `ranges` option to\n // `true`.\n //\n // [range]: https://bugzilla.mozilla.org/show_bug.cgi?id=745678\n ranges: false,\n // It is possible to parse multiple files into a single AST by\n // passing the tree produced by parsing the first file as\n // `program` option in subsequent parses. This will add the\n // toplevel forms of the parsed file to the `Program` (top) node\n // of an existing parse tree.\n program: null,\n // When `locations` is on, you can pass this to record the source\n // file in every node's `loc` object.\n sourceFile: null,\n // This value, if given, is stored in every node, whether\n // `locations` is on or off.\n directSourceFile: null,\n // When enabled, parenthesized expressions are represented by\n // (non-standard) ParenthesizedExpression nodes\n preserveParens: false\n}\n\n// Interpret and default an options object\n\nlet warnedAboutEcmaVersion = false\n\nexport function getOptions(opts) {\n let options = {}\n\n for (let opt in defaultOptions)\n options[opt] = opts && has(opts, opt) ? opts[opt] : defaultOptions[opt]\n\n if (options.ecmaVersion === \"latest\") {\n options.ecmaVersion = 1e8\n } else if (options.ecmaVersion == null) {\n if (!warnedAboutEcmaVersion && typeof console === \"object\" && console.warn) {\n warnedAboutEcmaVersion = true\n console.warn(\"Since Acorn 8.0.0, options.ecmaVersion is required.\\nDefaulting to 2020, but this will stop working in the future.\")\n }\n options.ecmaVersion = 11\n } else if (options.ecmaVersion >= 2015) {\n options.ecmaVersion -= 2009\n }\n\n if (options.allowReserved == null)\n options.allowReserved = options.ecmaVersion < 5\n\n if (isArray(options.onToken)) {\n let tokens = options.onToken\n options.onToken = (token) => tokens.push(token)\n }\n if (isArray(options.onComment))\n options.onComment = pushComment(options, options.onComment)\n\n return options\n}\n\nfunction pushComment(options, array) {\n return function(block, text, start, end, startLoc, endLoc) {\n let comment = {\n type: block ? \"Block\" : \"Line\",\n value: text,\n start: start,\n end: end\n }\n if (options.locations)\n comment.loc = new SourceLocation(this, startLoc, endLoc)\n if (options.ranges)\n comment.range = [start, end]\n array.push(comment)\n }\n}\n","// Each scope gets a bitset that may contain these flags\nexport const\n SCOPE_TOP = 1,\n SCOPE_FUNCTION = 2,\n SCOPE_VAR = SCOPE_TOP | SCOPE_FUNCTION,\n SCOPE_ASYNC = 4,\n SCOPE_GENERATOR = 8,\n SCOPE_ARROW = 16,\n SCOPE_SIMPLE_CATCH = 32,\n SCOPE_SUPER = 64,\n SCOPE_DIRECT_SUPER = 128\n\nexport function functionFlags(async, generator) {\n return SCOPE_FUNCTION | (async ? SCOPE_ASYNC : 0) | (generator ? SCOPE_GENERATOR : 0)\n}\n\n// Used in checkLVal* and declareName to determine the type of a binding\nexport const\n BIND_NONE = 0, // Not a binding\n BIND_VAR = 1, // Var-style binding\n BIND_LEXICAL = 2, // Let- or const-style binding\n BIND_FUNCTION = 3, // Function declaration\n BIND_SIMPLE_CATCH = 4, // Simple (identifier pattern) catch binding\n BIND_OUTSIDE = 5 // Special case for function names as bound inside the function\n","import {reservedWords, keywords} from \"./identifier.js\"\nimport {types as tt} from \"./tokentype.js\"\nimport {lineBreak} from \"./whitespace.js\"\nimport {getOptions} from \"./options.js\"\nimport {wordsRegexp} from \"./util.js\"\nimport {SCOPE_TOP, SCOPE_FUNCTION, SCOPE_ASYNC, SCOPE_GENERATOR, SCOPE_SUPER, SCOPE_DIRECT_SUPER} from \"./scopeflags.js\"\n\nexport class Parser {\n constructor(options, input, startPos) {\n this.options = options = getOptions(options)\n this.sourceFile = options.sourceFile\n this.keywords = wordsRegexp(keywords[options.ecmaVersion >= 6 ? 6 : options.sourceType === \"module\" ? \"5module\" : 5])\n let reserved = \"\"\n if (options.allowReserved !== true) {\n reserved = reservedWords[options.ecmaVersion >= 6 ? 6 : options.ecmaVersion === 5 ? 5 : 3]\n if (options.sourceType === \"module\") reserved += \" await\"\n }\n this.reservedWords = wordsRegexp(reserved)\n let reservedStrict = (reserved ? reserved + \" \" : \"\") + reservedWords.strict\n this.reservedWordsStrict = wordsRegexp(reservedStrict)\n this.reservedWordsStrictBind = wordsRegexp(reservedStrict + \" \" + reservedWords.strictBind)\n this.input = String(input)\n\n // Used to signal to callers of `readWord1` whether the word\n // contained any escape sequences. This is needed because words with\n // escape sequences must not be interpreted as keywords.\n this.containsEsc = false\n\n // Set up token state\n\n // The current position of the tokenizer in the input.\n if (startPos) {\n this.pos = startPos\n this.lineStart = this.input.lastIndexOf(\"\\n\", startPos - 1) + 1\n this.curLine = this.input.slice(0, this.lineStart).split(lineBreak).length\n } else {\n this.pos = this.lineStart = 0\n this.curLine = 1\n }\n\n // Properties of the current token:\n // Its type\n this.type = tt.eof\n // For tokens that include more information than their type, the value\n this.value = null\n // Its start and end offset\n this.start = this.end = this.pos\n // And, if locations are used, the {line, column} object\n // corresponding to those offsets\n this.startLoc = this.endLoc = this.curPosition()\n\n // Position information for the previous token\n this.lastTokEndLoc = this.lastTokStartLoc = null\n this.lastTokStart = this.lastTokEnd = this.pos\n\n // The context stack is used to superficially track syntactic\n // context to predict whether a regular expression is allowed in a\n // given position.\n this.context = this.initialContext()\n this.exprAllowed = true\n\n // Figure out if it's a module code.\n this.inModule = options.sourceType === \"module\"\n this.strict = this.inModule || this.strictDirective(this.pos)\n\n // Used to signify the start of a potential arrow function\n this.potentialArrowAt = -1\n\n // Positions to delayed-check that yield/await does not exist in default parameters.\n this.yieldPos = this.awaitPos = this.awaitIdentPos = 0\n // Labels in scope.\n this.labels = []\n // Thus-far undefined exports.\n this.undefinedExports = {}\n\n // If enabled, skip leading hashbang line.\n if (this.pos === 0 && options.allowHashBang && this.input.slice(0, 2) === \"#!\")\n this.skipLineComment(2)\n\n // Scope tracking for duplicate variable names (see scope.js)\n this.scopeStack = []\n this.enterScope(SCOPE_TOP)\n\n // For RegExp validation\n this.regexpState = null\n }\n\n parse() {\n let node = this.options.program || this.startNode()\n this.nextToken()\n return this.parseTopLevel(node)\n }\n\n get inFunction() { return (this.currentVarScope().flags & SCOPE_FUNCTION) > 0 }\n get inGenerator() { return (this.currentVarScope().flags & SCOPE_GENERATOR) > 0 }\n get inAsync() { return (this.currentVarScope().flags & SCOPE_ASYNC) > 0 }\n get allowSuper() { return (this.currentThisScope().flags & SCOPE_SUPER) > 0 }\n get allowDirectSuper() { return (this.currentThisScope().flags & SCOPE_DIRECT_SUPER) > 0 }\n get treatFunctionsAsVar() { return this.treatFunctionsAsVarInScope(this.currentScope()) }\n get inNonArrowFunction() { return (this.currentThisScope().flags & SCOPE_FUNCTION) > 0 }\n\n static extend(...plugins) {\n let cls = this\n for (let i = 0; i < plugins.length; i++) cls = plugins[i](cls)\n return cls\n }\n\n static parse(input, options) {\n return new this(options, input).parse()\n }\n\n static parseExpressionAt(input, pos, options) {\n let parser = new this(options, input, pos)\n parser.nextToken()\n return parser.parseExpression()\n }\n\n static tokenizer(input, options) {\n return new this(options, input)\n }\n}\n","import {types as tt} from \"./tokentype.js\"\nimport {Parser} from \"./state.js\"\nimport {lineBreak, skipWhiteSpace} from \"./whitespace.js\"\n\nconst pp = Parser.prototype\n\n// ## Parser utilities\n\nconst literal = /^(?:'((?:\\\\.|[^'\\\\])*?)'|\"((?:\\\\.|[^\"\\\\])*?)\")/\npp.strictDirective = function(start) {\n for (;;) {\n // Try to find string literal.\n skipWhiteSpace.lastIndex = start\n start += skipWhiteSpace.exec(this.input)[0].length\n let match = literal.exec(this.input.slice(start))\n if (!match) return false\n if ((match[1] || match[2]) === \"use strict\") {\n skipWhiteSpace.lastIndex = start + match[0].length\n let spaceAfter = skipWhiteSpace.exec(this.input), end = spaceAfter.index + spaceAfter[0].length\n let next = this.input.charAt(end)\n return next === \";\" || next === \"}\" ||\n (lineBreak.test(spaceAfter[0]) &&\n !(/[(`.[+\\-/*%<>=,?^&]/.test(next) || next === \"!\" && this.input.charAt(end + 1) === \"=\"))\n }\n start += match[0].length\n\n // Skip semicolon, if any.\n skipWhiteSpace.lastIndex = start\n start += skipWhiteSpace.exec(this.input)[0].length\n if (this.input[start] === \";\")\n start++\n }\n}\n\n// Predicate that tests whether the next token is of the given\n// type, and if yes, consumes it as a side effect.\n\npp.eat = function(type) {\n if (this.type === type) {\n this.next()\n return true\n } else {\n return false\n }\n}\n\n// Tests whether parsed token is a contextual keyword.\n\npp.isContextual = function(name) {\n return this.type === tt.name && this.value === name && !this.containsEsc\n}\n\n// Consumes contextual keyword if possible.\n\npp.eatContextual = function(name) {\n if (!this.isContextual(name)) return false\n this.next()\n return true\n}\n\n// Asserts that following token is given contextual keyword.\n\npp.expectContextual = function(name) {\n if (!this.eatContextual(name)) this.unexpected()\n}\n\n// Test whether a semicolon can be inserted at the current position.\n\npp.canInsertSemicolon = function() {\n return this.type === tt.eof ||\n this.type === tt.braceR ||\n lineBreak.test(this.input.slice(this.lastTokEnd, this.start))\n}\n\npp.insertSemicolon = function() {\n if (this.canInsertSemicolon()) {\n if (this.options.onInsertedSemicolon)\n this.options.onInsertedSemicolon(this.lastTokEnd, this.lastTokEndLoc)\n return true\n }\n}\n\n// Consume a semicolon, or, failing that, see if we are allowed to\n// pretend that there is a semicolon at this position.\n\npp.semicolon = function() {\n if (!this.eat(tt.semi) && !this.insertSemicolon()) this.unexpected()\n}\n\npp.afterTrailingComma = function(tokType, notNext) {\n if (this.type === tokType) {\n if (this.options.onTrailingComma)\n this.options.onTrailingComma(this.lastTokStart, this.lastTokStartLoc)\n if (!notNext)\n this.next()\n return true\n }\n}\n\n// Expect a token of a given type. If found, consume it, otherwise,\n// raise an unexpected token error.\n\npp.expect = function(type) {\n this.eat(type) || this.unexpected()\n}\n\n// Raise an unexpected token error.\n\npp.unexpected = function(pos) {\n this.raise(pos != null ? pos : this.start, \"Unexpected token\")\n}\n\nexport function DestructuringErrors() {\n this.shorthandAssign =\n this.trailingComma =\n this.parenthesizedAssign =\n this.parenthesizedBind =\n this.doubleProto =\n -1\n}\n\npp.checkPatternErrors = function(refDestructuringErrors, isAssign) {\n if (!refDestructuringErrors) return\n if (refDestructuringErrors.trailingComma > -1)\n this.raiseRecoverable(refDestructuringErrors.trailingComma, \"Comma is not permitted after the rest element\")\n let parens = isAssign ? refDestructuringErrors.parenthesizedAssign : refDestructuringErrors.parenthesizedBind\n if (parens > -1) this.raiseRecoverable(parens, \"Parenthesized pattern\")\n}\n\npp.checkExpressionErrors = function(refDestructuringErrors, andThrow) {\n if (!refDestructuringErrors) return false\n let {shorthandAssign, doubleProto} = refDestructuringErrors\n if (!andThrow) return shorthandAssign >= 0 || doubleProto >= 0\n if (shorthandAssign >= 0)\n this.raise(shorthandAssign, \"Shorthand property assignments are valid only in destructuring patterns\")\n if (doubleProto >= 0)\n this.raiseRecoverable(doubleProto, \"Redefinition of __proto__ property\")\n}\n\npp.checkYieldAwaitInDefaultParams = function() {\n if (this.yieldPos && (!this.awaitPos || this.yieldPos < this.awaitPos))\n this.raise(this.yieldPos, \"Yield expression cannot be a default value\")\n if (this.awaitPos)\n this.raise(this.awaitPos, \"Await expression cannot be a default value\")\n}\n\npp.isSimpleAssignTarget = function(expr) {\n if (expr.type === \"ParenthesizedExpression\")\n return this.isSimpleAssignTarget(expr.expression)\n return expr.type === \"Identifier\" || expr.type === \"MemberExpression\"\n}\n","import {types as tt} from \"./tokentype.js\"\nimport {Parser} from \"./state.js\"\nimport {lineBreak, skipWhiteSpace} from \"./whitespace.js\"\nimport {isIdentifierStart, isIdentifierChar, keywordRelationalOperator} from \"./identifier.js\"\nimport {has} from \"./util.js\"\nimport {DestructuringErrors} from \"./parseutil.js\"\nimport {functionFlags, SCOPE_SIMPLE_CATCH, BIND_SIMPLE_CATCH, BIND_LEXICAL, BIND_VAR, BIND_FUNCTION} from \"./scopeflags.js\"\n\nconst pp = Parser.prototype\n\n// ### Statement parsing\n\n// Parse a program. Initializes the parser, reads any number of\n// statements, and wraps them in a Program node. Optionally takes a\n// `program` argument. If present, the statements will be appended\n// to its body instead of creating a new node.\n\npp.parseTopLevel = function(node) {\n let exports = {}\n if (!node.body) node.body = []\n while (this.type !== tt.eof) {\n let stmt = this.parseStatement(null, true, exports)\n node.body.push(stmt)\n }\n if (this.inModule)\n for (let name of Object.keys(this.undefinedExports))\n this.raiseRecoverable(this.undefinedExports[name].start, `Export '${name}' is not defined`)\n this.adaptDirectivePrologue(node.body)\n this.next()\n node.sourceType = this.options.sourceType\n return this.finishNode(node, \"Program\")\n}\n\nconst loopLabel = {kind: \"loop\"}, switchLabel = {kind: \"switch\"}\n\npp.isLet = function(context) {\n if (this.options.ecmaVersion < 6 || !this.isContextual(\"let\")) return false\n skipWhiteSpace.lastIndex = this.pos\n let skip = skipWhiteSpace.exec(this.input)\n let next = this.pos + skip[0].length, nextCh = this.input.charCodeAt(next)\n // For ambiguous cases, determine if a LexicalDeclaration (or only a\n // Statement) is allowed here. If context is not empty then only a Statement\n // is allowed. However, `let [` is an explicit negative lookahead for\n // ExpressionStatement, so special-case it first.\n if (nextCh === 91) return true // '['\n if (context) return false\n\n if (nextCh === 123) return true // '{'\n if (isIdentifierStart(nextCh, true)) {\n let pos = next + 1\n while (isIdentifierChar(this.input.charCodeAt(pos), true)) ++pos\n let ident = this.input.slice(next, pos)\n if (!keywordRelationalOperator.test(ident)) return true\n }\n return false\n}\n\n// check 'async [no LineTerminator here] function'\n// - 'async /*foo*/ function' is OK.\n// - 'async /*\\n*/ function' is invalid.\npp.isAsyncFunction = function() {\n if (this.options.ecmaVersion < 8 || !this.isContextual(\"async\"))\n return false\n\n skipWhiteSpace.lastIndex = this.pos\n let skip = skipWhiteSpace.exec(this.input)\n let next = this.pos + skip[0].length\n return !lineBreak.test(this.input.slice(this.pos, next)) &&\n this.input.slice(next, next + 8) === \"function\" &&\n (next + 8 === this.input.length || !isIdentifierChar(this.input.charAt(next + 8)))\n}\n\n// Parse a single statement.\n//\n// If expecting a statement and finding a slash operator, parse a\n// regular expression literal. This is to handle cases like\n// `if (foo) /blah/.exec(foo)`, where looking at the previous token\n// does not help.\n\npp.parseStatement = function(context, topLevel, exports) {\n let starttype = this.type, node = this.startNode(), kind\n\n if (this.isLet(context)) {\n starttype = tt._var\n kind = \"let\"\n }\n\n // Most types of statements are recognized by the keyword they\n // start with. Many are trivial to parse, some require a bit of\n // complexity.\n\n switch (starttype) {\n case tt._break: case tt._continue: return this.parseBreakContinueStatement(node, starttype.keyword)\n case tt._debugger: return this.parseDebuggerStatement(node)\n case tt._do: return this.parseDoStatement(node)\n case tt._for: return this.parseForStatement(node)\n case tt._function:\n // Function as sole body of either an if statement or a labeled statement\n // works, but not when it is part of a labeled statement that is the sole\n // body of an if statement.\n if ((context && (this.strict || context !== \"if\" && context !== \"label\")) && this.options.ecmaVersion >= 6) this.unexpected()\n return this.parseFunctionStatement(node, false, !context)\n case tt._class:\n if (context) this.unexpected()\n return this.parseClass(node, true)\n case tt._if: return this.parseIfStatement(node)\n case tt._return: return this.parseReturnStatement(node)\n case tt._switch: return this.parseSwitchStatement(node)\n case tt._throw: return this.parseThrowStatement(node)\n case tt._try: return this.parseTryStatement(node)\n case tt._const: case tt._var:\n kind = kind || this.value\n if (context && kind !== \"var\") this.unexpected()\n return this.parseVarStatement(node, kind)\n case tt._while: return this.parseWhileStatement(node)\n case tt._with: return this.parseWithStatement(node)\n case tt.braceL: return this.parseBlock(true, node)\n case tt.semi: return this.parseEmptyStatement(node)\n case tt._export:\n case tt._import:\n if (this.options.ecmaVersion > 10 && starttype === tt._import) {\n skipWhiteSpace.lastIndex = this.pos\n let skip = skipWhiteSpace.exec(this.input)\n let next = this.pos + skip[0].length, nextCh = this.input.charCodeAt(next)\n if (nextCh === 40 || nextCh === 46) // '(' or '.'\n return this.parseExpressionStatement(node, this.parseExpression())\n }\n\n if (!this.options.allowImportExportEverywhere) {\n if (!topLevel)\n this.raise(this.start, \"'import' and 'export' may only appear at the top level\")\n if (!this.inModule)\n this.raise(this.start, \"'import' and 'export' may appear only with 'sourceType: module'\")\n }\n return starttype === tt._import ? this.parseImport(node) : this.parseExport(node, exports)\n\n // If the statement does not start with a statement keyword or a\n // brace, it's an ExpressionStatement or LabeledStatement. We\n // simply start parsing an expression, and afterwards, if the\n // next token is a colon and the expression was a simple\n // Identifier node, we switch to interpreting it as a label.\n default:\n if (this.isAsyncFunction()) {\n if (context) this.unexpected()\n this.next()\n return this.parseFunctionStatement(node, true, !context)\n }\n\n let maybeName = this.value, expr = this.parseExpression()\n if (starttype === tt.name && expr.type === \"Identifier\" && this.eat(tt.colon))\n return this.parseLabeledStatement(node, maybeName, expr, context)\n else return this.parseExpressionStatement(node, expr)\n }\n}\n\npp.parseBreakContinueStatement = function(node, keyword) {\n let isBreak = keyword === \"break\"\n this.next()\n if (this.eat(tt.semi) || this.insertSemicolon()) node.label = null\n else if (this.type !== tt.name) this.unexpected()\n else {\n node.label = this.parseIdent()\n this.semicolon()\n }\n\n // Verify that there is an actual destination to break or\n // continue to.\n let i = 0\n for (; i < this.labels.length; ++i) {\n let lab = this.labels[i]\n if (node.label == null || lab.name === node.label.name) {\n if (lab.kind != null && (isBreak || lab.kind === \"loop\")) break\n if (node.label && isBreak) break\n }\n }\n if (i === this.labels.length) this.raise(node.start, \"Unsyntactic \" + keyword)\n return this.finishNode(node, isBreak ? \"BreakStatement\" : \"ContinueStatement\")\n}\n\npp.parseDebuggerStatement = function(node) {\n this.next()\n this.semicolon()\n return this.finishNode(node, \"DebuggerStatement\")\n}\n\npp.parseDoStatement = function(node) {\n this.next()\n this.labels.push(loopLabel)\n node.body = this.parseStatement(\"do\")\n this.labels.pop()\n this.expect(tt._while)\n node.test = this.parseParenExpression()\n if (this.options.ecmaVersion >= 6)\n this.eat(tt.semi)\n else\n this.semicolon()\n return this.finishNode(node, \"DoWhileStatement\")\n}\n\n// Disambiguating between a `for` and a `for`/`in` or `for`/`of`\n// loop is non-trivial. Basically, we have to parse the init `var`\n// statement or expression, disallowing the `in` operator (see\n// the second parameter to `parseExpression`), and then check\n// whether the next token is `in` or `of`. When there is no init\n// part (semicolon immediately after the opening parenthesis), it\n// is a regular `for` loop.\n\npp.parseForStatement = function(node) {\n this.next()\n let awaitAt = (this.options.ecmaVersion >= 9 && (this.inAsync || (!this.inFunction && this.options.allowAwaitOutsideFunction)) && this.eatContextual(\"await\")) ? this.lastTokStart : -1\n this.labels.push(loopLabel)\n this.enterScope(0)\n this.expect(tt.parenL)\n if (this.type === tt.semi) {\n if (awaitAt > -1) this.unexpected(awaitAt)\n return this.parseFor(node, null)\n }\n let isLet = this.isLet()\n if (this.type === tt._var || this.type === tt._const || isLet) {\n let init = this.startNode(), kind = isLet ? \"let\" : this.value\n this.next()\n this.parseVar(init, true, kind)\n this.finishNode(init, \"VariableDeclaration\")\n if ((this.type === tt._in || (this.options.ecmaVersion >= 6 && this.isContextual(\"of\"))) && init.declarations.length === 1) {\n if (this.options.ecmaVersion >= 9) {\n if (this.type === tt._in) {\n if (awaitAt > -1) this.unexpected(awaitAt)\n } else node.await = awaitAt > -1\n }\n return this.parseForIn(node, init)\n }\n if (awaitAt > -1) this.unexpected(awaitAt)\n return this.parseFor(node, init)\n }\n let refDestructuringErrors = new DestructuringErrors\n let init = this.parseExpression(true, refDestructuringErrors)\n if (this.type === tt._in || (this.options.ecmaVersion >= 6 && this.isContextual(\"of\"))) {\n if (this.options.ecmaVersion >= 9) {\n if (this.type === tt._in) {\n if (awaitAt > -1) this.unexpected(awaitAt)\n } else node.await = awaitAt > -1\n }\n this.toAssignable(init, false, refDestructuringErrors)\n this.checkLValPattern(init)\n return this.parseForIn(node, init)\n } else {\n this.checkExpressionErrors(refDestructuringErrors, true)\n }\n if (awaitAt > -1) this.unexpected(awaitAt)\n return this.parseFor(node, init)\n}\n\npp.parseFunctionStatement = function(node, isAsync, declarationPosition) {\n this.next()\n return this.parseFunction(node, FUNC_STATEMENT | (declarationPosition ? 0 : FUNC_HANGING_STATEMENT), false, isAsync)\n}\n\npp.parseIfStatement = function(node) {\n this.next()\n node.test = this.parseParenExpression()\n // allow function declarations in branches, but only in non-strict mode\n node.consequent = this.parseStatement(\"if\")\n node.alternate = this.eat(tt._else) ? this.parseStatement(\"if\") : null\n return this.finishNode(node, \"IfStatement\")\n}\n\npp.parseReturnStatement = function(node) {\n if (!this.inFunction && !this.options.allowReturnOutsideFunction)\n this.raise(this.start, \"'return' outside of function\")\n this.next()\n\n // In `return` (and `break`/`continue`), the keywords with\n // optional arguments, we eagerly look for a semicolon or the\n // possibility to insert one.\n\n if (this.eat(tt.semi) || this.insertSemicolon()) node.argument = null\n else { node.argument = this.parseExpression(); this.semicolon() }\n return this.finishNode(node, \"ReturnStatement\")\n}\n\npp.parseSwitchStatement = function(node) {\n this.next()\n node.discriminant = this.parseParenExpression()\n node.cases = []\n this.expect(tt.braceL)\n this.labels.push(switchLabel)\n this.enterScope(0)\n\n // Statements under must be grouped (by label) in SwitchCase\n // nodes. `cur` is used to keep the node that we are currently\n // adding statements to.\n\n let cur\n for (let sawDefault = false; this.type !== tt.braceR;) {\n if (this.type === tt._case || this.type === tt._default) {\n let isCase = this.type === tt._case\n if (cur) this.finishNode(cur, \"SwitchCase\")\n node.cases.push(cur = this.startNode())\n cur.consequent = []\n this.next()\n if (isCase) {\n cur.test = this.parseExpression()\n } else {\n if (sawDefault) this.raiseRecoverable(this.lastTokStart, \"Multiple default clauses\")\n sawDefault = true\n cur.test = null\n }\n this.expect(tt.colon)\n } else {\n if (!cur) this.unexpected()\n cur.consequent.push(this.parseStatement(null))\n }\n }\n this.exitScope()\n if (cur) this.finishNode(cur, \"SwitchCase\")\n this.next() // Closing brace\n this.labels.pop()\n return this.finishNode(node, \"SwitchStatement\")\n}\n\npp.parseThrowStatement = function(node) {\n this.next()\n if (lineBreak.test(this.input.slice(this.lastTokEnd, this.start)))\n this.raise(this.lastTokEnd, \"Illegal newline after throw\")\n node.argument = this.parseExpression()\n this.semicolon()\n return this.finishNode(node, \"ThrowStatement\")\n}\n\n// Reused empty array added for node fields that are always empty.\n\nconst empty = []\n\npp.parseTryStatement = function(node) {\n this.next()\n node.block = this.parseBlock()\n node.handler = null\n if (this.type === tt._catch) {\n let clause = this.startNode()\n this.next()\n if (this.eat(tt.parenL)) {\n clause.param = this.parseBindingAtom()\n let simple = clause.param.type === \"Identifier\"\n this.enterScope(simple ? SCOPE_SIMPLE_CATCH : 0)\n this.checkLValPattern(clause.param, simple ? BIND_SIMPLE_CATCH : BIND_LEXICAL)\n this.expect(tt.parenR)\n } else {\n if (this.options.ecmaVersion < 10) this.unexpected()\n clause.param = null\n this.enterScope(0)\n }\n clause.body = this.parseBlock(false)\n this.exitScope()\n node.handler = this.finishNode(clause, \"CatchClause\")\n }\n node.finalizer = this.eat(tt._finally) ? this.parseBlock() : null\n if (!node.handler && !node.finalizer)\n this.raise(node.start, \"Missing catch or finally clause\")\n return this.finishNode(node, \"TryStatement\")\n}\n\npp.parseVarStatement = function(node, kind) {\n this.next()\n this.parseVar(node, false, kind)\n this.semicolon()\n return this.finishNode(node, \"VariableDeclaration\")\n}\n\npp.parseWhileStatement = function(node) {\n this.next()\n node.test = this.parseParenExpression()\n this.labels.push(loopLabel)\n node.body = this.parseStatement(\"while\")\n this.labels.pop()\n return this.finishNode(node, \"WhileStatement\")\n}\n\npp.parseWithStatement = function(node) {\n if (this.strict) this.raise(this.start, \"'with' in strict mode\")\n this.next()\n node.object = this.parseParenExpression()\n node.body = this.parseStatement(\"with\")\n return this.finishNode(node, \"WithStatement\")\n}\n\npp.parseEmptyStatement = function(node) {\n this.next()\n return this.finishNode(node, \"EmptyStatement\")\n}\n\npp.parseLabeledStatement = function(node, maybeName, expr, context) {\n for (let label of this.labels)\n if (label.name === maybeName)\n this.raise(expr.start, \"Label '\" + maybeName + \"' is already declared\")\n let kind = this.type.isLoop ? \"loop\" : this.type === tt._switch ? \"switch\" : null\n for (let i = this.labels.length - 1; i >= 0; i--) {\n let label = this.labels[i]\n if (label.statementStart === node.start) {\n // Update information about previous labels on this node\n label.statementStart = this.start\n label.kind = kind\n } else break\n }\n this.labels.push({name: maybeName, kind, statementStart: this.start})\n node.body = this.parseStatement(context ? context.indexOf(\"label\") === -1 ? context + \"label\" : context : \"label\")\n this.labels.pop()\n node.label = expr\n return this.finishNode(node, \"LabeledStatement\")\n}\n\npp.parseExpressionStatement = function(node, expr) {\n node.expression = expr\n this.semicolon()\n return this.finishNode(node, \"ExpressionStatement\")\n}\n\n// Parse a semicolon-enclosed block of statements, handling `\"use\n// strict\"` declarations when `allowStrict` is true (used for\n// function bodies).\n\npp.parseBlock = function(createNewLexicalScope = true, node = this.startNode(), exitStrict) {\n node.body = []\n this.expect(tt.braceL)\n if (createNewLexicalScope) this.enterScope(0)\n while (this.type !== tt.braceR) {\n let stmt = this.parseStatement(null)\n node.body.push(stmt)\n }\n if (exitStrict) this.strict = false\n this.next()\n if (createNewLexicalScope) this.exitScope()\n return this.finishNode(node, \"BlockStatement\")\n}\n\n// Parse a regular `for` loop. The disambiguation code in\n// `parseStatement` will already have parsed the init statement or\n// expression.\n\npp.parseFor = function(node, init) {\n node.init = init\n this.expect(tt.semi)\n node.test = this.type === tt.semi ? null : this.parseExpression()\n this.expect(tt.semi)\n node.update = this.type === tt.parenR ? null : this.parseExpression()\n this.expect(tt.parenR)\n node.body = this.parseStatement(\"for\")\n this.exitScope()\n this.labels.pop()\n return this.finishNode(node, \"ForStatement\")\n}\n\n// Parse a `for`/`in` and `for`/`of` loop, which are almost\n// same from parser's perspective.\n\npp.parseForIn = function(node, init) {\n const isForIn = this.type === tt._in\n this.next()\n\n if (\n init.type === \"VariableDeclaration\" &&\n init.declarations[0].init != null &&\n (\n !isForIn ||\n this.options.ecmaVersion < 8 ||\n this.strict ||\n init.kind !== \"var\" ||\n init.declarations[0].id.type !== \"Identifier\"\n )\n ) {\n this.raise(\n init.start,\n `${\n isForIn ? \"for-in\" : \"for-of\"\n } loop variable declaration may not have an initializer`\n )\n }\n node.left = init\n node.right = isForIn ? this.parseExpression() : this.parseMaybeAssign()\n this.expect(tt.parenR)\n node.body = this.parseStatement(\"for\")\n this.exitScope()\n this.labels.pop()\n return this.finishNode(node, isForIn ? \"ForInStatement\" : \"ForOfStatement\")\n}\n\n// Parse a list of variable declarations.\n\npp.parseVar = function(node, isFor, kind) {\n node.declarations = []\n node.kind = kind\n for (;;) {\n let decl = this.startNode()\n this.parseVarId(decl, kind)\n if (this.eat(tt.eq)) {\n decl.init = this.parseMaybeAssign(isFor)\n } else if (kind === \"const\" && !(this.type === tt._in || (this.options.ecmaVersion >= 6 && this.isContextual(\"of\")))) {\n this.unexpected()\n } else if (decl.id.type !== \"Identifier\" && !(isFor && (this.type === tt._in || this.isContextual(\"of\")))) {\n this.raise(this.lastTokEnd, \"Complex binding patterns require an initialization value\")\n } else {\n decl.init = null\n }\n node.declarations.push(this.finishNode(decl, \"VariableDeclarator\"))\n if (!this.eat(tt.comma)) break\n }\n return node\n}\n\npp.parseVarId = function(decl, kind) {\n decl.id = this.parseBindingAtom()\n this.checkLValPattern(decl.id, kind === \"var\" ? BIND_VAR : BIND_LEXICAL, false)\n}\n\nconst FUNC_STATEMENT = 1, FUNC_HANGING_STATEMENT = 2, FUNC_NULLABLE_ID = 4\n\n// Parse a function declaration or literal (depending on the\n// `statement & FUNC_STATEMENT`).\n\n// Remove `allowExpressionBody` for 7.0.0, as it is only called with false\npp.parseFunction = function(node, statement, allowExpressionBody, isAsync) {\n this.initFunction(node)\n if (this.options.ecmaVersion >= 9 || this.options.ecmaVersion >= 6 && !isAsync) {\n if (this.type === tt.star && (statement & FUNC_HANGING_STATEMENT))\n this.unexpected()\n node.generator = this.eat(tt.star)\n }\n if (this.options.ecmaVersion >= 8)\n node.async = !!isAsync\n\n if (statement & FUNC_STATEMENT) {\n node.id = (statement & FUNC_NULLABLE_ID) && this.type !== tt.name ? null : this.parseIdent()\n if (node.id && !(statement & FUNC_HANGING_STATEMENT))\n // If it is a regular function declaration in sloppy mode, then it is\n // subject to Annex B semantics (BIND_FUNCTION). Otherwise, the binding\n // mode depends on properties of the current scope (see\n // treatFunctionsAsVar).\n this.checkLValSimple(node.id, (this.strict || node.generator || node.async) ? this.treatFunctionsAsVar ? BIND_VAR : BIND_LEXICAL : BIND_FUNCTION)\n }\n\n let oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, oldAwaitIdentPos = this.awaitIdentPos\n this.yieldPos = 0\n this.awaitPos = 0\n this.awaitIdentPos = 0\n this.enterScope(functionFlags(node.async, node.generator))\n\n if (!(statement & FUNC_STATEMENT))\n node.id = this.type === tt.name ? this.parseIdent() : null\n\n this.parseFunctionParams(node)\n this.parseFunctionBody(node, allowExpressionBody, false)\n\n this.yieldPos = oldYieldPos\n this.awaitPos = oldAwaitPos\n this.awaitIdentPos = oldAwaitIdentPos\n return this.finishNode(node, (statement & FUNC_STATEMENT) ? \"FunctionDeclaration\" : \"FunctionExpression\")\n}\n\npp.parseFunctionParams = function(node) {\n this.expect(tt.parenL)\n node.params = this.parseBindingList(tt.parenR, false, this.options.ecmaVersion >= 8)\n this.checkYieldAwaitInDefaultParams()\n}\n\n// Parse a class declaration or literal (depending on the\n// `isStatement` parameter).\n\npp.parseClass = function(node, isStatement) {\n this.next()\n\n // ecma-262 14.6 Class Definitions\n // A class definition is always strict mode code.\n const oldStrict = this.strict\n this.strict = true\n\n this.parseClassId(node, isStatement)\n this.parseClassSuper(node)\n let classBody = this.startNode()\n let hadConstructor = false\n classBody.body = []\n this.expect(tt.braceL)\n while (this.type !== tt.braceR) {\n const element = this.parseClassElement(node.superClass !== null)\n if (element) {\n classBody.body.push(element)\n if (element.type === \"MethodDefinition\" && element.kind === \"constructor\") {\n if (hadConstructor) this.raise(element.start, \"Duplicate constructor in the same class\")\n hadConstructor = true\n }\n }\n }\n this.strict = oldStrict\n this.next()\n node.body = this.finishNode(classBody, \"ClassBody\")\n return this.finishNode(node, isStatement ? \"ClassDeclaration\" : \"ClassExpression\")\n}\n\npp.parseClassElement = function(constructorAllowsSuper) {\n if (this.eat(tt.semi)) return null\n\n let method = this.startNode()\n const tryContextual = (k, noLineBreak = false) => {\n const start = this.start, startLoc = this.startLoc\n if (!this.eatContextual(k)) return false\n if (this.type !== tt.parenL && (!noLineBreak || !this.canInsertSemicolon())) return true\n if (method.key) this.unexpected()\n method.computed = false\n method.key = this.startNodeAt(start, startLoc)\n method.key.name = k\n this.finishNode(method.key, \"Identifier\")\n return false\n }\n\n method.kind = \"method\"\n method.static = tryContextual(\"static\")\n let isGenerator = this.eat(tt.star)\n let isAsync = false\n if (!isGenerator) {\n if (this.options.ecmaVersion >= 8 && tryContextual(\"async\", true)) {\n isAsync = true\n isGenerator = this.options.ecmaVersion >= 9 && this.eat(tt.star)\n } else if (tryContextual(\"get\")) {\n method.kind = \"get\"\n } else if (tryContextual(\"set\")) {\n method.kind = \"set\"\n }\n }\n if (!method.key) this.parsePropertyName(method)\n let {key} = method\n let allowsDirectSuper = false\n if (!method.computed && !method.static && (key.type === \"Identifier\" && key.name === \"constructor\" ||\n key.type === \"Literal\" && key.value === \"constructor\")) {\n if (method.kind !== \"method\") this.raise(key.start, \"Constructor can't have get/set modifier\")\n if (isGenerator) this.raise(key.start, \"Constructor can't be a generator\")\n if (isAsync) this.raise(key.start, \"Constructor can't be an async method\")\n method.kind = \"constructor\"\n allowsDirectSuper = constructorAllowsSuper\n } else if (method.static && key.type === \"Identifier\" && key.name === \"prototype\") {\n this.raise(key.start, \"Classes may not have a static property named prototype\")\n }\n this.parseClassMethod(method, isGenerator, isAsync, allowsDirectSuper)\n if (method.kind === \"get\" && method.value.params.length !== 0)\n this.raiseRecoverable(method.value.start, \"getter should have no params\")\n if (method.kind === \"set\" && method.value.params.length !== 1)\n this.raiseRecoverable(method.value.start, \"setter should have exactly one param\")\n if (method.kind === \"set\" && method.value.params[0].type === \"RestElement\")\n this.raiseRecoverable(method.value.params[0].start, \"Setter cannot use rest params\")\n return method\n}\n\npp.parseClassMethod = function(method, isGenerator, isAsync, allowsDirectSuper) {\n method.value = this.parseMethod(isGenerator, isAsync, allowsDirectSuper)\n return this.finishNode(method, \"MethodDefinition\")\n}\n\npp.parseClassId = function(node, isStatement) {\n if (this.type === tt.name) {\n node.id = this.parseIdent()\n if (isStatement)\n this.checkLValSimple(node.id, BIND_LEXICAL, false)\n } else {\n if (isStatement === true)\n this.unexpected()\n node.id = null\n }\n}\n\npp.parseClassSuper = function(node) {\n node.superClass = this.eat(tt._extends) ? this.parseExprSubscripts() : null\n}\n\n// Parses module export declaration.\n\npp.parseExport = function(node, exports) {\n this.next()\n // export * from '...'\n if (this.eat(tt.star)) {\n if (this.options.ecmaVersion >= 11) {\n if (this.eatContextual(\"as\")) {\n node.exported = this.parseIdent(true)\n this.checkExport(exports, node.exported.name, this.lastTokStart)\n } else {\n node.exported = null\n }\n }\n this.expectContextual(\"from\")\n if (this.type !== tt.string) this.unexpected()\n node.source = this.parseExprAtom()\n this.semicolon()\n return this.finishNode(node, \"ExportAllDeclaration\")\n }\n if (this.eat(tt._default)) { // export default ...\n this.checkExport(exports, \"default\", this.lastTokStart)\n let isAsync\n if (this.type === tt._function || (isAsync = this.isAsyncFunction())) {\n let fNode = this.startNode()\n this.next()\n if (isAsync) this.next()\n node.declaration = this.parseFunction(fNode, FUNC_STATEMENT | FUNC_NULLABLE_ID, false, isAsync)\n } else if (this.type === tt._class) {\n let cNode = this.startNode()\n node.declaration = this.parseClass(cNode, \"nullableID\")\n } else {\n node.declaration = this.parseMaybeAssign()\n this.semicolon()\n }\n return this.finishNode(node, \"ExportDefaultDeclaration\")\n }\n // export var|const|let|function|class ...\n if (this.shouldParseExportStatement()) {\n node.declaration = this.parseStatement(null)\n if (node.declaration.type === \"VariableDeclaration\")\n this.checkVariableExport(exports, node.declaration.declarations)\n else\n this.checkExport(exports, node.declaration.id.name, node.declaration.id.start)\n node.specifiers = []\n node.source = null\n } else { // export { x, y as z } [from '...']\n node.declaration = null\n node.specifiers = this.parseExportSpecifiers(exports)\n if (this.eatContextual(\"from\")) {\n if (this.type !== tt.string) this.unexpected()\n node.source = this.parseExprAtom()\n } else {\n for (let spec of node.specifiers) {\n // check for keywords used as local names\n this.checkUnreserved(spec.local)\n // check if export is defined\n this.checkLocalExport(spec.local)\n }\n\n node.source = null\n }\n this.semicolon()\n }\n return this.finishNode(node, \"ExportNamedDeclaration\")\n}\n\npp.checkExport = function(exports, name, pos) {\n if (!exports) return\n if (has(exports, name))\n this.raiseRecoverable(pos, \"Duplicate export '\" + name + \"'\")\n exports[name] = true\n}\n\npp.checkPatternExport = function(exports, pat) {\n let type = pat.type\n if (type === \"Identifier\")\n this.checkExport(exports, pat.name, pat.start)\n else if (type === \"ObjectPattern\")\n for (let prop of pat.properties)\n this.checkPatternExport(exports, prop)\n else if (type === \"ArrayPattern\")\n for (let elt of pat.elements) {\n if (elt) this.checkPatternExport(exports, elt)\n }\n else if (type === \"Property\")\n this.checkPatternExport(exports, pat.value)\n else if (type === \"AssignmentPattern\")\n this.checkPatternExport(exports, pat.left)\n else if (type === \"RestElement\")\n this.checkPatternExport(exports, pat.argument)\n else if (type === \"ParenthesizedExpression\")\n this.checkPatternExport(exports, pat.expression)\n}\n\npp.checkVariableExport = function(exports, decls) {\n if (!exports) return\n for (let decl of decls)\n this.checkPatternExport(exports, decl.id)\n}\n\npp.shouldParseExportStatement = function() {\n return this.type.keyword === \"var\" ||\n this.type.keyword === \"const\" ||\n this.type.keyword === \"class\" ||\n this.type.keyword === \"function\" ||\n this.isLet() ||\n this.isAsyncFunction()\n}\n\n// Parses a comma-separated list of module exports.\n\npp.parseExportSpecifiers = function(exports) {\n let nodes = [], first = true\n // export { x, y as z } [from '...']\n this.expect(tt.braceL)\n while (!this.eat(tt.braceR)) {\n if (!first) {\n this.expect(tt.comma)\n if (this.afterTrailingComma(tt.braceR)) break\n } else first = false\n\n let node = this.startNode()\n node.local = this.parseIdent(true)\n node.exported = this.eatContextual(\"as\") ? this.parseIdent(true) : node.local\n this.checkExport(exports, node.exported.name, node.exported.start)\n nodes.push(this.finishNode(node, \"ExportSpecifier\"))\n }\n return nodes\n}\n\n// Parses import declaration.\n\npp.parseImport = function(node) {\n this.next()\n // import '...'\n if (this.type === tt.string) {\n node.specifiers = empty\n node.source = this.parseExprAtom()\n } else {\n node.specifiers = this.parseImportSpecifiers()\n this.expectContextual(\"from\")\n node.source = this.type === tt.string ? this.parseExprAtom() : this.unexpected()\n }\n this.semicolon()\n return this.finishNode(node, \"ImportDeclaration\")\n}\n\n// Parses a comma-separated list of module imports.\n\npp.parseImportSpecifiers = function() {\n let nodes = [], first = true\n if (this.type === tt.name) {\n // import defaultObj, { x, y as z } from '...'\n let node = this.startNode()\n node.local = this.parseIdent()\n this.checkLValSimple(node.local, BIND_LEXICAL)\n nodes.push(this.finishNode(node, \"ImportDefaultSpecifier\"))\n if (!this.eat(tt.comma)) return nodes\n }\n if (this.type === tt.star) {\n let node = this.startNode()\n this.next()\n this.expectContextual(\"as\")\n node.local = this.parseIdent()\n this.checkLValSimple(node.local, BIND_LEXICAL)\n nodes.push(this.finishNode(node, \"ImportNamespaceSpecifier\"))\n return nodes\n }\n this.expect(tt.braceL)\n while (!this.eat(tt.braceR)) {\n if (!first) {\n this.expect(tt.comma)\n if (this.afterTrailingComma(tt.braceR)) break\n } else first = false\n\n let node = this.startNode()\n node.imported = this.parseIdent(true)\n if (this.eatContextual(\"as\")) {\n node.local = this.parseIdent()\n } else {\n this.checkUnreserved(node.imported)\n node.local = node.imported\n }\n this.checkLValSimple(node.local, BIND_LEXICAL)\n nodes.push(this.finishNode(node, \"ImportSpecifier\"))\n }\n return nodes\n}\n\n// Set `ExpressionStatement#directive` property for directive prologues.\npp.adaptDirectivePrologue = function(statements) {\n for (let i = 0; i < statements.length && this.isDirectiveCandidate(statements[i]); ++i) {\n statements[i].directive = statements[i].expression.raw.slice(1, -1)\n }\n}\npp.isDirectiveCandidate = function(statement) {\n return (\n statement.type === \"ExpressionStatement\" &&\n statement.expression.type === \"Literal\" &&\n typeof statement.expression.value === \"string\" &&\n // Reject parenthesized strings.\n (this.input[statement.start] === \"\\\"\" || this.input[statement.start] === \"'\")\n )\n}\n","import {types as tt} from \"./tokentype.js\"\nimport {Parser} from \"./state.js\"\nimport {has} from \"./util.js\"\nimport {BIND_NONE, BIND_OUTSIDE, BIND_LEXICAL} from \"./scopeflags.js\"\n\nconst pp = Parser.prototype\n\n// Convert existing expression atom to assignable pattern\n// if possible.\n\npp.toAssignable = function(node, isBinding, refDestructuringErrors) {\n if (this.options.ecmaVersion >= 6 && node) {\n switch (node.type) {\n case \"Identifier\":\n if (this.inAsync && node.name === \"await\")\n this.raise(node.start, \"Cannot use 'await' as identifier inside an async function\")\n break\n\n case \"ObjectPattern\":\n case \"ArrayPattern\":\n case \"AssignmentPattern\":\n case \"RestElement\":\n break\n\n case \"ObjectExpression\":\n node.type = \"ObjectPattern\"\n if (refDestructuringErrors) this.checkPatternErrors(refDestructuringErrors, true)\n for (let prop of node.properties) {\n this.toAssignable(prop, isBinding)\n // Early error:\n // AssignmentRestProperty[Yield, Await] :\n // `...` DestructuringAssignmentTarget[Yield, Await]\n //\n // It is a Syntax Error if |DestructuringAssignmentTarget| is an |ArrayLiteral| or an |ObjectLiteral|.\n if (\n prop.type === \"RestElement\" &&\n (prop.argument.type === \"ArrayPattern\" || prop.argument.type === \"ObjectPattern\")\n ) {\n this.raise(prop.argument.start, \"Unexpected token\")\n }\n }\n break\n\n case \"Property\":\n // AssignmentProperty has type === \"Property\"\n if (node.kind !== \"init\") this.raise(node.key.start, \"Object pattern can't contain getter or setter\")\n this.toAssignable(node.value, isBinding)\n break\n\n case \"ArrayExpression\":\n node.type = \"ArrayPattern\"\n if (refDestructuringErrors) this.checkPatternErrors(refDestructuringErrors, true)\n this.toAssignableList(node.elements, isBinding)\n break\n\n case \"SpreadElement\":\n node.type = \"RestElement\"\n this.toAssignable(node.argument, isBinding)\n if (node.argument.type === \"AssignmentPattern\")\n this.raise(node.argument.start, \"Rest elements cannot have a default value\")\n break\n\n case \"AssignmentExpression\":\n if (node.operator !== \"=\") this.raise(node.left.end, \"Only '=' operator can be used for specifying default value.\")\n node.type = \"AssignmentPattern\"\n delete node.operator\n this.toAssignable(node.left, isBinding)\n break\n\n case \"ParenthesizedExpression\":\n this.toAssignable(node.expression, isBinding, refDestructuringErrors)\n break\n\n case \"ChainExpression\":\n this.raiseRecoverable(node.start, \"Optional chaining cannot appear in left-hand side\")\n break\n\n case \"MemberExpression\":\n if (!isBinding) break\n\n default:\n this.raise(node.start, \"Assigning to rvalue\")\n }\n } else if (refDestructuringErrors) this.checkPatternErrors(refDestructuringErrors, true)\n return node\n}\n\n// Convert list of expression atoms to binding list.\n\npp.toAssignableList = function(exprList, isBinding) {\n let end = exprList.length\n for (let i = 0; i < end; i++) {\n let elt = exprList[i]\n if (elt) this.toAssignable(elt, isBinding)\n }\n if (end) {\n let last = exprList[end - 1]\n if (this.options.ecmaVersion === 6 && isBinding && last && last.type === \"RestElement\" && last.argument.type !== \"Identifier\")\n this.unexpected(last.argument.start)\n }\n return exprList\n}\n\n// Parses spread element.\n\npp.parseSpread = function(refDestructuringErrors) {\n let node = this.startNode()\n this.next()\n node.argument = this.parseMaybeAssign(false, refDestructuringErrors)\n return this.finishNode(node, \"SpreadElement\")\n}\n\npp.parseRestBinding = function() {\n let node = this.startNode()\n this.next()\n\n // RestElement inside of a function parameter must be an identifier\n if (this.options.ecmaVersion === 6 && this.type !== tt.name)\n this.unexpected()\n\n node.argument = this.parseBindingAtom()\n\n return this.finishNode(node, \"RestElement\")\n}\n\n// Parses lvalue (assignable) atom.\n\npp.parseBindingAtom = function() {\n if (this.options.ecmaVersion >= 6) {\n switch (this.type) {\n case tt.bracketL:\n let node = this.startNode()\n this.next()\n node.elements = this.parseBindingList(tt.bracketR, true, true)\n return this.finishNode(node, \"ArrayPattern\")\n\n case tt.braceL:\n return this.parseObj(true)\n }\n }\n return this.parseIdent()\n}\n\npp.parseBindingList = function(close, allowEmpty, allowTrailingComma) {\n let elts = [], first = true\n while (!this.eat(close)) {\n if (first) first = false\n else this.expect(tt.comma)\n if (allowEmpty && this.type === tt.comma) {\n elts.push(null)\n } else if (allowTrailingComma && this.afterTrailingComma(close)) {\n break\n } else if (this.type === tt.ellipsis) {\n let rest = this.parseRestBinding()\n this.parseBindingListItem(rest)\n elts.push(rest)\n if (this.type === tt.comma) this.raise(this.start, \"Comma is not permitted after the rest element\")\n this.expect(close)\n break\n } else {\n let elem = this.parseMaybeDefault(this.start, this.startLoc)\n this.parseBindingListItem(elem)\n elts.push(elem)\n }\n }\n return elts\n}\n\npp.parseBindingListItem = function(param) {\n return param\n}\n\n// Parses assignment pattern around given atom if possible.\n\npp.parseMaybeDefault = function(startPos, startLoc, left) {\n left = left || this.parseBindingAtom()\n if (this.options.ecmaVersion < 6 || !this.eat(tt.eq)) return left\n let node = this.startNodeAt(startPos, startLoc)\n node.left = left\n node.right = this.parseMaybeAssign()\n return this.finishNode(node, \"AssignmentPattern\")\n}\n\n// The following three functions all verify that a node is an lvalue —\n// something that can be bound, or assigned to. In order to do so, they perform\n// a variety of checks:\n//\n// - Check that none of the bound/assigned-to identifiers are reserved words.\n// - Record name declarations for bindings in the appropriate scope.\n// - Check duplicate argument names, if checkClashes is set.\n//\n// If a complex binding pattern is encountered (e.g., object and array\n// destructuring), the entire pattern is recursively checked.\n//\n// There are three versions of checkLVal*() appropriate for different\n// circumstances:\n//\n// - checkLValSimple() shall be used if the syntactic construct supports\n// nothing other than identifiers and member expressions. Parenthesized\n// expressions are also correctly handled. This is generally appropriate for\n// constructs for which the spec says\n//\n// > It is a Syntax Error if AssignmentTargetType of [the production] is not\n// > simple.\n//\n// It is also appropriate for checking if an identifier is valid and not\n// defined elsewhere, like import declarations or function/class identifiers.\n//\n// Examples where this is used include:\n// a += …;\n// import a from '…';\n// where a is the node to be checked.\n//\n// - checkLValPattern() shall be used if the syntactic construct supports\n// anything checkLValSimple() supports, as well as object and array\n// destructuring patterns. This is generally appropriate for constructs for\n// which the spec says\n//\n// > It is a Syntax Error if [the production] is neither an ObjectLiteral nor\n// > an ArrayLiteral and AssignmentTargetType of [the production] is not\n// > simple.\n//\n// Examples where this is used include:\n// (a = …);\n// const a = …;\n// try { … } catch (a) { … }\n// where a is the node to be checked.\n//\n// - checkLValInnerPattern() shall be used if the syntactic construct supports\n// anything checkLValPattern() supports, as well as default assignment\n// patterns, rest elements, and other constructs that may appear within an\n// object or array destructuring pattern.\n//\n// As a special case, function parameters also use checkLValInnerPattern(),\n// as they also support defaults and rest constructs.\n//\n// These functions deliberately support both assignment and binding constructs,\n// as the logic for both is exceedingly similar. If the node is the target of\n// an assignment, then bindingType should be set to BIND_NONE. Otherwise, it\n// should be set to the appropriate BIND_* constant, like BIND_VAR or\n// BIND_LEXICAL.\n//\n// If the function is called with a non-BIND_NONE bindingType, then\n// additionally a checkClashes object may be specified to allow checking for\n// duplicate argument names. checkClashes is ignored if the provided construct\n// is an assignment (i.e., bindingType is BIND_NONE).\n\npp.checkLValSimple = function(expr, bindingType = BIND_NONE, checkClashes) {\n const isBind = bindingType !== BIND_NONE\n\n switch (expr.type) {\n case \"Identifier\":\n if (this.strict && this.reservedWordsStrictBind.test(expr.name))\n this.raiseRecoverable(expr.start, (isBind ? \"Binding \" : \"Assigning to \") + expr.name + \" in strict mode\")\n if (isBind) {\n if (bindingType === BIND_LEXICAL && expr.name === \"let\")\n this.raiseRecoverable(expr.start, \"let is disallowed as a lexically bound name\")\n if (checkClashes) {\n if (has(checkClashes, expr.name))\n this.raiseRecoverable(expr.start, \"Argument name clash\")\n checkClashes[expr.name] = true\n }\n if (bindingType !== BIND_OUTSIDE) this.declareName(expr.name, bindingType, expr.start)\n }\n break\n\n case \"ChainExpression\":\n this.raiseRecoverable(expr.start, \"Optional chaining cannot appear in left-hand side\")\n break\n\n case \"MemberExpression\":\n if (isBind) this.raiseRecoverable(expr.start, \"Binding member expression\")\n break\n\n case \"ParenthesizedExpression\":\n if (isBind) this.raiseRecoverable(expr.start, \"Binding parenthesized expression\")\n return this.checkLValSimple(expr.expression, bindingType, checkClashes)\n\n default:\n this.raise(expr.start, (isBind ? \"Binding\" : \"Assigning to\") + \" rvalue\")\n }\n}\n\npp.checkLValPattern = function(expr, bindingType = BIND_NONE, checkClashes) {\n switch (expr.type) {\n case \"ObjectPattern\":\n for (let prop of expr.properties) {\n this.checkLValInnerPattern(prop, bindingType, checkClashes)\n }\n break\n\n case \"ArrayPattern\":\n for (let elem of expr.elements) {\n if (elem) this.checkLValInnerPattern(elem, bindingType, checkClashes)\n }\n break\n\n default:\n this.checkLValSimple(expr, bindingType, checkClashes)\n }\n}\n\npp.checkLValInnerPattern = function(expr, bindingType = BIND_NONE, checkClashes) {\n switch (expr.type) {\n case \"Property\":\n // AssignmentProperty has type === \"Property\"\n this.checkLValInnerPattern(expr.value, bindingType, checkClashes)\n break\n\n case \"AssignmentPattern\":\n this.checkLValPattern(expr.left, bindingType, checkClashes)\n break\n\n case \"RestElement\":\n this.checkLValPattern(expr.argument, bindingType, checkClashes)\n break\n\n default:\n this.checkLValPattern(expr, bindingType, checkClashes)\n }\n}\n","// A recursive descent parser operates by defining functions for all\n// syntactic elements, and recursively calling those, each function\n// advancing the input stream and returning an AST node. Precedence\n// of constructs (for example, the fact that `!x[1]` means `!(x[1])`\n// instead of `(!x)[1]` is handled by the fact that the parser\n// function that parses unary prefix operators is called first, and\n// in turn calls the function that parses `[]` subscripts — that\n// way, it'll receive the node for `x[1]` already parsed, and wraps\n// *that* in the unary operator node.\n//\n// Acorn uses an [operator precedence parser][opp] to handle binary\n// operator precedence, because it is much more compact than using\n// the technique outlined above, which uses different, nesting\n// functions to specify precedence, for all of the ten binary\n// precedence levels that JavaScript defines.\n//\n// [opp]: http://en.wikipedia.org/wiki/Operator-precedence_parser\n\nimport {types as tt} from \"./tokentype.js\"\nimport {Parser} from \"./state.js\"\nimport {DestructuringErrors} from \"./parseutil.js\"\nimport {lineBreak} from \"./whitespace.js\"\nimport {functionFlags, SCOPE_ARROW, SCOPE_SUPER, SCOPE_DIRECT_SUPER, BIND_OUTSIDE, BIND_VAR} from \"./scopeflags.js\"\n\nconst pp = Parser.prototype\n\n// Check if property name clashes with already added.\n// Object/class getters and setters are not allowed to clash —\n// either with each other or with an init property — and in\n// strict mode, init properties are also not allowed to be repeated.\n\npp.checkPropClash = function(prop, propHash, refDestructuringErrors) {\n if (this.options.ecmaVersion >= 9 && prop.type === \"SpreadElement\")\n return\n if (this.options.ecmaVersion >= 6 && (prop.computed || prop.method || prop.shorthand))\n return\n let {key} = prop, name\n switch (key.type) {\n case \"Identifier\": name = key.name; break\n case \"Literal\": name = String(key.value); break\n default: return\n }\n let {kind} = prop\n if (this.options.ecmaVersion >= 6) {\n if (name === \"__proto__\" && kind === \"init\") {\n if (propHash.proto) {\n if (refDestructuringErrors) {\n if (refDestructuringErrors.doubleProto < 0)\n refDestructuringErrors.doubleProto = key.start\n // Backwards-compat kludge. Can be removed in version 6.0\n } else this.raiseRecoverable(key.start, \"Redefinition of __proto__ property\")\n }\n propHash.proto = true\n }\n return\n }\n name = \"$\" + name\n let other = propHash[name]\n if (other) {\n let redefinition\n if (kind === \"init\") {\n redefinition = this.strict && other.init || other.get || other.set\n } else {\n redefinition = other.init || other[kind]\n }\n if (redefinition)\n this.raiseRecoverable(key.start, \"Redefinition of property\")\n } else {\n other = propHash[name] = {\n init: false,\n get: false,\n set: false\n }\n }\n other[kind] = true\n}\n\n// ### Expression parsing\n\n// These nest, from the most general expression type at the top to\n// 'atomic', nondivisible expression types at the bottom. Most of\n// the functions will simply let the function(s) below them parse,\n// and, *if* the syntactic construct they handle is present, wrap\n// the AST node that the inner parser gave them in another node.\n\n// Parse a full expression. The optional arguments are used to\n// forbid the `in` operator (in for loops initalization expressions)\n// and provide reference for storing '=' operator inside shorthand\n// property assignment in contexts where both object expression\n// and object pattern might appear (so it's possible to raise\n// delayed syntax error at correct position).\n\npp.parseExpression = function(noIn, refDestructuringErrors) {\n let startPos = this.start, startLoc = this.startLoc\n let expr = this.parseMaybeAssign(noIn, refDestructuringErrors)\n if (this.type === tt.comma) {\n let node = this.startNodeAt(startPos, startLoc)\n node.expressions = [expr]\n while (this.eat(tt.comma)) node.expressions.push(this.parseMaybeAssign(noIn, refDestructuringErrors))\n return this.finishNode(node, \"SequenceExpression\")\n }\n return expr\n}\n\n// Parse an assignment expression. This includes applications of\n// operators like `+=`.\n\npp.parseMaybeAssign = function(noIn, refDestructuringErrors, afterLeftParse) {\n if (this.isContextual(\"yield\")) {\n if (this.inGenerator) return this.parseYield(noIn)\n // The tokenizer will assume an expression is allowed after\n // `yield`, but this isn't that kind of yield\n else this.exprAllowed = false\n }\n\n let ownDestructuringErrors = false, oldParenAssign = -1, oldTrailingComma = -1\n if (refDestructuringErrors) {\n oldParenAssign = refDestructuringErrors.parenthesizedAssign\n oldTrailingComma = refDestructuringErrors.trailingComma\n refDestructuringErrors.parenthesizedAssign = refDestructuringErrors.trailingComma = -1\n } else {\n refDestructuringErrors = new DestructuringErrors\n ownDestructuringErrors = true\n }\n\n let startPos = this.start, startLoc = this.startLoc\n if (this.type === tt.parenL || this.type === tt.name)\n this.potentialArrowAt = this.start\n let left = this.parseMaybeConditional(noIn, refDestructuringErrors)\n if (afterLeftParse) left = afterLeftParse.call(this, left, startPos, startLoc)\n if (this.type.isAssign) {\n let node = this.startNodeAt(startPos, startLoc)\n node.operator = this.value\n if (this.type === tt.eq)\n left = this.toAssignable(left, false, refDestructuringErrors)\n if (!ownDestructuringErrors) {\n refDestructuringErrors.parenthesizedAssign = refDestructuringErrors.trailingComma = refDestructuringErrors.doubleProto = -1\n }\n if (refDestructuringErrors.shorthandAssign >= left.start)\n refDestructuringErrors.shorthandAssign = -1 // reset because shorthand default was used correctly\n if (this.type === tt.eq)\n this.checkLValPattern(left)\n else\n this.checkLValSimple(left)\n node.left = left\n this.next()\n node.right = this.parseMaybeAssign(noIn)\n return this.finishNode(node, \"AssignmentExpression\")\n } else {\n if (ownDestructuringErrors) this.checkExpressionErrors(refDestructuringErrors, true)\n }\n if (oldParenAssign > -1) refDestructuringErrors.parenthesizedAssign = oldParenAssign\n if (oldTrailingComma > -1) refDestructuringErrors.trailingComma = oldTrailingComma\n return left\n}\n\n// Parse a ternary conditional (`?:`) operator.\n\npp.parseMaybeConditional = function(noIn, refDestructuringErrors) {\n let startPos = this.start, startLoc = this.startLoc\n let expr = this.parseExprOps(noIn, refDestructuringErrors)\n if (this.checkExpressionErrors(refDestructuringErrors)) return expr\n if (this.eat(tt.question)) {\n let node = this.startNodeAt(startPos, startLoc)\n node.test = expr\n node.consequent = this.parseMaybeAssign()\n this.expect(tt.colon)\n node.alternate = this.parseMaybeAssign(noIn)\n return this.finishNode(node, \"ConditionalExpression\")\n }\n return expr\n}\n\n// Start the precedence parser.\n\npp.parseExprOps = function(noIn, refDestructuringErrors) {\n let startPos = this.start, startLoc = this.startLoc\n let expr = this.parseMaybeUnary(refDestructuringErrors, false)\n if (this.checkExpressionErrors(refDestructuringErrors)) return expr\n return expr.start === startPos && expr.type === \"ArrowFunctionExpression\" ? expr : this.parseExprOp(expr, startPos, startLoc, -1, noIn)\n}\n\n// Parse binary operators with the operator precedence parsing\n// algorithm. `left` is the left-hand side of the operator.\n// `minPrec` provides context that allows the function to stop and\n// defer further parser to one of its callers when it encounters an\n// operator that has a lower precedence than the set it is parsing.\n\npp.parseExprOp = function(left, leftStartPos, leftStartLoc, minPrec, noIn) {\n let prec = this.type.binop\n if (prec != null && (!noIn || this.type !== tt._in)) {\n if (prec > minPrec) {\n let logical = this.type === tt.logicalOR || this.type === tt.logicalAND\n let coalesce = this.type === tt.coalesce\n if (coalesce) {\n // Handle the precedence of `tt.coalesce` as equal to the range of logical expressions.\n // In other words, `node.right` shouldn't contain logical expressions in order to check the mixed error.\n prec = tt.logicalAND.binop\n }\n let op = this.value\n this.next()\n let startPos = this.start, startLoc = this.startLoc\n let right = this.parseExprOp(this.parseMaybeUnary(null, false), startPos, startLoc, prec, noIn)\n let node = this.buildBinary(leftStartPos, leftStartLoc, left, right, op, logical || coalesce)\n if ((logical && this.type === tt.coalesce) || (coalesce && (this.type === tt.logicalOR || this.type === tt.logicalAND))) {\n this.raiseRecoverable(this.start, \"Logical expressions and coalesce expressions cannot be mixed. Wrap either by parentheses\")\n }\n return this.parseExprOp(node, leftStartPos, leftStartLoc, minPrec, noIn)\n }\n }\n return left\n}\n\npp.buildBinary = function(startPos, startLoc, left, right, op, logical) {\n let node = this.startNodeAt(startPos, startLoc)\n node.left = left\n node.operator = op\n node.right = right\n return this.finishNode(node, logical ? \"LogicalExpression\" : \"BinaryExpression\")\n}\n\n// Parse unary operators, both prefix and postfix.\n\npp.parseMaybeUnary = function(refDestructuringErrors, sawUnary) {\n let startPos = this.start, startLoc = this.startLoc, expr\n if (this.isContextual(\"await\") && (this.inAsync || (!this.inFunction && this.options.allowAwaitOutsideFunction))) {\n expr = this.parseAwait()\n sawUnary = true\n } else if (this.type.prefix) {\n let node = this.startNode(), update = this.type === tt.incDec\n node.operator = this.value\n node.prefix = true\n this.next()\n node.argument = this.parseMaybeUnary(null, true)\n this.checkExpressionErrors(refDestructuringErrors, true)\n if (update) this.checkLValSimple(node.argument)\n else if (this.strict && node.operator === \"delete\" &&\n node.argument.type === \"Identifier\")\n this.raiseRecoverable(node.start, \"Deleting local variable in strict mode\")\n else sawUnary = true\n expr = this.finishNode(node, update ? \"UpdateExpression\" : \"UnaryExpression\")\n } else {\n expr = this.parseExprSubscripts(refDestructuringErrors)\n if (this.checkExpressionErrors(refDestructuringErrors)) return expr\n while (this.type.postfix && !this.canInsertSemicolon()) {\n let node = this.startNodeAt(startPos, startLoc)\n node.operator = this.value\n node.prefix = false\n node.argument = expr\n this.checkLValSimple(expr)\n this.next()\n expr = this.finishNode(node, \"UpdateExpression\")\n }\n }\n\n if (!sawUnary && this.eat(tt.starstar))\n return this.buildBinary(startPos, startLoc, expr, this.parseMaybeUnary(null, false), \"**\", false)\n else\n return expr\n}\n\n// Parse call, dot, and `[]`-subscript expressions.\n\npp.parseExprSubscripts = function(refDestructuringErrors) {\n let startPos = this.start, startLoc = this.startLoc\n let expr = this.parseExprAtom(refDestructuringErrors)\n if (expr.type === \"ArrowFunctionExpression\" && this.input.slice(this.lastTokStart, this.lastTokEnd) !== \")\")\n return expr\n let result = this.parseSubscripts(expr, startPos, startLoc)\n if (refDestructuringErrors && result.type === \"MemberExpression\") {\n if (refDestructuringErrors.parenthesizedAssign >= result.start) refDestructuringErrors.parenthesizedAssign = -1\n if (refDestructuringErrors.parenthesizedBind >= result.start) refDestructuringErrors.parenthesizedBind = -1\n }\n return result\n}\n\npp.parseSubscripts = function(base, startPos, startLoc, noCalls) {\n let maybeAsyncArrow = this.options.ecmaVersion >= 8 && base.type === \"Identifier\" && base.name === \"async\" &&\n this.lastTokEnd === base.end && !this.canInsertSemicolon() && base.end - base.start === 5 &&\n this.potentialArrowAt === base.start\n let optionalChained = false\n\n while (true) {\n let element = this.parseSubscript(base, startPos, startLoc, noCalls, maybeAsyncArrow, optionalChained)\n\n if (element.optional) optionalChained = true\n if (element === base || element.type === \"ArrowFunctionExpression\") {\n if (optionalChained) {\n const chainNode = this.startNodeAt(startPos, startLoc)\n chainNode.expression = element\n element = this.finishNode(chainNode, \"ChainExpression\")\n }\n return element\n }\n\n base = element\n }\n}\n\npp.parseSubscript = function(base, startPos, startLoc, noCalls, maybeAsyncArrow, optionalChained) {\n let optionalSupported = this.options.ecmaVersion >= 11\n let optional = optionalSupported && this.eat(tt.questionDot)\n if (noCalls && optional) this.raise(this.lastTokStart, \"Optional chaining cannot appear in the callee of new expressions\")\n\n let computed = this.eat(tt.bracketL)\n if (computed || (optional && this.type !== tt.parenL && this.type !== tt.backQuote) || this.eat(tt.dot)) {\n let node = this.startNodeAt(startPos, startLoc)\n node.object = base\n node.property = computed ? this.parseExpression() : this.parseIdent(this.options.allowReserved !== \"never\")\n node.computed = !!computed\n if (computed) this.expect(tt.bracketR)\n if (optionalSupported) {\n node.optional = optional\n }\n base = this.finishNode(node, \"MemberExpression\")\n } else if (!noCalls && this.eat(tt.parenL)) {\n let refDestructuringErrors = new DestructuringErrors, oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, oldAwaitIdentPos = this.awaitIdentPos\n this.yieldPos = 0\n this.awaitPos = 0\n this.awaitIdentPos = 0\n let exprList = this.parseExprList(tt.parenR, this.options.ecmaVersion >= 8, false, refDestructuringErrors)\n if (maybeAsyncArrow && !optional && !this.canInsertSemicolon() && this.eat(tt.arrow)) {\n this.checkPatternErrors(refDestructuringErrors, false)\n this.checkYieldAwaitInDefaultParams()\n if (this.awaitIdentPos > 0)\n this.raise(this.awaitIdentPos, \"Cannot use 'await' as identifier inside an async function\")\n this.yieldPos = oldYieldPos\n this.awaitPos = oldAwaitPos\n this.awaitIdentPos = oldAwaitIdentPos\n return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), exprList, true)\n }\n this.checkExpressionErrors(refDestructuringErrors, true)\n this.yieldPos = oldYieldPos || this.yieldPos\n this.awaitPos = oldAwaitPos || this.awaitPos\n this.awaitIdentPos = oldAwaitIdentPos || this.awaitIdentPos\n let node = this.startNodeAt(startPos, startLoc)\n node.callee = base\n node.arguments = exprList\n if (optionalSupported) {\n node.optional = optional\n }\n base = this.finishNode(node, \"CallExpression\")\n } else if (this.type === tt.backQuote) {\n if (optional || optionalChained) {\n this.raise(this.start, \"Optional chaining cannot appear in the tag of tagged template expressions\")\n }\n let node = this.startNodeAt(startPos, startLoc)\n node.tag = base\n node.quasi = this.parseTemplate({isTagged: true})\n base = this.finishNode(node, \"TaggedTemplateExpression\")\n }\n return base\n}\n\n// Parse an atomic expression — either a single token that is an\n// expression, an expression started by a keyword like `function` or\n// `new`, or an expression wrapped in punctuation like `()`, `[]`,\n// or `{}`.\n\npp.parseExprAtom = function(refDestructuringErrors) {\n // If a division operator appears in an expression position, the\n // tokenizer got confused, and we force it to read a regexp instead.\n if (this.type === tt.slash) this.readRegexp()\n\n let node, canBeArrow = this.potentialArrowAt === this.start\n switch (this.type) {\n case tt._super:\n if (!this.allowSuper)\n this.raise(this.start, \"'super' keyword outside a method\")\n node = this.startNode()\n this.next()\n if (this.type === tt.parenL && !this.allowDirectSuper)\n this.raise(node.start, \"super() call outside constructor of a subclass\")\n // The `super` keyword can appear at below:\n // SuperProperty:\n // super [ Expression ]\n // super . IdentifierName\n // SuperCall:\n // super ( Arguments )\n if (this.type !== tt.dot && this.type !== tt.bracketL && this.type !== tt.parenL)\n this.unexpected()\n return this.finishNode(node, \"Super\")\n\n case tt._this:\n node = this.startNode()\n this.next()\n return this.finishNode(node, \"ThisExpression\")\n\n case tt.name:\n let startPos = this.start, startLoc = this.startLoc, containsEsc = this.containsEsc\n let id = this.parseIdent(false)\n if (this.options.ecmaVersion >= 8 && !containsEsc && id.name === \"async\" && !this.canInsertSemicolon() && this.eat(tt._function))\n return this.parseFunction(this.startNodeAt(startPos, startLoc), 0, false, true)\n if (canBeArrow && !this.canInsertSemicolon()) {\n if (this.eat(tt.arrow))\n return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), [id], false)\n if (this.options.ecmaVersion >= 8 && id.name === \"async\" && this.type === tt.name && !containsEsc) {\n id = this.parseIdent(false)\n if (this.canInsertSemicolon() || !this.eat(tt.arrow))\n this.unexpected()\n return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), [id], true)\n }\n }\n return id\n\n case tt.regexp:\n let value = this.value\n node = this.parseLiteral(value.value)\n node.regex = {pattern: value.pattern, flags: value.flags}\n return node\n\n case tt.num: case tt.string:\n return this.parseLiteral(this.value)\n\n case tt._null: case tt._true: case tt._false:\n node = this.startNode()\n node.value = this.type === tt._null ? null : this.type === tt._true\n node.raw = this.type.keyword\n this.next()\n return this.finishNode(node, \"Literal\")\n\n case tt.parenL:\n let start = this.start, expr = this.parseParenAndDistinguishExpression(canBeArrow)\n if (refDestructuringErrors) {\n if (refDestructuringErrors.parenthesizedAssign < 0 && !this.isSimpleAssignTarget(expr))\n refDestructuringErrors.parenthesizedAssign = start\n if (refDestructuringErrors.parenthesizedBind < 0)\n refDestructuringErrors.parenthesizedBind = start\n }\n return expr\n\n case tt.bracketL:\n node = this.startNode()\n this.next()\n node.elements = this.parseExprList(tt.bracketR, true, true, refDestructuringErrors)\n return this.finishNode(node, \"ArrayExpression\")\n\n case tt.braceL:\n return this.parseObj(false, refDestructuringErrors)\n\n case tt._function:\n node = this.startNode()\n this.next()\n return this.parseFunction(node, 0)\n\n case tt._class:\n return this.parseClass(this.startNode(), false)\n\n case tt._new:\n return this.parseNew()\n\n case tt.backQuote:\n return this.parseTemplate()\n\n case tt._import:\n if (this.options.ecmaVersion >= 11) {\n return this.parseExprImport()\n } else {\n return this.unexpected()\n }\n\n default:\n this.unexpected()\n }\n}\n\npp.parseExprImport = function() {\n const node = this.startNode()\n\n // Consume `import` as an identifier for `import.meta`.\n // Because `this.parseIdent(true)` doesn't check escape sequences, it needs the check of `this.containsEsc`.\n if (this.containsEsc) this.raiseRecoverable(this.start, \"Escape sequence in keyword import\")\n const meta = this.parseIdent(true)\n\n switch (this.type) {\n case tt.parenL:\n return this.parseDynamicImport(node)\n case tt.dot:\n node.meta = meta\n return this.parseImportMeta(node)\n default:\n this.unexpected()\n }\n}\n\npp.parseDynamicImport = function(node) {\n this.next() // skip `(`\n\n // Parse node.source.\n node.source = this.parseMaybeAssign()\n\n // Verify ending.\n if (!this.eat(tt.parenR)) {\n const errorPos = this.start\n if (this.eat(tt.comma) && this.eat(tt.parenR)) {\n this.raiseRecoverable(errorPos, \"Trailing comma is not allowed in import()\")\n } else {\n this.unexpected(errorPos)\n }\n }\n\n return this.finishNode(node, \"ImportExpression\")\n}\n\npp.parseImportMeta = function(node) {\n this.next() // skip `.`\n\n const containsEsc = this.containsEsc\n node.property = this.parseIdent(true)\n\n if (node.property.name !== \"meta\")\n this.raiseRecoverable(node.property.start, \"The only valid meta property for import is 'import.meta'\")\n if (containsEsc)\n this.raiseRecoverable(node.start, \"'import.meta' must not contain escaped characters\")\n if (this.options.sourceType !== \"module\")\n this.raiseRecoverable(node.start, \"Cannot use 'import.meta' outside a module\")\n\n return this.finishNode(node, \"MetaProperty\")\n}\n\npp.parseLiteral = function(value) {\n let node = this.startNode()\n node.value = value\n node.raw = this.input.slice(this.start, this.end)\n if (node.raw.charCodeAt(node.raw.length - 1) === 110) node.bigint = node.raw.slice(0, -1).replace(/_/g, \"\")\n this.next()\n return this.finishNode(node, \"Literal\")\n}\n\npp.parseParenExpression = function() {\n this.expect(tt.parenL)\n let val = this.parseExpression()\n this.expect(tt.parenR)\n return val\n}\n\npp.parseParenAndDistinguishExpression = function(canBeArrow) {\n let startPos = this.start, startLoc = this.startLoc, val, allowTrailingComma = this.options.ecmaVersion >= 8\n if (this.options.ecmaVersion >= 6) {\n this.next()\n\n let innerStartPos = this.start, innerStartLoc = this.startLoc\n let exprList = [], first = true, lastIsComma = false\n let refDestructuringErrors = new DestructuringErrors, oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, spreadStart\n this.yieldPos = 0\n this.awaitPos = 0\n // Do not save awaitIdentPos to allow checking awaits nested in parameters\n while (this.type !== tt.parenR) {\n first ? first = false : this.expect(tt.comma)\n if (allowTrailingComma && this.afterTrailingComma(tt.parenR, true)) {\n lastIsComma = true\n break\n } else if (this.type === tt.ellipsis) {\n spreadStart = this.start\n exprList.push(this.parseParenItem(this.parseRestBinding()))\n if (this.type === tt.comma) this.raise(this.start, \"Comma is not permitted after the rest element\")\n break\n } else {\n exprList.push(this.parseMaybeAssign(false, refDestructuringErrors, this.parseParenItem))\n }\n }\n let innerEndPos = this.start, innerEndLoc = this.startLoc\n this.expect(tt.parenR)\n\n if (canBeArrow && !this.canInsertSemicolon() && this.eat(tt.arrow)) {\n this.checkPatternErrors(refDestructuringErrors, false)\n this.checkYieldAwaitInDefaultParams()\n this.yieldPos = oldYieldPos\n this.awaitPos = oldAwaitPos\n return this.parseParenArrowList(startPos, startLoc, exprList)\n }\n\n if (!exprList.length || lastIsComma) this.unexpected(this.lastTokStart)\n if (spreadStart) this.unexpected(spreadStart)\n this.checkExpressionErrors(refDestructuringErrors, true)\n this.yieldPos = oldYieldPos || this.yieldPos\n this.awaitPos = oldAwaitPos || this.awaitPos\n\n if (exprList.length > 1) {\n val = this.startNodeAt(innerStartPos, innerStartLoc)\n val.expressions = exprList\n this.finishNodeAt(val, \"SequenceExpression\", innerEndPos, innerEndLoc)\n } else {\n val = exprList[0]\n }\n } else {\n val = this.parseParenExpression()\n }\n\n if (this.options.preserveParens) {\n let par = this.startNodeAt(startPos, startLoc)\n par.expression = val\n return this.finishNode(par, \"ParenthesizedExpression\")\n } else {\n return val\n }\n}\n\npp.parseParenItem = function(item) {\n return item\n}\n\npp.parseParenArrowList = function(startPos, startLoc, exprList) {\n return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), exprList)\n}\n\n// New's precedence is slightly tricky. It must allow its argument to\n// be a `[]` or dot subscript expression, but not a call — at least,\n// not without wrapping it in parentheses. Thus, it uses the noCalls\n// argument to parseSubscripts to prevent it from consuming the\n// argument list.\n\nconst empty = []\n\npp.parseNew = function() {\n if (this.containsEsc) this.raiseRecoverable(this.start, \"Escape sequence in keyword new\")\n let node = this.startNode()\n let meta = this.parseIdent(true)\n if (this.options.ecmaVersion >= 6 && this.eat(tt.dot)) {\n node.meta = meta\n let containsEsc = this.containsEsc\n node.property = this.parseIdent(true)\n if (node.property.name !== \"target\")\n this.raiseRecoverable(node.property.start, \"The only valid meta property for new is 'new.target'\")\n if (containsEsc)\n this.raiseRecoverable(node.start, \"'new.target' must not contain escaped characters\")\n if (!this.inNonArrowFunction)\n this.raiseRecoverable(node.start, \"'new.target' can only be used in functions\")\n return this.finishNode(node, \"MetaProperty\")\n }\n let startPos = this.start, startLoc = this.startLoc, isImport = this.type === tt._import\n node.callee = this.parseSubscripts(this.parseExprAtom(), startPos, startLoc, true)\n if (isImport && node.callee.type === \"ImportExpression\") {\n this.raise(startPos, \"Cannot use new with import()\")\n }\n if (this.eat(tt.parenL)) node.arguments = this.parseExprList(tt.parenR, this.options.ecmaVersion >= 8, false)\n else node.arguments = empty\n return this.finishNode(node, \"NewExpression\")\n}\n\n// Parse template expression.\n\npp.parseTemplateElement = function({isTagged}) {\n let elem = this.startNode()\n if (this.type === tt.invalidTemplate) {\n if (!isTagged) {\n this.raiseRecoverable(this.start, \"Bad escape sequence in untagged template literal\")\n }\n elem.value = {\n raw: this.value,\n cooked: null\n }\n } else {\n elem.value = {\n raw: this.input.slice(this.start, this.end).replace(/\\r\\n?/g, \"\\n\"),\n cooked: this.value\n }\n }\n this.next()\n elem.tail = this.type === tt.backQuote\n return this.finishNode(elem, \"TemplateElement\")\n}\n\npp.parseTemplate = function({isTagged = false} = {}) {\n let node = this.startNode()\n this.next()\n node.expressions = []\n let curElt = this.parseTemplateElement({isTagged})\n node.quasis = [curElt]\n while (!curElt.tail) {\n if (this.type === tt.eof) this.raise(this.pos, \"Unterminated template literal\")\n this.expect(tt.dollarBraceL)\n node.expressions.push(this.parseExpression())\n this.expect(tt.braceR)\n node.quasis.push(curElt = this.parseTemplateElement({isTagged}))\n }\n this.next()\n return this.finishNode(node, \"TemplateLiteral\")\n}\n\npp.isAsyncProp = function(prop) {\n return !prop.computed && prop.key.type === \"Identifier\" && prop.key.name === \"async\" &&\n (this.type === tt.name || this.type === tt.num || this.type === tt.string || this.type === tt.bracketL || this.type.keyword || (this.options.ecmaVersion >= 9 && this.type === tt.star)) &&\n !lineBreak.test(this.input.slice(this.lastTokEnd, this.start))\n}\n\n// Parse an object literal or binding pattern.\n\npp.parseObj = function(isPattern, refDestructuringErrors) {\n let node = this.startNode(), first = true, propHash = {}\n node.properties = []\n this.next()\n while (!this.eat(tt.braceR)) {\n if (!first) {\n this.expect(tt.comma)\n if (this.options.ecmaVersion >= 5 && this.afterTrailingComma(tt.braceR)) break\n } else first = false\n\n const prop = this.parseProperty(isPattern, refDestructuringErrors)\n if (!isPattern) this.checkPropClash(prop, propHash, refDestructuringErrors)\n node.properties.push(prop)\n }\n return this.finishNode(node, isPattern ? \"ObjectPattern\" : \"ObjectExpression\")\n}\n\npp.parseProperty = function(isPattern, refDestructuringErrors) {\n let prop = this.startNode(), isGenerator, isAsync, startPos, startLoc\n if (this.options.ecmaVersion >= 9 && this.eat(tt.ellipsis)) {\n if (isPattern) {\n prop.argument = this.parseIdent(false)\n if (this.type === tt.comma) {\n this.raise(this.start, \"Comma is not permitted after the rest element\")\n }\n return this.finishNode(prop, \"RestElement\")\n }\n // To disallow parenthesized identifier via `this.toAssignable()`.\n if (this.type === tt.parenL && refDestructuringErrors) {\n if (refDestructuringErrors.parenthesizedAssign < 0) {\n refDestructuringErrors.parenthesizedAssign = this.start\n }\n if (refDestructuringErrors.parenthesizedBind < 0) {\n refDestructuringErrors.parenthesizedBind = this.start\n }\n }\n // Parse argument.\n prop.argument = this.parseMaybeAssign(false, refDestructuringErrors)\n // To disallow trailing comma via `this.toAssignable()`.\n if (this.type === tt.comma && refDestructuringErrors && refDestructuringErrors.trailingComma < 0) {\n refDestructuringErrors.trailingComma = this.start\n }\n // Finish\n return this.finishNode(prop, \"SpreadElement\")\n }\n if (this.options.ecmaVersion >= 6) {\n prop.method = false\n prop.shorthand = false\n if (isPattern || refDestructuringErrors) {\n startPos = this.start\n startLoc = this.startLoc\n }\n if (!isPattern)\n isGenerator = this.eat(tt.star)\n }\n let containsEsc = this.containsEsc\n this.parsePropertyName(prop)\n if (!isPattern && !containsEsc && this.options.ecmaVersion >= 8 && !isGenerator && this.isAsyncProp(prop)) {\n isAsync = true\n isGenerator = this.options.ecmaVersion >= 9 && this.eat(tt.star)\n this.parsePropertyName(prop, refDestructuringErrors)\n } else {\n isAsync = false\n }\n this.parsePropertyValue(prop, isPattern, isGenerator, isAsync, startPos, startLoc, refDestructuringErrors, containsEsc)\n return this.finishNode(prop, \"Property\")\n}\n\npp.parsePropertyValue = function(prop, isPattern, isGenerator, isAsync, startPos, startLoc, refDestructuringErrors, containsEsc) {\n if ((isGenerator || isAsync) && this.type === tt.colon)\n this.unexpected()\n\n if (this.eat(tt.colon)) {\n prop.value = isPattern ? this.parseMaybeDefault(this.start, this.startLoc) : this.parseMaybeAssign(false, refDestructuringErrors)\n prop.kind = \"init\"\n } else if (this.options.ecmaVersion >= 6 && this.type === tt.parenL) {\n if (isPattern) this.unexpected()\n prop.kind = \"init\"\n prop.method = true\n prop.value = this.parseMethod(isGenerator, isAsync)\n } else if (!isPattern && !containsEsc &&\n this.options.ecmaVersion >= 5 && !prop.computed && prop.key.type === \"Identifier\" &&\n (prop.key.name === \"get\" || prop.key.name === \"set\") &&\n (this.type !== tt.comma && this.type !== tt.braceR && this.type !== tt.eq)) {\n if (isGenerator || isAsync) this.unexpected()\n prop.kind = prop.key.name\n this.parsePropertyName(prop)\n prop.value = this.parseMethod(false)\n let paramCount = prop.kind === \"get\" ? 0 : 1\n if (prop.value.params.length !== paramCount) {\n let start = prop.value.start\n if (prop.kind === \"get\")\n this.raiseRecoverable(start, \"getter should have no params\")\n else\n this.raiseRecoverable(start, \"setter should have exactly one param\")\n } else {\n if (prop.kind === \"set\" && prop.value.params[0].type === \"RestElement\")\n this.raiseRecoverable(prop.value.params[0].start, \"Setter cannot use rest params\")\n }\n } else if (this.options.ecmaVersion >= 6 && !prop.computed && prop.key.type === \"Identifier\") {\n if (isGenerator || isAsync) this.unexpected()\n this.checkUnreserved(prop.key)\n if (prop.key.name === \"await\" && !this.awaitIdentPos)\n this.awaitIdentPos = startPos\n prop.kind = \"init\"\n if (isPattern) {\n prop.value = this.parseMaybeDefault(startPos, startLoc, this.copyNode(prop.key))\n } else if (this.type === tt.eq && refDestructuringErrors) {\n if (refDestructuringErrors.shorthandAssign < 0)\n refDestructuringErrors.shorthandAssign = this.start\n prop.value = this.parseMaybeDefault(startPos, startLoc, this.copyNode(prop.key))\n } else {\n prop.value = this.copyNode(prop.key)\n }\n prop.shorthand = true\n } else this.unexpected()\n}\n\npp.parsePropertyName = function(prop) {\n if (this.options.ecmaVersion >= 6) {\n if (this.eat(tt.bracketL)) {\n prop.computed = true\n prop.key = this.parseMaybeAssign()\n this.expect(tt.bracketR)\n return prop.key\n } else {\n prop.computed = false\n }\n }\n return prop.key = this.type === tt.num || this.type === tt.string ? this.parseExprAtom() : this.parseIdent(this.options.allowReserved !== \"never\")\n}\n\n// Initialize empty function node.\n\npp.initFunction = function(node) {\n node.id = null\n if (this.options.ecmaVersion >= 6) node.generator = node.expression = false\n if (this.options.ecmaVersion >= 8) node.async = false\n}\n\n// Parse object or class method.\n\npp.parseMethod = function(isGenerator, isAsync, allowDirectSuper) {\n let node = this.startNode(), oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, oldAwaitIdentPos = this.awaitIdentPos\n\n this.initFunction(node)\n if (this.options.ecmaVersion >= 6)\n node.generator = isGenerator\n if (this.options.ecmaVersion >= 8)\n node.async = !!isAsync\n\n this.yieldPos = 0\n this.awaitPos = 0\n this.awaitIdentPos = 0\n this.enterScope(functionFlags(isAsync, node.generator) | SCOPE_SUPER | (allowDirectSuper ? SCOPE_DIRECT_SUPER : 0))\n\n this.expect(tt.parenL)\n node.params = this.parseBindingList(tt.parenR, false, this.options.ecmaVersion >= 8)\n this.checkYieldAwaitInDefaultParams()\n this.parseFunctionBody(node, false, true)\n\n this.yieldPos = oldYieldPos\n this.awaitPos = oldAwaitPos\n this.awaitIdentPos = oldAwaitIdentPos\n return this.finishNode(node, \"FunctionExpression\")\n}\n\n// Parse arrow function expression with given parameters.\n\npp.parseArrowExpression = function(node, params, isAsync) {\n let oldYieldPos = this.yieldPos, oldAwaitPos = this.awaitPos, oldAwaitIdentPos = this.awaitIdentPos\n\n this.enterScope(functionFlags(isAsync, false) | SCOPE_ARROW)\n this.initFunction(node)\n if (this.options.ecmaVersion >= 8) node.async = !!isAsync\n\n this.yieldPos = 0\n this.awaitPos = 0\n this.awaitIdentPos = 0\n\n node.params = this.toAssignableList(params, true)\n this.parseFunctionBody(node, true, false)\n\n this.yieldPos = oldYieldPos\n this.awaitPos = oldAwaitPos\n this.awaitIdentPos = oldAwaitIdentPos\n return this.finishNode(node, \"ArrowFunctionExpression\")\n}\n\n// Parse function body and check parameters.\n\npp.parseFunctionBody = function(node, isArrowFunction, isMethod) {\n let isExpression = isArrowFunction && this.type !== tt.braceL\n let oldStrict = this.strict, useStrict = false\n\n if (isExpression) {\n node.body = this.parseMaybeAssign()\n node.expression = true\n this.checkParams(node, false)\n } else {\n let nonSimple = this.options.ecmaVersion >= 7 && !this.isSimpleParamList(node.params)\n if (!oldStrict || nonSimple) {\n useStrict = this.strictDirective(this.end)\n // If this is a strict mode function, verify that argument names\n // are not repeated, and it does not try to bind the words `eval`\n // or `arguments`.\n if (useStrict && nonSimple)\n this.raiseRecoverable(node.start, \"Illegal 'use strict' directive in function with non-simple parameter list\")\n }\n // Start a new scope with regard to labels and the `inFunction`\n // flag (restore them to their old value afterwards).\n let oldLabels = this.labels\n this.labels = []\n if (useStrict) this.strict = true\n\n // Add the params to varDeclaredNames to ensure that an error is thrown\n // if a let/const declaration in the function clashes with one of the params.\n this.checkParams(node, !oldStrict && !useStrict && !isArrowFunction && !isMethod && this.isSimpleParamList(node.params))\n // Ensure the function name isn't a forbidden identifier in strict mode, e.g. 'eval'\n if (this.strict && node.id) this.checkLValSimple(node.id, BIND_OUTSIDE)\n node.body = this.parseBlock(false, undefined, useStrict && !oldStrict)\n node.expression = false\n this.adaptDirectivePrologue(node.body.body)\n this.labels = oldLabels\n }\n this.exitScope()\n}\n\npp.isSimpleParamList = function(params) {\n for (let param of params)\n if (param.type !== \"Identifier\") return false\n return true\n}\n\n// Checks function params for various disallowed patterns such as using \"eval\"\n// or \"arguments\" and duplicate parameters.\n\npp.checkParams = function(node, allowDuplicates) {\n let nameHash = {}\n for (let param of node.params)\n this.checkLValInnerPattern(param, BIND_VAR, allowDuplicates ? null : nameHash)\n}\n\n// Parses a comma-separated list of expressions, and returns them as\n// an array. `close` is the token type that ends the list, and\n// `allowEmpty` can be turned on to allow subsequent commas with\n// nothing in between them to be parsed as `null` (which is needed\n// for array literals).\n\npp.parseExprList = function(close, allowTrailingComma, allowEmpty, refDestructuringErrors) {\n let elts = [], first = true\n while (!this.eat(close)) {\n if (!first) {\n this.expect(tt.comma)\n if (allowTrailingComma && this.afterTrailingComma(close)) break\n } else first = false\n\n let elt\n if (allowEmpty && this.type === tt.comma)\n elt = null\n else if (this.type === tt.ellipsis) {\n elt = this.parseSpread(refDestructuringErrors)\n if (refDestructuringErrors && this.type === tt.comma && refDestructuringErrors.trailingComma < 0)\n refDestructuringErrors.trailingComma = this.start\n } else {\n elt = this.parseMaybeAssign(false, refDestructuringErrors)\n }\n elts.push(elt)\n }\n return elts\n}\n\npp.checkUnreserved = function({start, end, name}) {\n if (this.inGenerator && name === \"yield\")\n this.raiseRecoverable(start, \"Cannot use 'yield' as identifier inside a generator\")\n if (this.inAsync && name === \"await\")\n this.raiseRecoverable(start, \"Cannot use 'await' as identifier inside an async function\")\n if (this.keywords.test(name))\n this.raise(start, `Unexpected keyword '${name}'`)\n if (this.options.ecmaVersion < 6 &&\n this.input.slice(start, end).indexOf(\"\\\\\") !== -1) return\n const re = this.strict ? this.reservedWordsStrict : this.reservedWords\n if (re.test(name)) {\n if (!this.inAsync && name === \"await\")\n this.raiseRecoverable(start, \"Cannot use keyword 'await' outside an async function\")\n this.raiseRecoverable(start, `The keyword '${name}' is reserved`)\n }\n}\n\n// Parse the next token as an identifier. If `liberal` is true (used\n// when parsing properties), it will also convert keywords into\n// identifiers.\n\npp.parseIdent = function(liberal, isBinding) {\n let node = this.startNode()\n if (this.type === tt.name) {\n node.name = this.value\n } else if (this.type.keyword) {\n node.name = this.type.keyword\n\n // To fix https://github.com/acornjs/acorn/issues/575\n // `class` and `function` keywords push new context into this.context.\n // But there is no chance to pop the context if the keyword is consumed as an identifier such as a property name.\n // If the previous token is a dot, this does not apply because the context-managing code already ignored the keyword\n if ((node.name === \"class\" || node.name === \"function\") &&\n (this.lastTokEnd !== this.lastTokStart + 1 || this.input.charCodeAt(this.lastTokStart) !== 46)) {\n this.context.pop()\n }\n } else {\n this.unexpected()\n }\n this.next(!!liberal)\n this.finishNode(node, \"Identifier\")\n if (!liberal) {\n this.checkUnreserved(node)\n if (node.name === \"await\" && !this.awaitIdentPos)\n this.awaitIdentPos = node.start\n }\n return node\n}\n\n// Parses yield expression inside generator.\n\npp.parseYield = function(noIn) {\n if (!this.yieldPos) this.yieldPos = this.start\n\n let node = this.startNode()\n this.next()\n if (this.type === tt.semi || this.canInsertSemicolon() || (this.type !== tt.star && !this.type.startsExpr)) {\n node.delegate = false\n node.argument = null\n } else {\n node.delegate = this.eat(tt.star)\n node.argument = this.parseMaybeAssign(noIn)\n }\n return this.finishNode(node, \"YieldExpression\")\n}\n\npp.parseAwait = function() {\n if (!this.awaitPos) this.awaitPos = this.start\n\n let node = this.startNode()\n this.next()\n node.argument = this.parseMaybeUnary(null, true)\n return this.finishNode(node, \"AwaitExpression\")\n}\n","import {Parser} from \"./state.js\"\nimport {Position, getLineInfo} from \"./locutil.js\"\n\nconst pp = Parser.prototype\n\n// This function is used to raise exceptions on parse errors. It\n// takes an offset integer (into the current `input`) to indicate\n// the location of the error, attaches the position to the end\n// of the error message, and then raises a `SyntaxError` with that\n// message.\n\npp.raise = function(pos, message) {\n let loc = getLineInfo(this.input, pos)\n message += \" (\" + loc.line + \":\" + loc.column + \")\"\n let err = new SyntaxError(message)\n err.pos = pos; err.loc = loc; err.raisedAt = this.pos\n throw err\n}\n\npp.raiseRecoverable = pp.raise\n\npp.curPosition = function() {\n if (this.options.locations) {\n return new Position(this.curLine, this.pos - this.lineStart)\n }\n}\n","import {Parser} from \"./state.js\"\nimport {SCOPE_VAR, SCOPE_FUNCTION, SCOPE_TOP, SCOPE_ARROW, SCOPE_SIMPLE_CATCH, BIND_LEXICAL, BIND_SIMPLE_CATCH, BIND_FUNCTION} from \"./scopeflags.js\"\n\nconst pp = Parser.prototype\n\nclass Scope {\n constructor(flags) {\n this.flags = flags\n // A list of var-declared names in the current lexical scope\n this.var = []\n // A list of lexically-declared names in the current lexical scope\n this.lexical = []\n // A list of lexically-declared FunctionDeclaration names in the current lexical scope\n this.functions = []\n }\n}\n\n// The functions in this module keep track of declared variables in the current scope in order to detect duplicate variable names.\n\npp.enterScope = function(flags) {\n this.scopeStack.push(new Scope(flags))\n}\n\npp.exitScope = function() {\n this.scopeStack.pop()\n}\n\n// The spec says:\n// > At the top level of a function, or script, function declarations are\n// > treated like var declarations rather than like lexical declarations.\npp.treatFunctionsAsVarInScope = function(scope) {\n return (scope.flags & SCOPE_FUNCTION) || !this.inModule && (scope.flags & SCOPE_TOP)\n}\n\npp.declareName = function(name, bindingType, pos) {\n let redeclared = false\n if (bindingType === BIND_LEXICAL) {\n const scope = this.currentScope()\n redeclared = scope.lexical.indexOf(name) > -1 || scope.functions.indexOf(name) > -1 || scope.var.indexOf(name) > -1\n scope.lexical.push(name)\n if (this.inModule && (scope.flags & SCOPE_TOP))\n delete this.undefinedExports[name]\n } else if (bindingType === BIND_SIMPLE_CATCH) {\n const scope = this.currentScope()\n scope.lexical.push(name)\n } else if (bindingType === BIND_FUNCTION) {\n const scope = this.currentScope()\n if (this.treatFunctionsAsVar)\n redeclared = scope.lexical.indexOf(name) > -1\n else\n redeclared = scope.lexical.indexOf(name) > -1 || scope.var.indexOf(name) > -1\n scope.functions.push(name)\n } else {\n for (let i = this.scopeStack.length - 1; i >= 0; --i) {\n const scope = this.scopeStack[i]\n if (scope.lexical.indexOf(name) > -1 && !((scope.flags & SCOPE_SIMPLE_CATCH) && scope.lexical[0] === name) ||\n !this.treatFunctionsAsVarInScope(scope) && scope.functions.indexOf(name) > -1) {\n redeclared = true\n break\n }\n scope.var.push(name)\n if (this.inModule && (scope.flags & SCOPE_TOP))\n delete this.undefinedExports[name]\n if (scope.flags & SCOPE_VAR) break\n }\n }\n if (redeclared) this.raiseRecoverable(pos, `Identifier '${name}' has already been declared`)\n}\n\npp.checkLocalExport = function(id) {\n // scope.functions must be empty as Module code is always strict.\n if (this.scopeStack[0].lexical.indexOf(id.name) === -1 &&\n this.scopeStack[0].var.indexOf(id.name) === -1) {\n this.undefinedExports[id.name] = id\n }\n}\n\npp.currentScope = function() {\n return this.scopeStack[this.scopeStack.length - 1]\n}\n\npp.currentVarScope = function() {\n for (let i = this.scopeStack.length - 1;; i--) {\n let scope = this.scopeStack[i]\n if (scope.flags & SCOPE_VAR) return scope\n }\n}\n\n// Could be useful for `this`, `new.target`, `super()`, `super.property`, and `super[property]`.\npp.currentThisScope = function() {\n for (let i = this.scopeStack.length - 1;; i--) {\n let scope = this.scopeStack[i]\n if (scope.flags & SCOPE_VAR && !(scope.flags & SCOPE_ARROW)) return scope\n }\n}\n","import {Parser} from \"./state.js\"\nimport {SourceLocation} from \"./locutil.js\"\n\nexport class Node {\n constructor(parser, pos, loc) {\n this.type = \"\"\n this.start = pos\n this.end = 0\n if (parser.options.locations)\n this.loc = new SourceLocation(parser, loc)\n if (parser.options.directSourceFile)\n this.sourceFile = parser.options.directSourceFile\n if (parser.options.ranges)\n this.range = [pos, 0]\n }\n}\n\n// Start an AST node, attaching a start offset.\n\nconst pp = Parser.prototype\n\npp.startNode = function() {\n return new Node(this, this.start, this.startLoc)\n}\n\npp.startNodeAt = function(pos, loc) {\n return new Node(this, pos, loc)\n}\n\n// Finish an AST node, adding `type` and `end` properties.\n\nfunction finishNodeAt(node, type, pos, loc) {\n node.type = type\n node.end = pos\n if (this.options.locations)\n node.loc.end = loc\n if (this.options.ranges)\n node.range[1] = pos\n return node\n}\n\npp.finishNode = function(node, type) {\n return finishNodeAt.call(this, node, type, this.lastTokEnd, this.lastTokEndLoc)\n}\n\n// Finish node at given position\n\npp.finishNodeAt = function(node, type, pos, loc) {\n return finishNodeAt.call(this, node, type, pos, loc)\n}\n\npp.copyNode = function(node) {\n let newNode = new Node(this, node.start, this.startLoc)\n for (let prop in node) newNode[prop] = node[prop]\n return newNode\n}\n","// The algorithm used to determine whether a regexp can appear at a\n// given point in the program is loosely based on sweet.js' approach.\n// See https://github.com/mozilla/sweet.js/wiki/design\n\nimport {Parser} from \"./state.js\"\nimport {types as tt} from \"./tokentype.js\"\nimport {lineBreak} from \"./whitespace.js\"\n\nexport class TokContext {\n constructor(token, isExpr, preserveSpace, override, generator) {\n this.token = token\n this.isExpr = !!isExpr\n this.preserveSpace = !!preserveSpace\n this.override = override\n this.generator = !!generator\n }\n}\n\nexport const types = {\n b_stat: new TokContext(\"{\", false),\n b_expr: new TokContext(\"{\", true),\n b_tmpl: new TokContext(\"${\", false),\n p_stat: new TokContext(\"(\", false),\n p_expr: new TokContext(\"(\", true),\n q_tmpl: new TokContext(\"`\", true, true, p => p.tryReadTemplateToken()),\n f_stat: new TokContext(\"function\", false),\n f_expr: new TokContext(\"function\", true),\n f_expr_gen: new TokContext(\"function\", true, false, null, true),\n f_gen: new TokContext(\"function\", false, false, null, true)\n}\n\nconst pp = Parser.prototype\n\npp.initialContext = function() {\n return [types.b_stat]\n}\n\npp.braceIsBlock = function(prevType) {\n let parent = this.curContext()\n if (parent === types.f_expr || parent === types.f_stat)\n return true\n if (prevType === tt.colon && (parent === types.b_stat || parent === types.b_expr))\n return !parent.isExpr\n\n // The check for `tt.name && exprAllowed` detects whether we are\n // after a `yield` or `of` construct. See the `updateContext` for\n // `tt.name`.\n if (prevType === tt._return || prevType === tt.name && this.exprAllowed)\n return lineBreak.test(this.input.slice(this.lastTokEnd, this.start))\n if (prevType === tt._else || prevType === tt.semi || prevType === tt.eof || prevType === tt.parenR || prevType === tt.arrow)\n return true\n if (prevType === tt.braceL)\n return parent === types.b_stat\n if (prevType === tt._var || prevType === tt._const || prevType === tt.name)\n return false\n return !this.exprAllowed\n}\n\npp.inGeneratorContext = function() {\n for (let i = this.context.length - 1; i >= 1; i--) {\n let context = this.context[i]\n if (context.token === \"function\")\n return context.generator\n }\n return false\n}\n\npp.updateContext = function(prevType) {\n let update, type = this.type\n if (type.keyword && prevType === tt.dot)\n this.exprAllowed = false\n else if (update = type.updateContext)\n update.call(this, prevType)\n else\n this.exprAllowed = type.beforeExpr\n}\n\n// Token-specific context update code\n\ntt.parenR.updateContext = tt.braceR.updateContext = function() {\n if (this.context.length === 1) {\n this.exprAllowed = true\n return\n }\n let out = this.context.pop()\n if (out === types.b_stat && this.curContext().token === \"function\") {\n out = this.context.pop()\n }\n this.exprAllowed = !out.isExpr\n}\n\ntt.braceL.updateContext = function(prevType) {\n this.context.push(this.braceIsBlock(prevType) ? types.b_stat : types.b_expr)\n this.exprAllowed = true\n}\n\ntt.dollarBraceL.updateContext = function() {\n this.context.push(types.b_tmpl)\n this.exprAllowed = true\n}\n\ntt.parenL.updateContext = function(prevType) {\n let statementParens = prevType === tt._if || prevType === tt._for || prevType === tt._with || prevType === tt._while\n this.context.push(statementParens ? types.p_stat : types.p_expr)\n this.exprAllowed = true\n}\n\ntt.incDec.updateContext = function() {\n // tokExprAllowed stays unchanged\n}\n\ntt._function.updateContext = tt._class.updateContext = function(prevType) {\n if (prevType.beforeExpr && prevType !== tt._else &&\n !(prevType === tt.semi && this.curContext() !== types.p_stat) &&\n !(prevType === tt._return && lineBreak.test(this.input.slice(this.lastTokEnd, this.start))) &&\n !((prevType === tt.colon || prevType === tt.braceL) && this.curContext() === types.b_stat))\n this.context.push(types.f_expr)\n else\n this.context.push(types.f_stat)\n this.exprAllowed = false\n}\n\ntt.backQuote.updateContext = function() {\n if (this.curContext() === types.q_tmpl)\n this.context.pop()\n else\n this.context.push(types.q_tmpl)\n this.exprAllowed = false\n}\n\ntt.star.updateContext = function(prevType) {\n if (prevType === tt._function) {\n let index = this.context.length - 1\n if (this.context[index] === types.f_expr)\n this.context[index] = types.f_expr_gen\n else\n this.context[index] = types.f_gen\n }\n this.exprAllowed = true\n}\n\ntt.name.updateContext = function(prevType) {\n let allowed = false\n if (this.options.ecmaVersion >= 6 && prevType !== tt.dot) {\n if (this.value === \"of\" && !this.exprAllowed ||\n this.value === \"yield\" && this.inGeneratorContext())\n allowed = true\n }\n this.exprAllowed = allowed\n}\n","import {wordsRegexp} from \"./util.js\"\n\n// This file contains Unicode properties extracted from the ECMAScript\n// specification. The lists are extracted like so:\n// $$('#table-binary-unicode-properties > figure > table > tbody > tr > td:nth-child(1) code').map(el => el.innerText)\n\n// #table-binary-unicode-properties\nconst ecma9BinaryProperties = \"ASCII ASCII_Hex_Digit AHex Alphabetic Alpha Any Assigned Bidi_Control Bidi_C Bidi_Mirrored Bidi_M Case_Ignorable CI Cased Changes_When_Casefolded CWCF Changes_When_Casemapped CWCM Changes_When_Lowercased CWL Changes_When_NFKC_Casefolded CWKCF Changes_When_Titlecased CWT Changes_When_Uppercased CWU Dash Default_Ignorable_Code_Point DI Deprecated Dep Diacritic Dia Emoji Emoji_Component Emoji_Modifier Emoji_Modifier_Base Emoji_Presentation Extender Ext Grapheme_Base Gr_Base Grapheme_Extend Gr_Ext Hex_Digit Hex IDS_Binary_Operator IDSB IDS_Trinary_Operator IDST ID_Continue IDC ID_Start IDS Ideographic Ideo Join_Control Join_C Logical_Order_Exception LOE Lowercase Lower Math Noncharacter_Code_Point NChar Pattern_Syntax Pat_Syn Pattern_White_Space Pat_WS Quotation_Mark QMark Radical Regional_Indicator RI Sentence_Terminal STerm Soft_Dotted SD Terminal_Punctuation Term Unified_Ideograph UIdeo Uppercase Upper Variation_Selector VS White_Space space XID_Continue XIDC XID_Start XIDS\"\nconst ecma10BinaryProperties = ecma9BinaryProperties + \" Extended_Pictographic\"\nconst ecma11BinaryProperties = ecma10BinaryProperties\nconst ecma12BinaryProperties = ecma11BinaryProperties + \" EBase EComp EMod EPres ExtPict\"\nconst unicodeBinaryProperties = {\n 9: ecma9BinaryProperties,\n 10: ecma10BinaryProperties,\n 11: ecma11BinaryProperties,\n 12: ecma12BinaryProperties\n}\n\n// #table-unicode-general-category-values\nconst unicodeGeneralCategoryValues = \"Cased_Letter LC Close_Punctuation Pe Connector_Punctuation Pc Control Cc cntrl Currency_Symbol Sc Dash_Punctuation Pd Decimal_Number Nd digit Enclosing_Mark Me Final_Punctuation Pf Format Cf Initial_Punctuation Pi Letter L Letter_Number Nl Line_Separator Zl Lowercase_Letter Ll Mark M Combining_Mark Math_Symbol Sm Modifier_Letter Lm Modifier_Symbol Sk Nonspacing_Mark Mn Number N Open_Punctuation Ps Other C Other_Letter Lo Other_Number No Other_Punctuation Po Other_Symbol So Paragraph_Separator Zp Private_Use Co Punctuation P punct Separator Z Space_Separator Zs Spacing_Mark Mc Surrogate Cs Symbol S Titlecase_Letter Lt Unassigned Cn Uppercase_Letter Lu\"\n\n// #table-unicode-script-values\nconst ecma9ScriptValues = \"Adlam Adlm Ahom Ahom Anatolian_Hieroglyphs Hluw Arabic Arab Armenian Armn Avestan Avst Balinese Bali Bamum Bamu Bassa_Vah Bass Batak Batk Bengali Beng Bhaiksuki Bhks Bopomofo Bopo Brahmi Brah Braille Brai Buginese Bugi Buhid Buhd Canadian_Aboriginal Cans Carian Cari Caucasian_Albanian Aghb Chakma Cakm Cham Cham Cherokee Cher Common Zyyy Coptic Copt Qaac Cuneiform Xsux Cypriot Cprt Cyrillic Cyrl Deseret Dsrt Devanagari Deva Duployan Dupl Egyptian_Hieroglyphs Egyp Elbasan Elba Ethiopic Ethi Georgian Geor Glagolitic Glag Gothic Goth Grantha Gran Greek Grek Gujarati Gujr Gurmukhi Guru Han Hani Hangul Hang Hanunoo Hano Hatran Hatr Hebrew Hebr Hiragana Hira Imperial_Aramaic Armi Inherited Zinh Qaai Inscriptional_Pahlavi Phli Inscriptional_Parthian Prti Javanese Java Kaithi Kthi Kannada Knda Katakana Kana Kayah_Li Kali Kharoshthi Khar Khmer Khmr Khojki Khoj Khudawadi Sind Lao Laoo Latin Latn Lepcha Lepc Limbu Limb Linear_A Lina Linear_B Linb Lisu Lisu Lycian Lyci Lydian Lydi Mahajani Mahj Malayalam Mlym Mandaic Mand Manichaean Mani Marchen Marc Masaram_Gondi Gonm Meetei_Mayek Mtei Mende_Kikakui Mend Meroitic_Cursive Merc Meroitic_Hieroglyphs Mero Miao Plrd Modi Modi Mongolian Mong Mro Mroo Multani Mult Myanmar Mymr Nabataean Nbat New_Tai_Lue Talu Newa Newa Nko Nkoo Nushu Nshu Ogham Ogam Ol_Chiki Olck Old_Hungarian Hung Old_Italic Ital Old_North_Arabian Narb Old_Permic Perm Old_Persian Xpeo Old_South_Arabian Sarb Old_Turkic Orkh Oriya Orya Osage Osge Osmanya Osma Pahawh_Hmong Hmng Palmyrene Palm Pau_Cin_Hau Pauc Phags_Pa Phag Phoenician Phnx Psalter_Pahlavi Phlp Rejang Rjng Runic Runr Samaritan Samr Saurashtra Saur Sharada Shrd Shavian Shaw Siddham Sidd SignWriting Sgnw Sinhala Sinh Sora_Sompeng Sora Soyombo Soyo Sundanese Sund Syloti_Nagri Sylo Syriac Syrc Tagalog Tglg Tagbanwa Tagb Tai_Le Tale Tai_Tham Lana Tai_Viet Tavt Takri Takr Tamil Taml Tangut Tang Telugu Telu Thaana Thaa Thai Thai Tibetan Tibt Tifinagh Tfng Tirhuta Tirh Ugaritic Ugar Vai Vaii Warang_Citi Wara Yi Yiii Zanabazar_Square Zanb\"\nconst ecma10ScriptValues = ecma9ScriptValues + \" Dogra Dogr Gunjala_Gondi Gong Hanifi_Rohingya Rohg Makasar Maka Medefaidrin Medf Old_Sogdian Sogo Sogdian Sogd\"\nconst ecma11ScriptValues = ecma10ScriptValues + \" Elymaic Elym Nandinagari Nand Nyiakeng_Puachue_Hmong Hmnp Wancho Wcho\"\nconst ecma12ScriptValues = ecma11ScriptValues + \" Chorasmian Chrs Diak Dives_Akuru Khitan_Small_Script Kits Yezi Yezidi\"\nconst unicodeScriptValues = {\n 9: ecma9ScriptValues,\n 10: ecma10ScriptValues,\n 11: ecma11ScriptValues,\n 12: ecma12ScriptValues\n}\n\nconst data = {}\nfunction buildUnicodeData(ecmaVersion) {\n let d = data[ecmaVersion] = {\n binary: wordsRegexp(unicodeBinaryProperties[ecmaVersion] + \" \" + unicodeGeneralCategoryValues),\n nonBinary: {\n General_Category: wordsRegexp(unicodeGeneralCategoryValues),\n Script: wordsRegexp(unicodeScriptValues[ecmaVersion])\n }\n }\n d.nonBinary.Script_Extensions = d.nonBinary.Script\n\n d.nonBinary.gc = d.nonBinary.General_Category\n d.nonBinary.sc = d.nonBinary.Script\n d.nonBinary.scx = d.nonBinary.Script_Extensions\n}\nbuildUnicodeData(9)\nbuildUnicodeData(10)\nbuildUnicodeData(11)\nbuildUnicodeData(12)\n\nexport default data\n","import {isIdentifierStart, isIdentifierChar} from \"./identifier.js\"\nimport {Parser} from \"./state.js\"\nimport UNICODE_PROPERTY_VALUES from \"./unicode-property-data.js\"\nimport {has} from \"./util.js\"\n\nconst pp = Parser.prototype\n\nexport class RegExpValidationState {\n constructor(parser) {\n this.parser = parser\n this.validFlags = `gim${parser.options.ecmaVersion >= 6 ? \"uy\" : \"\"}${parser.options.ecmaVersion >= 9 ? \"s\" : \"\"}`\n this.unicodeProperties = UNICODE_PROPERTY_VALUES[parser.options.ecmaVersion >= 12 ? 12 : parser.options.ecmaVersion]\n this.source = \"\"\n this.flags = \"\"\n this.start = 0\n this.switchU = false\n this.switchN = false\n this.pos = 0\n this.lastIntValue = 0\n this.lastStringValue = \"\"\n this.lastAssertionIsQuantifiable = false\n this.numCapturingParens = 0\n this.maxBackReference = 0\n this.groupNames = []\n this.backReferenceNames = []\n }\n\n reset(start, pattern, flags) {\n const unicode = flags.indexOf(\"u\") !== -1\n this.start = start | 0\n this.source = pattern + \"\"\n this.flags = flags\n this.switchU = unicode && this.parser.options.ecmaVersion >= 6\n this.switchN = unicode && this.parser.options.ecmaVersion >= 9\n }\n\n raise(message) {\n this.parser.raiseRecoverable(this.start, `Invalid regular expression: /${this.source}/: ${message}`)\n }\n\n // If u flag is given, this returns the code point at the index (it combines a surrogate pair).\n // Otherwise, this returns the code unit of the index (can be a part of a surrogate pair).\n at(i, forceU = false) {\n const s = this.source\n const l = s.length\n if (i >= l) {\n return -1\n }\n const c = s.charCodeAt(i)\n if (!(forceU || this.switchU) || c <= 0xD7FF || c >= 0xE000 || i + 1 >= l) {\n return c\n }\n const next = s.charCodeAt(i + 1)\n return next >= 0xDC00 && next <= 0xDFFF ? (c << 10) + next - 0x35FDC00 : c\n }\n\n nextIndex(i, forceU = false) {\n const s = this.source\n const l = s.length\n if (i >= l) {\n return l\n }\n let c = s.charCodeAt(i), next\n if (!(forceU || this.switchU) || c <= 0xD7FF || c >= 0xE000 || i + 1 >= l ||\n (next = s.charCodeAt(i + 1)) < 0xDC00 || next > 0xDFFF) {\n return i + 1\n }\n return i + 2\n }\n\n current(forceU = false) {\n return this.at(this.pos, forceU)\n }\n\n lookahead(forceU = false) {\n return this.at(this.nextIndex(this.pos, forceU), forceU)\n }\n\n advance(forceU = false) {\n this.pos = this.nextIndex(this.pos, forceU)\n }\n\n eat(ch, forceU = false) {\n if (this.current(forceU) === ch) {\n this.advance(forceU)\n return true\n }\n return false\n }\n}\n\nfunction codePointToString(ch) {\n if (ch <= 0xFFFF) return String.fromCharCode(ch)\n ch -= 0x10000\n return String.fromCharCode((ch >> 10) + 0xD800, (ch & 0x03FF) + 0xDC00)\n}\n\n/**\n * Validate the flags part of a given RegExpLiteral.\n *\n * @param {RegExpValidationState} state The state to validate RegExp.\n * @returns {void}\n */\npp.validateRegExpFlags = function(state) {\n const validFlags = state.validFlags\n const flags = state.flags\n\n for (let i = 0; i < flags.length; i++) {\n const flag = flags.charAt(i)\n if (validFlags.indexOf(flag) === -1) {\n this.raise(state.start, \"Invalid regular expression flag\")\n }\n if (flags.indexOf(flag, i + 1) > -1) {\n this.raise(state.start, \"Duplicate regular expression flag\")\n }\n }\n}\n\n/**\n * Validate the pattern part of a given RegExpLiteral.\n *\n * @param {RegExpValidationState} state The state to validate RegExp.\n * @returns {void}\n */\npp.validateRegExpPattern = function(state) {\n this.regexp_pattern(state)\n\n // The goal symbol for the parse is |Pattern[~U, ~N]|. If the result of\n // parsing contains a |GroupName|, reparse with the goal symbol\n // |Pattern[~U, +N]| and use this result instead. Throw a *SyntaxError*\n // exception if _P_ did not conform to the grammar, if any elements of _P_\n // were not matched by the parse, or if any Early Error conditions exist.\n if (!state.switchN && this.options.ecmaVersion >= 9 && state.groupNames.length > 0) {\n state.switchN = true\n this.regexp_pattern(state)\n }\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-Pattern\npp.regexp_pattern = function(state) {\n state.pos = 0\n state.lastIntValue = 0\n state.lastStringValue = \"\"\n state.lastAssertionIsQuantifiable = false\n state.numCapturingParens = 0\n state.maxBackReference = 0\n state.groupNames.length = 0\n state.backReferenceNames.length = 0\n\n this.regexp_disjunction(state)\n\n if (state.pos !== state.source.length) {\n // Make the same messages as V8.\n if (state.eat(0x29 /* ) */)) {\n state.raise(\"Unmatched ')'\")\n }\n if (state.eat(0x5D /* ] */) || state.eat(0x7D /* } */)) {\n state.raise(\"Lone quantifier brackets\")\n }\n }\n if (state.maxBackReference > state.numCapturingParens) {\n state.raise(\"Invalid escape\")\n }\n for (const name of state.backReferenceNames) {\n if (state.groupNames.indexOf(name) === -1) {\n state.raise(\"Invalid named capture referenced\")\n }\n }\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-Disjunction\npp.regexp_disjunction = function(state) {\n this.regexp_alternative(state)\n while (state.eat(0x7C /* | */)) {\n this.regexp_alternative(state)\n }\n\n // Make the same message as V8.\n if (this.regexp_eatQuantifier(state, true)) {\n state.raise(\"Nothing to repeat\")\n }\n if (state.eat(0x7B /* { */)) {\n state.raise(\"Lone quantifier brackets\")\n }\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-Alternative\npp.regexp_alternative = function(state) {\n while (state.pos < state.source.length && this.regexp_eatTerm(state))\n ;\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-Term\npp.regexp_eatTerm = function(state) {\n if (this.regexp_eatAssertion(state)) {\n // Handle `QuantifiableAssertion Quantifier` alternative.\n // `state.lastAssertionIsQuantifiable` is true if the last eaten Assertion\n // is a QuantifiableAssertion.\n if (state.lastAssertionIsQuantifiable && this.regexp_eatQuantifier(state)) {\n // Make the same message as V8.\n if (state.switchU) {\n state.raise(\"Invalid quantifier\")\n }\n }\n return true\n }\n\n if (state.switchU ? this.regexp_eatAtom(state) : this.regexp_eatExtendedAtom(state)) {\n this.regexp_eatQuantifier(state)\n return true\n }\n\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-Assertion\npp.regexp_eatAssertion = function(state) {\n const start = state.pos\n state.lastAssertionIsQuantifiable = false\n\n // ^, $\n if (state.eat(0x5E /* ^ */) || state.eat(0x24 /* $ */)) {\n return true\n }\n\n // \\b \\B\n if (state.eat(0x5C /* \\ */)) {\n if (state.eat(0x42 /* B */) || state.eat(0x62 /* b */)) {\n return true\n }\n state.pos = start\n }\n\n // Lookahead / Lookbehind\n if (state.eat(0x28 /* ( */) && state.eat(0x3F /* ? */)) {\n let lookbehind = false\n if (this.options.ecmaVersion >= 9) {\n lookbehind = state.eat(0x3C /* < */)\n }\n if (state.eat(0x3D /* = */) || state.eat(0x21 /* ! */)) {\n this.regexp_disjunction(state)\n if (!state.eat(0x29 /* ) */)) {\n state.raise(\"Unterminated group\")\n }\n state.lastAssertionIsQuantifiable = !lookbehind\n return true\n }\n }\n\n state.pos = start\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-Quantifier\npp.regexp_eatQuantifier = function(state, noError = false) {\n if (this.regexp_eatQuantifierPrefix(state, noError)) {\n state.eat(0x3F /* ? */)\n return true\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-QuantifierPrefix\npp.regexp_eatQuantifierPrefix = function(state, noError) {\n return (\n state.eat(0x2A /* * */) ||\n state.eat(0x2B /* + */) ||\n state.eat(0x3F /* ? */) ||\n this.regexp_eatBracedQuantifier(state, noError)\n )\n}\npp.regexp_eatBracedQuantifier = function(state, noError) {\n const start = state.pos\n if (state.eat(0x7B /* { */)) {\n let min = 0, max = -1\n if (this.regexp_eatDecimalDigits(state)) {\n min = state.lastIntValue\n if (state.eat(0x2C /* , */) && this.regexp_eatDecimalDigits(state)) {\n max = state.lastIntValue\n }\n if (state.eat(0x7D /* } */)) {\n // SyntaxError in https://www.ecma-international.org/ecma-262/8.0/#sec-term\n if (max !== -1 && max < min && !noError) {\n state.raise(\"numbers out of order in {} quantifier\")\n }\n return true\n }\n }\n if (state.switchU && !noError) {\n state.raise(\"Incomplete quantifier\")\n }\n state.pos = start\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-Atom\npp.regexp_eatAtom = function(state) {\n return (\n this.regexp_eatPatternCharacters(state) ||\n state.eat(0x2E /* . */) ||\n this.regexp_eatReverseSolidusAtomEscape(state) ||\n this.regexp_eatCharacterClass(state) ||\n this.regexp_eatUncapturingGroup(state) ||\n this.regexp_eatCapturingGroup(state)\n )\n}\npp.regexp_eatReverseSolidusAtomEscape = function(state) {\n const start = state.pos\n if (state.eat(0x5C /* \\ */)) {\n if (this.regexp_eatAtomEscape(state)) {\n return true\n }\n state.pos = start\n }\n return false\n}\npp.regexp_eatUncapturingGroup = function(state) {\n const start = state.pos\n if (state.eat(0x28 /* ( */)) {\n if (state.eat(0x3F /* ? */) && state.eat(0x3A /* : */)) {\n this.regexp_disjunction(state)\n if (state.eat(0x29 /* ) */)) {\n return true\n }\n state.raise(\"Unterminated group\")\n }\n state.pos = start\n }\n return false\n}\npp.regexp_eatCapturingGroup = function(state) {\n if (state.eat(0x28 /* ( */)) {\n if (this.options.ecmaVersion >= 9) {\n this.regexp_groupSpecifier(state)\n } else if (state.current() === 0x3F /* ? */) {\n state.raise(\"Invalid group\")\n }\n this.regexp_disjunction(state)\n if (state.eat(0x29 /* ) */)) {\n state.numCapturingParens += 1\n return true\n }\n state.raise(\"Unterminated group\")\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-ExtendedAtom\npp.regexp_eatExtendedAtom = function(state) {\n return (\n state.eat(0x2E /* . */) ||\n this.regexp_eatReverseSolidusAtomEscape(state) ||\n this.regexp_eatCharacterClass(state) ||\n this.regexp_eatUncapturingGroup(state) ||\n this.regexp_eatCapturingGroup(state) ||\n this.regexp_eatInvalidBracedQuantifier(state) ||\n this.regexp_eatExtendedPatternCharacter(state)\n )\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-InvalidBracedQuantifier\npp.regexp_eatInvalidBracedQuantifier = function(state) {\n if (this.regexp_eatBracedQuantifier(state, true)) {\n state.raise(\"Nothing to repeat\")\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-SyntaxCharacter\npp.regexp_eatSyntaxCharacter = function(state) {\n const ch = state.current()\n if (isSyntaxCharacter(ch)) {\n state.lastIntValue = ch\n state.advance()\n return true\n }\n return false\n}\nfunction isSyntaxCharacter(ch) {\n return (\n ch === 0x24 /* $ */ ||\n ch >= 0x28 /* ( */ && ch <= 0x2B /* + */ ||\n ch === 0x2E /* . */ ||\n ch === 0x3F /* ? */ ||\n ch >= 0x5B /* [ */ && ch <= 0x5E /* ^ */ ||\n ch >= 0x7B /* { */ && ch <= 0x7D /* } */\n )\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-PatternCharacter\n// But eat eager.\npp.regexp_eatPatternCharacters = function(state) {\n const start = state.pos\n let ch = 0\n while ((ch = state.current()) !== -1 && !isSyntaxCharacter(ch)) {\n state.advance()\n }\n return state.pos !== start\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-ExtendedPatternCharacter\npp.regexp_eatExtendedPatternCharacter = function(state) {\n const ch = state.current()\n if (\n ch !== -1 &&\n ch !== 0x24 /* $ */ &&\n !(ch >= 0x28 /* ( */ && ch <= 0x2B /* + */) &&\n ch !== 0x2E /* . */ &&\n ch !== 0x3F /* ? */ &&\n ch !== 0x5B /* [ */ &&\n ch !== 0x5E /* ^ */ &&\n ch !== 0x7C /* | */\n ) {\n state.advance()\n return true\n }\n return false\n}\n\n// GroupSpecifier ::\n// [empty]\n// `?` GroupName\npp.regexp_groupSpecifier = function(state) {\n if (state.eat(0x3F /* ? */)) {\n if (this.regexp_eatGroupName(state)) {\n if (state.groupNames.indexOf(state.lastStringValue) !== -1) {\n state.raise(\"Duplicate capture group name\")\n }\n state.groupNames.push(state.lastStringValue)\n return\n }\n state.raise(\"Invalid group\")\n }\n}\n\n// GroupName ::\n// `<` RegExpIdentifierName `>`\n// Note: this updates `state.lastStringValue` property with the eaten name.\npp.regexp_eatGroupName = function(state) {\n state.lastStringValue = \"\"\n if (state.eat(0x3C /* < */)) {\n if (this.regexp_eatRegExpIdentifierName(state) && state.eat(0x3E /* > */)) {\n return true\n }\n state.raise(\"Invalid capture group name\")\n }\n return false\n}\n\n// RegExpIdentifierName ::\n// RegExpIdentifierStart\n// RegExpIdentifierName RegExpIdentifierPart\n// Note: this updates `state.lastStringValue` property with the eaten name.\npp.regexp_eatRegExpIdentifierName = function(state) {\n state.lastStringValue = \"\"\n if (this.regexp_eatRegExpIdentifierStart(state)) {\n state.lastStringValue += codePointToString(state.lastIntValue)\n while (this.regexp_eatRegExpIdentifierPart(state)) {\n state.lastStringValue += codePointToString(state.lastIntValue)\n }\n return true\n }\n return false\n}\n\n// RegExpIdentifierStart ::\n// UnicodeIDStart\n// `$`\n// `_`\n// `\\` RegExpUnicodeEscapeSequence[+U]\npp.regexp_eatRegExpIdentifierStart = function(state) {\n const start = state.pos\n const forceU = this.options.ecmaVersion >= 11\n let ch = state.current(forceU)\n state.advance(forceU)\n\n if (ch === 0x5C /* \\ */ && this.regexp_eatRegExpUnicodeEscapeSequence(state, forceU)) {\n ch = state.lastIntValue\n }\n if (isRegExpIdentifierStart(ch)) {\n state.lastIntValue = ch\n return true\n }\n\n state.pos = start\n return false\n}\nfunction isRegExpIdentifierStart(ch) {\n return isIdentifierStart(ch, true) || ch === 0x24 /* $ */ || ch === 0x5F /* _ */\n}\n\n// RegExpIdentifierPart ::\n// UnicodeIDContinue\n// `$`\n// `_`\n// `\\` RegExpUnicodeEscapeSequence[+U]\n// \n// \npp.regexp_eatRegExpIdentifierPart = function(state) {\n const start = state.pos\n const forceU = this.options.ecmaVersion >= 11\n let ch = state.current(forceU)\n state.advance(forceU)\n\n if (ch === 0x5C /* \\ */ && this.regexp_eatRegExpUnicodeEscapeSequence(state, forceU)) {\n ch = state.lastIntValue\n }\n if (isRegExpIdentifierPart(ch)) {\n state.lastIntValue = ch\n return true\n }\n\n state.pos = start\n return false\n}\nfunction isRegExpIdentifierPart(ch) {\n return isIdentifierChar(ch, true) || ch === 0x24 /* $ */ || ch === 0x5F /* _ */ || ch === 0x200C /* */ || ch === 0x200D /* */\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-AtomEscape\npp.regexp_eatAtomEscape = function(state) {\n if (\n this.regexp_eatBackReference(state) ||\n this.regexp_eatCharacterClassEscape(state) ||\n this.regexp_eatCharacterEscape(state) ||\n (state.switchN && this.regexp_eatKGroupName(state))\n ) {\n return true\n }\n if (state.switchU) {\n // Make the same message as V8.\n if (state.current() === 0x63 /* c */) {\n state.raise(\"Invalid unicode escape\")\n }\n state.raise(\"Invalid escape\")\n }\n return false\n}\npp.regexp_eatBackReference = function(state) {\n const start = state.pos\n if (this.regexp_eatDecimalEscape(state)) {\n const n = state.lastIntValue\n if (state.switchU) {\n // For SyntaxError in https://www.ecma-international.org/ecma-262/8.0/#sec-atomescape\n if (n > state.maxBackReference) {\n state.maxBackReference = n\n }\n return true\n }\n if (n <= state.numCapturingParens) {\n return true\n }\n state.pos = start\n }\n return false\n}\npp.regexp_eatKGroupName = function(state) {\n if (state.eat(0x6B /* k */)) {\n if (this.regexp_eatGroupName(state)) {\n state.backReferenceNames.push(state.lastStringValue)\n return true\n }\n state.raise(\"Invalid named reference\")\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-CharacterEscape\npp.regexp_eatCharacterEscape = function(state) {\n return (\n this.regexp_eatControlEscape(state) ||\n this.regexp_eatCControlLetter(state) ||\n this.regexp_eatZero(state) ||\n this.regexp_eatHexEscapeSequence(state) ||\n this.regexp_eatRegExpUnicodeEscapeSequence(state, false) ||\n (!state.switchU && this.regexp_eatLegacyOctalEscapeSequence(state)) ||\n this.regexp_eatIdentityEscape(state)\n )\n}\npp.regexp_eatCControlLetter = function(state) {\n const start = state.pos\n if (state.eat(0x63 /* c */)) {\n if (this.regexp_eatControlLetter(state)) {\n return true\n }\n state.pos = start\n }\n return false\n}\npp.regexp_eatZero = function(state) {\n if (state.current() === 0x30 /* 0 */ && !isDecimalDigit(state.lookahead())) {\n state.lastIntValue = 0\n state.advance()\n return true\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-ControlEscape\npp.regexp_eatControlEscape = function(state) {\n const ch = state.current()\n if (ch === 0x74 /* t */) {\n state.lastIntValue = 0x09 /* \\t */\n state.advance()\n return true\n }\n if (ch === 0x6E /* n */) {\n state.lastIntValue = 0x0A /* \\n */\n state.advance()\n return true\n }\n if (ch === 0x76 /* v */) {\n state.lastIntValue = 0x0B /* \\v */\n state.advance()\n return true\n }\n if (ch === 0x66 /* f */) {\n state.lastIntValue = 0x0C /* \\f */\n state.advance()\n return true\n }\n if (ch === 0x72 /* r */) {\n state.lastIntValue = 0x0D /* \\r */\n state.advance()\n return true\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-ControlLetter\npp.regexp_eatControlLetter = function(state) {\n const ch = state.current()\n if (isControlLetter(ch)) {\n state.lastIntValue = ch % 0x20\n state.advance()\n return true\n }\n return false\n}\nfunction isControlLetter(ch) {\n return (\n (ch >= 0x41 /* A */ && ch <= 0x5A /* Z */) ||\n (ch >= 0x61 /* a */ && ch <= 0x7A /* z */)\n )\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-RegExpUnicodeEscapeSequence\npp.regexp_eatRegExpUnicodeEscapeSequence = function(state, forceU = false) {\n const start = state.pos\n const switchU = forceU || state.switchU\n\n if (state.eat(0x75 /* u */)) {\n if (this.regexp_eatFixedHexDigits(state, 4)) {\n const lead = state.lastIntValue\n if (switchU && lead >= 0xD800 && lead <= 0xDBFF) {\n const leadSurrogateEnd = state.pos\n if (state.eat(0x5C /* \\ */) && state.eat(0x75 /* u */) && this.regexp_eatFixedHexDigits(state, 4)) {\n const trail = state.lastIntValue\n if (trail >= 0xDC00 && trail <= 0xDFFF) {\n state.lastIntValue = (lead - 0xD800) * 0x400 + (trail - 0xDC00) + 0x10000\n return true\n }\n }\n state.pos = leadSurrogateEnd\n state.lastIntValue = lead\n }\n return true\n }\n if (\n switchU &&\n state.eat(0x7B /* { */) &&\n this.regexp_eatHexDigits(state) &&\n state.eat(0x7D /* } */) &&\n isValidUnicode(state.lastIntValue)\n ) {\n return true\n }\n if (switchU) {\n state.raise(\"Invalid unicode escape\")\n }\n state.pos = start\n }\n\n return false\n}\nfunction isValidUnicode(ch) {\n return ch >= 0 && ch <= 0x10FFFF\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-IdentityEscape\npp.regexp_eatIdentityEscape = function(state) {\n if (state.switchU) {\n if (this.regexp_eatSyntaxCharacter(state)) {\n return true\n }\n if (state.eat(0x2F /* / */)) {\n state.lastIntValue = 0x2F /* / */\n return true\n }\n return false\n }\n\n const ch = state.current()\n if (ch !== 0x63 /* c */ && (!state.switchN || ch !== 0x6B /* k */)) {\n state.lastIntValue = ch\n state.advance()\n return true\n }\n\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-DecimalEscape\npp.regexp_eatDecimalEscape = function(state) {\n state.lastIntValue = 0\n let ch = state.current()\n if (ch >= 0x31 /* 1 */ && ch <= 0x39 /* 9 */) {\n do {\n state.lastIntValue = 10 * state.lastIntValue + (ch - 0x30 /* 0 */)\n state.advance()\n } while ((ch = state.current()) >= 0x30 /* 0 */ && ch <= 0x39 /* 9 */)\n return true\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-CharacterClassEscape\npp.regexp_eatCharacterClassEscape = function(state) {\n const ch = state.current()\n\n if (isCharacterClassEscape(ch)) {\n state.lastIntValue = -1\n state.advance()\n return true\n }\n\n if (\n state.switchU &&\n this.options.ecmaVersion >= 9 &&\n (ch === 0x50 /* P */ || ch === 0x70 /* p */)\n ) {\n state.lastIntValue = -1\n state.advance()\n if (\n state.eat(0x7B /* { */) &&\n this.regexp_eatUnicodePropertyValueExpression(state) &&\n state.eat(0x7D /* } */)\n ) {\n return true\n }\n state.raise(\"Invalid property name\")\n }\n\n return false\n}\nfunction isCharacterClassEscape(ch) {\n return (\n ch === 0x64 /* d */ ||\n ch === 0x44 /* D */ ||\n ch === 0x73 /* s */ ||\n ch === 0x53 /* S */ ||\n ch === 0x77 /* w */ ||\n ch === 0x57 /* W */\n )\n}\n\n// UnicodePropertyValueExpression ::\n// UnicodePropertyName `=` UnicodePropertyValue\n// LoneUnicodePropertyNameOrValue\npp.regexp_eatUnicodePropertyValueExpression = function(state) {\n const start = state.pos\n\n // UnicodePropertyName `=` UnicodePropertyValue\n if (this.regexp_eatUnicodePropertyName(state) && state.eat(0x3D /* = */)) {\n const name = state.lastStringValue\n if (this.regexp_eatUnicodePropertyValue(state)) {\n const value = state.lastStringValue\n this.regexp_validateUnicodePropertyNameAndValue(state, name, value)\n return true\n }\n }\n state.pos = start\n\n // LoneUnicodePropertyNameOrValue\n if (this.regexp_eatLoneUnicodePropertyNameOrValue(state)) {\n const nameOrValue = state.lastStringValue\n this.regexp_validateUnicodePropertyNameOrValue(state, nameOrValue)\n return true\n }\n return false\n}\npp.regexp_validateUnicodePropertyNameAndValue = function(state, name, value) {\n if (!has(state.unicodeProperties.nonBinary, name))\n state.raise(\"Invalid property name\")\n if (!state.unicodeProperties.nonBinary[name].test(value))\n state.raise(\"Invalid property value\")\n}\npp.regexp_validateUnicodePropertyNameOrValue = function(state, nameOrValue) {\n if (!state.unicodeProperties.binary.test(nameOrValue))\n state.raise(\"Invalid property name\")\n}\n\n// UnicodePropertyName ::\n// UnicodePropertyNameCharacters\npp.regexp_eatUnicodePropertyName = function(state) {\n let ch = 0\n state.lastStringValue = \"\"\n while (isUnicodePropertyNameCharacter(ch = state.current())) {\n state.lastStringValue += codePointToString(ch)\n state.advance()\n }\n return state.lastStringValue !== \"\"\n}\nfunction isUnicodePropertyNameCharacter(ch) {\n return isControlLetter(ch) || ch === 0x5F /* _ */\n}\n\n// UnicodePropertyValue ::\n// UnicodePropertyValueCharacters\npp.regexp_eatUnicodePropertyValue = function(state) {\n let ch = 0\n state.lastStringValue = \"\"\n while (isUnicodePropertyValueCharacter(ch = state.current())) {\n state.lastStringValue += codePointToString(ch)\n state.advance()\n }\n return state.lastStringValue !== \"\"\n}\nfunction isUnicodePropertyValueCharacter(ch) {\n return isUnicodePropertyNameCharacter(ch) || isDecimalDigit(ch)\n}\n\n// LoneUnicodePropertyNameOrValue ::\n// UnicodePropertyValueCharacters\npp.regexp_eatLoneUnicodePropertyNameOrValue = function(state) {\n return this.regexp_eatUnicodePropertyValue(state)\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-CharacterClass\npp.regexp_eatCharacterClass = function(state) {\n if (state.eat(0x5B /* [ */)) {\n state.eat(0x5E /* ^ */)\n this.regexp_classRanges(state)\n if (state.eat(0x5D /* ] */)) {\n return true\n }\n // Unreachable since it threw \"unterminated regular expression\" error before.\n state.raise(\"Unterminated character class\")\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-ClassRanges\n// https://www.ecma-international.org/ecma-262/8.0/#prod-NonemptyClassRanges\n// https://www.ecma-international.org/ecma-262/8.0/#prod-NonemptyClassRangesNoDash\npp.regexp_classRanges = function(state) {\n while (this.regexp_eatClassAtom(state)) {\n const left = state.lastIntValue\n if (state.eat(0x2D /* - */) && this.regexp_eatClassAtom(state)) {\n const right = state.lastIntValue\n if (state.switchU && (left === -1 || right === -1)) {\n state.raise(\"Invalid character class\")\n }\n if (left !== -1 && right !== -1 && left > right) {\n state.raise(\"Range out of order in character class\")\n }\n }\n }\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-ClassAtom\n// https://www.ecma-international.org/ecma-262/8.0/#prod-ClassAtomNoDash\npp.regexp_eatClassAtom = function(state) {\n const start = state.pos\n\n if (state.eat(0x5C /* \\ */)) {\n if (this.regexp_eatClassEscape(state)) {\n return true\n }\n if (state.switchU) {\n // Make the same message as V8.\n const ch = state.current()\n if (ch === 0x63 /* c */ || isOctalDigit(ch)) {\n state.raise(\"Invalid class escape\")\n }\n state.raise(\"Invalid escape\")\n }\n state.pos = start\n }\n\n const ch = state.current()\n if (ch !== 0x5D /* ] */) {\n state.lastIntValue = ch\n state.advance()\n return true\n }\n\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-ClassEscape\npp.regexp_eatClassEscape = function(state) {\n const start = state.pos\n\n if (state.eat(0x62 /* b */)) {\n state.lastIntValue = 0x08 /* */\n return true\n }\n\n if (state.switchU && state.eat(0x2D /* - */)) {\n state.lastIntValue = 0x2D /* - */\n return true\n }\n\n if (!state.switchU && state.eat(0x63 /* c */)) {\n if (this.regexp_eatClassControlLetter(state)) {\n return true\n }\n state.pos = start\n }\n\n return (\n this.regexp_eatCharacterClassEscape(state) ||\n this.regexp_eatCharacterEscape(state)\n )\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-ClassControlLetter\npp.regexp_eatClassControlLetter = function(state) {\n const ch = state.current()\n if (isDecimalDigit(ch) || ch === 0x5F /* _ */) {\n state.lastIntValue = ch % 0x20\n state.advance()\n return true\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-HexEscapeSequence\npp.regexp_eatHexEscapeSequence = function(state) {\n const start = state.pos\n if (state.eat(0x78 /* x */)) {\n if (this.regexp_eatFixedHexDigits(state, 2)) {\n return true\n }\n if (state.switchU) {\n state.raise(\"Invalid escape\")\n }\n state.pos = start\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-DecimalDigits\npp.regexp_eatDecimalDigits = function(state) {\n const start = state.pos\n let ch = 0\n state.lastIntValue = 0\n while (isDecimalDigit(ch = state.current())) {\n state.lastIntValue = 10 * state.lastIntValue + (ch - 0x30 /* 0 */)\n state.advance()\n }\n return state.pos !== start\n}\nfunction isDecimalDigit(ch) {\n return ch >= 0x30 /* 0 */ && ch <= 0x39 /* 9 */\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-HexDigits\npp.regexp_eatHexDigits = function(state) {\n const start = state.pos\n let ch = 0\n state.lastIntValue = 0\n while (isHexDigit(ch = state.current())) {\n state.lastIntValue = 16 * state.lastIntValue + hexToInt(ch)\n state.advance()\n }\n return state.pos !== start\n}\nfunction isHexDigit(ch) {\n return (\n (ch >= 0x30 /* 0 */ && ch <= 0x39 /* 9 */) ||\n (ch >= 0x41 /* A */ && ch <= 0x46 /* F */) ||\n (ch >= 0x61 /* a */ && ch <= 0x66 /* f */)\n )\n}\nfunction hexToInt(ch) {\n if (ch >= 0x41 /* A */ && ch <= 0x46 /* F */) {\n return 10 + (ch - 0x41 /* A */)\n }\n if (ch >= 0x61 /* a */ && ch <= 0x66 /* f */) {\n return 10 + (ch - 0x61 /* a */)\n }\n return ch - 0x30 /* 0 */\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-annexB-LegacyOctalEscapeSequence\n// Allows only 0-377(octal) i.e. 0-255(decimal).\npp.regexp_eatLegacyOctalEscapeSequence = function(state) {\n if (this.regexp_eatOctalDigit(state)) {\n const n1 = state.lastIntValue\n if (this.regexp_eatOctalDigit(state)) {\n const n2 = state.lastIntValue\n if (n1 <= 3 && this.regexp_eatOctalDigit(state)) {\n state.lastIntValue = n1 * 64 + n2 * 8 + state.lastIntValue\n } else {\n state.lastIntValue = n1 * 8 + n2\n }\n } else {\n state.lastIntValue = n1\n }\n return true\n }\n return false\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-OctalDigit\npp.regexp_eatOctalDigit = function(state) {\n const ch = state.current()\n if (isOctalDigit(ch)) {\n state.lastIntValue = ch - 0x30 /* 0 */\n state.advance()\n return true\n }\n state.lastIntValue = 0\n return false\n}\nfunction isOctalDigit(ch) {\n return ch >= 0x30 /* 0 */ && ch <= 0x37 /* 7 */\n}\n\n// https://www.ecma-international.org/ecma-262/8.0/#prod-Hex4Digits\n// https://www.ecma-international.org/ecma-262/8.0/#prod-HexDigit\n// And HexDigit HexDigit in https://www.ecma-international.org/ecma-262/8.0/#prod-HexEscapeSequence\npp.regexp_eatFixedHexDigits = function(state, length) {\n const start = state.pos\n state.lastIntValue = 0\n for (let i = 0; i < length; ++i) {\n const ch = state.current()\n if (!isHexDigit(ch)) {\n state.pos = start\n return false\n }\n state.lastIntValue = 16 * state.lastIntValue + hexToInt(ch)\n state.advance()\n }\n return true\n}\n","import {isIdentifierStart, isIdentifierChar} from \"./identifier.js\"\nimport {types as tt, keywords as keywordTypes} from \"./tokentype.js\"\nimport {Parser} from \"./state.js\"\nimport {SourceLocation} from \"./locutil.js\"\nimport {RegExpValidationState} from \"./regexp.js\"\nimport {lineBreak, lineBreakG, isNewLine, nonASCIIwhitespace} from \"./whitespace.js\"\n\n// Object type used to represent tokens. Note that normally, tokens\n// simply exist as properties on the parser object. This is only\n// used for the onToken callback and the external tokenizer.\n\nexport class Token {\n constructor(p) {\n this.type = p.type\n this.value = p.value\n this.start = p.start\n this.end = p.end\n if (p.options.locations)\n this.loc = new SourceLocation(p, p.startLoc, p.endLoc)\n if (p.options.ranges)\n this.range = [p.start, p.end]\n }\n}\n\n// ## Tokenizer\n\nconst pp = Parser.prototype\n\n// Move to the next token\n\npp.next = function(ignoreEscapeSequenceInKeyword) {\n if (!ignoreEscapeSequenceInKeyword && this.type.keyword && this.containsEsc)\n this.raiseRecoverable(this.start, \"Escape sequence in keyword \" + this.type.keyword)\n if (this.options.onToken)\n this.options.onToken(new Token(this))\n\n this.lastTokEnd = this.end\n this.lastTokStart = this.start\n this.lastTokEndLoc = this.endLoc\n this.lastTokStartLoc = this.startLoc\n this.nextToken()\n}\n\npp.getToken = function() {\n this.next()\n return new Token(this)\n}\n\n// If we're in an ES6 environment, make parsers iterable\nif (typeof Symbol !== \"undefined\")\n pp[Symbol.iterator] = function() {\n return {\n next: () => {\n let token = this.getToken()\n return {\n done: token.type === tt.eof,\n value: token\n }\n }\n }\n }\n\n// Toggle strict mode. Re-reads the next number or string to please\n// pedantic tests (`\"use strict\"; 010;` should fail).\n\npp.curContext = function() {\n return this.context[this.context.length - 1]\n}\n\n// Read a single token, updating the parser object's token-related\n// properties.\n\npp.nextToken = function() {\n let curContext = this.curContext()\n if (!curContext || !curContext.preserveSpace) this.skipSpace()\n\n this.start = this.pos\n if (this.options.locations) this.startLoc = this.curPosition()\n if (this.pos >= this.input.length) return this.finishToken(tt.eof)\n\n if (curContext.override) return curContext.override(this)\n else this.readToken(this.fullCharCodeAtPos())\n}\n\npp.readToken = function(code) {\n // Identifier or keyword. '\\uXXXX' sequences are allowed in\n // identifiers, so '\\' also dispatches to that.\n if (isIdentifierStart(code, this.options.ecmaVersion >= 6) || code === 92 /* '\\' */)\n return this.readWord()\n\n return this.getTokenFromCode(code)\n}\n\npp.fullCharCodeAtPos = function() {\n let code = this.input.charCodeAt(this.pos)\n if (code <= 0xd7ff || code >= 0xe000) return code\n let next = this.input.charCodeAt(this.pos + 1)\n return (code << 10) + next - 0x35fdc00\n}\n\npp.skipBlockComment = function() {\n let startLoc = this.options.onComment && this.curPosition()\n let start = this.pos, end = this.input.indexOf(\"*/\", this.pos += 2)\n if (end === -1) this.raise(this.pos - 2, \"Unterminated comment\")\n this.pos = end + 2\n if (this.options.locations) {\n lineBreakG.lastIndex = start\n let match\n while ((match = lineBreakG.exec(this.input)) && match.index < this.pos) {\n ++this.curLine\n this.lineStart = match.index + match[0].length\n }\n }\n if (this.options.onComment)\n this.options.onComment(true, this.input.slice(start + 2, end), start, this.pos,\n startLoc, this.curPosition())\n}\n\npp.skipLineComment = function(startSkip) {\n let start = this.pos\n let startLoc = this.options.onComment && this.curPosition()\n let ch = this.input.charCodeAt(this.pos += startSkip)\n while (this.pos < this.input.length && !isNewLine(ch)) {\n ch = this.input.charCodeAt(++this.pos)\n }\n if (this.options.onComment)\n this.options.onComment(false, this.input.slice(start + startSkip, this.pos), start, this.pos,\n startLoc, this.curPosition())\n}\n\n// Called at the start of the parse and after every token. Skips\n// whitespace and comments, and.\n\npp.skipSpace = function() {\n loop: while (this.pos < this.input.length) {\n let ch = this.input.charCodeAt(this.pos)\n switch (ch) {\n case 32: case 160: // ' '\n ++this.pos\n break\n case 13:\n if (this.input.charCodeAt(this.pos + 1) === 10) {\n ++this.pos\n }\n case 10: case 8232: case 8233:\n ++this.pos\n if (this.options.locations) {\n ++this.curLine\n this.lineStart = this.pos\n }\n break\n case 47: // '/'\n switch (this.input.charCodeAt(this.pos + 1)) {\n case 42: // '*'\n this.skipBlockComment()\n break\n case 47:\n this.skipLineComment(2)\n break\n default:\n break loop\n }\n break\n default:\n if (ch > 8 && ch < 14 || ch >= 5760 && nonASCIIwhitespace.test(String.fromCharCode(ch))) {\n ++this.pos\n } else {\n break loop\n }\n }\n }\n}\n\n// Called at the end of every token. Sets `end`, `val`, and\n// maintains `context` and `exprAllowed`, and skips the space after\n// the token, so that the next one's `start` will point at the\n// right position.\n\npp.finishToken = function(type, val) {\n this.end = this.pos\n if (this.options.locations) this.endLoc = this.curPosition()\n let prevType = this.type\n this.type = type\n this.value = val\n\n this.updateContext(prevType)\n}\n\n// ### Token reading\n\n// This is the function that is called to fetch the next token. It\n// is somewhat obscure, because it works in character codes rather\n// than characters, and because operator parsing has been inlined\n// into it.\n//\n// All in the name of speed.\n//\npp.readToken_dot = function() {\n let next = this.input.charCodeAt(this.pos + 1)\n if (next >= 48 && next <= 57) return this.readNumber(true)\n let next2 = this.input.charCodeAt(this.pos + 2)\n if (this.options.ecmaVersion >= 6 && next === 46 && next2 === 46) { // 46 = dot '.'\n this.pos += 3\n return this.finishToken(tt.ellipsis)\n } else {\n ++this.pos\n return this.finishToken(tt.dot)\n }\n}\n\npp.readToken_slash = function() { // '/'\n let next = this.input.charCodeAt(this.pos + 1)\n if (this.exprAllowed) { ++this.pos; return this.readRegexp() }\n if (next === 61) return this.finishOp(tt.assign, 2)\n return this.finishOp(tt.slash, 1)\n}\n\npp.readToken_mult_modulo_exp = function(code) { // '%*'\n let next = this.input.charCodeAt(this.pos + 1)\n let size = 1\n let tokentype = code === 42 ? tt.star : tt.modulo\n\n // exponentiation operator ** and **=\n if (this.options.ecmaVersion >= 7 && code === 42 && next === 42) {\n ++size\n tokentype = tt.starstar\n next = this.input.charCodeAt(this.pos + 2)\n }\n\n if (next === 61) return this.finishOp(tt.assign, size + 1)\n return this.finishOp(tokentype, size)\n}\n\npp.readToken_pipe_amp = function(code) { // '|&'\n let next = this.input.charCodeAt(this.pos + 1)\n if (next === code) {\n if (this.options.ecmaVersion >= 12) {\n let next2 = this.input.charCodeAt(this.pos + 2)\n if (next2 === 61) return this.finishOp(tt.assign, 3)\n }\n return this.finishOp(code === 124 ? tt.logicalOR : tt.logicalAND, 2)\n }\n if (next === 61) return this.finishOp(tt.assign, 2)\n return this.finishOp(code === 124 ? tt.bitwiseOR : tt.bitwiseAND, 1)\n}\n\npp.readToken_caret = function() { // '^'\n let next = this.input.charCodeAt(this.pos + 1)\n if (next === 61) return this.finishOp(tt.assign, 2)\n return this.finishOp(tt.bitwiseXOR, 1)\n}\n\npp.readToken_plus_min = function(code) { // '+-'\n let next = this.input.charCodeAt(this.pos + 1)\n if (next === code) {\n if (next === 45 && !this.inModule && this.input.charCodeAt(this.pos + 2) === 62 &&\n (this.lastTokEnd === 0 || lineBreak.test(this.input.slice(this.lastTokEnd, this.pos)))) {\n // A `-->` line comment\n this.skipLineComment(3)\n this.skipSpace()\n return this.nextToken()\n }\n return this.finishOp(tt.incDec, 2)\n }\n if (next === 61) return this.finishOp(tt.assign, 2)\n return this.finishOp(tt.plusMin, 1)\n}\n\npp.readToken_lt_gt = function(code) { // '<>'\n let next = this.input.charCodeAt(this.pos + 1)\n let size = 1\n if (next === code) {\n size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2\n if (this.input.charCodeAt(this.pos + size) === 61) return this.finishOp(tt.assign, size + 1)\n return this.finishOp(tt.bitShift, size)\n }\n if (next === 33 && code === 60 && !this.inModule && this.input.charCodeAt(this.pos + 2) === 45 &&\n this.input.charCodeAt(this.pos + 3) === 45) {\n // `` line comment\n this.skipLineComment(3)\n this.skipSpace()\n return this.nextToken()\n }\n return this.finishOp(tt.incDec, 2)\n }\n if (next === 61) return this.finishOp(tt.assign, 2)\n return this.finishOp(tt.plusMin, 1)\n}\n\npp.readToken_lt_gt = function(code) { // '<>'\n let next = this.input.charCodeAt(this.pos + 1)\n let size = 1\n if (next === code) {\n size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2\n if (this.input.charCodeAt(this.pos + size) === 61) return this.finishOp(tt.assign, size + 1)\n return this.finishOp(tt.bitShift, size)\n }\n if (next === 33 && code === 60 && !this.inModule && this.input.charCodeAt(this.pos + 2) === 45 &&\n this.input.charCodeAt(this.pos + 3) === 45) {\n // ` -Specify the maximum size, in bytes, of HTTP headers. Defaults to 16KB. +Specify the maximum size, in bytes, of HTTP headers. Defaults to 16 KB. ### `--napi-modules` -**This module is pending deprecation**. Once a replacement API has been +**This module is pending deprecation.** Once a replacement API has been finalized, this module will be fully deprecated. Most developers should **not** have cause to use this module. Users who absolutely must have the functionality that domains provide may rely on it for the time being diff --git a/doc/api/errors.md b/doc/api/errors.md index 04cd3dc6a680e7..48c44253d63ce3 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1424,6 +1424,11 @@ is set for the `Http2Stream`. `http2.connect()` was passed a URL that uses any protocol other than `http:` or `https:`. + +### `ERR_ILLEGAL_CONSTRUCTOR` + +An attempt was made to construct an object using a non-public constructor. + ### `ERR_INCOMPATIBLE_OPTION_PAIR` @@ -2447,11 +2452,11 @@ changes: - v10.15.0 commit: 186035243fad247e3955f pr-url: https://github.com/nodejs-private/node-private/pull/143 - description: Max header size in `http_parser` was set to 8KB. + description: Max header size in `http_parser` was set to 8 KB. --> Too much HTTP header data was received. In order to protect against malicious or -malconfigured clients, if more than 8KB of HTTP header data is received then +malconfigured clients, if more than 8 KB of HTTP header data is received then HTTP parsing will abort without a request or response object being created, and an `Error` with this code will be emitted. @@ -2825,7 +2830,7 @@ The native call from `process.cpuUsage` could not be processed. [`new URLSearchParams(iterable)`]: url.md#url_new_urlsearchparams_iterable [`package.json`]: packages.md#packages_node_js_package_json_field_definitions [`postMessage()`]: worker_threads.md#worker_threads_port_postmessage_value_transferlist -[`process.on('exit')`]: process.md#Event:-`'exit'` +[`process.on('exit')`]: process.md#process_event_exit [`process.send()`]: process.md#process_process_send_message_sendhandle_options_callback [`process.setUncaughtExceptionCaptureCallback()`]: process.md#process_process_setuncaughtexceptioncapturecallback_fn [`readable._read()`]: stream.md#stream_readable_read_size_1 diff --git a/doc/api/esm.md b/doc/api/esm.md index 691c5d47d5a9bb..f3827b5663966c 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -246,6 +246,7 @@ readFile('./foo.txt', (err, source) => { ```js import fs, { readFileSync } from 'fs'; import { syncBuiltinESMExports } from 'module'; +import { Buffer } from 'buffer'; fs.readFileSync = () => Buffer.from('Hello, ESM'); syncBuiltinESMExports(); @@ -818,8 +819,9 @@ globalThis.someInjectedProperty = 42; console.log('I just set some globals!'); const { createRequire } = getBuiltin('module'); +const { cwd } = getBuiltin('process'); -const require = createRequire(process.cwd() + '/'); +const require = createRequire(cwd() + '/'); // [...] `; } @@ -920,8 +922,9 @@ purposes. // coffeescript-loader.mjs import { URL, pathToFileURL } from 'url'; import CoffeeScript from 'coffeescript'; +import { cwd } from 'process'; -const baseURL = pathToFileURL(`${process.cwd()}/`).href; +const baseURL = pathToFileURL(`${cwd()}/`).href; // CoffeeScript files end in .coffee, .litcoffee or .coffee.md. const extensionsRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/; @@ -1335,15 +1338,15 @@ success! [`data:` URLs]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs [`export`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/export [`import()`]: #esm_import_expressions -[`import.meta.url`]: #esm_import_meta_url [`import.meta.resolve`]: #esm_import_meta_resolve_specifier_parent +[`import.meta.url`]: #esm_import_meta_url [`import`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import [`module.createRequire()`]: module.md#module_module_createrequire_filename [`module.syncBuiltinESMExports()`]: module.md#module_module_syncbuiltinesmexports [`package.json`]: packages.md#packages_node_js_package_json_field_definitions [`process.dlopen`]: process.md#process_process_dlopen_module_filename_flags -[`transformSource` hook]: #esm_transformsource_source_context_defaulttransformsource [`string`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String +[`transformSource` hook]: #esm_transformsource_source_context_defaulttransformsource [`util.TextDecoder`]: util.md#util_class_util_textdecoder [cjs-module-lexer]: https://github.com/guybedford/cjs-module-lexer/tree/1.2.1 [custom https loader]: #esm_https_loader diff --git a/doc/api/events.md b/doc/api/events.md index ca2ae7e6c84af9..c9c2e43353d7ba 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -1484,11 +1484,11 @@ target.removeEventListener('foo', handler, { capture: true }); added: v14.5.0 --> -* `event` {Object|Event} +* `event` {Event} +* Returns: {boolean} `true` if either event’s `cancelable` attribute value is + false or its `preventDefault()` method was not invoked, otherwise `false`. -Dispatches the `event` to the list of handlers for `event.type`. The `event` -may be an `Event` object or any object with a `type` property whose value is -a `string`. +Dispatches the `event` to the list of handlers for `event.type`. The registered event listeners is synchronously invoked in the order they were registered. @@ -1636,8 +1636,8 @@ to the `EventTarget`. [`fs.ReadStream`]: fs.md#fs_class_fs_readstream [`net.Server`]: net.md#net_class_net_server [`process.on('warning')`]: process.md#process_event_warning -[stream]: stream.md [capturerejections]: #events_capture_rejections_of_promises +[error]: #events_error_events [rejection]: #events_emitter_symbol_for_nodejs_rejection_err_eventname_args [rejectionsymbol]: #events_events_capturerejectionsymbol -[error]: #events_error_events +[stream]: stream.md diff --git a/doc/api/fs.md b/doc/api/fs.md index 0a032426d5b4f0..a4945718bb7ee1 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -14,24 +14,20 @@ way modeled on standard POSIX functions. To use the promise-based APIs: ```mjs -// Using ESM Module syntax: import * as fs from 'fs/promises'; ``` ```cjs -// Using CommonJS syntax: const fs = require('fs/promises'); ``` To use the callback and sync APIs: ```mjs -// Using ESM Module syntax: import * as fs from 'fs'; ``` ```cjs -// Using CommonJS syntax: const fs = require('fs'); ``` @@ -44,7 +40,6 @@ Promise-based operations return a promise that is fulfilled when the asynchronous operation is complete. ```mjs -// Using ESM Module syntax: import { unlink } from 'fs/promises'; try { @@ -56,7 +51,6 @@ try { ``` ```cjs -// Using CommonJS syntax const { unlink } = require('fs/promises'); (async function(path) { @@ -78,7 +72,6 @@ reserved for an exception. If the operation is completed successfully, then the first argument is `null` or `undefined`. ```mjs -// Using ESM syntax import { unlink } from 'fs'; unlink('/tmp/hello', (err) => { @@ -88,7 +81,6 @@ unlink('/tmp/hello', (err) => { ``` ```cjs -// Using CommonJS syntax const { unlink } = require('fs'); unlink('/tmp/hello', (err) => { @@ -108,7 +100,6 @@ execution until the operation is complete. Exceptions are thrown immediately and can be handled using `try…catch`, or can be allowed to bubble up. ```mjs -// Using ESM syntax import { unlinkSync } from 'fs'; try { @@ -120,7 +111,6 @@ try { ``` ```cjs -// Using CommonJS syntax const { unlinkSync } = require('fs'); try { @@ -450,6 +440,9 @@ changes: Write `buffer` to the file. +If `buffer` is a plain object, it must have an own (not inherited) `toString` +function property. + The promise is resolved with an object containing two properties: * `bytesWritten` {integer} the number of bytes written @@ -802,6 +795,10 @@ rejection only when `recursive` is false. ### `fsPromises.mkdtemp(prefix[, options])` * `prefix` {string} @@ -1093,7 +1090,7 @@ changes: option is not `true`. **Default:** `0`. * `recursive` {boolean} If `true`, perform a recursive directory removal. In recursive mode, operations are retried on failure. **Default:** `false`. - **Deprecated**. + **Deprecated.** * `retryDelay` {integer} The amount of time in milliseconds to wait between retries. This option is ignored if the `recursive` option is not `true`. **Default:** `100`. @@ -1290,8 +1287,8 @@ changes: * Returns: {Promise} Fulfills with `undefined` upon success. Asynchronously writes data to a file, replacing the file if it already exists. -`data` can be a string, a {Buffer}, or an object with an own `toString` function -property. +`data` can be a string, a {Buffer}, or, an object with an own (not inherited) +`toString` function property. The `encoding` option is ignored if `data` is a buffer. @@ -1313,6 +1310,7 @@ to be written. ```mjs import { writeFile } from 'fs/promises'; +import { Buffer } from 'buffer'; try { const controller = new AbortController(); @@ -1398,7 +1396,7 @@ access(file, constants.W_OK, (err) => { }); // Check if the file exists in the current directory, and if it is writable. -access(file, constants.F_OK | fs.constants.W_OK, (err) => { +access(file, constants.F_OK | constants.W_OK, (err) => { if (err) { console.error( `${file} ${err.code === 'ENOENT' ? 'does not exist' : 'is read-only'}`); @@ -2571,6 +2569,9 @@ See the POSIX mkdir(2) documentation for more details. * `prefix` {string} @@ -4849,7 +4859,7 @@ changes: option is not `true`. **Default:** `0`. * `recursive` {boolean} If `true`, perform a recursive directory removal. In recursive mode, operations are retried on failure. **Default:** `false`. - **Deprecated**. + **Deprecated.** * `retryDelay` {integer} The amount of time in milliseconds to wait between retries. This option is ignored if the `recursive` option is not `true`. **Default:** `100`. @@ -5026,6 +5036,9 @@ changes: Returns `undefined`. +If `data` is a plain object, it must have an own (not inherited) `toString` +function property. + For detailed information, see the documentation of the asynchronous version of this API: [`fs.writeFile()`][]. @@ -5060,6 +5073,9 @@ changes: * `position` {integer} * Returns: {number} The number of bytes written. +If `buffer` is a plain object, it must have an own (not inherited) `toString` +function property. + For detailed information, see the documentation of the asynchronous version of this API: [`fs.write(fd, buffer...)`][]. @@ -5086,6 +5102,9 @@ changes: * `encoding` {string} * Returns: {number} The number of bytes written. +If `string` is a plain object, it must have an own (not inherited) `toString` +function property. + For detailed information, see the documentation of the asynchronous version of this API: [`fs.write(fd, string...)`][]. @@ -6301,7 +6320,6 @@ It is important to correctly order the operations by awaiting the results of one before invoking the other: ```mjs -// Using ESM syntax import { rename, stat } from 'fs/promises'; const from = '/tmp/hello'; @@ -6317,7 +6335,6 @@ try { ``` ```cjs -// Using CommonJS syntax const { rename, stat } = require('fs/promises'); (async function(from, to) { @@ -6499,6 +6516,7 @@ Example using an absolute path on POSIX: ```mjs import { open } from 'fs/promises'; +import { Buffer } from 'buffer'; let fd; try { @@ -6690,7 +6708,6 @@ the file contents. [Naming Files, Paths, and Namespaces]: https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file [Readable Stream]: stream.md#stream_class_stream_readable [Writable Stream]: stream.md#stream_class_stream_writable -[caveats]: #fs_caveats [`AHAFS`]: https://developer.ibm.com/articles/au-aix_event_infrastructure/ [`Buffer.byteLength`]: buffer.md#buffer_static_method_buffer_bytelength_string_encoding [`FSEvents`]: https://developer.apple.com/documentation/coreservices/file_system_events @@ -6705,7 +6722,7 @@ the file contents. [`fs.copyFile()`]: #fs_fs_copyfile_src_dest_mode_callback [`fs.createReadStream()`]: #fs_fs_createreadstream_path_options [`fs.createWriteStream()`]: #fs_fs_createwritestream_path_options -[`fs.exists()`]: fs.md#fs_fs_exists_path_callback +[`fs.exists()`]: #fs_fs_exists_path_callback [`fs.fstat()`]: #fs_fs_fstat_fd_options_callback [`fs.ftruncate()`]: #fs_fs_ftruncate_fd_len_callback [`fs.futimes()`]: #fs_fs_futimes_fd_atime_mtime_callback @@ -6743,6 +6760,7 @@ the file contents. [`kqueue(2)`]: https://www.freebsd.org/cgi/man.cgi?query=kqueue&sektion=2 [`util.promisify()`]: util.md#util_util_promisify_original [bigints]: https://tc39.github.io/proposal-bigint +[caveats]: #fs_caveats [chcp]: https://ss64.com/nt/chcp.html [inode]: https://en.wikipedia.org/wiki/Inode [support of file system `flags`]: #fs_file_system_flags diff --git a/doc/api/http.md b/doc/api/http.md index 9c4807c53afcec..fdb77648b9c56e 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -2677,7 +2677,7 @@ changes: * `maxHeaderSize` {number} Optionally overrides the value of [`--max-http-header-size`][] for requests received by this server, i.e. the maximum length of request headers in bytes. - **Default:** 16384 (16KB). + **Default:** 16384 (16 KB). * `requestListener` {Function} * Returns: {http.Server} @@ -2787,7 +2787,8 @@ added: * {number} Read-only property specifying the maximum allowed size of HTTP headers in bytes. -Defaults to 8KB. Configurable using the [`--max-http-header-size`][] CLI option. +Defaults to 8 KB. Configurable using the [`--max-http-header-size`][] CLI +option. This can be overridden for servers and client requests by passing the `maxHeaderSize` option. @@ -2853,7 +2854,7 @@ changes: * `maxHeaderSize` {number} Optionally overrides the value of [`--max-http-header-size`][] for requests received from the server, i.e. the maximum length of response headers in bytes. - **Default:** 16384 (16KB). + **Default:** 16384 (16 KB). * `method` {string} A string specifying the HTTP request method. **Default:** `'GET'`. * `path` {string} Request path. Should include query string if any. @@ -3140,13 +3141,13 @@ try { } ``` -[`--insecure-http-parser`]: cli.md#cli_insecure_http_parser -[`--max-http-header-size`]: cli.md#cli_max_http_header_size_size [`'checkContinue'`]: #http_event_checkcontinue [`'finish'`]: #http_event_finish [`'request'`]: #http_event_request [`'response'`]: #http_event_response [`'upgrade'`]: #http_event_upgrade +[`--insecure-http-parser`]: cli.md#cli_insecure_http_parser +[`--max-http-header-size`]: cli.md#cli_max_http_header_size_size [`Agent`]: #http_class_http_agent [`Buffer.byteLength()`]: buffer.md#buffer_static_method_buffer_bytelength_string_encoding [`Duplex`]: stream.md#stream_class_stream_duplex @@ -3162,37 +3163,37 @@ try { [`http.Agent`]: #http_class_http_agent [`http.ClientRequest`]: #http_class_http_clientrequest [`http.IncomingMessage`]: #http_class_http_incomingmessage -[`http.Server`]: #http_class_http_server [`http.ServerResponse`]: #http_class_http_serverresponse +[`http.Server`]: #http_class_http_server [`http.get()`]: #http_http_get_options_callback [`http.globalAgent`]: #http_http_globalagent [`http.request()`]: #http_http_request_options_callback [`message.headers`]: #http_message_headers +[`message.socket`]: #http_message_socket [`net.Server.close()`]: net.md#net_server_close_callback [`net.Server`]: net.md#net_class_net_server [`net.Socket`]: net.md#net_class_net_socket [`net.createConnection()`]: net.md#net_net_createconnection_options_connectlistener [`new URL()`]: url.md#url_new_url_input_base -[`message.socket`]: #http_message_socket [`outgoingMessage.socket`]: #http_outgoingMessage.socket [`removeHeader(name)`]: #http_request_removeheader_name -[`request.end()`]: #http_request_end_data_encoding_callback [`request.destroy()`]: #http_request_destroy_error +[`request.end()`]: #http_request_end_data_encoding_callback [`request.flushHeaders()`]: #http_request_flushheaders [`request.getHeader()`]: #http_request_getheader_name [`request.setHeader()`]: #http_request_setheader_name_value [`request.setTimeout()`]: #http_request_settimeout_timeout_callback [`request.socket.getPeerCertificate()`]: tls.md#tls_tlssocket_getpeercertificate_detailed [`request.socket`]: #http_request_socket -[`request.writableFinished`]: #http_request_writablefinished [`request.writableEnded`]: #http_request_writableended +[`request.writableFinished`]: #http_request_writablefinished [`request.write(data, encoding)`]: #http_request_write_chunk_encoding_callback [`response.end()`]: #http_response_end_data_encoding_callback [`response.getHeader()`]: #http_response_getheader_name [`response.setHeader()`]: #http_response_setheader_name_value [`response.socket`]: #http_response_socket -[`response.writableFinished`]: #http_response_writablefinished [`response.writableEnded`]: #http_response_writableended +[`response.writableFinished`]: #http_response_writablefinished [`response.write()`]: #http_response_write_chunk_encoding_callback [`response.write(data, encoding)`]: #http_response_write_chunk_encoding_callback [`response.writeContinue()`]: #http_response_writecontinue diff --git a/doc/api/http2.md b/doc/api/http2.md index 13d187321c6780..58081253229a83 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -3852,6 +3852,7 @@ you need to implement any fall-back behavior yourself. [RFC 7838]: https://tools.ietf.org/html/rfc7838 [RFC 8336]: https://tools.ietf.org/html/rfc8336 [RFC 8441]: https://tools.ietf.org/html/rfc8441 +[Sensitive headers]: #http2_sensitive_headers [`'checkContinue'`]: #http2_event_checkcontinue [`'connect'`]: #http2_event_connect [`'request'`]: #http2_event_request @@ -3881,8 +3882,8 @@ you need to implement any fall-back behavior yourself. [`net.connect()`]: net.md#net_net_connect [`net.createServer()`]: net.md#net_net_createserver_options_connectionlistener [`request.authority`]: #http2_request_authority -[`request.socket`]: #http2_request_socket [`request.socket.getPeerCertificate()`]: tls.md#tls_tlssocket_getpeercertificate_detailed +[`request.socket`]: #http2_request_socket [`response.end()`]: #http2_response_end_data_encoding_callback [`response.setHeader()`]: #http2_response_setheader_name_value [`response.socket`]: #http2_response_socket @@ -3897,4 +3898,3 @@ you need to implement any fall-back behavior yourself. [`tls.createServer()`]: tls.md#tls_tls_createserver_options_secureconnectionlistener [`writable.writableFinished`]: stream.md#stream_writable_writablefinished [error code]: #http2_error_codes_for_rst_stream_and_goaway -[Sensitive headers]: #http2_sensitive_headers diff --git a/doc/api/index.md b/doc/api/index.md index 71c415afaa673a..448f6d599fc8f5 100644 --- a/doc/api/index.md +++ b/doc/api/index.md @@ -64,6 +64,7 @@ * [VM](vm.md) * [WASI](wasi.md) * [Web Crypto API](webcrypto.md) +* [Web Streams API](webstreams.md) * [Worker threads](worker_threads.md) * [Zlib](zlib.md) diff --git a/doc/api/modules.md b/doc/api/modules.md index a28be94e536e49..7ddfeab3a7b229 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -1009,9 +1009,9 @@ This section was moved to [`Error`]: errors.md#errors_class_error [`__dirname`]: #modules_dirname [`__filename`]: #modules_filename -[`module` object]: #modules_the_module_object -[`module.id`]: #modules_module_id [`module.children`]: #modules_module_children +[`module.id`]: #modules_module_id +[`module` object]: #modules_the_module_object [`package.json`]: packages.md#packages_node_js_package_json_field_definitions [`path.dirname()`]: path.md#path_path_dirname_path [`require.main`]: #modules_require_main diff --git a/doc/api/n-api.md b/doc/api/n-api.md index dca306b83cdede..d06cbd1ba70481 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -326,7 +326,7 @@ listed as supporting a later version. v14.0.0 v14.0.0 v14.12.0 - + v14.17.0 v15.x @@ -1025,7 +1025,7 @@ clear the exception. On success, result will contain the handle to the last JavaScript `Object` thrown. If it is determined, after retrieving the exception, the exception cannot be handled after all it can be re-thrown it with [`napi_throw`][] where error is the -JavaScript `Error` object to be thrown. +JavaScript value to be thrown. The following utility functions are also available in case native code needs to throw an exception or determine if a `napi_value` is an instance @@ -1178,7 +1178,7 @@ NAPI_EXTERN napi_status napi_create_error(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] code`: Optional `napi_value` with the string for the error code to be associated with the error. -* `[in] msg`: `napi_value` that references a JavaScript `String` to be used as +* `[in] msg`: `napi_value` that references a JavaScript `string` to be used as the message for the `Error`. * `[out] result`: `napi_value` representing the error created. @@ -1202,7 +1202,7 @@ NAPI_EXTERN napi_status napi_create_type_error(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] code`: Optional `napi_value` with the string for the error code to be associated with the error. -* `[in] msg`: `napi_value` that references a JavaScript `String` to be used as +* `[in] msg`: `napi_value` that references a JavaScript `string` to be used as the message for the `Error`. * `[out] result`: `napi_value` representing the error created. @@ -1226,7 +1226,7 @@ NAPI_EXTERN napi_status napi_create_range_error(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] code`: Optional `napi_value` with the string for the error code to be associated with the error. -* `[in] msg`: `napi_value` that references a JavaScript `String` to be used as +* `[in] msg`: `napi_value` that references a JavaScript `string` to be used as the message for the `Error`. * `[out] result`: `napi_value` representing the error created. @@ -2357,14 +2357,14 @@ napi_status napi_create_symbol(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] description`: Optional `napi_value` which refers to a JavaScript - `String` to be set as the description for the symbol. -* `[out] result`: A `napi_value` representing a JavaScript `Symbol`. + `string` to be set as the description for the symbol. +* `[out] result`: A `napi_value` representing a JavaScript `symbol`. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript `Symbol` object from a UTF8-encoded C string. +This API creates a JavaScript `symbol` value from a UTF8-encoded C string. -The JavaScript `Symbol` type is described in [Section 19.4][] +The JavaScript `symbol` type is described in [Section 19.4][] of the ECMAScript Language Specification. #### napi_create_typedarray @@ -2451,14 +2451,14 @@ napi_status napi_create_int32(napi_env env, int32_t value, napi_value* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: Integer value to be represented in JavaScript. -* `[out] result`: A `napi_value` representing a JavaScript `Number`. +* `[out] result`: A `napi_value` representing a JavaScript `number`. Returns `napi_ok` if the API succeeded. This API is used to convert from the C `int32_t` type to the JavaScript -`Number` type. +`number` type. -The JavaScript `Number` type is described in +The JavaScript `number` type is described in [Section 6.1.6][] of the ECMAScript Language Specification. #### napi_create_uint32 @@ -2473,14 +2473,14 @@ napi_status napi_create_uint32(napi_env env, uint32_t value, napi_value* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: Unsigned integer value to be represented in JavaScript. -* `[out] result`: A `napi_value` representing a JavaScript `Number`. +* `[out] result`: A `napi_value` representing a JavaScript `number`. Returns `napi_ok` if the API succeeded. This API is used to convert from the C `uint32_t` type to the JavaScript -`Number` type. +`number` type. -The JavaScript `Number` type is described in +The JavaScript `number` type is described in [Section 6.1.6][] of the ECMAScript Language Specification. #### napi_create_int64 @@ -2495,14 +2495,14 @@ napi_status napi_create_int64(napi_env env, int64_t value, napi_value* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: Integer value to be represented in JavaScript. -* `[out] result`: A `napi_value` representing a JavaScript `Number`. +* `[out] result`: A `napi_value` representing a JavaScript `number`. Returns `napi_ok` if the API succeeded. This API is used to convert from the C `int64_t` type to the JavaScript -`Number` type. +`number` type. -The JavaScript `Number` type is described in [Section 6.1.6][] +The JavaScript `number` type is described in [Section 6.1.6][] of the ECMAScript Language Specification. Note the complete range of `int64_t` cannot be represented with full precision in JavaScript. Integer values outside the range of [`Number.MIN_SAFE_INTEGER`][] `-(2**53 - 1)` - @@ -2520,14 +2520,14 @@ napi_status napi_create_double(napi_env env, double value, napi_value* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: Double-precision value to be represented in JavaScript. -* `[out] result`: A `napi_value` representing a JavaScript `Number`. +* `[out] result`: A `napi_value` representing a JavaScript `number`. Returns `napi_ok` if the API succeeded. This API is used to convert from the C `double` type to the JavaScript -`Number` type. +`number` type. -The JavaScript `Number` type is described in +The JavaScript `number` type is described in [Section 6.1.6][] of the ECMAScript Language Specification. #### napi_create_bigint_int64 @@ -2616,14 +2616,14 @@ napi_status napi_create_string_latin1(napi_env env, * `[in] str`: Character buffer representing an ISO-8859-1-encoded string. * `[in] length`: The length of the string in bytes, or `NAPI_AUTO_LENGTH` if it is null-terminated. -* `[out] result`: A `napi_value` representing a JavaScript `String`. +* `[out] result`: A `napi_value` representing a JavaScript `string`. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript `String` object from an ISO-8859-1-encoded C +This API creates a JavaScript `string` value from an ISO-8859-1-encoded C string. The native string is copied. -The JavaScript `String` type is described in +The JavaScript `string` type is described in [Section 6.1.4][] of the ECMAScript Language Specification. #### napi_create_string_utf16 @@ -2643,14 +2643,14 @@ napi_status napi_create_string_utf16(napi_env env, * `[in] str`: Character buffer representing a UTF16-LE-encoded string. * `[in] length`: The length of the string in two-byte code units, or `NAPI_AUTO_LENGTH` if it is null-terminated. -* `[out] result`: A `napi_value` representing a JavaScript `String`. +* `[out] result`: A `napi_value` representing a JavaScript `string`. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript `String` object from a UTF16-LE-encoded C string. +This API creates a JavaScript `string` value from a UTF16-LE-encoded C string. The native string is copied. -The JavaScript `String` type is described in +The JavaScript `string` type is described in [Section 6.1.4][] of the ECMAScript Language Specification. #### napi_create_string_utf8 @@ -2670,14 +2670,14 @@ napi_status napi_create_string_utf8(napi_env env, * `[in] str`: Character buffer representing a UTF8-encoded string. * `[in] length`: The length of the string in bytes, or `NAPI_AUTO_LENGTH` if it is null-terminated. -* `[out] result`: A `napi_value` representing a JavaScript `String`. +* `[out] result`: A `napi_value` representing a JavaScript `string`. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript `String` object from a UTF8-encoded C string. +This API creates a JavaScript `string` value from a UTF8-encoded C string. The native string is copied. -The JavaScript `String` type is described in +The JavaScript `string` type is described in [Section 6.1.4][] of the ECMAScript Language Specification. ### Functions to convert from Node-API to C types @@ -2841,7 +2841,7 @@ napi_status napi_get_dataview_info(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] dataview`: `napi_value` representing the `DataView` whose properties to query. -* `[out] byte_length`: `Number` of bytes in the `DataView`. +* `[out] byte_length`: Number of bytes in the `DataView`. * `[out] data`: The data buffer underlying the `DataView`. If byte_length is `0`, this may be `NULL` or any other pointer value. * `[out] arraybuffer`: `ArrayBuffer` underlying the `DataView`. @@ -2914,15 +2914,15 @@ napi_status napi_get_value_double(napi_env env, ``` * `[in] env`: The environment that the API is invoked under. -* `[in] value`: `napi_value` representing JavaScript `Number`. +* `[in] value`: `napi_value` representing JavaScript `number`. * `[out] result`: C double primitive equivalent of the given JavaScript - `Number`. + `number`. Returns `napi_ok` if the API succeeded. If a non-number `napi_value` is passed in it returns `napi_number_expected`. This API returns the C double primitive equivalent of the given JavaScript -`Number`. +`number`. #### napi_get_value_bigint_int64 -The `process` object is a `global` that provides information about, and control -over, the current Node.js process. As a global, it is always available to -Node.js applications without using `require()`. It can also be explicitly -accessed using `require()`: +The `process` object provides information about, and control over, the current +Node.js process. While it is available as a global, it is recommended to +explicitly access it via require or import: -```js +```mjs +import process from 'process'; +``` + +```cjs const process = require('process'); ``` @@ -38,7 +41,28 @@ termination, such as calling [`process.exit()`][] or uncaught exceptions. The `'beforeExit'` should *not* be used as an alternative to the `'exit'` event unless the intention is to schedule additional work. -```js +```mjs +import process from 'process'; + +process.on('beforeExit', (code) => { + console.log('Process beforeExit event with code: ', code); +}); + +process.on('exit', (code) => { + console.log('Process exit event with code: ', code); +}); + +console.log('This message is displayed first.'); + +// Prints: +// This message is displayed first. +// Process beforeExit event with code: 0 +// Process exit event with code: 0 +``` + +```cjs +const process = require('process'); + process.on('beforeExit', (code) => { console.log('Process beforeExit event with code: ', code); }); @@ -84,7 +108,17 @@ The listener callback function is invoked with the exit code specified either by the [`process.exitCode`][] property, or the `exitCode` argument passed to the [`process.exit()`][] method. -```js +```mjs +import process from 'process'; + +process.on('exit', (code) => { + console.log(`About to exit with code: ${code}`); +}); +``` + +```cjs +const process = require('process'); + process.on('exit', (code) => { console.log(`About to exit with code: ${code}`); }); @@ -95,7 +129,19 @@ process will exit immediately after calling the `'exit'` event listeners causing any additional work still queued in the event loop to be abandoned. In the following example, for instance, the timeout will never occur: -```js +```mjs +import process from 'process'; + +process.on('exit', (code) => { + setTimeout(() => { + console.log('This will not run'); + }, 0); +}); +``` + +```cjs +const process = require('process'); + process.on('exit', (code) => { setTimeout(() => { console.log('This will not run'); @@ -148,7 +194,38 @@ This is useful for tracking potential errors in an application while using the the occurrence of this event does not necessarily indicate an error. For example, [`Promise.race()`][] can trigger a `'multipleResolves'` event. -```js +```mjs +import process from 'process'; + +process.on('multipleResolves', (type, promise, reason) => { + console.error(type, promise, reason); + setImmediate(() => process.exit(1)); +}); + +async function main() { + try { + return await new Promise((resolve, reject) => { + resolve('First call'); + resolve('Swallowed resolve'); + reject(new Error('Swallowed reject')); + }); + } catch { + throw new Error('Failed'); + } +} + +main().then(console.log); +// resolve: Promise { 'First call' } 'Swallowed resolve' +// reject: Promise { 'First call' } Error: Swallowed reject +// at Promise (*) +// at new Promise () +// at main (*) +// First call +``` + +```cjs +const process = require('process'); + process.on('multipleResolves', (type, promise, reason) => { console.error(type, promise, reason); setImmediate(() => process.exit(1)); @@ -206,7 +283,21 @@ In asynchronous code, the `'unhandledRejection'` event is emitted when the list of unhandled rejections grows, and the `'rejectionHandled'` event is emitted when the list of unhandled rejections shrinks. -```js +```mjs +import process from 'process'; + +const unhandledRejections = new Map(); +process.on('unhandledRejection', (reason, promise) => { + unhandledRejections.set(promise, reason); +}); +process.on('rejectionHandled', (promise) => { + unhandledRejections.delete(promise); +}); +``` + +```cjs +const process = require('process'); + const unhandledRejections = new Map(); process.on('unhandledRejection', (reason, promise) => { unhandledRejections.set(promise, reason); @@ -250,7 +341,29 @@ behavior. Alternatively, change the [`process.exitCode`][] in the provided exit code. Otherwise, in the presence of such handler the process will exit with 0. -```js +```mjs +import process from 'process'; + +process.on('uncaughtException', (err, origin) => { + fs.writeSync( + process.stderr.fd, + `Caught exception: ${err}\n` + + `Exception origin: ${origin}` + ); +}); + +setTimeout(() => { + console.log('This will still run.'); +}, 500); + +// Intentionally cause an exception, but don't catch it. +nonexistentFunc(); +console.log('This will not run.'); +``` + +```cjs +const process = require('process'); + process.on('uncaughtException', (err, origin) => { fs.writeSync( process.stderr.fd, @@ -321,7 +434,21 @@ Installing an `'uncaughtExceptionMonitor'` listener does not change the behavior once an `'uncaughtException'` event is emitted. The process will still crash if no `'uncaughtException'` listener is installed. -```js +```mjs +import process from 'process'; + +process.on('uncaughtExceptionMonitor', (err, origin) => { + MyMonitoringTool.logSync(err, origin); +}); + +// Intentionally cause an exception, but don't catch it. +nonexistentFunc(); +// Still crashes Node.js +``` + +```cjs +const process = require('process'); + process.on('uncaughtExceptionMonitor', (err, origin) => { MyMonitoringTool.logSync(err, origin); }); @@ -356,7 +483,22 @@ are propagated through a `Promise` chain. The `'unhandledRejection'` event is useful for detecting and keeping track of promises that were rejected whose rejections have not yet been handled. -```js +```mjs +import process from 'process'; + +process.on('unhandledRejection', (reason, promise) => { + console.log('Unhandled Rejection at:', promise, 'reason:', reason); + // Application specific logging, throwing an error, or other logic here +}); + +somePromise.then((res) => { + return reportToUser(JSON.pasre(res)); // Note the typo (`pasre`) +}); // No `.catch()` or `.then()` +``` + +```cjs +const process = require('process'); + process.on('unhandledRejection', (reason, promise) => { console.log('Unhandled Rejection at:', promise, 'reason:', reason); // Application specific logging, throwing an error, or other logic here @@ -370,7 +512,21 @@ somePromise.then((res) => { The following will also trigger the `'unhandledRejection'` event to be emitted: -```js +```mjs +import process from 'process'; + +function SomeResource() { + // Initially set the loaded status to a rejected promise + this.loaded = Promise.reject(new Error('Resource not yet loaded!')); +} + +const resource = new SomeResource(); +// no .catch or .then on resource.loaded for at least a turn +``` + +```cjs +const process = require('process'); + function SomeResource() { // Initially set the loaded status to a rejected promise this.loaded = Promise.reject(new Error('Resource not yet loaded!')); @@ -406,7 +562,19 @@ are not part of the normal Node.js and JavaScript error handling flow. Node.js can emit warnings whenever it detects bad coding practices that could lead to sub-optimal application performance, bugs, or security vulnerabilities. -```js +```mjs +import process from 'process'; + +process.on('warning', (warning) => { + console.warn(warning.name); // Print the warning name + console.warn(warning.message); // Print the warning message + console.warn(warning.stack); // Print the stack trace +}); +``` + +```cjs +const process = require('process'); + process.on('warning', (warning) => { console.warn(warning.name); // Print the warning name console.warn(warning.message); // Print the warning message @@ -511,7 +679,28 @@ The signal handler will receive the signal's name (`'SIGINT'`, The name of each event will be the uppercase common name for the signal (e.g. `'SIGINT'` for `SIGINT` signals). -```js +```mjs +import process from 'process'; + +// Begin reading from stdin so the process does not exit. +process.stdin.resume(); + +process.on('SIGINT', () => { + console.log('Received SIGINT. Press Control-D to exit.'); +}); + +// Using a single function to handle multiple signals +function handle(signal) { + console.log(`Received ${signal}`); +} + +process.on('SIGINT', handle); +process.on('SIGTERM', handle); +``` + +```cjs +const process = require('process'); + // Begin reading from stdin so the process does not exit. process.stdin.resume(); @@ -613,8 +802,21 @@ appear only *once*; each will begin with one or more dashes. Flags passed through to V8 will contain underscores instead of non-leading dashes: -```js -process.allowedNodeEnvironmentFlags.forEach((flag) => { +```mjs +import { allowedNodeEnvironmentFlags } from 'process'; + +allowedNodeEnvironmentFlags.forEach((flag) => { + // -r + // --inspect-brk + // --abort_on_uncaught_exception + // ... +}); +``` + +```cjs +const { allowedNodeEnvironmentFlags } = require('process'); + +allowedNodeEnvironmentFlags.forEach((flag) => { // -r // --inspect-brk // --abort_on_uncaught_exception @@ -641,7 +843,15 @@ The operating system CPU architecture for which the Node.js binary was compiled. Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'mips'`,`'mipsel'`, `'ppc'`, `'ppc64'`, `'s390'`, `'s390x'`, `'x32'`, and `'x64'`. -```js +```mjs +import { arch } from 'process'; + +console.log(`This processor architecture is ${arch}`); +``` + +```cjs +const { arch } = require('process'); + console.log(`This processor architecture is ${process.arch}`); ``` @@ -661,9 +871,20 @@ arguments. For example, assuming the following script for `process-args.js`: -```js +```mjs +import { argv } from 'process'; + +// print process.argv +argv.forEach((val, index) => { + console.log(`${index}: ${val}`); +}); +``` + +```cjs +const { argv } = require('process'); + // print process.argv -process.argv.forEach((val, index) => { +argv.forEach((val, index) => { console.log(`${index}: ${val}`); }); ``` @@ -753,11 +974,25 @@ The `process.chdir()` method changes the current working directory of the Node.js process or throws an exception if doing so fails (for instance, if the specified `directory` does not exist). -```js -console.log(`Starting directory: ${process.cwd()}`); +```mjs +import { chdir, cwd } from 'process'; + +console.log(`Starting directory: ${cwd()}`); try { - process.chdir('/tmp'); - console.log(`New directory: ${process.cwd()}`); + chdir('/tmp'); + console.log(`New directory: ${cwd()}`); +} catch (err) { + console.error(`chdir: ${err}`); +} +``` + +```cjs +const { chdir, cwd } = require('process'); + +console.log(`Starting directory: ${cwd()}`); +try { + chdir('/tmp'); + console.log(`New directory: ${cwd()}`); } catch (err) { console.error(`chdir: ${err}`); } @@ -855,15 +1090,31 @@ actual elapsed time if multiple CPU cores are performing work for this process. The result of a previous call to `process.cpuUsage()` can be passed as the argument to the function, to get a diff reading. -```js -const startUsage = process.cpuUsage(); +```mjs +import { cpuUsage } from 'process'; + +const startUsage = cpuUsage(); // { user: 38579, system: 6986 } // spin the CPU for 500 milliseconds const now = Date.now(); while (Date.now() - now < 500); -console.log(process.cpuUsage(startUsage)); +console.log(cpuUsage(startUsage)); +// { user: 514883, system: 11226 } +``` + +```cjs +const { cpuUsage } = require('process'); + +const startUsage = cpuUsage(); +// { user: 38579, system: 6986 } + +// spin the CPU for 500 milliseconds +const now = Date.now(); +while (Date.now() - now < 500); + +console.log(cpuUsage(startUsage)); // { user: 514883, system: 11226 } ``` @@ -877,8 +1128,16 @@ added: v0.1.8 The `process.cwd()` method returns the current working directory of the Node.js process. -```js -console.log(`Current directory: ${process.cwd()}`); +```mjs +import { cwd } from 'process'; + +console.log(`Current directory: ${cwd()}`); +``` + +```cjs +const { cwd } = require('process'); + +console.log(`Current directory: ${cwd()}`); ``` ## `process.debugPort` @@ -890,7 +1149,15 @@ added: v0.7.2 The port used by the Node.js debugger when enabled. -```js +```mjs +import process from 'process'; + +process.debugPort = 5858; +``` + +```cjs +const process = require('process'); + process.debugPort = 5858; ``` @@ -941,12 +1208,24 @@ that exports a `foo` function. All the symbols are loaded before the call returns, by passing the `RTLD_NOW` constant. In this example the constant is assumed to be available. -```js -const os = require('os'); -const path = require('path'); +```mjs +import { dlopen } from 'process'; +import { constants } from 'os'; +import { fileURLToPath } from 'url'; + +const module = { exports: {} }; +dlopen(module, fileURLToPath(new URL('local.node', import.meta.url)), + constants.dlopen.RTLD_NOW); +module.exports.foo(); +``` + +```cjs +const { dlopen } = require('process'); +const { constants } = require('os'); +const { join } = require('path'); + const module = { exports: {} }; -process.dlopen(module, path.join(__dirname, 'local.node'), - os.constants.dlopen.RTLD_NOW); +dlopen(module, join(__dirname, 'local.node'), constants.dlopen.RTLD_NOW); module.exports.foo(); ``` @@ -969,9 +1248,24 @@ The `process.emitWarning()` method can be used to emit custom or application specific process warnings. These can be listened for by adding a handler to the [`'warning'`][process_warning] event. -```js +```mjs +import { emitWarning } from 'process'; + +// Emit a warning with a code and additional detail. +emitWarning('Something happened!', { + code: 'MY_WARNING', + detail: 'This is some additional information' +}); +// Emits: +// (node:56338) [MY_WARNING] Warning: Something happened! +// This is some additional information +``` + +```cjs +const { emitWarning } = require('process'); + // Emit a warning with a code and additional detail. -process.emitWarning('Something happened!', { +emitWarning('Something happened!', { code: 'MY_WARNING', detail: 'This is some additional information' }); @@ -984,7 +1278,21 @@ In this example, an `Error` object is generated internally by `process.emitWarning()` and passed through to the [`'warning'`][process_warning] handler. -```js +```mjs +import process from 'process'; + +process.on('warning', (warning) => { + console.warn(warning.name); // 'Warning' + console.warn(warning.message); // 'Something happened!' + console.warn(warning.code); // 'MY_WARNING' + console.warn(warning.stack); // Stack trace + console.warn(warning.detail); // 'This is some additional information' +}); +``` + +```cjs +const process = require('process'); + process.on('warning', (warning) => { console.warn(warning.name); // 'Warning' console.warn(warning.message); // 'Something happened!' @@ -1013,19 +1321,48 @@ The `process.emitWarning()` method can be used to emit custom or application specific process warnings. These can be listened for by adding a handler to the [`'warning'`][process_warning] event. -```js +```mjs +import { emitWarning } from 'process'; + // Emit a warning using a string. -process.emitWarning('Something happened!'); +emitWarning('Something happened!'); // Emits: (node: 56338) Warning: Something happened! ``` -```js +```cjs +const { emitWarning } = require('process'); + +// Emit a warning using a string. +emitWarning('Something happened!'); +// Emits: (node: 56338) Warning: Something happened! +``` + +```mjs +import { emitWarning } from 'process'; + // Emit a warning using a string and a type. -process.emitWarning('Something Happened!', 'CustomWarning'); +emitWarning('Something Happened!', 'CustomWarning'); // Emits: (node:56338) CustomWarning: Something Happened! ``` -```js +```cjs +const { emitWarning } = require('process'); + +// Emit a warning using a string and a type. +emitWarning('Something Happened!', 'CustomWarning'); +// Emits: (node:56338) CustomWarning: Something Happened! +``` + +```mjs +import { emitWarning } from 'process'; + +emitWarning('Something happened!', 'CustomWarning', 'WARN001'); +// Emits: (node:56338) [WARN001] CustomWarning: Something happened! +``` + +```cjs +const { emitWarning } = require('process'); + process.emitWarning('Something happened!', 'CustomWarning', 'WARN001'); // Emits: (node:56338) [WARN001] CustomWarning: Something happened! ``` @@ -1034,7 +1371,20 @@ In each of the previous examples, an `Error` object is generated internally by `process.emitWarning()` and passed through to the [`'warning'`][process_warning] handler. -```js +```mjs +import process from 'process'; + +process.on('warning', (warning) => { + console.warn(warning.name); + console.warn(warning.message); + console.warn(warning.code); + console.warn(warning.stack); +}); +``` + +```cjs +const process = require('process'); + process.on('warning', (warning) => { console.warn(warning.name); console.warn(warning.message); @@ -1047,14 +1397,29 @@ If `warning` is passed as an `Error` object, it will be passed through to the `'warning'` event handler unmodified (and the optional `type`, `code` and `ctor` arguments will be ignored): -```js +```mjs +import { emitWarning } from 'process'; + // Emit a warning using an Error object. const myWarning = new Error('Something happened!'); // Use the Error name property to specify the type name myWarning.name = 'CustomWarning'; myWarning.code = 'WARN001'; -process.emitWarning(myWarning); +emitWarning(myWarning); +// Emits: (node:56338) [WARN001] CustomWarning: Something happened! +``` + +```cjs +const { emitWarning } = require('process'); + +// Emit a warning using an Error object. +const myWarning = new Error('Something happened!'); +// Use the Error name property to specify the type name +myWarning.name = 'CustomWarning'; +myWarning.code = 'WARN001'; + +emitWarning(myWarning); // Emits: (node:56338) [WARN001] CustomWarning: Something happened! ``` @@ -1080,11 +1445,28 @@ As a best practice, warnings should be emitted only once per process. To do so, it is recommended to place the `emitWarning()` behind a simple boolean flag as illustrated in the example below: -```js +```mjs +import { emitWarning } from 'process'; + function emitMyWarning() { if (!emitMyWarning.warned) { emitMyWarning.warned = true; - process.emitWarning('Only warn once!'); + emitWarning('Only warn once!'); + } +} +emitMyWarning(); +// Emits: (node: 56339) Warning: Only warn once! +emitMyWarning(); +// Emits nothing +``` + +```cjs +const { emitWarning } = require('process'); + +function emitMyWarning() { + if (!emitMyWarning.warned) { + emitMyWarning.warned = true; + emitWarning('Only warn once!'); } } emitMyWarning(); @@ -1141,38 +1523,81 @@ $ node -e 'process.env.foo = "bar"' && echo $foo While the following will: -```js -process.env.foo = 'bar'; -console.log(process.env.foo); +```mjs +import { env } from 'process'; + +env.foo = 'bar'; +console.log(env.foo); +``` + +```cjs +const { env } = require('process'); + +env.foo = 'bar'; +console.log(env.foo); ``` Assigning a property on `process.env` will implicitly convert the value to a string. **This behavior is deprecated.** Future versions of Node.js may throw an error when the value is not a string, number, or boolean. -```js -process.env.test = null; -console.log(process.env.test); +```mjs +import { env } from 'process'; + +env.test = null; +console.log(env.test); // => 'null' -process.env.test = undefined; -console.log(process.env.test); +env.test = undefined; +console.log(env.test); +// => 'undefined' +``` + +```cjs +const { env } = require('process'); + +env.test = null; +console.log(env.test); +// => 'null' +env.test = undefined; +console.log(env.test); // => 'undefined' ``` Use `delete` to delete a property from `process.env`. -```js -process.env.TEST = 1; -delete process.env.TEST; -console.log(process.env.TEST); +```mjs +import { env } from 'process'; + +env.TEST = 1; +delete env.TEST; +console.log(env.TEST); +// => undefined +``` + +```cjs +const { env } = require('process'); + +env.TEST = 1; +delete env.TEST; +console.log(env.TEST); // => undefined ``` On Windows operating systems, environment variables are case-insensitive. -```js -process.env.TEST = 1; -console.log(process.env.test); +```mjs +import { env } from 'process'; + +env.TEST = 1; +console.log(env.test); +// => 1 +``` + +```cjs +const { env } = require('process'); + +env.TEST = 1; +console.log(env.test); // => 1 ``` @@ -1248,8 +1673,16 @@ called. To exit with a 'failure' code: -```js -process.exit(1); +```mjs +import { exit } from 'process'; + +exit(1); +``` + +```cjs +const { exit } = require('process'); + +exit(1); ``` The shell that executed Node.js should see the exit code as `1`. @@ -1268,11 +1701,23 @@ For instance, the following example illustrates a *misuse* of the `process.exit()` method that could lead to data printed to stdout being truncated and lost: -```js +```mjs +import { exit } from 'process'; + // This is an example of what *not* to do: if (someConditionNotMet()) { printUsageToStdout(); - process.exit(1); + exit(1); +} +``` + +```cjs +const { exit } = require('process'); + +// This is an example of what *not* to do: +if (someConditionNotMet()) { + printUsageToStdout(); + exit(1); } ``` @@ -1285,7 +1730,20 @@ Rather than calling `process.exit()` directly, the code *should* set the `process.exitCode` and allow the process to exit naturally by avoiding scheduling any additional work for the event loop: -```js +```mjs +import process from 'process'; + +// How to properly set the exit code while letting +// the process exit gracefully. +if (someConditionNotMet()) { + printUsageToStdout(); + process.exitCode = 1; +} +``` + +```cjs +const process = require('process'); + // How to properly set the exit code while letting // the process exit gracefully. if (someConditionNotMet()) { @@ -1323,7 +1781,17 @@ added: v2.0.0 The `process.getegid()` method returns the numerical effective group identity of the Node.js process. (See getegid(2).) -```js +```mjs +import process from 'process'; + +if (process.getegid) { + console.log(`Current gid: ${process.getegid()}`); +} +``` + +```cjs +const process = require('process'); + if (process.getegid) { console.log(`Current gid: ${process.getegid()}`); } @@ -1342,7 +1810,17 @@ added: v2.0.0 The `process.geteuid()` method returns the numerical effective user identity of the process. (See geteuid(2).) -```js +```mjs +import process from 'process'; + +if (process.geteuid) { + console.log(`Current uid: ${process.geteuid()}`); +} +``` + +```cjs +const process = require('process'); + if (process.geteuid) { console.log(`Current uid: ${process.geteuid()}`); } @@ -1361,7 +1839,17 @@ added: v0.1.31 The `process.getgid()` method returns the numerical group identity of the process. (See getgid(2).) -```js +```mjs +import process from 'process'; + +if (process.getgid) { + console.log(`Current gid: ${process.getgid()}`); +} +``` + +```cjs +const process = require('process'); + if (process.getgid) { console.log(`Current gid: ${process.getgid()}`); } @@ -1381,7 +1869,17 @@ The `process.getgroups()` method returns an array with the supplementary group IDs. POSIX leaves it unspecified if the effective group ID is included but Node.js ensures it always is. -```js +```mjs +import process from 'process'; + +if (process.getgroups) { + console.log(process.getgroups()); // [ 16, 21, 297 ] +} +``` + +```cjs +const process = require('process'); + if (process.getgroups) { console.log(process.getgroups()); // [ 16, 21, 297 ] } @@ -1400,7 +1898,17 @@ added: v0.1.28 The `process.getuid()` method returns the numeric user identity of the process. (See getuid(2).) -```js +```mjs +import process from 'process'; + +if (process.getuid) { + console.log(`Current uid: ${process.getuid()}`); +} +``` + +```cjs +const process = require('process'); + if (process.getuid) { console.log(`Current uid: ${process.getuid()}`); } @@ -1446,13 +1954,31 @@ These times are relative to an arbitrary time in the past, and not related to the time of day and therefore not subject to clock drift. The primary use is for measuring performance between intervals: -```js +```mjs +import { hrtime } from 'process'; + +const NS_PER_SEC = 1e9; +const time = hrtime(); +// [ 1800216, 25 ] + +setTimeout(() => { + const diff = hrtime(time); + // [ 1, 552 ] + + console.log(`Benchmark took ${diff[0] * NS_PER_SEC + diff[1]} nanoseconds`); + // Benchmark took 1000000552 nanoseconds +}, 1000); +``` + +```cjs +const { hrtime } = require('process'); + const NS_PER_SEC = 1e9; -const time = process.hrtime(); +const time = hrtime(); // [ 1800216, 25 ] setTimeout(() => { - const diff = process.hrtime(time); + const diff = hrtime(time); // [ 1, 552 ] console.log(`Benchmark took ${diff[0] * NS_PER_SEC + diff[1]} nanoseconds`); @@ -1474,12 +2000,29 @@ Unlike [`process.hrtime()`][], it does not support an additional `time` argument since the difference can just be computed directly by subtraction of the two `bigint`s. -```js -const start = process.hrtime.bigint(); +```mjs +import { hrtime } from 'process'; + +const start = hrtime.bigint(); // 191051479007711n setTimeout(() => { - const end = process.hrtime.bigint(); + const end = hrtime.bigint(); + // 191052633396993n + + console.log(`Benchmark took ${end - start} nanoseconds`); + // Benchmark took 1154389282 nanoseconds +}, 1000); +``` + +```cjs +const { hrtime } = require('process'); + +const start = hrtime.bigint(); +// 191051479007711n + +setTimeout(() => { + const end = hrtime.bigint(); // 191052633396993n console.log(`Benchmark took ${end - start} nanoseconds`); @@ -1502,12 +2045,24 @@ access or the `CAP_SETGID` capability. Use care when dropping privileges: -```js -console.log(process.getgroups()); // [ 0 ] -process.initgroups('nodeuser', 1000); // switch user -console.log(process.getgroups()); // [ 27, 30, 46, 1000, 0 ] -process.setgid(1000); // drop root gid -console.log(process.getgroups()); // [ 27, 30, 46, 1000 ] +```mjs +import { getgroups, initgroups, setgid } from 'process'; + +console.log(getgroups()); // [ 0 ] +initgroups('nodeuser', 1000); // switch user +console.log(getgroups()); // [ 27, 30, 46, 1000, 0 ] +setgid(1000); // drop root gid +console.log(getgroups()); // [ 27, 30, 46, 1000 ] +``` + +```cjs +const { getgroups, initgroups, setgid } = require('process'); + +console.log(getgroups()); // [ 0 ] +initgroups('nodeuser', 1000); // switch user +console.log(getgroups()); // [ 27, 30, 46, 1000, 0 ] +setgid(1000); // drop root gid +console.log(getgroups()); // [ 27, 30, 46, 1000 ] ``` This function is only available on POSIX platforms (i.e. not Windows or @@ -1538,7 +2093,24 @@ Even though the name of this function is `process.kill()`, it is really just a signal sender, like the `kill` system call. The signal sent may do something other than kill the target process. -```js +```mjs +import process, { kill } from 'process'; + +process.on('SIGHUP', () => { + console.log('Got SIGHUP signal.'); +}); + +setTimeout(() => { + console.log('Exiting.'); + process.exit(0); +}, 100); + +kill(process.pid, 'SIGHUP'); +``` + +```cjs +const process = require('process'); + process.on('SIGHUP', () => { console.log('Got SIGHUP signal.'); }); @@ -1594,11 +2166,27 @@ changes: * `external` {integer} * `arrayBuffers` {integer} -Returns an object describing the memory usage of the Node.js process measured in -bytes. +Returns an object describing the memory usage of the Node.js process measured in +bytes. + +```mjs +import { memoryUsage } from 'process'; + +console.log(memoryUsage()); +// Prints: +// { +// rss: 4935680, +// heapTotal: 1826816, +// heapUsed: 650472, +// external: 49879, +// arrayBuffers: 9386 +// } +``` + +```cjs +const { memoryUsage } = require('process'); -```js -console.log(process.memoryUsage()); +console.log(memoryUsage()); // Prints: // { // rss: 4935680, @@ -1645,8 +2233,17 @@ process, including all C++ and JavaScript objects and code. This is the same value as the `rss` property provided by `process.memoryUsage()` but `process.memoryUsage.rss()` is faster. -```js -console.log(process.memoryUsage.rss()); +```mjs +import { memoryUsage } from 'process'; + +console.log(memoryUsage.rss()); +// 35655680 +``` + +```cjs +const { rss } = require('process'); + +console.log(memoryUsage.rss()); // 35655680 ``` @@ -1668,9 +2265,25 @@ completion and before the event loop is allowed to continue. It's possible to create an infinite loop if one were to recursively call `process.nextTick()`. See the [Event Loop][] guide for more background. -```js +```mjs +import { nextTick } from 'process'; + +console.log('start'); +nextTick(() => { + console.log('nextTick callback'); +}); +console.log('scheduled'); +// Output: +// start +// scheduled +// nextTick callback +``` + +```cjs +const { nextTick } = require('process'); + console.log('start'); -process.nextTick(() => { +nextTick(() => { console.log('nextTick callback'); }); console.log('scheduled'); @@ -1684,11 +2297,30 @@ This is important when developing APIs in order to give users the opportunity to assign event handlers *after* an object has been constructed but before any I/O has occurred: -```js +```mjs +import { nextTick } from 'process'; + +function MyThing(options) { + this.setupOptions(options); + + nextTick(() => { + this.startDoingStuff(); + }); +} + +const thing = new MyThing(); +thing.getReadyForStuff(); + +// thing.startDoingStuff() gets called now, not before. +``` + +```cjs +const { nextTick } = require('process'); + function MyThing(options) { this.setupOptions(options); - process.nextTick(() => { + nextTick(() => { this.startDoingStuff(); }); } @@ -1730,10 +2362,25 @@ It is not clear whether `foo()` or `bar()` will be called first. The following approach is much better: -```js +```mjs +import { nextTick } from 'process'; + +function definitelyAsync(arg, cb) { + if (arg) { + nextTick(cb); + return; + } + + fs.stat('file', cb); +} +``` + +```cjs +const { nextTick } = require('process'); + function definitelyAsync(arg, cb) { if (arg) { - process.nextTick(cb); + nextTick(cb); return; } @@ -1749,10 +2396,24 @@ execute the then, catch, and finally handlers of resolved promises. Within Node.js, every time the "next tick queue" is drained, the microtask queue is drained immediately after. -```js +```mjs +import { nextTick } from 'process'; + +Promise.resolve().then(() => console.log(2)); +queueMicrotask(() => console.log(3)); +nextTick(() => console.log(1)); +// Output: +// 1 +// 2 +// 3 +``` + +```cjs +const { nextTick } = require('process'); + Promise.resolve().then(() => console.log(2)); queueMicrotask(() => console.log(3)); -process.nextTick(() => console.log(1)); +nextTick(() => console.log(1)); // Output: // 1 // 2 @@ -1827,8 +2488,16 @@ added: v0.1.15 The `process.pid` property returns the PID of the process. -```js -console.log(`This process is pid ${process.pid}`); +```mjs +import { pid } from 'process'; + +console.log(`This process is pid ${pid}`); +``` + +```cjs +const { pid } = require('process'); + +console.log(`This process is pid ${pid}`); ``` ## `process.platform` @@ -1851,8 +2520,16 @@ Currently possible values are: * `'sunos'` * `'win32'` -```js -console.log(`This platform is ${process.platform}`); +```mjs +import { platform } from 'process'; + +console.log(`This platform is ${platform}`); +``` + +```cjs +const { platform } = require('process'); + +console.log(`This platform is ${platform}`); ``` The value `'android'` may also be returned if the Node.js is built on the @@ -1872,8 +2549,16 @@ added: The `process.ppid` property returns the PID of the parent of the current process. -```js -console.log(`The parent process is pid ${process.ppid}`); +```mjs +import { ppid } from 'process'; + +console.log(`The parent process is pid ${ppid}`); +``` + +```cjs +const { ppid } = require('process'); + +console.log(`The parent process is pid ${ppid}`); ``` ## `process.release` @@ -1958,8 +2643,16 @@ Write reports in a compact format, single-line JSON, more easily consumable by log processing systems than the default multi-line format designed for human consumption. -```js -console.log(`Reports are compact? ${process.report.compact}`); +```mjs +import { report } from 'process'; + +console.log(`Reports are compact? ${report.compact}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Reports are compact? ${report.compact}`); ``` ### `process.report.directory` @@ -1979,8 +2672,16 @@ Directory where the report is written. The default value is the empty string, indicating that reports are written to the current working directory of the Node.js process. -```js -console.log(`Report directory is ${process.report.directory}`); +```mjs +import { report } from 'process'; + +console.log(`Report directory is ${report.directory}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report directory is ${report.directory}`); ``` ### `process.report.filename` @@ -2000,8 +2701,16 @@ Filename where the report is written. If set to the empty string, the output filename will be comprised of a timestamp, PID, and sequence number. The default value is the empty string. -```js -console.log(`Report filename is ${process.report.filename}`); +```mjs +import { report } from 'process'; + +console.log(`Report filename is ${report.filename}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report filename is ${report.filename}`); ``` ### `process.report.getReport([err])` @@ -2022,8 +2731,21 @@ Returns a JavaScript Object representation of a diagnostic report for the running process. The report's JavaScript stack trace is taken from `err`, if present. -```js -const data = process.report.getReport(); +```mjs +import { report } from 'process'; + +const data = report.getReport(); +console.log(data.header.nodejsVersion); + +// Similar to process.report.writeReport() +import fs from 'fs'; +fs.writeFileSync('my-report.log', util.inspect(data), 'utf8'); +``` + +```cjs +const { report } = require('process'); + +const data = report.getReport(); console.log(data.header.nodejsVersion); // Similar to process.report.writeReport() @@ -2048,8 +2770,16 @@ changes: If `true`, a diagnostic report is generated on fatal errors, such as out of memory errors or failed C++ assertions. -```js -console.log(`Report on fatal error: ${process.report.reportOnFatalError}`); +```mjs +import { report } from 'process'; + +console.log(`Report on fatal error: ${report.reportOnFatalError}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report on fatal error: ${report.reportOnFatalError}`); ``` ### `process.report.reportOnSignal` @@ -2068,8 +2798,16 @@ changes: If `true`, a diagnostic report is generated when the process receives the signal specified by `process.report.signal`. -```js -console.log(`Report on signal: ${process.report.reportOnSignal}`); +```mjs +import { report } from 'process'; + +console.log(`Report on signal: ${report.reportOnSignal}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report on signal: ${report.reportOnSignal}`); ``` ### `process.report.reportOnUncaughtException` @@ -2087,8 +2825,16 @@ changes: If `true`, a diagnostic report is generated on uncaught exception. -```js -console.log(`Report on exception: ${process.report.reportOnUncaughtException}`); +```mjs +import { report } from 'process'; + +console.log(`Report on exception: ${report.reportOnUncaughtException}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report on exception: ${report.reportOnUncaughtException}`); ``` ### `process.report.signal` @@ -2107,8 +2853,16 @@ changes: The signal used to trigger the creation of a diagnostic report. Defaults to `'SIGUSR2'`. -```js -console.log(`Report signal: ${process.report.signal}`); +```mjs +import { report } from 'process'; + +console.log(`Report signal: ${report.signal}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report signal: ${report.signal}`); ``` ### `process.report.writeReport([filename][, err])` @@ -2134,8 +2888,16 @@ Writes a diagnostic report to a file. If `filename` is not provided, the default filename includes the date, time, PID, and a sequence number. The report's JavaScript stack trace is taken from `err`, if present. -```js -process.report.writeReport(); +```mjs +import { report } from 'process'; + +report.writeReport(); +``` + +```cjs +const { report } = require('process'); + +report.writeReport(); ``` Additional documentation is available in the [report documentation][]. @@ -2188,8 +2950,37 @@ added: v12.6.0 process becoming runnable or because the current process exceeded its time slice. This field is not supported on Windows. -```js -console.log(process.resourceUsage()); +```mjs +import { resourceUsage } from 'process'; + +console.log(resourceUsage()); +/* + Will output: + { + userCPUTime: 82872, + systemCPUTime: 4143, + maxRSS: 33164, + sharedMemorySize: 0, + unsharedDataSize: 0, + unsharedStackSize: 0, + minorPageFault: 2469, + majorPageFault: 0, + swappedOut: 0, + fsRead: 0, + fsWrite: 8, + ipcSent: 0, + ipcReceived: 0, + signalsCount: 0, + voluntaryContextSwitches: 79, + involuntaryContextSwitches: 1 + } +*/ +``` + +```cjs +const { resourceUsage } = require('process'); + +console.log(resourceUsage()); /* Will output: { @@ -2250,7 +3041,23 @@ The `process.setegid()` method sets the effective group identity of the process. name string. If a group name is specified, this method blocks while resolving the associated a numeric ID. -```js +```mjs +import process from 'process'; + +if (process.getegid && process.setegid) { + console.log(`Current gid: ${process.getegid()}`); + try { + process.setegid(501); + console.log(`New gid: ${process.getegid()}`); + } catch (err) { + console.log(`Failed to set gid: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.getegid && process.setegid) { console.log(`Current gid: ${process.getegid()}`); try { @@ -2278,7 +3085,23 @@ The `process.seteuid()` method sets the effective user identity of the process. string. If a username is specified, the method blocks while resolving the associated numeric ID. -```js +```mjs +import process from 'process'; + +if (process.geteuid && process.seteuid) { + console.log(`Current uid: ${process.geteuid()}`); + try { + process.seteuid(501); + console.log(`New uid: ${process.geteuid()}`); + } catch (err) { + console.log(`Failed to set uid: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.geteuid && process.seteuid) { console.log(`Current uid: ${process.geteuid()}`); try { @@ -2306,7 +3129,23 @@ setgid(2).) The `id` can be passed as either a numeric ID or a group name string. If a group name is specified, this method blocks while resolving the associated numeric ID. -```js +```mjs +import process from 'process'; + +if (process.getgid && process.setgid) { + console.log(`Current gid: ${process.getgid()}`); + try { + process.setgid(501); + console.log(`New gid: ${process.getgid()}`); + } catch (err) { + console.log(`Failed to set gid: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.getgid && process.setgid) { console.log(`Current gid: ${process.getgid()}`); try { @@ -2335,7 +3174,22 @@ process to have `root` or the `CAP_SETGID` capability. The `groups` array can contain numeric group IDs, group names, or both. -```js +```mjs +import process from 'process'; + +if (process.getgroups && process.setgroups) { + try { + process.setgroups([501]); + console.log(process.getgroups()); // new groups + } catch (err) { + console.log(`Failed to set groups: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.getgroups && process.setgroups) { try { process.setgroups([501]); @@ -2362,7 +3216,23 @@ setuid(2).) The `id` can be passed as either a numeric ID or a username string. If a username is specified, the method blocks while resolving the associated numeric ID. -```js +```mjs +import process from 'process'; + +if (process.getuid && process.setuid) { + console.log(`Current uid: ${process.getuid()}`); + try { + process.setuid(501); + console.log(`New uid: ${process.getuid()}`); + } catch (err) { + console.log(`Failed to set uid: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.getuid && process.setuid) { console.log(`Current uid: ${process.getuid()}`); try { @@ -2461,8 +3331,16 @@ a [Writable][] stream. For example, to copy `process.stdin` to `process.stdout`: -```js -process.stdin.pipe(process.stdout); +```mjs +import { stdin, stdout } from 'process'; + +stdin.pipe(stdout); +``` + +```cjs +const { stdin, stdout } = require('process'); + +stdin.pipe(stdout); ``` `process.stdout` differs from other Node.js streams in important ways. See @@ -2621,9 +3499,21 @@ added: v0.1.19 `process.umask(mask)` sets the Node.js process's file mode creation mask. Child processes inherit the mask from the parent process. Returns the previous mask. -```js +```mjs +import { umask } from 'process'; + +const newmask = 0o022; +const oldmask = umask(newmask); +console.log( + `Changed umask from ${oldmask.toString(8)} to ${newmask.toString(8)}` +); +``` + +```cjs +const { umask } = require('process'); + const newmask = 0o022; -const oldmask = process.umask(newmask); +const oldmask = umask(newmask); console.log( `Changed umask from ${oldmask.toString(8)} to ${newmask.toString(8)}` ); @@ -2653,8 +3543,17 @@ added: v0.1.3 The `process.version` property contains the Node.js version string. -```js -console.log(`Version: ${process.version}`); +```mjs +import { version } from 'process'; + +console.log(`Version: ${version}`); +// Version: v14.8.0 +``` + +```cjs +const { version } = require('process'); + +console.log(`Version: ${version}`); // Version: v14.8.0 ``` @@ -2680,8 +3579,16 @@ Node.js and its dependencies. `process.versions.modules` indicates the current ABI version, which is increased whenever a C++ API changes. Node.js will refuse to load modules that were compiled against a different module ABI version. -```js -console.log(process.versions); +```mjs +import { versions } from 'process'; + +console.log(versions); +``` + +```cjs +const { versions } = require('process'); + +console.log(versions); ``` Will generate an object similar to: @@ -2793,7 +3700,7 @@ cases: [`process.hrtime()`]: #process_process_hrtime_time [`process.hrtime.bigint()`]: #process_process_hrtime_bigint [`process.kill()`]: #process_process_kill_pid_signal -[`process.setUncaughtExceptionCaptureCallback()`]: process.md#process_process_setuncaughtexceptioncapturecallback_fn +[`process.setUncaughtExceptionCaptureCallback()`]: #process_process_setuncaughtexceptioncapturecallback_fn [`promise.catch()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/catch [`queueMicrotask()`]: globals.md#globals_queuemicrotask_callback [`readable.read()`]: stream.md#stream_readable_read_size @@ -2803,7 +3710,7 @@ cases: [`v8.setFlagsFromString()`]: v8.md#v8_v8_setflagsfromstring_flags [debugger]: debugger.md [deprecation code]: deprecations.md -[note on process I/O]: process.md#process_a_note_on_process_i_o +[note on process I/O]: #process_a_note_on_process_i_o [process.cpuUsage]: #process_process_cpuusage_previousvalue [process_emit_warning]: #process_process_emitwarning_warning_type_code_ctor [process_warning]: #process_event_warning diff --git a/doc/api/punycode.md b/doc/api/punycode.md index c9c20ce6350c7e..7f4e5fcf003e75 100644 --- a/doc/api/punycode.md +++ b/doc/api/punycode.md @@ -9,7 +9,7 @@ deprecated: v7.0.0 -**The version of the punycode module bundled in Node.js is being deprecated**. +**The version of the punycode module bundled in Node.js is being deprecated.** In a future major version of Node.js this module will be removed. Users currently depending on the `punycode` module should switch to using the userland-provided [Punycode.js][] module instead. For punycode-based URL diff --git a/doc/api/readline.md b/doc/api/readline.md index 58c7209b7619f1..72265f803eca8e 100644 --- a/doc/api/readline.md +++ b/doc/api/readline.md @@ -971,8 +971,8 @@ const { createInterface } = require('readline'); [TTY]: tty.md [TTY keybindings]: #readline_tty_keybindings [Writable]: stream.md#stream_writable_streams -[`'SIGCONT'`]: readline.md#readline_event_sigcont -[`'SIGTSTP'`]: readline.md#readline_event_sigtstp +[`'SIGCONT'`]: #readline_event_sigcont +[`'SIGTSTP'`]: #readline_event_sigtstp [`'line'`]: #readline_event_line [`fs.ReadStream`]: fs.md#fs_class_fs_readstream [`process.stdin`]: process.md#process_process_stdin diff --git a/doc/api/stream.md b/doc/api/stream.md index 57c72c45a176f6..8ff36423cb246e 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -1509,7 +1509,7 @@ If the loop terminates with a `break`, `return`, or a `throw`, the stream will be destroyed. In other terms, iterating over a stream will consume the stream fully. The stream will be read in chunks of size equal to the `highWaterMark` option. In the code example above, data will be in a single chunk if the file -has less then 64KB of data because no `highWaterMark` option is provided to +has less then 64 KB of data because no `highWaterMark` option is provided to [`fs.createReadStream()`][]. ##### `readable.iterator([options])` @@ -2053,7 +2053,7 @@ changes: * `options` {Object} * `highWaterMark` {number} Buffer level when [`stream.write()`][stream-write] starts returning `false`. **Default:** - `16384` (16KB), or `16` for `objectMode` streams. + `16384` (16 KB), or `16` for `objectMode` streams. * `decodeStrings` {boolean} Whether to encode `string`s passed to [`stream.write()`][stream-write] to `Buffer`s (with the encoding specified in the [`stream.write()`][stream-write] call) before passing @@ -2416,7 +2416,7 @@ changes: * `options` {Object} * `highWaterMark` {number} The maximum [number of bytes][hwm-gotcha] to store in the internal buffer before ceasing to read from the underlying resource. - **Default:** `16384` (16KB), or `16` for `objectMode` streams. + **Default:** `16384` (16 KB), or `16` for `objectMode` streams. * `encoding` {string} If specified, then buffers will be decoded to strings using the specified encoding. **Default:** `null`. * `objectMode` {boolean} Whether this stream should behave @@ -3293,6 +3293,7 @@ contain multi-byte characters. [HTTP requests, on the client]: http.md#http_class_http_clientrequest [HTTP responses, on the server]: http.md#http_class_http_serverresponse [TCP sockets]: net.md#net_class_net_socket +[Three states]: #stream_three_states [`'data'`]: #stream_event_data [`'drain'`]: #stream_event_drain [`'end'`]: #stream_event_end @@ -3314,11 +3315,11 @@ contain multi-byte characters. [`readable.push('')`]: #stream_readable_push [`readable.setEncoding()`]: #stream_readable_setencoding_encoding [`stream.Readable.from()`]: #stream_stream_readable_from_iterable_options +[`stream.addAbortSignal()`]: #stream_stream_addabortsignal_signal_stream [`stream.cork()`]: #stream_writable_cork [`stream.finished()`]: #stream_stream_finished_stream_options_callback [`stream.pipe()`]: #stream_readable_pipe_destination_options [`stream.pipeline()`]: #stream_stream_pipeline_source_transforms_destination_callback -[`stream.addAbortSignal()`]: #stream_stream_addabortsignal_signal_stream [`stream.uncork()`]: #stream_writable_uncork [`stream.unpipe()`]: #stream_readable_unpipe_destination [`stream.wrap()`]: #stream_readable_wrap_stream @@ -3354,7 +3355,6 @@ contain multi-byte characters. [stream-resume]: #stream_readable_resume [stream-uncork]: #stream_writable_uncork [stream-write]: #stream_writable_write_chunk_encoding_callback -[Three states]: #stream_three_states [writable-_construct]: #stream_writable_construct_callback [writable-_destroy]: #stream_writable_destroy_err_callback [writable-destroy]: #stream_writable_destroy_error diff --git a/doc/api/timers.md b/doc/api/timers.md index 820e6a282aeb01..cfbc8f1ead4b71 100644 --- a/doc/api/timers.md +++ b/doc/api/timers.md @@ -170,25 +170,7 @@ next event loop iteration. If `callback` is not a function, a [`TypeError`][] will be thrown. This method has a custom variant for promises that is available using -[`util.promisify()`][]: - -```js -const util = require('util'); -const setImmediatePromise = util.promisify(setImmediate); - -setImmediatePromise('foobar').then((value) => { - // value === 'foobar' (passing values is optional) - // This is executed after all I/O callbacks. -}); - -// Or with async function -async function timerExample() { - console.log('Before I/O callbacks'); - await setImmediatePromise(); - console.log('After I/O callbacks'); -} -timerExample(); -``` +[`timersPromises.setImmediate()`][]. ### `setInterval(callback[, delay[, ...args]])` + +The `v8.stopCoverage()` method allows the user to stop the coverage collection +started by [`NODE_V8_COVERAGE`][], so that V8 can release the execution count +records and optimize code. This can be used in conjunction with +[`v8.takeCoverage()`][] if the user wants to collect the coverage on demand. + ## `v8.takeCoverage()` - -The `v8.stopCoverage()` method allows the user to stop the coverage collection -started by [`NODE_V8_COVERAGE`][], so that V8 can release the execution count -records and optimize code. This can be used in conjunction with -[`v8.takeCoverage()`][] if the user wants to collect the coverage on demand. - ## `v8.writeHeapSnapshot([filename])` - -> Stability: 1 - Experimental - -Measure the memory known to V8 and used by all contexts known to the -current V8 isolate, or the main context. - -* `options` {Object} Optional. - * `mode` {string} Either `'summary'` or `'detailed'`. In summary mode, - only the memory measured for the main context will be returned. In - detailed mode, the measure measured for all contexts known to the - current V8 isolate will be returned. - **Default:** `'summary'` - * `execution` {string} Either `'default'` or `'eager'`. With default - execution, the promise will not resolve until after the next scheduled - garbage collection starts, which may take a while (or never if the program - exits before the next GC). With eager execution, the GC will be started - right away to measure the memory. - **Default:** `'default'` -* Returns: {Promise} If the memory is successfully measured the promise will - resolve with an object containing information about the memory usage. - -The format of the object that the returned Promise may resolve with is -specific to the V8 engine and may change from one version of V8 to the next. - -The returned result is different from the statistics returned by -`v8.getHeapSpaceStatistics()` in that `vm.measureMemory()` measure the -memory reachable by each V8 specific contexts in the current instance of -the V8 engine, while the result of `v8.getHeapSpaceStatistics()` measure -the memory occupied by each heap space in the current V8 instance. - -```js -const vm = require('vm'); -// Measure the memory used by the main context. -vm.measureMemory({ mode: 'summary' }) - // This is the same as vm.measureMemory() - .then((result) => { - // The current format is: - // { - // total: { - // jsMemoryEstimate: 2418479, jsMemoryRange: [ 2418479, 2745799 ] - // } - // } - console.log(result); - }); - -const context = vm.createContext({ a: 1 }); -vm.measureMemory({ mode: 'detailed', execution: 'eager' }) - .then((result) => { - // Reference the context here so that it won't be GC'ed - // until the measurement is complete. - console.log(context.a); - // { - // total: { - // jsMemoryEstimate: 2574732, - // jsMemoryRange: [ 2574732, 2904372 ] - // }, - // current: { - // jsMemoryEstimate: 2438996, - // jsMemoryRange: [ 2438996, 2768636 ] - // }, - // other: [ - // { - // jsMemoryEstimate: 135736, - // jsMemoryRange: [ 135736, 465376 ] - // } - // ] - // } - console.log(result); - }); -``` - ## Class: `vm.Module` + +> Stability: 1 - Experimental + +Measure the memory known to V8 and used by all contexts known to the +current V8 isolate, or the main context. + +* `options` {Object} Optional. + * `mode` {string} Either `'summary'` or `'detailed'`. In summary mode, + only the memory measured for the main context will be returned. In + detailed mode, the measure measured for all contexts known to the + current V8 isolate will be returned. + **Default:** `'summary'` + * `execution` {string} Either `'default'` or `'eager'`. With default + execution, the promise will not resolve until after the next scheduled + garbage collection starts, which may take a while (or never if the program + exits before the next GC). With eager execution, the GC will be started + right away to measure the memory. + **Default:** `'default'` +* Returns: {Promise} If the memory is successfully measured the promise will + resolve with an object containing information about the memory usage. + +The format of the object that the returned Promise may resolve with is +specific to the V8 engine and may change from one version of V8 to the next. + +The returned result is different from the statistics returned by +`v8.getHeapSpaceStatistics()` in that `vm.measureMemory()` measure the +memory reachable by each V8 specific contexts in the current instance of +the V8 engine, while the result of `v8.getHeapSpaceStatistics()` measure +the memory occupied by each heap space in the current V8 instance. + +```js +const vm = require('vm'); +// Measure the memory used by the main context. +vm.measureMemory({ mode: 'summary' }) + // This is the same as vm.measureMemory() + .then((result) => { + // The current format is: + // { + // total: { + // jsMemoryEstimate: 2418479, jsMemoryRange: [ 2418479, 2745799 ] + // } + // } + console.log(result); + }); + +const context = vm.createContext({ a: 1 }); +vm.measureMemory({ mode: 'detailed', execution: 'eager' }) + .then((result) => { + // Reference the context here so that it won't be GC'ed + // until the measurement is complete. + console.log(context.a); + // { + // total: { + // jsMemoryEstimate: 2574732, + // jsMemoryRange: [ 2574732, 2904372 ] + // }, + // current: { + // jsMemoryEstimate: 2438996, + // jsMemoryRange: [ 2438996, 2768636 ] + // }, + // other: [ + // { + // jsMemoryEstimate: 135736, + // jsMemoryRange: [ 135736, 465376 ] + // } + // ] + // } + console.log(result); + }); +``` + ## `vm.runInContext(code, contextifiedObject[, options])` + +#### `new ReadableStream([underlyingSource [, strategy]])` + + + +* `underlyingSource` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `ReadableStream` is created. + * `controller` {ReadableStreamDefaultController|ReadableByteStreamController} + * Returns: `undefined` or a promise fulfilled with `undefined`. + * `pull` {Function} A user-defined function that is called repeatedly when the + `ReadableStream` internal queue is not full. The operation may be sync or + async. If async, the function will not be called again until the previously + returned promise is fulfilled. + * `controller` {ReadableStreamDefaultController|ReadableByteStreamController} + * Returns: A promise fulfilled with `undefined`. + * `cancel` {Function} A user-defined function that is called when the + `ReadableStream` is canceled. + * `reason` {any} + * Returns: A promise fulfilled with `undefined`. + * `type` {string} Must be `'bytes'` or `undefined`. + * `autoAllocateChunkSize` {number} Used only when `type` is equal to + `'bytes'`. +* `strategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + + +#### `readableStream.locked` + + +* Type: {boolean} Set to `true` if there is an active reader for this + {ReadableStream}. + +The `readableStream.locked` property is `false` by default, and is +switch to `true` while there is an active reader consuming the +stream's data. + +#### `readableStream.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined` once cancelation has + been completed. + +#### `readableStream.getReader([options])` + + +* `options` {Object} + * `mode` {string} `'byob'` or `undefined` +* Returns: {ReadableStreamDefaultReader|ReadableStreamBYOBReader} + +```mjs +import { ReadableStream } from 'node:stream/web'; + +const stream = new ReadableStream(); + +const reader = stream.getReader(); + +console.log(await reader.read()); +``` + +```cjs +const { ReadableStream } = require('stream/web'); + +const stream = new ReadableStream(); + +const reader = stream.getReader(); + +reader.read().then(console.log); +``` + +Causes the `readableStream.locked` to be `true`. + +#### `readableStream.pipeThrough(transform[, options])` + + +* `transform` {Object} + * `readable` {ReadableStream} The `ReadableStream` to which + `transform.writable` will push the potentially modified data + is receives from this `ReadableStream`. + * `writable` {WritableStream} The `WritableStream` to which this + `ReadableStream`'s data will be written. +* `options` {Object} + * `preventAbort` {boolean} When `true`, errors in this `ReadableStream` + will not cause `transform.writable` to be aborted. + * `preventCancel` {boolean} When `true`, errors in the destination + `transform.writable` is not cause this `ReadableStream` to be + canceled. + * `preventClose` {boolean} When `true`, closing this `ReadableStream` + will no cause `transform.writable` to be closed. + * `signal` {AbortSignal} Allows the transfer of data to be canceled + using an {AbortController}. +* Returns: {ReadableStream} From `transform.readable`. + +Connects this {ReadableStream} to the pair of {ReadableStream} and +{WritableStream} provided in the `transform` argument such that the +data from this {ReadableStream} is written in to `transform.writable`, +possibly transformed, then pushed to `transform.readable`. Once the +pipeline is configured, `transform.readable` is returned. + +Causes the `readableStream.locked` to be `true` while the pipe operation +is active. + +```mjs +import { + ReadableStream, + TransformStream, +} from 'node:stream/web'; + +const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, +}); + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +const transformedStream = stream.pipeThrough(transform); + +for await (const chunk of transformedStream) + console.log(chunk); +``` + +```cjs +const { + ReadableStream, + TransformStream, +} = require('stream/web'); + +const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, +}); + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +const transformedStream = stream.pipeThrough(transform); + +(async () => { + for await (const chunk of transformedStream) + console.log(chunk); +})(); +``` + +#### `readableStream.pipeTo(destination, options)` + + +* `destination` {WritableStream} A {WritableStream} to which this + `ReadableStream`'s data will be written. +* `options` {Object} + * `preventAbort` {boolean} When `true`, errors in this `ReadableStream` + will not cause `transform.writable` to be aborted. + * `preventCancel` {boolean} When `true`, errors in the destination + `transform.writable` is not cause this `ReadableStream` to be + canceled. + * `preventClose` {boolean} When `true`, closing this `ReadableStream` + will no cause `transform.writable` to be closed. + * `signal` {AbortSignal} Allows the transfer of data to be canceled + using an {AbortController}. +* Returns: A promise fulfilled with `undefined` + +Causes the `readableStream.locked` to be `true` while the pipe operation +is active. + +#### `readableStream.tee()` + + +* Returns: {ReadableStream[]} + +Returns a pair of new {ReadableStream} instances to which this +`ReadableStream`'s data will be forwarded. Each will receive the +same data. + +Causes the `readableStream.locked` to be `true`. + +#### `readableStream.values([options])` + + +* `options` {Object} + * `preventCancel` {boolean} When `true`, prevents the {ReadableStream} + from being closed when the async iterator abruptly terminates. + **Defaults**: `false` + +Creates and returns an async iterator usable for consuming this +`ReadableStream`'s data. + +Causes the `readableStream.locked` to be `true` while the async iterator +is active. + +```mjs +import { Buffer } from 'node:buffer'; + +const stream = new ReadableStream(getSomeSource()); + +for await (const chunk of stream.values({ preventCancel: true })) + console.log(Buffer.from(chunk).toString()); +``` + +#### Async Iteration + +The {ReadableStream} object supports the async iterator protocol using +`for await` syntax. + +```mjs +import { Buffer } from 'buffer'; + +const stream = new ReadableStream(getSomeSource()); + +for await (const chunk of stream) + console.log(Buffer.from(chunk).toString()); +``` + +The async iterator will consume the {ReadableStream} until it terminates. + +By default, if the async iterator exits early (via either a `break`, +`return`, or a `throw`), the {ReadableStream} will be closed. To prevent +automatic closing of the {ReadableStream}, use the `readableStream.values()` +method to acquire the async iterator and set the `preventCancel` option to +`true`. + +The {ReadableStream} must not be locked (that is, it must not have an existing +active reader). During the async iteration, the {ReadableStream} will be locked. + +#### Transferring with `postMessage()` + +A {ReadableStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new ReadableStream(getReadableSourceSomehow()); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + data.getReader().read().then((chunk) => { + console.log(chunk); + }); +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `ReadableStreamDefaultReader` + + +By default, calling `readableStream.getReader()` with no arguments +will return an instance of `ReadableStreamDefaultReader`. The default +reader treats the chunks of data passed through the stream as opaque +values, which allows the {ReadableStream} to work with generally any +JavaScript value. + +#### `new ReadableStreamDefaultReader(stream)` + + +* `stream` {ReadableStream} + +Creates a new {ReadableStreamDefaultReader} that is locked to the +given {ReadableStream}. + +#### `readableStreamDefaultReader.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Cancels the {ReadableStream} and returns a promise that is fulfilled +when the underlying stream has been canceled. + +#### `readableStreamDefaultReader.closed` + + +* Type: {Promise} Fulfilled with `undefined` when the associated + {ReadableStream} is closed or this reader's lock is released. + +#### `readableStreamDefaultReader.read()` + + +* Returns: A promise fulfilled with an object: + * `value` {ArrayBuffer} + * `done` {boolean} + +Requests the next chunk of data from the underlying {ReadableStream} +and returns a promise that is fulfilled with the data once it is +available. + +#### `readableStreamDefaultReader.releaseLock()` + + +Releases this reader's lock on the underlying {ReadableStream}. + +### Class: `ReadableStreamBYOBReader` + + +The `ReadableStreamBYOBReader` is an alternative consumer for +byte-oriented {ReadableStream}'s (those that are created with +`underlyingSource.type` set equal to `'bytes`` when the +`ReadableStream` was created). + +The `BYOB` is short for "bring your own buffer". This is a +pattern that allows for more efficient reading of byte-oriented +data that avoids extraneous copying. + +```mjs +import { + open +} from 'node:fs/promises'; + +import { + ReadableStream +} from 'node:stream/web'; + +import { Buffer } from 'node:buffer'; + +class Source { + type = 'bytes'; + autoAllocateChunkSize = 1024; + + async start(controller) { + this.file = await open(new URL(import.meta.url)); + this.controller = controller; + } + + async pull(controller) { + const view = controller.byobRequest?.view; + const { + bytesRead, + } = await this.file.read({ + buffer: view, + offset: view.byteOffset, + length: view.byteLength + }); + + if (bytesRead === 0) { + await this.file.close(); + this.controller.close(); + } + controller.byobRequest.respond(bytesRead); + } +} + +const stream = new ReadableStream(new Source()); + +async function read(stream) { + const reader = stream.getReader({ mode: 'byob' }); + + const chunks = []; + let result; + do { + result = await reader.read(Buffer.alloc(100)); + if (result.value !== undefined) + chunks.push(Buffer.from(result.value)); + } while (!result.done); + + return Buffer.concat(chunks); +} + +const data = await read(stream); +console.log(Buffer.from(data).toString()); +``` + +#### `new ReadableStreamBYOBReader(stream)` + + +* `stream` {ReadableStream} + +Creates a new `ReadableStreamBYOBReader` that is locked to the +given {ReadableStream}. + +#### `readableStreamBYOBReader.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Cancels the {ReadableStream} and returns a promise that is fulfilled +when the underlying stream has been canceled. + +#### `readableStreamBYOBReader.closed` + + +* Type: {Promise} Fulfilled with `undefined` when the associated + {ReadableStream} is closed or this reader's lock is released. + +#### `readableStreamBYOBReader.read(view)` + + +* `view` {Buffer|TypedArray|DataView} +* Returns: A promise fulfilled with an object: + * `value` {ArrayBuffer} + * `done` {boolean} + +Requests the next chunk of data from the underlying {ReadableStream} +and returns a promise that is fulfilled with the data once it is +available. + +Do not pass a pooled {Buffer} object instance in to this method. +Pooled `Buffer` objects are created using `Buffer.allocUnsafe()`, +or `Buffer.from()`, or are often returned by various `fs` module +callbacks. These types of `Buffer`s use a shared underlying +{ArrayBuffer} object that contains all of the data from all of +the pooled `Buffer` instances. When a `Buffer`, {TypedArray}, +or {DataView} is passed in to `readableStreamBYOBReader.read()`, +the view's underlying `ArrayBuffer` is *detached*, invalidating +all existing views that may exist on that `ArrayBuffer`. This +can have disastrous consequences for your application. + +#### `readableStreamBYOBReader.releaseLock()` + + +Releases this reader's lock on the underlying {ReadableStream}. + +### Class: `ReadableStreamDefaultController` + + +Every {ReadableStream} has a controller that is responsible for +the internal state and management of the stream's queue. The +`ReadableStreamDefaultController` is the default controller +implementation for `ReadableStream`s that are not byte-oriented. + +#### `readableStreamDefaultController.close()` + + +Closes the {ReadableStream} to which this controller is associated. + +#### `readableStreamDefaultController.desiredSize` + + +* Type: {number} + +Returns the amount of data remaining to fill the {ReadableStream}'s +queue. + +#### `readableStreamDefaultController.enqueue(chunk)` + + +* `chunk` {any} + +Appends a new chunk of data to the {ReadableStream}'s queue. + +#### `readableStreamDefaultController.error(error)` + + +* `error` {any} + +Signals an error that causes the {ReadableStream} to error and close. + +### Class: `ReadableByteStreamController` + + +Every {ReadableStream} has a controller that is responsible for +the internal state and management of the stream's queue. The +`ReadableByteStreamController` is for byte-oriented `ReadableStream`s. + +#### `readableByteStreamController.byobRequest` + + +* Type: {ReadableStreamBYOBRequest} + +#### `readableByteStreamController.close()` + + +Closes the {ReadableStream} to which this controller is associated. + +#### `readableByteStreamController.desiredSize` + + +* Type: {number} + +Returns the amount of data remaining to fill the {ReadableStream}'s +queue. + +#### `readableByteStreamController.enqueue(chunk)` + + +* `chunk`: {Buffer|TypedArray|DataView} + +Appends a new chunk of data to the {ReadableStream}'s queue. + +#### `readableByteStreamController.error(error)` + + +* `error` {any} + +Signals an error that causes the {ReadableStream} to error and close. + +### Class: `ReadableStreamBYOBRequest` + + +When using `ReadableByteStreamController` in byte-oriented +streams, and when using the `ReadableStreamBYOBReader`, +the `readableByteStreamController.byobRequest` property +provides access to a `ReadableStreamBYOBRequest` instance +that represents the current read request. The object +is used to gain access to the `ArrayBuffer`/`TypedArray` +that has been provided for the read request to fill, +and provides methods for signaling that the data has +been provided. + +#### `readableStreamBYOBRequest.respond(bytesWritten)` + + +* `bytesWritten` {number} + +Signals that a `bytesWritten` number of bytes have been written +to `readableStreamBYOBRequest.view`. + +#### `readableStreamBYOBRequest.respondWithNewView(view)` + + +* `view` {Buffer|TypedArray|DataView} + +Signals that the request has been fulfilled with bytes written +to a new `Buffer`, `TypedArray`, or `DataView`. + +#### `readableStreamBYOBRequest.view` + + +* Type: {Buffer|TypedArray|DataView} + +### Class: `WritableStream` + + +The `WritableStream` is a destination to which stream data is sent. + +```mjs +import { + WritableStream +} from 'node:stream/web'; + +const stream = new WritableStream({ + write(chunk) { + console.log(chunk); + } +}); + +await stream.getWriter().write('Hello World'); +``` + +#### `new WritableStream([underlyingSink[, strategy]])` + + +* `underlyingSink` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `WritableStream` is created. + * `controller` {WritableStreamDefaultController} + * Returns: `undefined` or a promise fulfilled with `undefined`. + * `write` {Function} A user-defined function that is invoked when a chunk of + data has been written to the `WritableStream`. + * `chunk` {any} + * `controller` {WritableStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `close` {Function} A user-defined function that is called when the + `WritableStream` is closed. + * Returns: A promise fulfilled with `undefined`. + * `abort` {Function} A user-defined function that is called to abruptly close + the `WritableStream`. + * `reason` {any} + * Returns: A promise fulfilled with `undefined`. + * `type` {any} The `type` option is reserved for future use and *must* be + undefined. +* `strategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + +#### `writableStream.abort([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Abruptly terminates the `WritableStream`. All queued writes will be +canceled with their associated promises rejected. + +#### `writableStream.close()` + + +* Returns: A promise fulfilled with `undefined`. + +Closes the `WritableStream` when no additional writes are expected. + +#### `writableStream.getWriter()` + + +* Returns: {WritableStreamDefaultWriter} + +Creates and creates a new writer instance that can be used to write +data into the `WritableStream`. + +#### `writableStream.locked` + + +* Type: {boolean} + +The `writableStream.locked` property is `false` by default, and is +switched to `true` while there is an active writer attached to this +`WritableStream`. + +#### Transferring with postMessage() + +A {WritableStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new WritableStream(getWritableSinkSomehow()); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + data.getWriter().write('hello'); +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `WritableStreamDefaultWriter` + + +#### `new WritableStreamDefaultWriter(stream)` + + +* `stream` {WritableStream} + +Creates a new `WritableStreamDefaultWriter` that is locked to the given +`WritableStream`. + +#### `writableStreamDefaultWriter.abort([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Abruptly terminates the `WritableStream`. All queued writes will be +canceled with their associated promises rejected. + +#### `writableStreamDefaultWriter.close()` + + +* Returns: A promise fulfilled with `undefined`. + +Closes the `WritableStream` when no additional writes are expected. + +#### `writableStreamDefaultWriter.closed` + + +* Type: A promise that is fulfilled with `undefined` when the + associated {WritableStream} is closed or this writer's lock is + released. + +#### `writableStreamDefaultWriter.desiredSize` + + +* Type: {number} + +The amount of data required to fill the {WritableStream}'s queue. + +#### `writableStreamDefaultWriter.ready` + + +* type: A promise that is fulfilled with `undefined` when the + writer is ready to be used. + +#### `writableStreamDefaultWriter.releaseLock()` + + +Releases this writer's lock on the underlying {ReadableStream}. + +#### `writableStreamDefaultWriter.write([chunk])` + + +* `chunk`: {any} +* Returns: A promise fulfilled with `undefined`. + +Appends a new chunk of data to the {WritableStream}'s queue. + +### Class: `WritableStreamDefaultController` + + +The `WritableStreamDefaultController` manage's the {WritableStream}'s +internal state. + +#### `writableStreamDefaultController.abortReason` + +* Type: {any} The `reason` value passed to `writableStream.abort()`. + +#### `writableStreamDefaultController.error(error)` + + +* `error` {any} + +Called by user-code to signal that an error has occurred while processing +the `WritableStream` data. When called, the {WritableStream} will be aborted, +with currently pending writes canceled. + +#### `writableStreamDefaultController.signal` + +* Type: {AbortSignal} An `AbortSignal` that can be used to cancel pending + write or close operations when a {WritableStream} is aborted. + +### Class: `TransformStream` + + +A `TransformStream` consists of a {ReadableStream} and a {WritableStream} that +are connected such that the data written to the `WritableStream` is received, +and potentially transformed, before being pushed into the `ReadableStream`'s +queue. + +```mjs +import { + TransformStream +} from 'node:stream/web'; + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +await Promise.all([ + transform.writable.getWriter().write('A'), + transform.readable.getReader().read(), +]); +``` + +#### `new TransformStream([transformer[, writableStrategy[, readableStrategy]]])` + + +* `transformer` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `TransformStream` is created. + * `controller` {TransformStreamDefaultController} + * Returns: `undefined` or a promise fulfilled with `undefined` + * `transform` {Function} A user-defined function that receives, and + potentially modifies, a chunk of data written to `transformStream.writable`, + before forwarding that on to `transformStream.readable`. + * `chunk` {any} + * `controller` {TransformStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `flush` {Function} A user-defined function that is called immediately before + the writable side of the `TransformStream` is closed, signaling the end of + the transformation process. + * `controller` {TransformStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `readableType` {any} the `readableType` option is reserved for future use + and *must* be `undefined. + * `writableType` {any} the `writableType` option is reserved for future use + and *must* be `undefined. +* `writableStrategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} +* `readableStrategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + +#### `transformStream.readable` + + +* Type: {ReadableStream} + +#### `transformStream.writable` + + +* Type: {WritableStream} + +#### Transferring with postMessage() + +A {TransformStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new TransformStream(); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + const { writable, readable } = data; + // ... +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `TransformStreamDefaultController` + + +The `TransformStreamDefaultController` manages the internal state +of the `TransformStream`. + +#### `transformStreamDefaultController.desiredSize` + + +* Type: {number} + +The amount of data required to fill the readable side's queue. + +#### `transformStreamDefaultController.enqueue([chunk])` + + +* `chunk` {any} + +Appends a chunk of data to the readable side's queue. + +#### `transformStreamDefaultController.error([reason])` + + +* `reason` {any} + +Signals to both the readable and writable side that an error has occurred +while processing the transform data, causing both sides to be abruptly +closed. + +#### `transformStreamDefaultController.terminate()` + + +Closes the readable side of the transport and causes the writable side +to be abruptly closed with an error. + +### Class: `ByteLengthQueuingStrategy` + + +#### `new ByteLengthQueuingStrategy(options)` + + +* `options` {Object} + * `highWaterMark` {number} + +#### `byteLengthQueuingStrategy.highWaterMark` + + +* Type: {number} + +#### `byteLengthQueuingStrategy.size` + + +* Type: {Function} + * `chunk` {any} + * Returns: {number} + +### Class: `CountQueuingStrategy` + + +#### `new CountQueuingStrategy(options)` + + +* `options` {Object} + * `highWaterMark` {number} + +#### `countQueuingStrategy.highWaterMark` + + +* Type: {number} + +#### `countQueuingStrategy.size` + + +* Type: {Function} + * `chunk` {any} + * Returns: {number} + +[Streams]: stream.md +[WHATWG Streams Standard]: https://streams.spec.whatwg.org/ diff --git a/doc/api/worker_threads.md b/doc/api/worker_threads.md index f345b9dba76913..1ca8852308902b 100644 --- a/doc/api/worker_threads.md +++ b/doc/api/worker_threads.md @@ -1259,6 +1259,7 @@ thread spawned will spawn another until the application crashes. [`SharedArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer [`Uint8Array`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array [`WebAssembly.Module`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Module +[`Worker constructor options`]: #worker_threads_new_worker_filename_options [`Worker`]: #worker_threads_class_worker [`cluster` module]: cluster.md [`data:` URL]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs @@ -1281,17 +1282,16 @@ thread spawned will spawn another until the application crashes. [`process.title`]: process.md#process_process_title [`require('worker_threads').isMainThread`]: #worker_threads_worker_ismainthread [`require('worker_threads').parentPort.on('message')`]: #worker_threads_event_message -[`require('worker_threads').parentPort`]: #worker_threads_worker_parentport [`require('worker_threads').parentPort.postMessage()`]: #worker_threads_worker_postmessage_value_transferlist +[`require('worker_threads').parentPort`]: #worker_threads_worker_parentport [`require('worker_threads').threadId`]: #worker_threads_worker_threadid [`require('worker_threads').workerData`]: #worker_threads_worker_workerdata [`trace_events`]: tracing.md [`v8.getHeapSnapshot()`]: v8.md#v8_v8_getheapsnapshot [`vm`]: vm.md -[`Worker constructor options`]: #worker_threads_new_worker_filename_options +[`worker.SHARE_ENV`]: #worker_threads_worker_share_env [`worker.on('message')`]: #worker_threads_event_message_1 [`worker.postMessage()`]: #worker_threads_worker_postmessage_value_transferlist -[`worker.SHARE_ENV`]: #worker_threads_worker_share_env [`worker.terminate()`]: #worker_threads_worker_terminate [`worker.threadId`]: #worker_threads_worker_threadid_1 [async-resource-worker-pool]: async_hooks.md#async-resource-worker-pool diff --git a/doc/api_assets/hljs.css b/doc/api_assets/hljs.css index bbf076987530d6..4893f9de26fab4 100644 --- a/doc/api_assets/hljs.css +++ b/doc/api_assets/hljs.css @@ -8,7 +8,8 @@ } .hljs-attribute, -.hljs-keyword { +.hljs-keyword, +.hljs-type { color: #338; } @@ -35,9 +36,10 @@ color: var(--green4); } -.dark-mode .hljs-keyword, .dark-mode .hljs-attribute, -.dark-mode .hljs-doctag { +.dark-mode .hljs-doctag, +.dark-mode .hljs-keyword, +.dark-mode .hljs-type { color: #66d9ef; } diff --git a/doc/api_assets/style.css b/doc/api_assets/style.css index ef8fec8d92912f..b1c8a763a638aa 100644 --- a/doc/api_assets/style.css +++ b/doc/api_assets/style.css @@ -25,30 +25,32 @@ --gray7: #c1c1c1; --grey8: #ddd; + --background-color-api-stability-link: rgba(255, 255, 255, .4); + --background-color-highlight: var(--white-smoke); --color-brand-primary: var(--gray6); --color-brand-secondary: var(--green1); - --color-text-primary: var(--gray6); - --color-text-secondary: var(--green2); --color-fill-app: var(--white); - --color-text-nav: var(--gray3); - --highlight-background-color: var(--white-smoke); - --color-links: var(--green1); --color-fill-side-nav: var(--gray6); - --api-stability-links-bg: rgba(255, 255, 255, .4) + --color-links: var(--green1); + --color-text-mark: var(--gray1); + --color-text-nav: var(--gray3); + --color-text-primary: var(--gray6); + --color-text-secondary: var(--green2); } .dark-mode { - --color-links: var(--green5); - --color-fill-app: var(--black1); - --color-text-primary: var(--white); - --color-fill-side-nav: var(--black3); - --highlight-background-color: var(--black2); + --background-color-highlight: var(--black2); + --color-fill-app: var(--black1); + --color-fill-side-nav: var(--black3); + --color-links: var(--green5); + --color-text-mark: var(--gray5); + --color-text-primary: var(--white); } .dark-mode code, .dark-mode tt { color: var(--grey-smoke); - background-color: var(--highlight-background-color); + background-color: var(--background-color-highlight); } .dark-mode a code { color: var(--green3); @@ -248,7 +250,7 @@ ol.version-picker li:last-child a { .api_stability a:hover, .api_stability a:active, .api_stability a:focus { - background-color: var(--api-stability-links-bg); + background-color: var(--background-color-api-stability-link); } .api_stability a code { @@ -455,7 +457,7 @@ code { pre { padding: 1rem; vertical-align: top; - background-color: var(--highlight-background-color); + background-color: var(--background-color-highlight); margin: 1rem; overflow-x: auto; } @@ -632,7 +634,7 @@ a code { span > .mark, span > .mark:visited { - color: #707070; + color: var(--color-text-mark); position: absolute; top: 0; right: 0; @@ -679,11 +681,6 @@ kbd { visibility: hidden; } -.github_icon { - vertical-align: middle; - margin: -2px 3px 0 0; -} - /* API reference sidebar */ @media only screen and (min-width: 1025px) { .apidoc #column2 > .line { diff --git a/doc/changelogs/CHANGELOG_V16.md b/doc/changelogs/CHANGELOG_V16.md index 1195cb9e726c9e..6ea9b378110ba8 100644 --- a/doc/changelogs/CHANGELOG_V16.md +++ b/doc/changelogs/CHANGELOG_V16.md @@ -10,6 +10,7 @@ +16.5.0
16.4.2
16.4.1
16.4.0
@@ -39,6 +40,171 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2021-07-14, Version 16.5.0 (Current), @targos + +### Notable Changes + +#### Experimental Web Streams API + +Node.js now exposes an experimental implementation of the +[Web Streams API](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API). + +While it is experimental, the API is not exposed on the global object and is only +accessible using the new `stream/web` core module: + +```mjs +import { ReadableStream, WritableStream } from 'stream/web'; +// Or from 'node:stream/web' +``` + +Importing the module will emit a single experimental warning per process. + +The raw API is implemented and we are now working on its integration with +various existing core APIs. + +Contributed by James M Snell - [#39062](https://github.com/nodejs/node/pull/39062) + +#### Other notable changes + +* [[`83f3b959f9`](https://github.com/nodejs/node/commit/83f3b959f9)] - **(SEMVER-MINOR)** **fs**: allow empty string for temp directory prefix (Voltrex) [#39028](https://github.com/nodejs/node/pull/39028) +* [[`c04fd2bb97`](https://github.com/nodejs/node/commit/c04fd2bb97)] - **deps**: upgrade npm to 7.19.1 (npm team) [#39225](https://github.com/nodejs/node/pull/39225) + +### Commits + +* [[`aafa08d7b9`](https://github.com/nodejs/node/commit/aafa08d7b9)] - **bootstrap**: load perf\_hooks eagerly during bootstrap (Joyee Cheung) [#38971](https://github.com/nodejs/node/pull/38971) +* [[`6e46eb186c`](https://github.com/nodejs/node/commit/6e46eb186c)] - **bootstrap**: support perf hooks in snapshot (Joyee Cheung) [#38971](https://github.com/nodejs/node/pull/38971) +* [[`10681828ac`](https://github.com/nodejs/node/commit/10681828ac)] - **build**: update gcovr for gcc 8 compatibility (Richard Lau) [#39326](https://github.com/nodejs/node/pull/39326) +* [[`8381132f76`](https://github.com/nodejs/node/commit/8381132f76)] - **build**: add riscv into host\_arch\_cc (Lu Yahan) [#39004](https://github.com/nodejs/node/pull/39004) +* [[`a7ba21864d`](https://github.com/nodejs/node/commit/a7ba21864d)] - **build**: restore libplatform headers in distribution (Jeroen Ooms) [#39288](https://github.com/nodejs/node/pull/39288) +* [[`41161eabf2`](https://github.com/nodejs/node/commit/41161eabf2)] - **build**: remove unused comment in Makefile (LitoMore) [#39171](https://github.com/nodejs/node/pull/39171) +* [[`f6a1092471`](https://github.com/nodejs/node/commit/f6a1092471)] - **build**: allow to build riscv64 using Makefile (Makoto Kato) [#39048](https://github.com/nodejs/node/pull/39048) +* [[`a7cd40ed8d`](https://github.com/nodejs/node/commit/a7cd40ed8d)] - **build**: uvwasi honours node\_shared\_libuv (Jérémy Lal) [#39260](https://github.com/nodejs/node/pull/39260) +* [[`3ed04994b7`](https://github.com/nodejs/node/commit/3ed04994b7)] - **build**: shorten path used in tarball build workflow (Richard Lau) [#39192](https://github.com/nodejs/node/pull/39192) +* [[`65b56b3774`](https://github.com/nodejs/node/commit/65b56b3774)] - **build**: fix building with external builtins (Momtchil Momtchev) [#39091](https://github.com/nodejs/node/pull/39091) +* [[`412b1012d2`](https://github.com/nodejs/node/commit/412b1012d2)] - **build**: pass directory instead of list of files to js2c.py (Joyee Cheung) [#39069](https://github.com/nodejs/node/pull/39069) +* [[`171ca6bb3c`](https://github.com/nodejs/node/commit/171ca6bb3c)] - **build**: don't pass `--mode` argument to V8 test-runner (Richard Lau) [#39055](https://github.com/nodejs/node/pull/39055) +* [[`cf8536ea3f`](https://github.com/nodejs/node/commit/cf8536ea3f)] - **build**: fix commit linter on unrebased PRs (Mary Marchini) [#39121](https://github.com/nodejs/node/pull/39121) +* [[`cf0533b8b2`](https://github.com/nodejs/node/commit/cf0533b8b2)] - **build**: use Actions to validate commit message (Mary Marchini) [#32417](https://github.com/nodejs/node/pull/32417) +* [[`4202274851`](https://github.com/nodejs/node/commit/4202274851)] - **crypto**: move OPENSSL\_IS\_BORINGSSL guard (Shelley Vohr) [#39136](https://github.com/nodejs/node/pull/39136) +* [[`89f5a73ba5`](https://github.com/nodejs/node/commit/89f5a73ba5)] - **crypto**: use compatible ecdh function (Shelley Vohr) [#39054](https://github.com/nodejs/node/pull/39054) +* [[`30e878b603`](https://github.com/nodejs/node/commit/30e878b603)] - **crypto**: add OPENSSL\_IS\_BORINGSSL guard (Shelley Vohr) [#39138](https://github.com/nodejs/node/pull/39138) +* [[`630266cba2`](https://github.com/nodejs/node/commit/630266cba2)] - **debugger**: indicate server is ending (Rich Trott) [#39334](https://github.com/nodejs/node/pull/39334) +* [[`48d9680f84`](https://github.com/nodejs/node/commit/48d9680f84)] - **debugger**: remove final lint exceptions in inspect\_repl.js (Rich Trott) [#39078](https://github.com/nodejs/node/pull/39078) +* [[`4507714f9d`](https://github.com/nodejs/node/commit/4507714f9d)] - **deps**: V8: backport 5c76da8ddcf8 (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`0e64bd0dd6`](https://github.com/nodejs/node/commit/0e64bd0dd6)] - **deps**: V8: cherry-pick 359d44df4cdd (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`142ce6838b`](https://github.com/nodejs/node/commit/142ce6838b)] - **deps**: V8: cherry-pick 3805a698f7b6 (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`2657c305cb`](https://github.com/nodejs/node/commit/2657c305cb)] - **deps**: V8: cherry-pick 56fe020eec0c (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`5c5a93e533`](https://github.com/nodejs/node/commit/5c5a93e533)] - **deps**: V8: cherry-pick 2b77ca200c56 (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`cf49ebb052`](https://github.com/nodejs/node/commit/cf49ebb052)] - **deps**: V8: cherry-pick 53784bdb8f01 (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`3d351b29c1`](https://github.com/nodejs/node/commit/3d351b29c1)] - **deps**: V8: cherry-pick cb4faa902e9f (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`165130a3e0`](https://github.com/nodejs/node/commit/165130a3e0)] - **deps**: patch V8 to 9.1.269.38 (Michaël Zasso) [#39196](https://github.com/nodejs/node/pull/39196) +* [[`c04fd2bb97`](https://github.com/nodejs/node/commit/c04fd2bb97)] - **deps**: upgrade npm to 7.19.1 (npm team) [#39225](https://github.com/nodejs/node/pull/39225) +* [[`bf4c50f9d9`](https://github.com/nodejs/node/commit/bf4c50f9d9)] - **deps**: upgrade npm to 7.19.0 (npm team) [#39148](https://github.com/nodejs/node/pull/39148) +* [[`8630b39376`](https://github.com/nodejs/node/commit/8630b39376)] - **deps**: update Acorn to v8.4.1 (Michaël Zasso) [#39166](https://github.com/nodejs/node/pull/39166) +* [[`38ae4077c7`](https://github.com/nodejs/node/commit/38ae4077c7)] - **doc**: fix typos in Web Streams API documentation (Tobias Nießen) [#39351](https://github.com/nodejs/node/pull/39351) +* [[`fb6616ecbb`](https://github.com/nodejs/node/commit/fb6616ecbb)] - **doc**: add text about moving long commit lists out of PR description (Danielle Adams) [#39186](https://github.com/nodejs/node/pull/39186) +* [[`29c9cc8f03`](https://github.com/nodejs/node/commit/29c9cc8f03)] - **doc**: do not use & for "and" in text (Rich Trott) [#39345](https://github.com/nodejs/node/pull/39345) +* [[`0b3b2695bc`](https://github.com/nodejs/node/commit/0b3b2695bc)] - **doc**: do not use tilde for "about" or "approximately" (Rich Trott) [#39344](https://github.com/nodejs/node/pull/39344) +* [[`64a185e595`](https://github.com/nodejs/node/commit/64a185e595)] - **doc**: use consistent abbreviation formatting (Rich Trott) [#39343](https://github.com/nodejs/node/pull/39343) +* [[`2573bf5116`](https://github.com/nodejs/node/commit/2573bf5116)] - **doc**: update AUTHORS (Rich Trott) [#39277](https://github.com/nodejs/node/pull/39277) +* [[`63b6084724`](https://github.com/nodejs/node/commit/63b6084724)] - **doc**: put information about the past in details tags (Rich Trott) [#39321](https://github.com/nodejs/node/pull/39321) +* [[`e26635085a`](https://github.com/nodejs/node/commit/e26635085a)] - **doc**: replace outdated `util.promisify` timer examples with references (foxxyz) [#39164](https://github.com/nodejs/node/pull/39164) +* [[`d101a85e36`](https://github.com/nodejs/node/commit/d101a85e36)] - **doc**: move AndreasMadsen to emeritus (Rich Trott) [#39315](https://github.com/nodejs/node/pull/39315) +* [[`2d552a32d6`](https://github.com/nodejs/node/commit/2d552a32d6)] - **doc**: move ofrobots to collaborator emeritus (Rich Trott) [#39307](https://github.com/nodejs/node/pull/39307) +* [[`131d676f64`](https://github.com/nodejs/node/commit/131d676f64)] - **doc**: simplify CRAN mirror text in benchmark guide (Rich Trott) [#39287](https://github.com/nodejs/node/pull/39287) +* [[`c92b80e631`](https://github.com/nodejs/node/commit/c92b80e631)] - **doc**: use "repository" instead of "repo" in onboarding.md (Rich Trott) [#39286](https://github.com/nodejs/node/pull/39286) +* [[`81df9b1e92`](https://github.com/nodejs/node/commit/81df9b1e92)] - **doc**: update collaborator email address (Rich Trott) [#39263](https://github.com/nodejs/node/pull/39263) +* [[`b8860f35c9`](https://github.com/nodejs/node/commit/b8860f35c9)] - **doc**: remove GitHub mark (Rich Trott) [#39251](https://github.com/nodejs/node/pull/39251) +* [[`f06ebf1775`](https://github.com/nodejs/node/commit/f06ebf1775)] - **doc**: remove emailing the TSC from offboarding doc (Rich Trott) [#39280](https://github.com/nodejs/node/pull/39280) +* [[`175a6569f4`](https://github.com/nodejs/node/commit/175a6569f4)] - **doc**: add annotation to writeFile `data` as `Object` (Jacob) [#39167](https://github.com/nodejs/node/pull/39167) +* [[`4d53c63c22`](https://github.com/nodejs/node/commit/4d53c63c22)] - **doc**: fix boldface punctuation for full sentences (Rich Trott) [#39278](https://github.com/nodejs/node/pull/39278) +* [[`146f733f43`](https://github.com/nodejs/node/commit/146f733f43)] - **doc**: fix constants usage in fs.access example (Cyrille Bourgois) [#39289](https://github.com/nodejs/node/pull/39289) +* [[`eacee0ab17`](https://github.com/nodejs/node/commit/eacee0ab17)] - **doc**: use "repository" in guides versus repo (Michael Dawson) [#39198](https://github.com/nodejs/node/pull/39198) +* [[`04bcfcfff1`](https://github.com/nodejs/node/commit/04bcfcfff1)] - **doc**: update Node-api version matrix (Michael Dawson) [#39197](https://github.com/nodejs/node/pull/39197) +* [[`4dd6ab389a`](https://github.com/nodejs/node/commit/4dd6ab389a)] - **doc**: remove onboarding-extras (Rich Trott) [#39252](https://github.com/nodejs/node/pull/39252) +* [[`a01dacfdcd`](https://github.com/nodejs/node/commit/a01dacfdcd)] - **doc**: move Sam Ruby to emeritus (Rich Trott) [#39264](https://github.com/nodejs/node/pull/39264) +* [[`2bb3713b74`](https://github.com/nodejs/node/commit/2bb3713b74)] - **doc**: update AUTHORS file (Rich Trott) [#39250](https://github.com/nodejs/node/pull/39250) +* [[`2227c1368f`](https://github.com/nodejs/node/commit/2227c1368f)] - **doc**: fix color contrast for anchor marks in dark mode (Rich Trott) [#39168](https://github.com/nodejs/node/pull/39168) +* [[`f8cdaad9d4`](https://github.com/nodejs/node/commit/f8cdaad9d4)] - **doc**: rename datatypes to data types (FrankEntriken) [#39209](https://github.com/nodejs/node/pull/39209) +* [[`250024eaec`](https://github.com/nodejs/node/commit/250024eaec)] - **doc**: normalize CSS variable names and indentation (Rich Trott) [#39199](https://github.com/nodejs/node/pull/39199) +* [[`db74a35348`](https://github.com/nodejs/node/commit/db74a35348)] - **doc**: remove unnecessary module format comments (Rich Trott) [#39219](https://github.com/nodejs/node/pull/39219) +* [[`24a1f7ec84`](https://github.com/nodejs/node/commit/24a1f7ec84)] - **doc**: use more consistent formatting for deprecations (Rich Trott) [#39218](https://github.com/nodejs/node/pull/39218) +* [[`24c0d7d872`](https://github.com/nodejs/node/commit/24c0d7d872)] - **doc**: update AUTHORS (Rich Trott) [#39217](https://github.com/nodejs/node/pull/39217) +* [[`3e5ed72b0a`](https://github.com/nodejs/node/commit/3e5ed72b0a)] - **doc**: use "pull request" instead of "PR" in packages.md (Rich Trott) [#39213](https://github.com/nodejs/node/pull/39213) +* [[`ddc24b2105`](https://github.com/nodejs/node/commit/ddc24b2105)] - **doc**: move v8.stopCoverage() to expected location in doc (Rich Trott) [#39212](https://github.com/nodejs/node/pull/39212) +* [[`68c334c8c9`](https://github.com/nodejs/node/commit/68c334c8c9)] - **doc**: move vm.measureMemory() to expected location in doc (Rich Trott) [#39211](https://github.com/nodejs/node/pull/39211) +* [[`81d52d7c79`](https://github.com/nodejs/node/commit/81d52d7c79)] - **doc**: fix CHANGELOG.md formatting (Richard Lau) [#39223](https://github.com/nodejs/node/pull/39223) +* [[`9c3a5fd53e`](https://github.com/nodejs/node/commit/9c3a5fd53e)] - **doc**: add cc oss-security@lists.openwall.com (Daniel Bevenius) [#39191](https://github.com/nodejs/node/pull/39191) +* [[`07ba2875ae`](https://github.com/nodejs/node/commit/07ba2875ae)] - **doc**: remove instructions for unsupported Node.js versions (Rich Trott) [#39185](https://github.com/nodejs/node/pull/39185) +* [[`482851f647`](https://github.com/nodejs/node/commit/482851f647)] - **doc**: remove obsolete cc recommendations (Rich Trott) [#39181](https://github.com/nodejs/node/pull/39181) +* [[`8311b29083`](https://github.com/nodejs/node/commit/8311b29083)] - **doc**: use "repository" in maintaining-V8 doc (Rich Trott) [#39179](https://github.com/nodejs/node/pull/39179) +* [[`952580e1bf`](https://github.com/nodejs/node/commit/952580e1bf)] - **doc**: fix broken link in errors.md (Rich Trott) [#39200](https://github.com/nodejs/node/pull/39200) +* [[`af1e1dba36`](https://github.com/nodejs/node/commit/af1e1dba36)] - **doc**: correct JavaScript primitive value names in n-api.md (legendecas) [#39129](https://github.com/nodejs/node/pull/39129) +* [[`00728d1301`](https://github.com/nodejs/node/commit/00728d1301)] - **doc**: apply logical ordering to CSS variables (Rich Trott) [#39169](https://github.com/nodejs/node/pull/39169) +* [[`aec2744e14`](https://github.com/nodejs/node/commit/aec2744e14)] - **doc**: remove file name from self-reference links (Antoine du Hamel) [#39165](https://github.com/nodejs/node/pull/39165) +* [[`74bb915178`](https://github.com/nodejs/node/commit/74bb915178)] - **doc**: use repository instead of repo (Rich Trott) [#39157](https://github.com/nodejs/node/pull/39157) +* [[`a669a191a1`](https://github.com/nodejs/node/commit/a669a191a1)] - **doc**: use ASCII order for md refs (Antoine du Hamel) [#39170](https://github.com/nodejs/node/pull/39170) +* [[`21e8720155`](https://github.com/nodejs/node/commit/21e8720155)] - **doc**: fix `EventTarget.dispatchEvent` docs (Rohan Sharma) [#39127](https://github.com/nodejs/node/pull/39127) +* [[`90ec7660bc`](https://github.com/nodejs/node/commit/90ec7660bc)] - **doc**: update AUTHORS file (Rich Trott) [#39082](https://github.com/nodejs/node/pull/39082) +* [[`81cebec5cc`](https://github.com/nodejs/node/commit/81cebec5cc)] - **doc**: esm examples /w imports for process, Buffer (Guy Bedford) [#39043](https://github.com/nodejs/node/pull/39043) +* [[`c1588887a6`](https://github.com/nodejs/node/commit/c1588887a6)] - **doc**: fix napi\_default\_property name (Davidson Francis) [#39104](https://github.com/nodejs/node/pull/39104) +* [[`a440f6c69c`](https://github.com/nodejs/node/commit/a440f6c69c)] - **doc**: fix dead links in packages.md (Michaël Zasso) [#39113](https://github.com/nodejs/node/pull/39113) +* [[`33cad271c5`](https://github.com/nodejs/node/commit/33cad271c5)] - **errors**: remove eager stack generation for node errors (Gus Caplan) [#39182](https://github.com/nodejs/node/pull/39182) +* [[`ac05a0a8a3`](https://github.com/nodejs/node/commit/ac05a0a8a3)] - **errors**: don't throw TypeError on missing export (Benjamin Coe) [#39017](https://github.com/nodejs/node/pull/39017) +* [[`83f3b959f9`](https://github.com/nodejs/node/commit/83f3b959f9)] - **(SEMVER-MINOR)** **fs**: allow empty string for temp directory prefix (Voltrex) [#39028](https://github.com/nodejs/node/pull/39028) +* [[`ac7184d8c7`](https://github.com/nodejs/node/commit/ac7184d8c7)] - **http**: clean up HttpParser correctly (Tobias Koppers) [#39292](https://github.com/nodejs/node/pull/39292) +* [[`35331cbd13`](https://github.com/nodejs/node/commit/35331cbd13)] - **http,https**: align server option of https with http (Qingyu Deng) [#38992](https://github.com/nodejs/node/pull/38992) +* [[`29194d4f88`](https://github.com/nodejs/node/commit/29194d4f88)] - **inspector**: move inspector async hooks to environment (Joyee Cheung) [#39112](https://github.com/nodejs/node/pull/39112) +* [[`ecf627a9af`](https://github.com/nodejs/node/commit/ecf627a9af)] - **lib**: rename TransferedReadableStream etc (Tobias Nießen) [#39352](https://github.com/nodejs/node/pull/39352) +* [[`0e55cb72df`](https://github.com/nodejs/node/commit/0e55cb72df)] - **lib**: make lazyDOMException more common (Khaidi Chu) [#39105](https://github.com/nodejs/node/pull/39105) +* [[`cfd96aa8f9`](https://github.com/nodejs/node/commit/cfd96aa8f9)] - **meta**: fix tls code owners (Robert Nagy) [#39355](https://github.com/nodejs/node/pull/39355) +* [[`e5c2d80560`](https://github.com/nodejs/node/commit/e5c2d80560)] - **meta**: use form schema for bug report template (Michaël Zasso) [#39194](https://github.com/nodejs/node/pull/39194) +* [[`bd472daf0c`](https://github.com/nodejs/node/commit/bd472daf0c)] - **meta**: add @nodejs/actions as CODEOWNERS (Mary Marchini) [#39119](https://github.com/nodejs/node/pull/39119) +* [[`63f87027e3`](https://github.com/nodejs/node/commit/63f87027e3)] - **node-api**: cctest on v8impl::Reference (legendecas) [#38970](https://github.com/nodejs/node/pull/38970) +* [[`7ea98fbccd`](https://github.com/nodejs/node/commit/7ea98fbccd)] - **perf_hooks**: refactor perf\_hooks for snapshot building (Joyee Cheung) [#38971](https://github.com/nodejs/node/pull/38971) +* [[`20cc8ec2af`](https://github.com/nodejs/node/commit/20cc8ec2af)] - **readline**: allow completer to rewrite existing input (Anna Henningsen) [#39178](https://github.com/nodejs/node/pull/39178) +* [[`b168ec2a2a`](https://github.com/nodejs/node/commit/b168ec2a2a)] - **repl**: processTopLevelAwait fallback error handling (ejose19) [#39290](https://github.com/nodejs/node/pull/39290) +* [[`a101fe68ad`](https://github.com/nodejs/node/commit/a101fe68ad)] - **repl**: correctly hoist top level await declarations (ejose19) [#39265](https://github.com/nodejs/node/pull/39265) +* [[`d441d91450`](https://github.com/nodejs/node/commit/d441d91450)] - **repl**: ensure correct syntax err for await parsing (Guy Bedford) [#39154](https://github.com/nodejs/node/pull/39154) +* [[`9184259a54`](https://github.com/nodejs/node/commit/9184259a54)] - **src**: add JSDoc typings for v8 (Voltrex) [#38944](https://github.com/nodejs/node/pull/38944) +* [[`66553feeba`](https://github.com/nodejs/node/commit/66553feeba)] - **src**: compare IPv4 addresses in host byte order (Colin Ihrig) [#39096](https://github.com/nodejs/node/pull/39096) +* [[`ea8d83bf59`](https://github.com/nodejs/node/commit/ea8d83bf59)] - **src,crypto**: fix 0-length output crash in webcrypto (Khaidi Chu) [#38913](https://github.com/nodejs/node/pull/38913) +* [[`683c995001`](https://github.com/nodejs/node/commit/683c995001)] - **src,zlib**: tighten up Z\_\*\_WINDOWBITS macros (Khaidi Chu) [#39115](https://github.com/nodejs/node/pull/39115) +* [[`cb32f69e00`](https://github.com/nodejs/node/commit/cb32f69e00)] - **stream**: cleanup async handling (Robert Nagy) [#39329](https://github.com/nodejs/node/pull/39329) +* [[`1fc6382942`](https://github.com/nodejs/node/commit/1fc6382942)] - **stream**: don't emit prefinish after error or close (Robert Nagy) [#39332](https://github.com/nodejs/node/pull/39332) +* [[`35b6669e13`](https://github.com/nodejs/node/commit/35b6669e13)] - **stream**: use finished for pump (Robert Nagy) [#39203](https://github.com/nodejs/node/pull/39203) +* [[`9af62a1357`](https://github.com/nodejs/node/commit/9af62a1357)] - **(SEMVER-MINOR)** **stream**: implement WHATWG streams (James M Snell) [#39062](https://github.com/nodejs/node/pull/39062) +* [[`0bb980aeaf`](https://github.com/nodejs/node/commit/0bb980aeaf)] - **test**: remove eslint-disable comment from fixture file (Rich Trott) [#39320](https://github.com/nodejs/node/pull/39320) +* [[`21f77031fb`](https://github.com/nodejs/node/commit/21f77031fb)] - **test**: move debugger test case to parallel (Rich Trott) [#39300](https://github.com/nodejs/node/pull/39300) +* [[`0ec93a1fc1`](https://github.com/nodejs/node/commit/0ec93a1fc1)] - **test**: use common.PORT instead of hardcoded port number (Rich Trott) [#39298](https://github.com/nodejs/node/pull/39298) +* [[`11a8b81caf`](https://github.com/nodejs/node/commit/11a8b81caf)] - **test**: remove debugger workaround for AIX (Rich Trott) [#39296](https://github.com/nodejs/node/pull/39296) +* [[`8e77aa23f1`](https://github.com/nodejs/node/commit/8e77aa23f1)] - **test**: add test for debugger restart message issue (Rich Trott) [#39273](https://github.com/nodejs/node/pull/39273) +* [[`13755599e1`](https://github.com/nodejs/node/commit/13755599e1)] - **test**: remove workaround code in debugger test (Rich Trott) [#39238](https://github.com/nodejs/node/pull/39238) +* [[`1f31e3c774`](https://github.com/nodejs/node/commit/1f31e3c774)] - **test**: remove checks for armv6 (Rich Trott) [#39162](https://github.com/nodejs/node/pull/39162) +* [[`d486d0117c`](https://github.com/nodejs/node/commit/d486d0117c)] - **test**: move test-debugger-address to parallel (Rich Trott) [#39236](https://github.com/nodejs/node/pull/39236) +* [[`cdc7a19f48`](https://github.com/nodejs/node/commit/cdc7a19f48)] - **test**: remove common.enoughTestCpu (Rich Trott) [#39161](https://github.com/nodejs/node/pull/39161) +* [[`cc32365f56`](https://github.com/nodejs/node/commit/cc32365f56)] - **(SEMVER-MINOR)** **test**: add WPT streams tests (James M Snell) [#39062](https://github.com/nodejs/node/pull/39062) +* [[`fff21a4afb`](https://github.com/nodejs/node/commit/fff21a4afb)] - **test**: replace "inspector-cli" with "debugger" (Rich Trott) [#39156](https://github.com/nodejs/node/pull/39156) +* [[`df17c62818`](https://github.com/nodejs/node/commit/df17c62818)] - **test**: use localhost test instead of connecting to remote (Adam Majer) [#39011](https://github.com/nodejs/node/pull/39011) +* [[`dfe99d2aac`](https://github.com/nodejs/node/commit/dfe99d2aac)] - **tls**: move legacy code into own file (Robert Nagy) [#39333](https://github.com/nodejs/node/pull/39333) +* [[`f338fddbb0`](https://github.com/nodejs/node/commit/f338fddbb0)] - **tools**: add GitHub Action to run find-inactive-collaborators.mjs (Rich Trott) [#39335](https://github.com/nodejs/node/pull/39335) +* [[`b3a0dd1e4a`](https://github.com/nodejs/node/commit/b3a0dd1e4a)] - **tools**: pass bot token to node-pr-labeler (Michaël Zasso) [#39271](https://github.com/nodejs/node/pull/39271) +* [[`b56a3d9009`](https://github.com/nodejs/node/commit/b56a3d9009)] - **tools**: update gyp-next to v0.9.3 (Jiawen Geng) [#39291](https://github.com/nodejs/node/pull/39291) +* [[`3cd9f5e298`](https://github.com/nodejs/node/commit/3cd9f5e298)] - **tools**: add find-inactive-collaborators.js (Rich Trott) [#39262](https://github.com/nodejs/node/pull/39262) +* [[`0673ede3ad`](https://github.com/nodejs/node/commit/0673ede3ad)] - **tools**: take ownership of deps/v8/tools/node (Michaël Zasso) [#39222](https://github.com/nodejs/node/pull/39222) +* [[`cb8c6ffbce`](https://github.com/nodejs/node/commit/cb8c6ffbce)] - **tools**: update ESLint to 7.30.0 (Colin Ihrig) [#39242](https://github.com/nodejs/node/pull/39242) +* [[`d5113f9e34`](https://github.com/nodejs/node/commit/d5113f9e34)] - **tools**: remove armv6 from test tools (Rich Trott) [#39162](https://github.com/nodejs/node/pull/39162) +* [[`802d9c4488`](https://github.com/nodejs/node/commit/802d9c4488)] - **tools**: update path-parse to 1.0.7 (Rich Trott) [#39232](https://github.com/nodejs/node/pull/39232) +* [[`ab9ccd014c`](https://github.com/nodejs/node/commit/ab9ccd014c)] - **tools**: remove unused `lint-pr-commit-message.sh` (Richard Lau) [#39120](https://github.com/nodejs/node/pull/39120) +* [[`6200f3b35f`](https://github.com/nodejs/node/commit/6200f3b35f)] - **tools**: update @babel/eslint-parser to 7.14.7 (Rich Trott) [#39160](https://github.com/nodejs/node/pull/39160) +* [[`dfe5d1139c`](https://github.com/nodejs/node/commit/dfe5d1139c)] - **tools**: update remark-preset-lint-node to 2.4.1 (Rich Trott) [#39201](https://github.com/nodejs/node/pull/39201) +* [[`4715105581`](https://github.com/nodejs/node/commit/4715105581)] - **tools**: upgrade `highlight.js` to version 11.0.1 (Antoine du Hamel) [#39032](https://github.com/nodejs/node/pull/39032) +* [[`2481ddd08d`](https://github.com/nodejs/node/commit/2481ddd08d)] - **tools,doc**: fix error message for unrecognized type (Antoine du Hamel) [#39221](https://github.com/nodejs/node/pull/39221) +* [[`adb812c042`](https://github.com/nodejs/node/commit/adb812c042)] - **typings**: add a few JSDoc typings for the net lib module (nerdthatnoonelikes) [#38953](https://github.com/nodejs/node/pull/38953) +* [[`29673b8ac8`](https://github.com/nodejs/node/commit/29673b8ac8)] - **typings**: add JSDoc typings for timers (Voltrex) [#38834](https://github.com/nodejs/node/pull/38834) +* [[`fe1c81f247`](https://github.com/nodejs/node/commit/fe1c81f247)] - **wasi**: use missing validator (Voltrex) [#39070](https://github.com/nodejs/node/pull/39070) + ## 2021-07-05, Version 16.4.2 (Current), @BethGriggs @@ -80,7 +246,7 @@ Vulnerabilities fixed: * **async_hooks**: * stabilize part of AsyncLocalStorage (Vladimir de Turckheim) [#37675](https://github.com/nodejs/node/pull/37675) * **deps**: - * upgrade npm to 7.18.1 (npm-robot) [#39065](https://github.com/nodejs/node/pull/39065) + * upgrade npm to 7.18.1 (npm team) [#39065](https://github.com/nodejs/node/pull/39065) * update V8 to 9.1.269.36 (Michaël Zasso) [#38273](https://github.com/nodejs/node/pull/38273) * **dns**: * allow `--dns-result-order` to change default dns verbatim (Ouyang Yadong) [#38099](https://github.com/nodejs/node/pull/38099) @@ -114,8 +280,8 @@ Vulnerabilities fixed: * [[`dc9218136b`](https://github.com/nodejs/node/commit/dc9218136b)] - **debugger**: use ERR\_DEBUGGER\_ERROR in debugger client (Rich Trott) [#39024](https://github.com/nodejs/node/pull/39024) * [[`711916a271`](https://github.com/nodejs/node/commit/711916a271)] - **debugger**: remove unnecessary boilerplate copyright comment (Rich Trott) [#38952](https://github.com/nodejs/node/pull/38952) * [[`0f65e41442`](https://github.com/nodejs/node/commit/0f65e41442)] - **debugger**: reduce scope of eslint disable comment (Rich Trott) [#38946](https://github.com/nodejs/node/pull/38946) -* [[`1fa724ec5a`](https://github.com/nodejs/node/commit/1fa724ec5a)] - **deps**: upgrade npm to 7.18.1 (npm-robot) [#39065](https://github.com/nodejs/node/pull/39065) -* [[`c6aa68598d`](https://github.com/nodejs/node/commit/c6aa68598d)] - **deps**: upgrade npm to 7.17.0 (npm-robot) [#38999](https://github.com/nodejs/node/pull/38999) +* [[`1fa724ec5a`](https://github.com/nodejs/node/commit/1fa724ec5a)] - **deps**: upgrade npm to 7.18.1 (npm team) [#39065](https://github.com/nodejs/node/pull/39065) +* [[`c6aa68598d`](https://github.com/nodejs/node/commit/c6aa68598d)] - **deps**: upgrade npm to 7.17.0 (npm team) [#38999](https://github.com/nodejs/node/pull/38999) * [[`864fe9910b`](https://github.com/nodejs/node/commit/864fe9910b)] - **deps**: make V8 9.1 abi-compatible with 9.0 (Michaël Zasso) [#38991](https://github.com/nodejs/node/pull/38991) * [[`c93f3573eb`](https://github.com/nodejs/node/commit/c93f3573eb)] - **deps**: V8: cherry-pick fa4cb172cde2 (Michaël Zasso) [#38273](https://github.com/nodejs/node/pull/38273) * [[`3c6c28b0a1`](https://github.com/nodejs/node/commit/3c6c28b0a1)] - **deps**: V8: cherry-pick 4c074516397b (Michaël Zasso) [#38273](https://github.com/nodejs/node/pull/38273) @@ -131,7 +297,7 @@ Vulnerabilities fixed: * [[`4ef37c83a9`](https://github.com/nodejs/node/commit/4ef37c83a9)] - **deps**: V8: patch register-arm64.h (Refael Ackermann) [#32116](https://github.com/nodejs/node/pull/32116) * [[`7c61c6ee25`](https://github.com/nodejs/node/commit/7c61c6ee25)] - **deps**: V8: un-cherry-pick bd019bd (Refael Ackermann) [#32116](https://github.com/nodejs/node/pull/32116) * [[`e82ef4148e`](https://github.com/nodejs/node/commit/e82ef4148e)] - **(SEMVER-MINOR)** **deps**: update V8 to 9.1.269.36 (Michaël Zasso) [#38273](https://github.com/nodejs/node/pull/38273) -* [[`70af146745`](https://github.com/nodejs/node/commit/70af146745)] - **deps**: upgrade npm to 7.16.0 (npm-robot) [#38920](https://github.com/nodejs/node/pull/38920) +* [[`70af146745`](https://github.com/nodejs/node/commit/70af146745)] - **deps**: upgrade npm to 7.16.0 (npm team) [#38920](https://github.com/nodejs/node/pull/38920) * [[`a71df7630e`](https://github.com/nodejs/node/commit/a71df7630e)] - **(SEMVER-MINOR)** **dns**: allow `--dns-result-order` to change default dns verbatim (Ouyang Yadong) [#38099](https://github.com/nodejs/node/pull/38099) * [[`dce256b210`](https://github.com/nodejs/node/commit/dce256b210)] - **doc**: remove references to deleted freenode channels (devsnek) [#39047](https://github.com/nodejs/node/pull/39047) * [[`1afff98805`](https://github.com/nodejs/node/commit/1afff98805)] - **doc**: fix typos (bl-ue) [#39049](https://github.com/nodejs/node/pull/39049) @@ -230,7 +396,7 @@ Vulnerabilities fixed: * [[`eb7c932a6d`](https://github.com/nodejs/node/commit/eb7c932a6d)] - **debugger**: revise async iterator usage to comply with lint rules (Rich Trott) [#38847](https://github.com/nodejs/node/pull/38847) * [[`f1000e0e52`](https://github.com/nodejs/node/commit/f1000e0e52)] - **debugger**: removed unused function argument (Rich Trott) [#38850](https://github.com/nodejs/node/pull/38850) * [[`ee1056da60`](https://github.com/nodejs/node/commit/ee1056da60)] - **debugger**: wait for V8 debugger to be enabled (Michaël Zasso) [#38811](https://github.com/nodejs/node/pull/38811) -* [[`47ad448def`](https://github.com/nodejs/node/commit/47ad448def)] - **deps**: upgrade npm to 7.15.1 (npm-robot) [#38880](https://github.com/nodejs/node/pull/38880) +* [[`47ad448def`](https://github.com/nodejs/node/commit/47ad448def)] - **deps**: upgrade npm to 7.15.1 (npm team) [#38880](https://github.com/nodejs/node/pull/38880) * [[`e8192b5e89`](https://github.com/nodejs/node/commit/e8192b5e89)] - **deps**: upgrade npm to 7.14.0 (Ruy Adorno) [#38750](https://github.com/nodejs/node/pull/38750) * [[`15aaf14690`](https://github.com/nodejs/node/commit/15aaf14690)] - **deps**: update llhttp to 6.0.2 (Fedor Indutny) [#38665](https://github.com/nodejs/node/pull/38665) * [[`108ffdb68f`](https://github.com/nodejs/node/commit/108ffdb68f)] - **doc**: fixed typo in n-api.md (julianjany) [#38822](https://github.com/nodejs/node/pull/38822) diff --git a/doc/guides/adding-new-napi-api.md b/doc/guides/adding-new-napi-api.md index 21328912c51bf7..18f551929e4e03 100644 --- a/doc/guides/adding-new-napi-api.md +++ b/doc/guides/adding-new-napi-api.md @@ -11,7 +11,7 @@ Node-API. * **Must** return `napi_status`. * **Should** consume `napi_env`. * **Must** operate only on primitive data types, pointers to primitive - datatypes or opaque handles. + data types or opaque handles. * **Must** be a necessary API and not a nice to have. Convenience APIs belong in node-addon-api. * **Must** not change the signature of an existing Node-API API or break diff --git a/doc/guides/collaborator-guide.md b/doc/guides/collaborator-guide.md index ae860f6446eb6f..cdc6c6da1dbad3 100644 --- a/doc/guides/collaborator-guide.md +++ b/doc/guides/collaborator-guide.md @@ -95,7 +95,7 @@ issues. If a user opens a security issue in the public repository: * Ask the user to submit a report through HackerOne as outlined in [SECURITY.md][]. -* Move the issue to the private repo called +* Move the issue to the private repository called [premature-disclosures](https://github.com/nodejs/premature-disclosures). * For any related pull requests, create an associated issue in the `premature-disclosures` repository. Add a copy of the patch for the @@ -552,7 +552,7 @@ Checkout proper target branch: $ git checkout master ``` -Update the tree (assumes your repo is set up as detailed in +Update the tree (assumes your repository is set up as detailed in [CONTRIBUTING.md](./contributing/pull-requests.md#step-1-fork)): ```text @@ -831,6 +831,86 @@ When things need extra attention, are controversial, or `semver-major`: If you cannot find who to cc for a file, `git shortlog -n -s ` can help. +## Labels + +### General labels + +* `confirmed-bug`: Bugs you have verified +* `discuss`: Things that need larger discussion +* `feature request`: Any issue that requests a new feature +* `good first issue`: Issues suitable for newcomers to fix +* `meta`: Governance, policies, procedures, etc. +* `tsc-agenda`: Open issues and pull requests with this label will be added to + the Technical Steering Committee meeting agenda + +--- + +* `author-ready` - A pull request is _author ready_ when: + * There is a CI run in progress or completed. + * There is at least one Collaborator approval (or two TSC approvals for + semver-major pull requests). + * There are no outstanding review comments. + +Please always add the `author ready` label to pull requests that qualify. +Please always remove it again as soon as the conditions are not met anymore, +such as if the CI run fails or a new outstanding review comment is posted. + +--- + +* `semver-{minor,major}` + * be conservative – that is, if a change has the remote *chance* of breaking + something, go for semver-major + * when adding a semver label, add a comment explaining why you're adding it + * minor vs. patch: roughly: "does it add a new method / does it add a new + section to the docs" + * major vs. everything else: run last versions tests against this version, if + they pass, **probably** minor or patch + +### LTS/version labels + +We use labels to keep track of which branches a commit should land on: + +* `dont-land-on-v?.x` + * For changes that do not apply to a certain release line + * Also used when the work of backporting a change outweighs the benefits +* `land-on-v?.x` + * Used by releasers to mark a pull request as scheduled for inclusion in an + LTS release + * Applied to the original pull request for clean cherry-picks, to the backport + pull request otherwise +* `backport-requested-v?.x` + * Used to indicate that a pull request needs a manual backport to a branch in + order to land the changes on that branch + * Typically applied by a releaser when the pull request does not apply cleanly + or it breaks the tests after applying + * Will be replaced by either `dont-land-on-v?.x` or `backported-to-v?.x` +* `backported-to-v?.x` + * Applied to pull requests for which a backport pull request has been merged +* `lts-watch-v?.x` + * Applied to pull requests which the Release working group should consider + including in an LTS release + * Does not indicate that any specific action will be taken, but can be + effective as messaging to non-collaborators +* `release-agenda` + * For things that need discussion by the Release working group + * (for example semver-minor changes that need or should go into an LTS + release) +* `v?.x` + * Automatically applied to changes that do not target `master` but rather the + `v?.x-staging` branch + +Once a release line enters maintenance mode, the corresponding labels do not +need to be attached anymore, as only important bugfixes will be included. + +### Other labels + +* Operating system labels + * `macos`, `windows`, `smartos`, `aix` + * No `linux` label because it is the implied default +* Architecture labels + * `arm`, `mips`, `s390`, `ppc` + * No `x86{_64}` label because it is the implied default + ["Merge Pull Request"]: https://help.github.com/articles/merging-a-pull-request/#merging-a-pull-request-on-github [Deprecation]: https://en.wikipedia.org/wiki/Deprecation [SECURITY.md]: https://github.com/nodejs/node/blob/HEAD/SECURITY.md diff --git a/doc/guides/investigating_native_memory_leak.md b/doc/guides/investigating_native_memory_leak.md index 55ba1b1ec3d60b..f808675f5328bc 100644 --- a/doc/guides/investigating_native_memory_leak.md +++ b/doc/guides/investigating_native_memory_leak.md @@ -92,7 +92,7 @@ operating systems will clean up the memory of the process after the shutdown while attempting to free all memory to get a clean report may have a negative impact on the code complexity and shutdown times. Node.js does a pretty good job only leaving on -the order of 6KB that are not freed on shutdown. +the order of 6 KB that are not freed on shutdown. ## An obvious memory leak @@ -100,8 +100,8 @@ Leaks can be introduced in native addons and the following is a simple example leak based on the "Hello world" addon from [node-addon-examples](https://github.com/nodejs/node-addon-examples). -In this example, a loop which allocates ~1MB of memory and never frees it -has been added: +In this example, a loop which allocates approximately 1 MB of memory and never +frees it has been added: ```cpp void* malloc_holder = nullptr; diff --git a/doc/guides/maintaining-V8.md b/doc/guides/maintaining-V8.md index da940758a25d94..6234b29bc2b225 100644 --- a/doc/guides/maintaining-V8.md +++ b/doc/guides/maintaining-V8.md @@ -179,7 +179,7 @@ fixed upstream first. ### Backporting to active branches If the bug exists in any of the active V8 branches, we may need to get the fix -backported. At any given time there are [two active branches][V8ActiveBranches] +backported. At any given time, there are [two active branches][V8ActiveBranches] (beta and stable) in addition to master. The following steps are needed to backport the fix: @@ -191,9 +191,7 @@ backport the fix: * If a bug already exists * Add a reference to the GitHub issue. * Attach *merge-request-x.x* labels to the bug for any active branches - that still contain the bug. (e.g. merge-request-5.3, - merge-request-5.4) - * Add ofrobots-at-google.com to the cc list. + that still contain the bug. * Once the merge has been approved, it should be merged using the [merge script documented in the V8 wiki][V8MergingPatching]. Merging requires commit access to the V8 repository. If you don't have commit access you can @@ -215,17 +213,12 @@ to be cherry-picked in the Node.js repository and V8-CI must test the change. * Checkout a branch off the appropriate *vY.x-staging* branch (e.g. *v6.x-staging* to fix an issue in V8 5.1). * Cherry-pick the commit(s) from the V8 repository. - * On Node.js < 9.0.0: Increase the patch level version in `v8-version.h`. - This will not cause any problems with versioning because V8 will not - publish other patches for this branch, so Node.js can effectively bump the - patch version. - * On Node.js >= 9.0.0: Increase the `v8_embedder_string` number in - `common.gypi`. + * Increase the `v8_embedder_string` number in `common.gypi`. * In some cases the patch may require extra effort to merge in case V8 has - changed substantially. For important issues we may be able to lean on the + changed substantially. For important issues, we may be able to lean on the V8 team to get help with reimplementing the patch. - * Open a cherry-pick PR on `nodejs/node` targeting the *vY.x-staging* branch - and notify the `@nodejs/v8` team. + * Open a cherry-pick pull request on `nodejs/node` targeting the + *vY.x-staging* branch and notify the `@nodejs/v8` team. * Run the Node.js [V8 CI][] in addition to the [Node.js CI][]. The CI uses the `test-v8` target in the `Makefile`, which uses `tools/make-v8.sh` to reconstruct a git tree in the `deps/v8` directory to @@ -246,8 +239,7 @@ fix needed to be cherry-picked. To cherry-pick, here's an example workflow: not apply cleanly. It may help to try to cherry-pick the merge to the oldest branch that was done upstream in V8. In this example, this would be the patch from the merge to 5.2. The hope is that this would be closer to the V8 5.1, - and has a better chance of applying cleanly. If you're stuck, feel free to - ping @ofrobots for help. + and has a better chance of applying cleanly. * Modify the commit message to match the format we use for V8 backports and replace yourself as the author. `git commit --amend --reset-author`. You may want to add extra description if necessary to indicate the impact of the fix @@ -274,9 +266,9 @@ Refs: https://github.com/v8/v8/commit/a51f429772d1e796744244128c9feeab4c26a854 PR-URL: https://github.com/nodejs/node/pull/7833 ``` -* Open a PR against the `v6.x-staging` branch in the Node.js repo. Launch the - normal and [V8 CI][] using the Node.js CI system. We only needed to backport - to `v6.x` as the other LTS branches weren't affected by this bug. +* Open a PR against the `v6.x-staging` branch in the Node.js repository. Launch + the normal and [V8 CI][] using the Node.js CI system. We only needed to + backport to `v6.x` as the other LTS branches weren't affected by this bug. ### Backports identified by the V8 team @@ -382,7 +374,7 @@ git node v8 major --branch=5.1-lkgr This should be followed up with manual refloating of all relevant patches. -## Proposal: using a fork repo to track upstream V8 +## Proposal: Using a fork repository to track upstream V8 The fact that Node.js keeps a vendored, potentially edited copy of V8 in deps/ makes the above processes a bit complicated. An alternative proposal would be to diff --git a/doc/guides/offboarding.md b/doc/guides/offboarding.md index 13f602bb0f8286..0d73e412089b7f 100644 --- a/doc/guides/offboarding.md +++ b/doc/guides/offboarding.md @@ -6,8 +6,6 @@ Emeritus or leaves the project. * Remove the Collaborator from the @nodejs/collaborators team. * Open a fast-track pull request to move the Collaborator to Collaborator Emeriti in README.md. -* Email the TSC mailing list to notify TSC members that the Collaborator is - moving to Collaborator Emeritus. * Determine what GitHub teams the Collaborator belongs to. In consultation with the Collaborator, determine which of those teams they should be removed from. * Some teams may also require a pull request to remove the Collaborator from diff --git a/doc/guides/onboarding-extras.md b/doc/guides/onboarding-extras.md deleted file mode 100644 index 79951a433926c6..00000000000000 --- a/doc/guides/onboarding-extras.md +++ /dev/null @@ -1,80 +0,0 @@ -# Additional onboarding information - -## Labels - -### General - -* `confirmed-bug`: Bugs you have verified -* `discuss`: Things that need larger discussion -* `feature request`: Any issue that requests a new feature -* `good first issue`: Issues suitable for newcomers to fix -* `meta`: Governance, policies, procedures, etc. -* `tsc-agenda`: Open issues and pull requests with this label will be added to - the Technical Steering Committee meeting agenda - ---- - -* `author-ready` - A pull request is _author ready_ when: - * There is a CI run in progress or completed. - * There is at least one Collaborator approval (or two TSC approvals for - semver-major PRs). - * There are no outstanding review comments. - -Please always add the `author ready` label to pull requests that qualify. -Please always remove it again as soon as the conditions are not met anymore, -such as if the CI run fails or a new outstanding review comment is posted. - ---- - -* `semver-{minor,major}` - * be conservative – that is, if a change has the remote *chance* of breaking - something, go for semver-major - * when adding a semver label, add a comment explaining why you're adding it - * minor vs. patch: roughly: "does it add a new method / does it add a new - section to the docs" - * major vs. everything else: run last versions tests against this version, if - they pass, **probably** minor or patch - -### LTS/version labels - -We use labels to keep track of which branches a commit should land on: - -* `dont-land-on-v?.x` - * For changes that do not apply to a certain release line - * Also used when the work of backporting a change outweighs the benefits -* `land-on-v?.x` - * Used by releasers to mark a PR as scheduled for inclusion in an LTS release - * Applied to the original PR for clean cherry-picks, to the backport PR - otherwise -* `backport-requested-v?.x` - * Used to indicate that a PR needs a manual backport to a branch in order to - land the changes on that branch - * Typically applied by a releaser when the PR does not apply cleanly or it - breaks the tests after applying - * Will be replaced by either `dont-land-on-v?.x` or `backported-to-v?.x` -* `backported-to-v?.x` - * Applied to PRs for which a backport PR has been merged -* `lts-watch-v?.x` - * Applied to PRs which the LTS working group should consider including in a - LTS release - * Does not indicate that any specific action will be taken, but can be - effective as messaging to non-collaborators -* `lts-agenda` - * For things that need discussion by the LTS working group - * (for example semver-minor changes that need or should go into an LTS - release) -* `v?.x` - * Automatically applied to changes that do not target `master` but rather the - `v?.x-staging` branch - -Once a release line enters maintenance mode, the corresponding labels do not -need to be attached anymore, as only important bugfixes will be included. - -### Other labels - -* Operating system labels - * `macos`, `windows`, `smartos`, `aix` - * No `linux` label because it is the implied default -* Architecture labels - * `arm`, `mips`, `s390`, `ppc` - * No `x86{_64}` label because it is the implied default diff --git a/doc/guides/releases.md b/doc/guides/releases.md index 832b8c8eda0680..8a357b595a8c89 100644 --- a/doc/guides/releases.md +++ b/doc/guides/releases.md @@ -398,7 +398,19 @@ Create a pull request targeting the correct release line. For example, a `v5.3.0-proposal` PR should target `v5.x`, not master. Paste the CHANGELOG modifications into the body of the PR so that collaborators can see what is changing. These PRs should be left open for at least 24 hours, and can be -updated as new commits land. +updated as new commits land. If the CHANGELOG pasted into the pull request +is long enough that it slows down the GitHub UI, consider pasting the commits +into `
` tags or in follow up comments. + +If using the `
` tag, use the following format: + +```markdown +
+Commits + +* Full list of commits... +
+``` If you need any additional information about any of the commits, this PR is a good place to @-mention the relevant contributors. @@ -583,9 +595,9 @@ $ git push upstream master ### 14. Push the release tag -Push the tag to the repo before you promote the builds. If you haven't pushed -your tag first, then build promotion won't work properly. Push the tag using the -following command: +Push the tag to the repository before you promote the builds. If you +haven't pushed your tag first, then build promotion won't work properly. +Push the tag using the following command: ```console $ git push diff --git a/doc/guides/security-release-process.md b/doc/guides/security-release-process.md index 10465c57501328..9517ea674f0c70 100644 --- a/doc/guides/security-release-process.md +++ b/doc/guides/security-release-process.md @@ -38,9 +38,10 @@ information described. * Described in the pre/post announcements * [ ] Pre-release announcement [email][]: ***LINK TO EMAIL*** + * CC: `oss-security@lists.openwall.com` + * Subject: `Node.js security updates for all active release lines, Month Year` + * Body: ```text - Security updates for all active release lines, Month Year - The Node.js project will release new versions of all supported release lines on or shortly after Day of week, Month Day of Month, Year For more information see: https://nodejs.org/en/blog/vulnerability/month-year-security-releases/ ``` @@ -70,9 +71,10 @@ information described. * [ ] [Unlock CI](https://github.com/nodejs/build/blob/HEAD/doc/jenkins-guide.md#after-the-release) * [ ] Post-release announcement in reply [email][]: ***LINK TO EMAIL*** + * CC: `oss-security@lists.openwall.com` + * Subject: `Node.js security updates for all active release lines, Month Year` + * Body: ```text - Security updates for all active release lines, Month Year - The Node.js project has now released new versions of all supported release lines. For more information see: https://nodejs.org/en/blog/vulnerability/month-year-security-releases/ ``` diff --git a/doc/guides/writing-and-running-benchmarks.md b/doc/guides/writing-and-running-benchmarks.md index 7e00e65a7b8b67..1198abe19bf921 100644 --- a/doc/guides/writing-and-running-benchmarks.md +++ b/doc/guides/writing-and-running-benchmarks.md @@ -74,11 +74,8 @@ install.packages("ggplot2") install.packages("plyr") ``` -In the event that a message is reported stating that a CRAN mirror must be -selected first, specify a mirror by adding in the repo parameter. - -If we used the "" mirror, it could look something -like this: +If a message states that a CRAN mirror must be selected first, specify a mirror +with the `repo` parameter. ```r install.packages("ggplot2", repo="http://cran.us.r-project.org") diff --git a/doc/guides/writing-tests.md b/doc/guides/writing-tests.md index 7d33ee15858cc5..1972c897b0a5eb 100644 --- a/doc/guides/writing-tests.md +++ b/doc/guides/writing-tests.md @@ -20,7 +20,7 @@ Add tests when: ## Test directory structure -See [directory structure overview][] for outline of existing test & locations. +See [directory structure overview][] for outline of existing test and locations. When deciding on whether to expand an existing test file or create a new one, consider going through the files related to the subsystem. For example, look for `test-streams` when writing a test for `lib/streams.js`. diff --git a/doc/node.1 b/doc/node.1 index a0bbb2b781b7ba..2cfc4dbda91d07 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -266,7 +266,7 @@ This flag is inherited from V8 and is subject to change upstream. It may disappear in a non-semver-major release. . .It Fl -max-http-header-size Ns = Ns Ar size -Specify the maximum size of HTTP headers in bytes. Defaults to 16KB. +Specify the maximum size of HTTP headers in bytes. Defaults to 16 KB. . .It Fl -napi-modules This option is a no-op. @@ -698,7 +698,7 @@ Documentation: .Sy https://nodejs.org/api/ . .Pp -GitHub repository & Issue Tracker: +GitHub repository and issue tracker: .Sy https://github.com/nodejs/node . .Pp diff --git a/lib/_http_common.js b/lib/_http_common.js index e3e732a8a180c6..642cdea41f68b1 100644 --- a/lib/_http_common.js +++ b/lib/_http_common.js @@ -46,6 +46,7 @@ let debug = require('internal/util/debuglog').debuglog('http', (fn) => { const kIncomingMessage = Symbol('IncomingMessage'); const kRequestTimeout = Symbol('RequestTimeout'); +const kOnMessageBegin = HTTPParser.kOnMessageBegin | 0; const kOnHeaders = HTTPParser.kOnHeaders | 0; const kOnHeadersComplete = HTTPParser.kOnHeadersComplete | 0; const kOnBody = HTTPParser.kOnBody | 0; @@ -239,6 +240,7 @@ function cleanParser(parser) { parser.incoming = null; parser.outgoing = null; parser.maxHeaderPairs = MAX_HEADER_PAIRS; + parser[kOnMessageBegin] = null; parser[kOnExecute] = null; parser[kOnTimeout] = null; parser._consumed = false; diff --git a/lib/_http_server.js b/lib/_http_server.js index 97df58a007daba..11119169d56f2c 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -353,18 +353,7 @@ function writeHead(statusCode, reason, obj) { // Docs-only deprecated: DEP0063 ServerResponse.prototype.writeHeader = ServerResponse.prototype.writeHead; -function Server(options, requestListener) { - if (!(this instanceof Server)) return new Server(options, requestListener); - - if (typeof options === 'function') { - requestListener = options; - options = {}; - } else if (options == null || typeof options === 'object') { - options = { ...options }; - } else { - throw new ERR_INVALID_ARG_TYPE('options', 'object', options); - } - +function storeHTTPOptions(options) { this[kIncomingMessage] = options.IncomingMessage || IncomingMessage; this[kServerResponse] = options.ServerResponse || ServerResponse; @@ -377,7 +366,21 @@ function Server(options, requestListener) { if (insecureHTTPParser !== undefined) validateBoolean(insecureHTTPParser, 'options.insecureHTTPParser'); this.insecureHTTPParser = insecureHTTPParser; +} + +function Server(options, requestListener) { + if (!(this instanceof Server)) return new Server(options, requestListener); + + if (typeof options === 'function') { + requestListener = options; + options = {}; + } else if (options == null || typeof options === 'object') { + options = { ...options }; + } else { + throw new ERR_INVALID_ARG_TYPE('options', 'object', options); + } + storeHTTPOptions.call(this, options); net.Server.call(this, { allowHalfOpen: true }); if (requestListener) { @@ -991,6 +994,7 @@ module.exports = { STATUS_CODES, Server, ServerResponse, + storeHTTPOptions, _connectionListener: connectionListener, kServerResponse }; diff --git a/lib/_tls_common.js b/lib/_tls_common.js index 5ca6d65181d0ae..21b22a42507c5b 100644 --- a/lib/_tls_common.js +++ b/lib/_tls_common.js @@ -52,8 +52,11 @@ const { const { configSecureContext, +} = require('internal/tls/secure-context'); + +const { parseCertString, -} = require('internal/tls'); +} = require('internal/tls/parse-cert-string'); function toV(which, v, def) { if (v == null) v = def; diff --git a/lib/assert.js b/lib/assert.js index 61d6bda6b37632..89949c01614fd4 100644 --- a/lib/assert.js +++ b/lib/assert.js @@ -243,21 +243,9 @@ function getCode(fd, line, column) { function parseCode(code, offset) { // Lazy load acorn. if (parseExpressionAt === undefined) { - const acorn = require('internal/deps/acorn/acorn/dist/acorn'); - const privateMethods = - require('internal/deps/acorn-plugins/acorn-private-methods/index'); - const classFields = - require('internal/deps/acorn-plugins/acorn-class-fields/index'); - const staticClassFeatures = - require('internal/deps/acorn-plugins/acorn-static-class-features/index'); - + const Parser = require('internal/deps/acorn/acorn/dist/acorn').Parser; ({ findNodeAround } = require('internal/deps/acorn/acorn-walk/dist/walk')); - const Parser = acorn.Parser.extend( - privateMethods, - classFields, - staticClassFeatures - ); parseExpressionAt = FunctionPrototypeBind(Parser.parseExpressionAt, Parser); } let node; diff --git a/lib/buffer.js b/lib/buffer.js index 18b90f40527921..278a67cbbfb37e 100644 --- a/lib/buffer.js +++ b/lib/buffer.js @@ -75,6 +75,7 @@ const { const { customInspectSymbol, isInsideNodeModules, + lazyDOMException, normalizeEncoding, kIsEncodingSymbol } = require('internal/util'); @@ -1208,14 +1209,6 @@ if (internalBinding('config').hasIntl) { }; } -let DOMException; - -const lazyInvalidCharError = hideStackFrames((message, name) => { - if (DOMException === undefined) - DOMException = internalBinding('messaging').DOMException; - throw new DOMException('Invalid character', 'InvalidCharacterError'); -}); - function btoa(input) { // The implementation here has not been performance optimized in any way and // should not be. @@ -1223,7 +1216,7 @@ function btoa(input) { input = `${input}`; for (let n = 0; n < input.length; n++) { if (input[n].charCodeAt(0) > 0xff) - lazyInvalidCharError(); + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); } const buf = Buffer.from(input, 'latin1'); return buf.toString('base64'); @@ -1239,7 +1232,7 @@ function atob(input) { input = `${input}`; for (let n = 0; n < input.length; n++) { if (!kBase64Digits.includes(input[n])) - lazyInvalidCharError(); + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); } return Buffer.from(input, 'base64').toString('latin1'); } diff --git a/lib/fs.js b/lib/fs.js index 46209a3f4d58c0..cf3c885b31cdf8 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -74,7 +74,6 @@ const { codes: { ERR_FS_FILE_TOO_LARGE, ERR_INVALID_ARG_VALUE, - ERR_INVALID_ARG_TYPE, ERR_FEATURE_UNAVAILABLE_ON_PLATFORM, }, AbortError, @@ -136,6 +135,7 @@ const { validateEncoding, validateFunction, validateInteger, + validateString, } = require('internal/validators'); const watchers = require('internal/fs/watchers'); @@ -2712,9 +2712,8 @@ realpath.native = (path, options, callback) => { function mkdtemp(prefix, options, callback) { callback = makeCallback(typeof options === 'function' ? options : callback); options = getOptions(options, {}); - if (!prefix || typeof prefix !== 'string') { - throw new ERR_INVALID_ARG_TYPE('prefix', 'string', prefix); - } + + validateString(prefix, 'prefix'); nullCheck(prefix, 'prefix'); warnOnNonPortableTemplate(prefix); const req = new FSReqCallback(); @@ -2730,9 +2729,8 @@ function mkdtemp(prefix, options, callback) { */ function mkdtempSync(prefix, options) { options = getOptions(options, {}); - if (!prefix || typeof prefix !== 'string') { - throw new ERR_INVALID_ARG_TYPE('prefix', 'string', prefix); - } + + validateString(prefix, 'prefix'); nullCheck(prefix, 'prefix'); warnOnNonPortableTemplate(prefix); const path = `${prefix}XXXXXX`; diff --git a/lib/https.js b/lib/https.js index 765e1a22b60696..695a9020994852 100644 --- a/lib/https.js +++ b/lib/https.js @@ -40,16 +40,14 @@ const tls = require('tls'); const { Agent: HttpAgent } = require('_http_agent'); const { Server: HttpServer, + storeHTTPOptions, _connectionListener, - kServerResponse } = require('_http_server'); const { ClientRequest } = require('_http_client'); let debug = require('internal/util/debuglog').debuglog('https', (fn) => { debug = fn; }); const { URL, urlToHttpOptions, searchParamsSymbol } = require('internal/url'); -const { IncomingMessage, ServerResponse } = require('http'); -const { kIncomingMessage } = require('_http_common'); function Server(opts, requestListener) { if (!(this instanceof Server)) return new Server(opts, requestListener); @@ -67,9 +65,7 @@ function Server(opts, requestListener) { opts.ALPNProtocols = ['http/1.1']; } - this[kIncomingMessage] = opts.IncomingMessage || IncomingMessage; - this[kServerResponse] = opts.ServerResponse || ServerResponse; - + FunctionPrototypeCall(storeHTTPOptions, this, opts); FunctionPrototypeCall(tls.Server, this, opts, _connectionListener); this.httpAllowHalfOpen = false; diff --git a/lib/internal/abort_controller.js b/lib/internal/abort_controller.js index 6c80aa7bf4f2b3..e6ee07052617d5 100644 --- a/lib/internal/abort_controller.js +++ b/lib/internal/abort_controller.js @@ -143,6 +143,7 @@ ObjectDefineProperty(AbortController.prototype, SymbolToStringTag, { }); module.exports = { + kAborted, AbortController, AbortSignal, }; diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js index 863d4ef5608bce..58f7396990dddb 100644 --- a/lib/internal/bootstrap/node.js +++ b/lib/internal/bootstrap/node.js @@ -241,10 +241,9 @@ if (!config.noBrowserGlobals) { defineOperation(globalThis, 'queueMicrotask', queueMicrotask); - defineLazyGlobal(globalThis, 'performance', () => { - const { performance } = require('perf_hooks'); - return performance; - }); + // https://www.w3.org/TR/hr-time-2/#the-performance-attribute + defineReplacableAttribute(globalThis, 'performance', + require('perf_hooks').performance); // Non-standard extensions: defineOperation(globalThis, 'clearImmediate', timers.clearImmediate); @@ -494,20 +493,12 @@ function defineOperation(target, name, method) { }); } -function defineLazyGlobal(target, name, loader) { - let value; - let overridden = false; +// https://heycam.github.io/webidl/#Replaceable +function defineReplacableAttribute(target, name, value) { ObjectDefineProperty(target, name, { + writable: true, enumerable: true, configurable: true, - get() { - if (value === undefined && !overridden) - value = loader(); - return value; - }, - set(val) { - value = val; - overridden = true; - } + value, }); } diff --git a/lib/internal/bootstrap/pre_execution.js b/lib/internal/bootstrap/pre_execution.js index 83ccfe90c11065..3b69844dc4ea0c 100644 --- a/lib/internal/bootstrap/pre_execution.js +++ b/lib/internal/bootstrap/pre_execution.js @@ -27,6 +27,7 @@ function prepareMainThreadExecution(expandArgv1 = false) { // Patch the process object with legacy properties and normalizations patchProcessObject(expandArgv1); setupTraceCategoryState(); + setupPerfHooks(); setupInspectorHooks(); setupWarningHandler(); @@ -222,6 +223,11 @@ function setupTraceCategoryState() { toggleTraceCategoryState(isTraceCategoryEnabled('node.async_hooks')); } +function setupPerfHooks() { + require('internal/perf/performance').refreshTimeOrigin(); + require('internal/perf/utils').refreshTimeOrigin(); +} + function setupInspectorHooks() { // If Debugger.setAsyncCallStackDepth is sent during bootstrap, // we cannot immediately call into JS to enable the hooks, which could @@ -474,6 +480,7 @@ module.exports = { setupCoverageHooks, setupWarningHandler, setupDebugEnv, + setupPerfHooks, prepareMainThreadExecution, initializeDeprecations, initializeESMLoader, diff --git a/lib/internal/crypto/aes.js b/lib/internal/crypto/aes.js index dd6aa49ff454ab..0675c59ec368ca 100644 --- a/lib/internal/crypto/aes.js +++ b/lib/internal/crypto/aes.js @@ -36,7 +36,6 @@ const { getArrayBufferOrView, hasAnyNotIn, jobPromise, - lazyDOMException, validateByteLength, validateKeyOps, validateMaxBufferLength, @@ -45,6 +44,10 @@ const { kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { PromiseReject } = primordials; const { diff --git a/lib/internal/crypto/diffiehellman.js b/lib/internal/crypto/diffiehellman.js index 2e38e95ea1d774..2efdbdd5bac9d3 100644 --- a/lib/internal/crypto/diffiehellman.js +++ b/lib/internal/crypto/diffiehellman.js @@ -47,6 +47,10 @@ const { isAnyArrayBuffer, } = require('internal/util/types'); +const { + lazyDOMException, +} = require('internal/util'); + const { KeyObject, InternalCryptoKey, @@ -66,7 +70,6 @@ const { getUsagesUnion, hasAnyNotIn, jobPromise, - lazyDOMException, toBuf, kHandle, kKeyObject, diff --git a/lib/internal/crypto/dsa.js b/lib/internal/crypto/dsa.js index b615c3a9cb932f..54bd70d9e2eac5 100644 --- a/lib/internal/crypto/dsa.js +++ b/lib/internal/crypto/dsa.js @@ -44,13 +44,16 @@ const { getUsagesUnion, hasAnyNotIn, jobPromise, - lazyDOMException, normalizeHashName, validateKeyOps, kKeyObject, kHandle, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + function verifyAcceptableDsaKeyUse(name, type, usages) { let checkSet; switch (type) { diff --git a/lib/internal/crypto/ec.js b/lib/internal/crypto/ec.js index ee14eed7d083a7..8bc7d9a28a42ea 100644 --- a/lib/internal/crypto/ec.js +++ b/lib/internal/crypto/ec.js @@ -38,7 +38,6 @@ const { getUsagesUnion, hasAnyNotIn, jobPromise, - lazyDOMException, normalizeHashName, validateKeyOps, kHandle, @@ -46,6 +45,10 @@ const { kNamedCurveAliases, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { generateKeyPair, } = require('internal/crypto/keygen'); diff --git a/lib/internal/crypto/hkdf.js b/lib/internal/crypto/hkdf.js index 8cff0489a553e0..84d39ac61b2b2e 100644 --- a/lib/internal/crypto/hkdf.js +++ b/lib/internal/crypto/hkdf.js @@ -23,7 +23,6 @@ const { kMaxLength } = require('buffer'); const { getArrayBufferOrView, - lazyDOMException, normalizeHashName, toBuf, validateByteSource, @@ -35,6 +34,10 @@ const { isKeyObject, } = require('internal/crypto/keys'); +const { + lazyDOMException, +} = require('internal/util'); + const { isAnyArrayBuffer, isArrayBufferView, diff --git a/lib/internal/crypto/mac.js b/lib/internal/crypto/mac.js index 5ee1f0918db7e1..61fcc88a923dad 100644 --- a/lib/internal/crypto/mac.js +++ b/lib/internal/crypto/mac.js @@ -18,7 +18,6 @@ const { getHashLength, hasAnyNotIn, jobPromise, - lazyDOMException, normalizeHashName, validateBitLength, validateKeyOps, @@ -26,6 +25,10 @@ const { kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { codes: { ERR_MISSING_OPTION, diff --git a/lib/internal/crypto/pbkdf2.js b/lib/internal/crypto/pbkdf2.js index d600f8f036284b..753c4f2d9da597 100644 --- a/lib/internal/crypto/pbkdf2.js +++ b/lib/internal/crypto/pbkdf2.js @@ -25,11 +25,14 @@ const { ERR_MISSING_OPTION } = require('internal/errors').codes; const { getArrayBufferOrView, getDefaultEncoding, - lazyDOMException, normalizeHashName, kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + function pbkdf2(password, salt, iterations, keylen, digest, callback) { if (typeof digest === 'function') { callback = digest; diff --git a/lib/internal/crypto/random.js b/lib/internal/crypto/random.js index cf84507515a505..5ce158324851d4 100644 --- a/lib/internal/crypto/random.js +++ b/lib/internal/crypto/random.js @@ -27,7 +27,7 @@ const { const { lazyDOMException, -} = require('internal/crypto/util'); +} = require('internal/util'); const { Buffer, kMaxLength } = require('buffer'); diff --git a/lib/internal/crypto/rsa.js b/lib/internal/crypto/rsa.js index e7b793fa184817..a0d27f3715e211 100644 --- a/lib/internal/crypto/rsa.js +++ b/lib/internal/crypto/rsa.js @@ -40,7 +40,6 @@ const { getUsagesUnion, hasAnyNotIn, jobPromise, - lazyDOMException, normalizeHashName, validateKeyOps, validateMaxBufferLength, @@ -48,6 +47,10 @@ const { kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { isUint8Array, } = require('internal/util/types'); diff --git a/lib/internal/crypto/scrypt.js b/lib/internal/crypto/scrypt.js index 45a04905bfd447..63a5547e4cbf79 100644 --- a/lib/internal/crypto/scrypt.js +++ b/lib/internal/crypto/scrypt.js @@ -30,10 +30,13 @@ const { const { getArrayBufferOrView, getDefaultEncoding, - lazyDOMException, kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const defaults = { N: 16384, r: 8, diff --git a/lib/internal/crypto/util.js b/lib/internal/crypto/util.js index 8343940b99e169..eafcc3d9669288 100644 --- a/lib/internal/crypto/util.js +++ b/lib/internal/crypto/util.js @@ -51,6 +51,7 @@ const { Buffer } = require('buffer'); const { cachedResult, filterDuplicateStrings, + lazyDOMException, } = require('internal/util'); const { @@ -70,13 +71,6 @@ function lazyRequire(name) { return ret; } -let DOMException; -const lazyDOMException = hideStackFrames((message, name) => { - if (DOMException === undefined) - DOMException = internalBinding('messaging').DOMException; - return new DOMException(message, name); -}); - var defaultEncoding = 'buffer'; function setDefaultEncoding(val) { @@ -428,7 +422,6 @@ module.exports = { normalizeAlgorithm, normalizeHashName, hasAnyNotIn, - lazyDOMException, validateBitLength, validateByteLength, validateByteSource, diff --git a/lib/internal/crypto/webcrypto.js b/lib/internal/crypto/webcrypto.js index 900ac7a6e8ad7b..0dcdc28a6c2f63 100644 --- a/lib/internal/crypto/webcrypto.js +++ b/lib/internal/crypto/webcrypto.js @@ -49,7 +49,6 @@ const { const { getArrayBufferOrView, hasAnyNotIn, - lazyDOMException, lazyRequire, normalizeAlgorithm, normalizeHashName, @@ -59,6 +58,10 @@ const { kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { getRandomValues, } = require('internal/crypto/random'); diff --git a/lib/internal/debugger/inspect_repl.js b/lib/internal/debugger/inspect_repl.js index 5393cb32718300..a93a8dfecf2720 100644 --- a/lib/internal/debugger/inspect_repl.js +++ b/lib/internal/debugger/inspect_repl.js @@ -1,6 +1,3 @@ -// TODO(trott): enable ESLint -/* eslint-disable getter-return */ - 'use strict'; const { @@ -899,7 +896,7 @@ function createRepl(inspector) { copyOwnProperties(context, { get help() { - print(HELP); + return print(HELP); }, get run() { @@ -1078,7 +1075,7 @@ function createRepl(inspector) { repl.setPrompt('> '); print('Press Ctrl+C to leave debug repl'); - repl.displayPrompt(); + return repl.displayPrompt(); }, get version() { diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 13b56311d370b8..fb01b8f6731ff1 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -33,6 +33,7 @@ const { Number, NumberIsInteger, ObjectDefineProperty, + ObjectDefineProperties, ObjectIsExtensible, ObjectGetOwnPropertyDescriptor, ObjectKeys, @@ -58,6 +59,8 @@ const { URIError, } = primordials; +const kIsNodeError = Symbol('kIsNodeError'); + const isWindows = process.platform === 'win32'; const messages = new SafeMap(); @@ -116,7 +119,12 @@ const prepareStackTrace = (globalThis, error, trace) => { // Error: Message // at function (file) // at file - const errorString = ErrorPrototypeToString(error); + let errorString; + if (kIsNodeError in error) { + errorString = `${error.name} [${error.code}]: ${error.message}`; + } else { + errorString = ErrorPrototypeToString(error); + } if (trace.length === 0) { return errorString; } @@ -186,27 +194,6 @@ function lazyBuffer() { return buffer; } -const addCodeToName = hideStackFrames(function addCodeToName(err, name, code) { - // Set the stack - err = captureLargerStackTrace(err); - // Add the error code to the name to include it in the stack trace. - err.name = `${name} [${code}]`; - // Access the stack to generate the error message including the error code - // from the name. - err.stack; // eslint-disable-line no-unused-expressions - // Reset the name to the actual name. - if (name === 'SystemError') { - ObjectDefineProperty(err, 'name', { - value: name, - enumerable: false, - writable: true, - configurable: true - }); - } else { - delete err.name; - } -}); - function isErrorStackTraceLimitWritable() { const desc = ObjectGetOwnPropertyDescriptor(Error, 'stackTraceLimit'); if (desc === undefined) { @@ -242,43 +229,55 @@ class SystemError extends Error { if (context.dest !== undefined) message += ` => ${context.dest}`; - ObjectDefineProperty(this, 'message', { - value: message, - enumerable: false, - writable: true, - configurable: true - }); - addCodeToName(this, 'SystemError', key); + captureLargerStackTrace(this); this.code = key; - ObjectDefineProperty(this, 'info', { - value: context, - enumerable: true, - configurable: true, - writable: false - }); - - ObjectDefineProperty(this, 'errno', { - get() { - return context.errno; + ObjectDefineProperties(this, { + [kIsNodeError]: { + value: true, + enumerable: false, + writable: false, + configurable: true, }, - set: (value) => { - context.errno = value; + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, }, - enumerable: true, - configurable: true - }); - - ObjectDefineProperty(this, 'syscall', { - get() { - return context.syscall; + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get() { + return context.errno; + }, + set: (value) => { + context.errno = value; + }, + enumerable: true, + configurable: true, }, - set: (value) => { - context.syscall = value; + syscall: { + get() { + return context.syscall; + }, + set: (value) => { + context.syscall = value; + }, + enumerable: true, + configurable: true, }, - enumerable: true, - configurable: true }); if (context.path !== undefined) { @@ -346,21 +345,29 @@ function makeNodeErrorWithCode(Base, key) { // Reset the limit and setting the name property. if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit; const message = getMessage(key, args, error); - ObjectDefineProperty(error, 'message', { - value: message, - enumerable: false, - writable: true, - configurable: true, - }); - ObjectDefineProperty(error, 'toString', { - value() { - return `${this.name} [${key}]: ${this.message}`; + ObjectDefineProperties(error, { + [kIsNodeError]: { + value: true, + enumerable: false, + writable: false, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + toString: { + value() { + return `${this.name} [${key}]: ${this.message}`; + }, + enumerable: false, + writable: true, + configurable: true, }, - enumerable: false, - writable: true, - configurable: true, }); - addCodeToName(error, Base.name, key); + captureLargerStackTrace(error); error.code = key; return error; }; @@ -792,7 +799,6 @@ class AbortError extends Error { } } module.exports = { - addCodeToName, // Exported for NghttpError aggregateTwoErrors, codes, dnsException, @@ -815,7 +821,9 @@ module.exports = { maybeOverridePrepareStackTrace, overrideStackTrace, kEnhanceStackBeforeInspector, - fatalExceptionStackEnhancers + fatalExceptionStackEnhancers, + kIsNodeError, + captureLargerStackTrace, }; // To declare an error message, use the E(sym, val, def) function above. The sym @@ -1033,6 +1041,7 @@ E('ERR_HTTP_SOCKET_ENCODING', 'Changing the socket encoding is not allowed per RFC7230 Section 3.', Error); E('ERR_HTTP_TRAILER_INVALID', 'Trailers are invalid with this transfer encoding', Error); +E('ERR_ILLEGAL_CONSTRUCTOR', 'Illegal constructor', TypeError); E('ERR_INCOMPATIBLE_OPTION_PAIR', 'Option "%s" cannot be used in combination with option "%s"', TypeError); E('ERR_INPUT_TYPE_NOT_ALLOWED', '--input-type can only be used with string ' + @@ -1256,8 +1265,8 @@ E('ERR_INVALID_RETURN_VALUE', (input, name, value) => { } return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`; -}, TypeError); -E('ERR_INVALID_STATE', 'Invalid state: %s', Error); +}, TypeError, RangeError); +E('ERR_INVALID_STATE', 'Invalid state: %s', Error, TypeError, RangeError); E('ERR_INVALID_SYNC_FORK_INPUT', 'Asynchronous forks do not support ' + 'Buffer, TypedArray, DataView or string input: %s', @@ -1361,7 +1370,7 @@ E('ERR_NO_CRYPTO', 'Node.js is not compiled with OpenSSL crypto support', Error); E('ERR_NO_ICU', '%s is not supported on Node.js compiled without ICU', TypeError); -E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error); +E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error, TypeError); E('ERR_OUT_OF_RANGE', (str, range, input, replaceDefaultBoolean = false) => { assert(range, 'Missing "range" argument'); diff --git a/lib/internal/event_target.js b/lib/internal/event_target.js index 026746825b7767..825e1e8b2597ab 100644 --- a/lib/internal/event_target.js +++ b/lib/internal/event_target.js @@ -59,13 +59,7 @@ const kRemoveListener = Symbol('kRemoveListener'); const kIsNodeStyleListener = Symbol('kIsNodeStyleListener'); const kTrustEvent = Symbol('kTrustEvent'); -// Lazy load perf_hooks to avoid the additional overhead on startup -let perf_hooks; -function lazyNow() { - if (perf_hooks === undefined) - perf_hooks = require('perf_hooks'); - return perf_hooks.performance.now(); -} +const { now } = require('internal/perf/utils'); // TODO(joyeecheung): V8 snapshot does not support instance member // initializers for now: @@ -98,7 +92,7 @@ class Event { this[kComposed] = !!composed; this[kType] = `${type}`; this[kDefaultPrevented] = false; - this[kTimestamp] = lazyNow(); + this[kTimestamp] = now(); this[kPropagationStopped] = false; if (options?.[kTrustEvent]) { isTrustedSet.add(this); diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index 62b4ae10b21fc4..6007e7384cbca3 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -28,7 +28,6 @@ const { Buffer } = require('buffer'); const { codes: { ERR_FS_FILE_TOO_LARGE, - ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_METHOD_NOT_IMPLEMENTED, }, @@ -72,9 +71,10 @@ const { validateBuffer, validateEncoding, validateInteger, + validateString, } = require('internal/validators'); const pathModule = require('path'); -const { promisify } = require('internal/util'); +const { lazyDOMException, promisify } = require('internal/util'); const { EventEmitterMixin } = require('internal/event_target'); const { watch } = require('internal/fs/watchers'); const { isIterable } = require('internal/streams/utils'); @@ -209,8 +209,7 @@ class FileHandle extends EventEmitterMixin(JSTransferable) { [kTransfer]() { if (this[kClosePromise] || this[kRefs] > 1) { - const DOMException = internalBinding('messaging').DOMException; - throw new DOMException('Cannot transfer FileHandle while in use', + throw lazyDOMException('Cannot transfer FileHandle while in use', 'DataCloneError'); } @@ -693,9 +692,8 @@ async function realpath(path, options) { async function mkdtemp(prefix, options) { options = getOptions(options, {}); - if (!prefix || typeof prefix !== 'string') { - throw new ERR_INVALID_ARG_TYPE('prefix', 'string', prefix); - } + + validateString(prefix, 'prefix'); nullCheck(prefix); warnOnNonPortableTemplate(prefix); return binding.mkdtemp(`${prefix}XXXXXX`, options.encoding, kUsePromises); diff --git a/lib/internal/http.js b/lib/internal/http.js index badfaa5c4a88d8..56187a2b1cc315 100644 --- a/lib/internal/http.js +++ b/lib/internal/http.js @@ -9,7 +9,7 @@ const { const { setUnrefTimeout } = require('internal/timers'); -const { InternalPerformanceEntry } = require('internal/perf/perf'); +const { InternalPerformanceEntry } = require('internal/perf/performance_entry'); const { enqueue, diff --git a/lib/internal/http2/util.js b/lib/internal/http2/util.js index f8252fffba65f5..78ff2937c3a317 100644 --- a/lib/internal/http2/util.js +++ b/lib/internal/http2/util.js @@ -9,6 +9,7 @@ const { MathMax, Number, ObjectCreate, + ObjectDefineProperty, ObjectKeys, SafeSet, String, @@ -28,9 +29,10 @@ const { ERR_INVALID_ARG_TYPE, ERR_INVALID_HTTP_TOKEN }, - addCodeToName, + captureLargerStackTrace, getMessage, - hideStackFrames + hideStackFrames, + kIsNodeError, } = require('internal/errors'); const kSensitiveHeaders = Symbol('nodejs.http2.sensitiveHeaders'); @@ -550,7 +552,13 @@ class NghttpError extends Error { binding.nghttp2ErrorString(integerCode)); this.code = customErrorCode || 'ERR_HTTP2_ERROR'; this.errno = integerCode; - addCodeToName(this, super.name, this.code); + captureLargerStackTrace(this); + ObjectDefineProperty(this, kIsNodeError, { + value: true, + enumerable: false, + writable: false, + configurable: true, + }); } toString() { diff --git a/lib/internal/main/worker_thread.js b/lib/internal/main/worker_thread.js index e22fd17fa2d214..d6434ff96e1185 100644 --- a/lib/internal/main/worker_thread.js +++ b/lib/internal/main/worker_thread.js @@ -18,6 +18,7 @@ const { setupInspectorHooks, setupWarningHandler, setupDebugEnv, + setupPerfHooks, initializeDeprecations, initializeWASI, initializeCJSLoader, @@ -114,6 +115,7 @@ port.on('message', (message) => { } = message; setupTraceCategoryState(); + setupPerfHooks(); initializeReport(); if (manifestSrc) { require('internal/process/policy').setup(manifestSrc, manifestURL); diff --git a/lib/internal/modules/esm/module_job.js b/lib/internal/modules/esm/module_job.js index b899c233d45a09..0ef8ebdeb9245b 100644 --- a/lib/internal/modules/esm/module_job.js +++ b/lib/internal/modules/esm/module_job.js @@ -24,6 +24,9 @@ const { const { ModuleWrap } = internalBinding('module_wrap'); const { decorateErrorStack } = require('internal/util'); +const { + getSourceMapsEnabled, +} = require('internal/source_map/source_map_cache'); const assert = require('internal/assert'); const resolvedPromise = PromiseResolve(); @@ -122,7 +125,12 @@ class ModuleJob { } } catch (e) { decorateErrorStack(e); - if (StringPrototypeIncludes(e.message, + // TODO(@bcoe): Add source map support to exception that occurs as result + // of missing named export. This is currently not possible because + // stack trace originates in module_job, not the file itself. A hidden + // symbol with filename could be set in node_errors.cc to facilitate this. + if (!getSourceMapsEnabled() && + StringPrototypeIncludes(e.message, ' does not provide an export named')) { const splitStack = StringPrototypeSplit(e.stack, '\n'); const parentFileUrl = StringPrototypeReplace( diff --git a/lib/internal/per_context/primordials.js b/lib/internal/per_context/primordials.js index 42250ffb422d6e..4dfb4dea85ef2a 100644 --- a/lib/internal/per_context/primordials.js +++ b/lib/internal/per_context/primordials.js @@ -415,5 +415,10 @@ primordials.SafePromisePrototypeFinally = (thisPromise, onFinally) => .then(a, b) ); +primordials.AsyncIteratorPrototype = + primordials.ReflectGetPrototypeOf( + primordials.ReflectGetPrototypeOf( + async function* () {}).prototype); + ObjectSetPrototypeOf(primordials, null); ObjectFreeze(primordials); diff --git a/lib/internal/perf/event_loop_utilization.js b/lib/internal/perf/event_loop_utilization.js index 398c4ad4e42f58..d73b2f5a831ab9 100644 --- a/lib/internal/perf/event_loop_utilization.js +++ b/lib/internal/perf/event_loop_utilization.js @@ -2,7 +2,7 @@ const nodeTiming = require('internal/perf/nodetiming'); -const { now } = require('internal/perf/perf'); +const { now } = require('internal/perf/utils'); function eventLoopUtilization(util1, util2) { const ls = nodeTiming.loopStart; diff --git a/lib/internal/perf/nodetiming.js b/lib/internal/perf/nodetiming.js index 5ff6dd38cd86d3..fcbd7efff49099 100644 --- a/lib/internal/perf/nodetiming.js +++ b/lib/internal/perf/nodetiming.js @@ -3,15 +3,14 @@ const { ObjectDefineProperties, ObjectSetPrototypeOf, - SafeArrayIterator, - SafeSet, } = primordials; +const { PerformanceEntry } = require('internal/perf/performance_entry'); + const { - PerformanceEntry, - kReadOnlyAttributes, now, -} = require('internal/perf/perf'); + getMilestoneTimestamp, +} = require('internal/perf/utils'); const { customInspectSymbol: kInspect, @@ -29,26 +28,8 @@ const { NODE_PERFORMANCE_MILESTONE_ENVIRONMENT }, loopIdleTime, - milestones, - timeOrigin, } = internalBinding('performance'); -function getMilestoneTimestamp(milestoneIdx) { - const ns = milestones[milestoneIdx]; - if (ns === -1) - return ns; - return ns / 1e6 - timeOrigin; -} - -const readOnlyAttributes = new SafeSet(new SafeArrayIterator([ - 'nodeStart', - 'v8Start', - 'environment', - 'loopStart', - 'loopExit', - 'bootstrapComplete', -])); - class PerformanceNodeTiming { constructor() { ObjectDefineProperties(this, { @@ -159,10 +140,6 @@ class PerformanceNodeTiming { idleTime: this.idleTime, }; } - - static get [kReadOnlyAttributes]() { - return readOnlyAttributes; - } } ObjectSetPrototypeOf( diff --git a/lib/internal/perf/observe.js b/lib/internal/perf/observe.js index c96925c723f64e..8ec8512434510b 100644 --- a/lib/internal/perf/observe.js +++ b/lib/internal/perf/observe.js @@ -31,7 +31,7 @@ const { const { InternalPerformanceEntry, isPerformanceEntry, -} = require('internal/perf/perf'); +} = require('internal/perf/performance_entry'); const { codes: { @@ -174,11 +174,13 @@ class PerformanceObserverEntryList { } class PerformanceObserver { - [kBuffer] = []; - [kEntryTypes] = new SafeSet(); - [kType] = undefined; - constructor(callback) { + // TODO(joyeecheung): V8 snapshot does not support instance member + // initializers for now: + // https://bugs.chromium.org/p/v8/issues/detail?id=10704 + this[kBuffer] = []; + this[kEntryTypes] = new SafeSet(); + this[kType] = undefined; validateCallback(callback); this[kCallback] = callback; } diff --git a/lib/internal/perf/performance.js b/lib/internal/perf/performance.js new file mode 100644 index 00000000000000..ca4aed90e4e270 --- /dev/null +++ b/lib/internal/perf/performance.js @@ -0,0 +1,129 @@ +'use strict'; + +const { + ObjectDefineProperty, + ObjectDefineProperties, + ObjectSetPrototypeOf, + TypeError, +} = primordials; + +const { + EventTarget, +} = require('internal/event_target'); + +const { now } = require('internal/perf/utils'); + +const { + mark, + measure, + clearMarks, +} = require('internal/perf/usertiming'); + +const eventLoopUtilization = require('internal/perf/event_loop_utilization'); +const nodeTiming = require('internal/perf/nodetiming'); +const timerify = require('internal/perf/timerify'); +const { customInspectSymbol: kInspect } = require('internal/util'); +const { inspect } = require('util'); + +const { + getTimeOriginTimestamp +} = internalBinding('performance'); + +class Performance extends EventTarget { + constructor() { + // eslint-disable-next-line no-restricted-syntax + throw new TypeError('Illegal constructor'); + } + + [kInspect](depth, options) { + if (depth < 0) return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1 + }; + + return `Performance ${inspect({ + nodeTiming: this.nodeTiming, + timeOrigin: this.timeOrigin, + }, opts)}`; + } + +} + +function toJSON() { + return { + nodeTiming: this.nodeTiming, + timeOrigin: this.timeOrigin, + eventLoopUtilization: this.eventLoopUtilization() + }; +} + +class InternalPerformance extends EventTarget {} +InternalPerformance.prototype.constructor = Performance.prototype.constructor; +ObjectSetPrototypeOf(InternalPerformance.prototype, Performance.prototype); + +ObjectDefineProperties(Performance.prototype, { + clearMarks: { + configurable: true, + enumerable: false, + value: clearMarks, + }, + eventLoopUtilization: { + configurable: true, + enumerable: false, + value: eventLoopUtilization, + }, + mark: { + configurable: true, + enumerable: false, + value: mark, + }, + measure: { + configurable: true, + enumerable: false, + value: measure, + }, + nodeTiming: { + configurable: true, + enumerable: false, + value: nodeTiming, + }, + now: { + configurable: true, + enumerable: false, + value: now, + }, + timerify: { + configurable: true, + enumerable: false, + value: timerify, + }, + // This would be updated during pre-execution in case + // the process is launched from a snapshot. + // TODO(joyeecheung): we may want to warn about access to + // this during snapshot building. + timeOrigin: { + configurable: true, + enumerable: true, + value: getTimeOriginTimestamp(), + }, + toJSON: { + configurable: true, + enumerable: true, + value: toJSON, + } +}); + +function refreshTimeOrigin() { + ObjectDefineProperty(Performance.prototype, 'timeOrigin', { + configurable: true, + enumerable: true, + value: getTimeOriginTimestamp(), + }); +} + +module.exports = { + InternalPerformance, + refreshTimeOrigin +}; diff --git a/lib/internal/perf/perf.js b/lib/internal/perf/performance_entry.js similarity index 87% rename from lib/internal/perf/perf.js rename to lib/internal/perf/performance_entry.js index d049d3c68fff04..f9f1c9e8966e2d 100644 --- a/lib/internal/perf/perf.js +++ b/lib/internal/perf/performance_entry.js @@ -6,10 +6,6 @@ const { TypeError, } = primordials; -const { - timeOrigin, -} = internalBinding('performance'); - const { customInspectSymbol: kInspect, } = require('internal/util'); @@ -21,12 +17,6 @@ const kType = Symbol('kType'); const kStart = Symbol('kStart'); const kDuration = Symbol('kDuration'); const kDetail = Symbol('kDetail'); -const kReadOnlyAttributes = Symbol('kReadOnlyAttributes'); - -function now() { - const hr = process.hrtime(); - return (hr[0] * 1000 + hr[1] / 1e6) - timeOrigin; -} function isPerformanceEntry(obj) { return obj?.[kName] !== undefined; @@ -88,7 +78,5 @@ ObjectSetPrototypeOf( module.exports = { InternalPerformanceEntry, PerformanceEntry, - kReadOnlyAttributes, isPerformanceEntry, - now, }; diff --git a/lib/internal/perf/timerify.js b/lib/internal/perf/timerify.js index d730f62aae7eb1..dae0b06bf80c8a 100644 --- a/lib/internal/perf/timerify.js +++ b/lib/internal/perf/timerify.js @@ -9,10 +9,8 @@ const { Symbol, } = primordials; -const { - InternalPerformanceEntry, - now, -} = require('internal/perf/perf'); +const { InternalPerformanceEntry } = require('internal/perf/performance_entry'); +const { now } = require('internal/perf/utils'); const { validateFunction, diff --git a/lib/internal/perf/usertiming.js b/lib/internal/perf/usertiming.js index 6672a3f4dfdeb0..f83091de1919a8 100644 --- a/lib/internal/perf/usertiming.js +++ b/lib/internal/perf/usertiming.js @@ -3,16 +3,13 @@ const { ObjectKeys, SafeMap, + SafeSet, + SafeArrayIterator, } = primordials; -const { - InternalPerformanceEntry, - kReadOnlyAttributes, - now, -} = require('internal/perf/perf'); - +const { InternalPerformanceEntry } = require('internal/perf/performance_entry'); +const { now } = require('internal/perf/utils'); const { enqueue } = require('internal/perf/observe'); - const nodeTiming = require('internal/perf/nodetiming'); const { @@ -31,8 +28,15 @@ const { } = require('internal/errors'); const marks = new SafeMap(); -const nodeTimingReadOnlyAttributes = - nodeTiming.constructor[kReadOnlyAttributes]; + +const nodeTimingReadOnlyAttributes = new SafeSet(new SafeArrayIterator([ + 'nodeStart', + 'v8Start', + 'environment', + 'loopStart', + 'loopExit', + 'bootstrapComplete', +])); function getMark(name) { if (name === undefined) return; diff --git a/lib/internal/perf/utils.js b/lib/internal/perf/utils.js new file mode 100644 index 00000000000000..bcc7e223b8c882 --- /dev/null +++ b/lib/internal/perf/utils.js @@ -0,0 +1,33 @@ +'use strict'; + +const binding = internalBinding('performance'); +const { + milestones, + getTimeOrigin, +} = binding; + +// TODO(joyeecheung): we may want to warn about access to +// this during snapshot building. +let timeOrigin = getTimeOrigin(); + +function now() { + const hr = process.hrtime(); + return (hr[0] * 1000 + hr[1] / 1e6) - timeOrigin; +} + +function getMilestoneTimestamp(milestoneIdx) { + const ns = milestones[milestoneIdx]; + if (ns === -1) + return ns; + return ns / 1e6 - timeOrigin; +} + +function refreshTimeOrigin() { + timeOrigin = getTimeOrigin(); +} + +module.exports = { + now, + getMilestoneTimestamp, + refreshTimeOrigin +}; diff --git a/lib/internal/repl/await.js b/lib/internal/repl/await.js index 9d36c7147509b4..e36fa5bfd735b1 100644 --- a/lib/internal/repl/await.js +++ b/lib/internal/repl/await.js @@ -3,34 +3,41 @@ const { ArrayFrom, ArrayPrototypeForEach, + ArrayPrototypeIncludes, ArrayPrototypeJoin, ArrayPrototypePop, ArrayPrototypePush, FunctionPrototype, ObjectKeys, + RegExpPrototypeSymbolReplace, + StringPrototypeEndsWith, + StringPrototypeIncludes, + StringPrototypeIndexOf, + StringPrototypeRepeat, + StringPrototypeSplit, + StringPrototypeStartsWith, + SyntaxError, } = primordials; -const acorn = require('internal/deps/acorn/acorn/dist/acorn'); +const parser = require('internal/deps/acorn/acorn/dist/acorn').Parser; const walk = require('internal/deps/acorn/acorn-walk/dist/walk'); -const privateMethods = - require('internal/deps/acorn-plugins/acorn-private-methods/index'); -const classFields = - require('internal/deps/acorn-plugins/acorn-class-fields/index'); -const staticClassFeatures = - require('internal/deps/acorn-plugins/acorn-static-class-features/index'); - -const parser = acorn.Parser.extend( - privateMethods, - classFields, - staticClassFeatures -); +const { Recoverable } = require('internal/repl'); + +function isTopLevelDeclaration(state) { + return state.ancestors[state.ancestors.length - 2] === state.body; +} const noop = FunctionPrototype; const visitorsWithoutAncestors = { ClassDeclaration(node, state, c) { - if (state.ancestors[state.ancestors.length - 2] === state.body) { + if (isTopLevelDeclaration(state)) { state.prepend(node, `${node.id.name}=`); + ArrayPrototypePush( + state.hoistedDeclarationStatements, + `let ${node.id.name}; ` + ); } + walk.base.ClassDeclaration(node, state, c); }, ForOfStatement(node, state, c) { @@ -41,6 +48,10 @@ const visitorsWithoutAncestors = { }, FunctionDeclaration(node, state, c) { state.prepend(node, `${node.id.name}=`); + ArrayPrototypePush( + state.hoistedDeclarationStatements, + `var ${node.id.name}; ` + ); }, FunctionExpression: noop, ArrowFunctionExpression: noop, @@ -54,22 +65,72 @@ const visitorsWithoutAncestors = { walk.base.ReturnStatement(node, state, c); }, VariableDeclaration(node, state, c) { - if (node.kind === 'var' || - state.ancestors[state.ancestors.length - 2] === state.body) { - if (node.declarations.length === 1) { - state.replace(node.start, node.start + node.kind.length, 'void'); - } else { - state.replace(node.start, node.start + node.kind.length, 'void ('); + const variableKind = node.kind; + const isIterableForDeclaration = ArrayPrototypeIncludes( + ['ForOfStatement', 'ForInStatement'], + state.ancestors[state.ancestors.length - 2].type + ); + + if (variableKind === 'var' || isTopLevelDeclaration(state)) { + state.replace( + node.start, + node.start + variableKind.length + (isIterableForDeclaration ? 1 : 0), + variableKind === 'var' && isIterableForDeclaration ? + '' : + 'void' + (node.declarations.length === 1 ? '' : ' (') + ); + + if (!isIterableForDeclaration) { + ArrayPrototypeForEach(node.declarations, (decl) => { + state.prepend(decl, '('); + state.append(decl, decl.init ? ')' : '=undefined)'); + }); + + if (node.declarations.length !== 1) { + state.append(node.declarations[node.declarations.length - 1], ')'); + } + } + + const variableIdentifiersToHoist = [ + ['var', []], + ['let', []], + ]; + function registerVariableDeclarationIdentifiers(node) { + switch (node.type) { + case 'Identifier': + ArrayPrototypePush( + variableIdentifiersToHoist[variableKind === 'var' ? 0 : 1][1], + node.name + ); + break; + case 'ObjectPattern': + ArrayPrototypeForEach(node.properties, (property) => { + registerVariableDeclarationIdentifiers(property.value); + }); + break; + case 'ArrayPattern': + ArrayPrototypeForEach(node.elements, (element) => { + registerVariableDeclarationIdentifiers(element); + }); + break; + } } ArrayPrototypeForEach(node.declarations, (decl) => { - state.prepend(decl, '('); - state.append(decl, decl.init ? ')' : '=undefined)'); + registerVariableDeclarationIdentifiers(decl.id); }); - if (node.declarations.length !== 1) { - state.append(node.declarations[node.declarations.length - 1], ')'); - } + ArrayPrototypeForEach( + variableIdentifiersToHoist, + ({ 0: kind, 1: identifiers }) => { + if (identifiers.length > 0) { + ArrayPrototypePush( + state.hoistedDeclarationStatements, + `${kind} ${ArrayPrototypeJoin(identifiers, ', ')}; ` + ); + } + } + ); } walk.base.VariableDeclaration(node, state, c); @@ -92,18 +153,49 @@ for (const nodeType of ObjectKeys(walk.base)) { } function processTopLevelAwait(src) { - const wrapped = `(async () => { ${src} })()`; + const wrapPrefix = '(async () => { '; + const wrapped = `${wrapPrefix}${src} })()`; const wrappedArray = ArrayFrom(wrapped); let root; try { root = parser.parse(wrapped, { ecmaVersion: 'latest' }); - } catch { - return null; + } catch (e) { + if (StringPrototypeStartsWith(e.message, 'Unterminated ')) + throw new Recoverable(e); + // If the parse error is before the first "await", then use the execution + // error. Otherwise we must emit this parse error, making it look like a + // proper syntax error. + const awaitPos = StringPrototypeIndexOf(src, 'await'); + const errPos = e.pos - wrapPrefix.length; + if (awaitPos > errPos) + return null; + // Convert keyword parse errors on await into their original errors when + // possible. + if (errPos === awaitPos + 6 && + StringPrototypeIncludes(e.message, 'Expecting Unicode escape sequence')) + return null; + if (errPos === awaitPos + 7 && + StringPrototypeIncludes(e.message, 'Unexpected token')) + return null; + const line = e.loc.line; + const column = line === 1 ? e.loc.column - wrapPrefix.length : e.loc.column; + let message = '\n' + StringPrototypeSplit(src, '\n')[line - 1] + '\n' + + StringPrototypeRepeat(' ', column) + + '^\n\n' + RegExpPrototypeSymbolReplace(/ \([^)]+\)/, e.message, ''); + // V8 unexpected token errors include the token string. + if (StringPrototypeEndsWith(message, 'Unexpected token')) + message += " '" + + // Wrapper end may cause acorn to report error position after the source + (src[e.pos - wrapPrefix.length] ?? src[src.length - 1]) + + "'"; + // eslint-disable-next-line no-restricted-syntax + throw new SyntaxError(message); } const body = root.body[0].expression.callee.body; const state = { body, ancestors: [], + hoistedDeclarationStatements: [], replace(from, to, str) { for (let i = from; i < to; i++) { wrappedArray[i] = ''; @@ -148,7 +240,10 @@ function processTopLevelAwait(src) { state.append(last.expression, ')'); } - return ArrayPrototypeJoin(wrappedArray, ''); + return ( + ArrayPrototypeJoin(state.hoistedDeclarationStatements, '') + + ArrayPrototypeJoin(wrappedArray, '') + ); } module.exports = { diff --git a/lib/internal/repl/utils.js b/lib/internal/repl/utils.js index aff7dafe16e95a..4ee177f6eb02bd 100644 --- a/lib/internal/repl/utils.js +++ b/lib/internal/repl/utils.js @@ -23,12 +23,6 @@ const { const { tokTypes: tt, Parser: AcornParser } = require('internal/deps/acorn/acorn/dist/acorn'); -const privateMethods = - require('internal/deps/acorn-plugins/acorn-private-methods/index'); -const classFields = - require('internal/deps/acorn-plugins/acorn-class-fields/index'); -const staticClassFeatures = - require('internal/deps/acorn-plugins/acorn-static-class-features/index'); const { sendInspectorCommand } = require('internal/util/inspector'); @@ -98,9 +92,6 @@ function isRecoverableError(e, code) { // const RecoverableParser = AcornParser .extend( - privateMethods, - classFields, - staticClassFeatures, (Parser) => { return class extends Parser { // eslint-disable-next-line no-useless-constructor diff --git a/lib/internal/source_map/prepare_stack_trace.js b/lib/internal/source_map/prepare_stack_trace.js index 6b8d4e566ff1b1..9502cfef6fe029 100644 --- a/lib/internal/source_map/prepare_stack_trace.js +++ b/lib/internal/source_map/prepare_stack_trace.js @@ -21,7 +21,8 @@ const { findSourceMap } = require('internal/source_map/source_map_cache'); const { kNoOverride, overrideStackTrace, - maybeOverridePrepareStackTrace + maybeOverridePrepareStackTrace, + kIsNodeError, } = require('internal/errors'); const { fileURLToPath } = require('internal/url'); @@ -41,7 +42,12 @@ const prepareStackTrace = (globalThis, error, trace) => { maybeOverridePrepareStackTrace(globalThis, error, trace); if (globalOverride !== kNoOverride) return globalOverride; - const errorString = ErrorPrototypeToString(error); + let errorString; + if (kIsNodeError in error) { + errorString = `${error.name} [${error.code}]: ${error.message}`; + } else { + errorString = ErrorPrototypeToString(error); + } if (trace.length === 0) { return errorString; diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js index a2892c67a0fcfa..6d50b09f0948b4 100644 --- a/lib/internal/streams/destroy.js +++ b/lib/internal/streams/destroy.js @@ -69,12 +69,16 @@ function destroy(err, cb) { function _destroy(self, err, cb) { let called = false; - const result = self._destroy(err || null, (err) => { - const r = self._readableState; - const w = self._writableState; + function onDestroy(err) { + if (called) { + return; + } called = true; + const r = self._readableState; + const w = self._writableState; + checkError(err, w, r); if (w) { @@ -93,64 +97,24 @@ function _destroy(self, err, cb) { } else { process.nextTick(emitCloseNT, self); } - }); - if (result !== undefined && result !== null) { - try { + } + try { + const result = self._destroy(err || null, onDestroy); + if (result != null) { const then = result.then; if (typeof then === 'function') { then.call( result, function() { - if (called) - return; - - const r = self._readableState; - const w = self._writableState; - - if (w) { - w.closed = true; - } - if (r) { - r.closed = true; - } - - if (typeof cb === 'function') { - process.nextTick(cb); - } - - process.nextTick(emitCloseNT, self); + process.nextTick(onDestroy, null); }, function(err) { - const r = self._readableState; - const w = self._writableState; - err.stack; // eslint-disable-line no-unused-expressions - - called = true; - - if (w && !w.errored) { - w.errored = err; - } - if (r && !r.errored) { - r.errored = err; - } - - if (w) { - w.closed = true; - } - if (r) { - r.closed = true; - } - - if (typeof cb === 'function') { - process.nextTick(cb, err); - } - - process.nextTick(emitErrorCloseNT, self, err); + process.nextTick(onDestroy, err); }); } - } catch (err) { - process.nextTick(emitErrorNT, self, err); } + } catch (err) { + onDestroy(err); } } @@ -284,13 +248,19 @@ function construct(stream, cb) { } function constructNT(stream) { - const r = stream._readableState; - const w = stream._writableState; - // With duplex streams we use the writable side for state. - const s = w || r; - let called = false; - const result = stream._construct((err) => { + + function onConstruct(err) { + if (called) { + errorOrDestroy(stream, err ?? new ERR_MULTIPLE_CALLBACK()); + return; + } + called = true; + + const r = stream._readableState; + const w = stream._writableState; + const s = w || r; + if (r) { r.constructed = true; } @@ -298,12 +268,6 @@ function constructNT(stream) { w.constructed = true; } - if (called) { - err = new ERR_MULTIPLE_CALLBACK(); - } else { - called = true; - } - if (s.destroyed) { stream.emit(kDestroy, err); } else if (err) { @@ -311,47 +275,25 @@ function constructNT(stream) { } else { process.nextTick(emitConstructNT, stream); } - }); - if (result !== undefined && result !== null) { - try { + } + + try { + const result = stream._construct(onConstruct); + if (result != null) { const then = result.then; if (typeof then === 'function') { then.call( result, function() { - // If the callback was invoked, do nothing further. - if (called) - return; - if (r) { - r.constructed = true; - } - if (w) { - w.constructed = true; - } - if (s.destroyed) { - process.nextTick(() => stream.emit(kDestroy)); - } else { - process.nextTick(emitConstructNT, stream); - } + process.nextTick(onConstruct, null); }, function(err) { - if (r) { - r.constructed = true; - } - if (w) { - w.constructed = true; - } - called = true; - if (s.destroyed) { - process.nextTick(() => stream.emit(kDestroy, err)); - } else { - process.nextTick(errorOrDestroy, stream, err); - } + process.nextTick(onConstruct, err); }); } - } catch (err) { - process.nextTick(emitErrorNT, stream, err); } + } catch (err) { + onConstruct(err); } } diff --git a/lib/internal/streams/duplexpair.js b/lib/internal/streams/duplexpair.js deleted file mode 100644 index ec92cbe8716df4..00000000000000 --- a/lib/internal/streams/duplexpair.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict'; - -const { - Symbol, -} = primordials; - -const { Duplex } = require('stream'); - -const kCallback = Symbol('Callback'); -const kOtherSide = Symbol('Other'); - -class DuplexSocket extends Duplex { - constructor() { - super(); - this[kCallback] = null; - this[kOtherSide] = null; - } - - _read() { - const callback = this[kCallback]; - if (callback) { - this[kCallback] = null; - callback(); - } - } - - _write(chunk, encoding, callback) { - if (chunk.length === 0) { - process.nextTick(callback); - } else { - this[kOtherSide].push(chunk); - this[kOtherSide][kCallback] = callback; - } - } - - _final(callback) { - this[kOtherSide].on('end', callback); - this[kOtherSide].push(null); - } -} - -class DuplexPair { - constructor() { - this.socket1 = new DuplexSocket(); - this.socket2 = new DuplexSocket(); - this.socket1[kOtherSide] = this.socket2; - this.socket2[kOtherSide] = this.socket1; - } -} - -module.exports = DuplexPair; diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js index 202a8cf9810d62..5759dbd4a580a3 100644 --- a/lib/internal/streams/pipeline.js +++ b/lib/internal/streams/pipeline.js @@ -20,20 +20,16 @@ const { ERR_INVALID_RETURN_VALUE, ERR_MISSING_ARGS, ERR_STREAM_DESTROYED, - ERR_STREAM_PREMATURE_CLOSE, }, } = require('internal/errors'); const { validateCallback } = require('internal/validators'); -function noop() {} - const { isIterable, isReadable, isStream, } = require('internal/streams/utils'); -const assert = require('internal/assert'); let PassThrough; let Readable; @@ -109,62 +105,58 @@ async function* fromReadable(val) { async function pump(iterable, writable, finish) { let error; - let callback = noop; + let onresolve = null; + const resume = (err) => { - error = aggregateTwoErrors(error, err); - const _callback = callback; - callback = noop; - _callback(); - }; - const onClose = () => { - resume(new ERR_STREAM_PREMATURE_CLOSE()); + if (err) { + error = err; + } + + if (onresolve) { + const callback = onresolve; + onresolve = null; + callback(); + } }; - const waitForDrain = () => new Promise((resolve) => { - assert(callback === noop); - if (error || writable.destroyed) { - resolve(); + const wait = () => new Promise((resolve, reject) => { + if (error) { + reject(error); } else { - callback = resolve; + onresolve = () => { + if (error) { + reject(error); + } else { + resolve(); + } + }; } }); - writable - .on('drain', resume) - .on('error', resume) - .on('close', onClose); + writable.on('drain', resume); + const cleanup = eos(writable, { readable: false }, resume); try { if (writable.writableNeedDrain) { - await waitForDrain(); - } - - if (error) { - return; + await wait(); } for await (const chunk of iterable) { if (!writable.write(chunk)) { - await waitForDrain(); + await wait(); } - if (error) { - return; - } - } - - if (error) { - return; } writable.end(); + + await wait(); + + finish(); } catch (err) { - error = aggregateTwoErrors(error, err); + finish(error !== err ? aggregateTwoErrors(error, err) : err); } finally { - writable - .off('drain', resume) - .off('error', resume) - .off('close', onClose); - finish(error); + cleanup(); + writable.off('drain', resume); } } diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js index 7f6876599cc7fc..d2d4f19ed3fa5c 100644 --- a/lib/internal/streams/readable.js +++ b/lib/internal/streams/readable.js @@ -479,8 +479,10 @@ Readable.prototype.read = function(n) { // If the length is currently zero, then we *need* a readable event. if (state.length === 0) state.needReadable = true; + // Call internal read method this._read(state.highWaterMark); + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js index 03f63b16bd04ff..f41cc183f0939a 100644 --- a/lib/internal/streams/writable.js +++ b/lib/internal/streams/writable.js @@ -654,13 +654,21 @@ function needFinish(state) { !state.errored && state.buffered.length === 0 && !state.finished && - !state.writing); + !state.writing && + !state.errorEmitted && + !state.closeEmitted); } function callFinal(stream, state) { - state.sync = true; - state.pendingcb++; - const result = stream._final((err) => { + let called = false; + + function onFinish(err) { + if (called) { + errorOrDestroy(stream, err ?? ERR_MULTIPLE_CALLBACK()); + return; + } + called = true; + state.pendingcb--; if (err) { const onfinishCallbacks = state[kOnFinished].splice(0); @@ -677,33 +685,30 @@ function callFinal(stream, state) { state.pendingcb++; process.nextTick(finish, stream, state); } - }); - if (result !== undefined && result !== null) { - try { + } + + state.sync = true; + state.pendingcb++; + + try { + const result = stream._final(onFinish); + if (result != null) { const then = result.then; if (typeof then === 'function') { then.call( result, function() { - if (state.prefinished) - return; - state.prefinish = true; - process.nextTick(() => stream.emit('prefinish')); - state.pendingcb++; - process.nextTick(finish, stream, state); + process.nextTick(onFinish, null); }, function(err) { - const onfinishCallbacks = state[kOnFinished].splice(0); - for (let i = 0; i < onfinishCallbacks.length; i++) { - process.nextTick(onfinishCallbacks[i], err); - } - process.nextTick(errorOrDestroy, stream, err, state.sync); + process.nextTick(onFinish, err); }); } - } catch (err) { - process.nextTick(errorOrDestroy, stream, err, state.sync); } + } catch (err) { + onFinish(stream, state, err); } + state.sync = false; } @@ -735,10 +740,6 @@ function finishMaybe(stream, state, sync) { function finish(stream, state) { state.pendingcb--; - // TODO (ronag): Unify with needFinish. - if (state.errorEmitted || state.closeEmitted) - return; - state.finished = true; const onfinishCallbacks = state[kOnFinished].splice(0); diff --git a/lib/internal/tls/parse-cert-string.js b/lib/internal/tls/parse-cert-string.js new file mode 100644 index 00000000000000..a499df886097b4 --- /dev/null +++ b/lib/internal/tls/parse-cert-string.js @@ -0,0 +1,35 @@ +'use strict'; + +const { + ArrayIsArray, + ArrayPrototypeForEach, + ArrayPrototypePush, + StringPrototypeIndexOf, + StringPrototypeSlice, + StringPrototypeSplit, + ObjectCreate, +} = primordials; + +// Example: +// C=US\nST=CA\nL=SF\nO=Joyent\nOU=Node.js\nCN=ca1\nemailAddress=ry@clouds.org +function parseCertString(s) { + const out = ObjectCreate(null); + ArrayPrototypeForEach(StringPrototypeSplit(s, '\n'), (part) => { + const sepIndex = StringPrototypeIndexOf(part, '='); + if (sepIndex > 0) { + const key = StringPrototypeSlice(part, 0, sepIndex); + const value = StringPrototypeSlice(part, sepIndex + 1); + if (key in out) { + if (!ArrayIsArray(out[key])) { + out[key] = [out[key]]; + } + ArrayPrototypePush(out[key], value); + } else { + out[key] = value; + } + } + }); + return out; +} + +exports.parseCertString = parseCertString; diff --git a/lib/internal/tls.js b/lib/internal/tls/secure-context.js similarity index 92% rename from lib/internal/tls.js rename to lib/internal/tls/secure-context.js index 0a9eea8f3eb026..50a68df092c981 100644 --- a/lib/internal/tls.js +++ b/lib/internal/tls/secure-context.js @@ -5,12 +5,8 @@ const { ArrayPrototypeFilter, ArrayPrototypeForEach, ArrayPrototypeJoin, - ArrayPrototypePush, - StringPrototypeIndexOf, - StringPrototypeSlice, StringPrototypeSplit, StringPrototypeStartsWith, - ObjectCreate, } = primordials; const { @@ -42,28 +38,6 @@ const { }, } = internalBinding('constants'); -// Example: -// C=US\nST=CA\nL=SF\nO=Joyent\nOU=Node.js\nCN=ca1\nemailAddress=ry@clouds.org -function parseCertString(s) { - const out = ObjectCreate(null); - ArrayPrototypeForEach(StringPrototypeSplit(s, '\n'), (part) => { - const sepIndex = StringPrototypeIndexOf(part, '='); - if (sepIndex > 0) { - const key = StringPrototypeSlice(part, 0, sepIndex); - const value = StringPrototypeSlice(part, sepIndex + 1); - if (key in out) { - if (!ArrayIsArray(out[key])) { - out[key] = [out[key]]; - } - ArrayPrototypePush(out[key], value); - } else { - out[key] = value; - } - } - }); - return out; -} - function getDefaultEcdhCurve() { // We do it this way because DEFAULT_ECDH_CURVE can be // changed by users, so we need to grab the current @@ -340,5 +314,4 @@ function configSecureContext(context, options = {}, name = 'options') { module.exports = { configSecureContext, - parseCertString, }; diff --git a/lib/internal/tls/secure-pair.js b/lib/internal/tls/secure-pair.js new file mode 100644 index 00000000000000..b3f0930a3c7118 --- /dev/null +++ b/lib/internal/tls/secure-pair.js @@ -0,0 +1,86 @@ +'use strict'; + +const EventEmitter = require('events'); +const { Duplex } = require('stream'); +const _tls_wrap = require('_tls_wrap'); +const _tls_common = require('_tls_common'); + +const { + Symbol, + ReflectConstruct, +} = primordials; + +const kCallback = Symbol('Callback'); +const kOtherSide = Symbol('Other'); + +class DuplexSocket extends Duplex { + constructor() { + super(); + this[kCallback] = null; + this[kOtherSide] = null; + } + + _read() { + const callback = this[kCallback]; + if (callback) { + this[kCallback] = null; + callback(); + } + } + + _write(chunk, encoding, callback) { + if (chunk.length === 0) { + process.nextTick(callback); + } else { + this[kOtherSide].push(chunk); + this[kOtherSide][kCallback] = callback; + } + } + + _final(callback) { + this[kOtherSide].on('end', callback); + this[kOtherSide].push(null); + } +} + +class DuplexPair { + constructor() { + this.socket1 = new DuplexSocket(); + this.socket2 = new DuplexSocket(); + this.socket1[kOtherSide] = this.socket2; + this.socket2[kOtherSide] = this.socket1; + } +} + +class SecurePair extends EventEmitter { + constructor(secureContext = _tls_common.createSecureContext(), + isServer = false, + requestCert = !isServer, + rejectUnauthorized = false, + options = {}) { + super(); + const { socket1, socket2 } = new DuplexPair(); + + this.server = options.server; + this.credentials = secureContext; + + this.encrypted = socket1; + this.cleartext = new _tls_wrap.TLSSocket(socket2, { + secureContext, + isServer, + requestCert, + rejectUnauthorized, + ...options + }); + this.cleartext.once('secure', () => this.emit('secure')); + } + + destroy() { + this.cleartext.destroy(); + this.encrypted.destroy(); + } +} + +exports.createSecurePair = function createSecurePair(...args) { + return ReflectConstruct(SecurePair, args); +}; diff --git a/lib/internal/util.js b/lib/internal/util.js index f0a04f61818392..101fbec67775b0 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -28,6 +28,7 @@ const { } = primordials; const { + hideStackFrames, codes: { ERR_INVALID_ARG_TYPE, ERR_NO_CRYPTO, @@ -441,6 +442,13 @@ function createDeferredPromise() { return { promise, resolve, reject }; } +let DOMException; +const lazyDOMException = hideStackFrames((message, name) => { + if (DOMException === undefined) + DOMException = internalBinding('messaging').DOMException; + return new DOMException(message, name); +}); + module.exports = { assertCrypto, cachedResult, @@ -457,6 +465,7 @@ module.exports = { isError, isInsideNodeModules, join, + lazyDOMException, normalizeEncoding, once, promisify, diff --git a/lib/internal/validators.js b/lib/internal/validators.js index 6abe332fc5fdc0..9bca0aa747725e 100644 --- a/lib/internal/validators.js +++ b/lib/internal/validators.js @@ -229,6 +229,11 @@ const validateFunction = hideStackFrames((value, name) => { throw new ERR_INVALID_ARG_TYPE(name, 'Function', value); }); +const validateUndefined = hideStackFrames((value, name) => { + if (value !== undefined) + throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value); +}); + module.exports = { isInt32, isUint32, @@ -247,6 +252,7 @@ module.exports = { validateSignalName, validateString, validateUint32, + validateUndefined, validateCallback, validateAbortSignal, }; diff --git a/lib/internal/webstreams/queuingstrategies.js b/lib/internal/webstreams/queuingstrategies.js new file mode 100644 index 00000000000000..d8750665bd5e86 --- /dev/null +++ b/lib/internal/webstreams/queuingstrategies.js @@ -0,0 +1,168 @@ +'use strict'; + +const { + ObjectDefineProperties, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_INVALID_THIS, + ERR_MISSING_OPTION, + }, +} = require('internal/errors'); + +const { + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + customInspect, + isBrandCheck, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + validateObject, +} = require('internal/validators'); + +const isByteLengthQueuingStrategy = + isBrandCheck('ByteLengthQueuingStrategy'); + +const isCountQueuingStrategy = + isBrandCheck('CountQueuingStrategy'); + +/** + * @callback QueuingStrategySize + * @param {any} chunk + * @returns {number} + * + * @typedef {{ + * highWaterMark : number, + * size? : QueuingStrategySize, + * }} QueuingStrategy + */ + +// eslint-disable-next-line func-name-matching,func-style +const byteSizeFunction = function size(chunk) { return chunk.byteLength; }; + +// eslint-disable-next-line func-name-matching,func-style +const countSizeFunction = function size() { return 1; }; + +/** + * @type {QueuingStrategy} + */ +class ByteLengthQueuingStrategy { + [kType] = 'ByteLengthQueuingStrategy'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {{ + * highWaterMark : number + * }} init + */ + constructor(init) { + validateObject(init, 'init'); + if (init.highWaterMark === undefined) + throw new ERR_MISSING_OPTION('options.highWaterMark'); + + // The highWaterMark value is not checked until the strategy + // is actually used, per the spec. + this[kState] = { + highWaterMark: +init.highWaterMark, + }; + } + + /** + * @readonly + * @type {number} + */ + get highWaterMark() { + if (!isByteLengthQueuingStrategy(this)) + throw new ERR_INVALID_THIS('ByteLengthQueuingStrategy'); + return this[kState].highWaterMark; + } + + /** + * @type {QueuingStrategySize} + */ + get size() { + if (!isByteLengthQueuingStrategy(this)) + throw new ERR_INVALID_THIS('ByteLengthQueuingStrategy'); + return byteSizeFunction; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + highWaterMark: this.highWaterMark, + }); + } +} + +ObjectDefineProperties(ByteLengthQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true }, +}); + +/** + * @type {QueuingStrategy} + */ +class CountQueuingStrategy { + [kType] = 'CountQueuingStrategy'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {{ + * highWaterMark : number + * }} init + */ + constructor(init) { + validateObject(init, 'init'); + if (init.highWaterMark === undefined) + throw new ERR_MISSING_OPTION('options.highWaterMark'); + + // The highWaterMark value is not checked until the strategy + // is actually used, per the spec. + this[kState] = { + highWaterMark: +init.highWaterMark, + }; + } + + /** + * @readonly + * @type {number} + */ + get highWaterMark() { + if (!isCountQueuingStrategy(this)) + throw new ERR_INVALID_THIS('CountQueuingStrategy'); + return this[kState].highWaterMark; + } + + /** + * @type {QueuingStrategySize} + */ + get size() { + if (!isCountQueuingStrategy(this)) + throw new ERR_INVALID_THIS('CountQueuingStrategy'); + return countSizeFunction; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + highWaterMark: this.highWaterMark, + }); + } +} + +ObjectDefineProperties(CountQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true }, +}); + +module.exports = { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +}; diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js new file mode 100644 index 00000000000000..978281dfa324e8 --- /dev/null +++ b/lib/internal/webstreams/readablestream.js @@ -0,0 +1,2740 @@ +'use strict'; + +/* eslint-disable no-use-before-define */ + +const { + ArrayBuffer, + ArrayBufferPrototypeSlice, + ArrayPrototypePush, + ArrayPrototypeShift, + DataViewCtor, + FunctionPrototypeBind, + FunctionPrototypeCall, + MathMin, + NumberIsInteger, + ObjectCreate, + ObjectDefineProperties, + ObjectSetPrototypeOf, + Promise, + PromisePrototypeCatch, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + PromiseAll, + ReflectConstruct, + Symbol, + SymbolAsyncIterator, + SymbolToStringTag, + Uint8Array, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + isArrayBufferView, + isDataView, +} = require('util/types'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + serialize, + deserialize, +} = require('v8'); + +const { + validateObject, +} = require('internal/validators'); + +const { + kAborted, +} = require('internal/abort_controller'); + +const { + MessageChannel, +} = require('internal/worker/io'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + queueMicrotask, +} = require('internal/process/task_queues'); + +const { + ArrayBufferViewGetBuffer, + ArrayBufferViewGetByteLength, + ArrayBufferViewGetByteOffset, + ArrayBufferGetByteLength, + AsyncIterator, + copyArrayBuffer, + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + resetQueue, + setPromiseHandled, + transferArrayBuffer, + nonOpCancel, + nonOpPull, + nonOpStart, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + WritableStreamDefaultWriter, + + isWritableStream, + isWritableStreamLocked, + isWritableStreamDefaultController, + isWritableStreamDefaultWriter, + + writableStreamAbort, + writableStreamCloseQueuedOrInFlight, + writableStreamDefaultWriterCloseWithErrorPropagation, + writableStreamDefaultWriterRelease, + writableStreamDefaultWriterWrite, +} = require('internal/webstreams/writablestream'); + +const assert = require('internal/assert'); + +const kCancel = Symbol('kCancel'); +const kClose = Symbol('kClose'); +const kChunk = Symbol('kChunk'); +const kError = Symbol('kError'); +const kPull = Symbol('kPull'); + +/** + * @typedef {import('../abort_controller').AbortSignal} AbortSignal + * @typedef {import('./queuingstrategies').QueuingStrategy} QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * @typedef {import('./writablestream').WritableStream} WritableStream + * + * @typedef {ReadableStreamDefaultController | ReadableByteStreamController + * } ReadableStreamController + * + * @typedef {ReadableStreamDefaultReader | ReadableStreamBYOBReader + * } ReadableStreamReader + * + * @callback UnderlyingSourceStartCallback + * @param {ReadableStreamController} controller + * @returns { any | Promise } + * + * @callback UnderlyingSourcePullCallback + * @param {ReadableStreamController} controller + * @returns { Promise } + * + * @callback UnderlyingSourceCancelCallback + * @param {any} reason + * @returns { Promise } + * + * @typedef {{ + * readable: ReadableStream, + * writable: WritableStream, + * }} ReadableWritablePair + * + * @typedef {{ + * preventClose? : boolean, + * preventAbort? : boolean, + * preventCancel? : boolean, + * signal? : AbortSignal, + * }} StreamPipeOptions + * + * @typedef {{ + * start? : UnderlyingSourceStartCallback, + * pull? : UnderlyingSourcePullCallback, + * cancel? : UnderlyingSourceCancelCallback, + * type? : "bytes", + * autoAllocateChunkSize? : number + * }} UnderlyingSource + * + */ + +class ReadableStream { + [kType] = 'ReadableStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {UnderlyingSource} [source] + * @param {QueuingStrategy} [strategy] + */ + constructor(source = {}, strategy = {}) { + if (source === null) + throw new ERR_INVALID_ARG_VALUE('source', 'Object', source); + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port1: undefined, + port2: undefined, + promise: undefined, + } + }; + // The spec requires handling of the strategy first + // here. Specifically, if getting the size and + // highWaterMark from the strategy fail, that has + // to trigger a throw before getting the details + // from the source. So be sure to keep these in + // this order. + const size = strategy?.size; + const highWaterMark = strategy?.highWaterMark; + const type = source.type; + + if (`${type}` === 'bytes') { + if (size !== undefined) + throw new ERR_INVALID_ARG_VALUE.RangeError('strategy.size', size); + setupReadableByteStreamControllerFromSource( + this, + source, + extractHighWaterMark(highWaterMark, 0)); + return; + } + + if (type !== undefined) + throw new ERR_INVALID_ARG_VALUE('source.type', type); + setupReadableStreamDefaultControllerFromSource( + this, + source, + extractHighWaterMark(highWaterMark, 1), + extractSizeAlgorithm(size)); + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {boolean} + */ + get locked() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + return isReadableStreamLocked(this); + } + + /** + * @param {any} [reason] + * @returns { Promise } + */ + cancel(reason = undefined) { + if (!isReadableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStream')); + if (isReadableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('ReadableStream is locked')); + } + return readableStreamCancel(this, reason); + } + + /** + * @param {{ + * mode? : "byob" + * }} [options] + * @returns {ReadableStreamReader} + */ + getReader(options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + validateObject(options, 'options', { nullable: true, allowFunction: true }); + const mode = options?.mode; + + if (mode === undefined) + return new ReadableStreamDefaultReader(this); + + if (`${mode}` !== 'byob') + throw new ERR_INVALID_ARG_VALUE('options.mode', mode); + return new ReadableStreamBYOBReader(this); + } + + /** + * @param {ReadableWritablePair} transform + * @param {StreamPipeOptions} [options] + * @returns {ReadableStream} + */ + pipeThrough(transform, options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + const readable = transform?.readable; + if (!isReadableStream(readable)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.readable', + 'ReadableStream', + readable); + } + const writable = transform?.writable; + if (!isWritableStream(writable)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.writable', + 'WritableStream', + writable); + } + + // The web platform tests require that these be handled one at a + // time and in a specific order. options can be null or undefined. + const preventAbort = options?.preventAbort; + const preventCancel = options?.preventCancel; + const preventClose = options?.preventClose; + const signal = options?.signal; + + if (signal !== undefined && signal?.[kAborted] === undefined) + throw new ERR_INVALID_ARG_TYPE('options.signal', 'AbortSignal', signal); + + if (isReadableStreamLocked(this)) + throw new ERR_INVALID_STATE.TypeError('The ReadableStream is locked'); + if (isWritableStreamLocked(writable)) + throw new ERR_INVALID_STATE.TypeError('The WritableStream is locked'); + + const promise = readableStreamPipeTo( + this, + writable, + !!preventClose, + !!preventAbort, + !!preventCancel, + signal); + setPromiseHandled(promise); + + return readable; + } + + /** + * @param {WritableStream} destination + * @param {StreamPipeOptions} [options] + * @returns {Promise} + */ + pipeTo(destination, options = {}) { + try { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + if (!isWritableStream(destination)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.writable', + 'WritableStream', + destination); + } + + const preventAbort = options?.preventAbort; + const preventCancel = options?.preventCancel; + const preventClose = options?.preventClose; + const signal = options?.signal; + + if (signal !== undefined && signal?.[kAborted] === undefined) + throw new ERR_INVALID_ARG_TYPE('options.signal', 'AbortSignal', signal); + + if (isReadableStreamLocked(this)) + throw new ERR_INVALID_STATE.TypeError('The ReadableStream is locked'); + if (isWritableStreamLocked(destination)) + throw new ERR_INVALID_STATE.TypeError('The WritableStream is locked'); + + return readableStreamPipeTo( + this, + destination, + !!preventClose, + !!preventAbort, + !!preventCancel, + signal); + } catch (error) { + return PromiseReject(error); + } + } + + /** + * @returns {ReadableStream[]} + */ + tee() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + return readableStreamTee(this, false); + } + + /** + * @param {{ + * preventCancel? : boolean, + * }} [options] + * @returns {AsyncIterable} + */ + values(options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + validateObject(options, 'options'); + const { + preventCancel = false, + } = options; + + const reader = new ReadableStreamDefaultReader(this); + let done = false; + let started = false; + let current; + + // The nextSteps function is not an async function in order + // to make it more efficient. Because nextSteps explicitly + // creates a Promise and returns it in the common case, + // making it an async function just causes two additional + // unnecessary Promise allocations to occur, which just add + // cost. + function nextSteps() { + if (done) + return PromiseResolve({ done: true, value: undefined }); + + if (reader[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not bound to a ReadableStream')); + } + const promise = createDeferredPromise(); + + readableStreamDefaultReaderRead(reader, { + [kChunk](chunk) { + current = undefined; + promise.resolve({ value: chunk, done: false }); + }, + [kClose]() { + current = undefined; + done = true; + readableStreamReaderGenericRelease(reader); + promise.resolve({ done: true, value: undefined }); + }, + [kError](error) { + current = undefined; + done = true; + readableStreamReaderGenericRelease(reader); + promise.reject(error); + } + }); + return promise.promise; + } + + async function returnSteps(value) { + if (done) + return { done: true, value }; + done = true; + + if (reader[kState].stream === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'The reader is not bound to a ReadableStream'); + } + assert(!reader[kState].readRequests.length); + if (!preventCancel) { + const result = readableStreamReaderGenericCancel(reader, value); + readableStreamReaderGenericRelease(reader); + await result; + return { done: true, value }; + } + + readableStreamReaderGenericRelease(reader); + return { done: true, value }; + } + + // TODO(@jasnell): Explore whether an async generator + // can be used here instead of a custom iterator object. + return ObjectSetPrototypeOf({ + // Changing either of these functions (next or return) + // to async functions causes a failure in the streams + // Web Platform Tests that check for use of a modified + // Promise.prototype.then. Since the await keyword + // uses Promise.prototype.then, it is open to prototype + // pollution, which causes the test to fail. The other + // await uses here do not trigger that failure because + // the test that fails does not trigger those code paths. + next() { + // If this is the first read, delay by one microtask + // to ensure that the controller has had an opportunity + // to properly start and perform the initial pull. + // TODO(@jasnell): The spec doesn't call this out so + // need to investigate if it's a bug in our impl or + // the spec. + if (!started) { + current = PromiseResolve(); + started = true; + } + current = current !== undefined ? + PromisePrototypeThen(current, nextSteps, nextSteps) : + nextSteps(); + return current; + }, + + return(error) { + return current ? + PromisePrototypeThen( + current, + () => returnSteps(error), + () => returnSteps(error)) : + returnSteps(error); + }, + + [SymbolAsyncIterator]() { return this; } + }, AsyncIterator); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + locked: this.locked, + state: this[kState].state, + }); + } + + [kTransfer]() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + if (this.locked) { + this[kState].transfer.port1?.close(); + this[kState].transfer.port1 = undefined; + this[kState].transfer.port2 = undefined; + throw new DOMException( + 'Cannot transfer a locked ReadableStream', + 'DataCloneError'); + } + + const { + writable, + promise, + } = lazyTransfer().newCrossRealmWritableSink( + this, + this[kState].transfer.port1); + + this[kState].transfer.writable = writable; + this[kState].transfer.promise = promise; + + return { + data: { port: this[kState].transfer.port2 }, + deserializeInfo: + 'internal/webstreams/readablestream:TransferredReadableStream' + }; + } + + [kTransferList]() { + const { port1, port2 } = new MessageChannel(); + this[kState].transfer.port1 = port1; + this[kState].transfer.port2 = port2; + return [ port2 ]; + } + + [kDeserialize]({ port }) { + const transfer = lazyTransfer(); + setupReadableStreamDefaultControllerFromSource( + this, + new transfer.CrossRealmTransformReadableSource(port), + 0, () => 1); + } +} + +ObjectDefineProperties(ReadableStream.prototype, { + [SymbolAsyncIterator]: { + configurable: true, + enumerable: false, + writable: true, + value: ReadableStream.prototype.values, + }, + locked: { enumerable: true }, + cancel: { enumerable: true }, + getReader: { enumerable: true }, + pipeThrough: { enumerable: true }, + pipeTo: { enumerable: true }, + tee: { enumerable: true }, +}); + +function TransferredReadableStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'ReadableStream'; + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port: undefined, + promise: undefined, + } + }; + }, + [], ReadableStream)); +} +TransferredReadableStream.prototype[kDeserialize] = () => {}; + +class ReadableStreamBYOBRequest { + [kType] = 'ReadableStreamBYOBRequest'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {ArrayBufferView} + */ + get view() { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + return this[kState].view; + } + + /** + * @param {number} bytesWritten + */ + respond(bytesWritten) { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + const { + view, + controller, + } = this[kState]; + if (controller === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'This BYOB request has been invalidated'); + } + + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + if (viewByteLength === 0 || viewBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'View ArrayBuffer is zero-length or detached'); + } + + readableByteStreamControllerRespond(controller, bytesWritten); + } + + /** + * @param {ArrayBufferView} view + */ + respondWithNewView(view) { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + const { + controller, + } = this[kState]; + + if (controller === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'This BYOB request has been invalidated'); + } + + readableByteStreamControllerRespondWithNewView(controller, view); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + view: this.view, + controller: this[kState].controller, + }); + } +} + +ObjectDefineProperties(ReadableStreamBYOBRequest.prototype, { + view: { enumerable: true }, + respond: { enumerable: true }, + respondWithNewView: { enumerable: true }, +}); + +function createReadableStreamBYOBRequest(controller, view) { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStreamBYOBRequest'; + this[kState] = { + controller, + view, + }; + }, + [], + ReadableStreamBYOBRequest + ); +} + +class DefaultReadRequest { + constructor() { + this[kState] = createDeferredPromise(); + } + + [kChunk](value) { + this[kState].resolve?.({ value, done: false }); + } + + [kClose]() { + this[kState].resolve?.({ value: undefined, done: true }); + } + + [kError](error) { + this[kState].reject?.(error); + } + + get promise() { return this[kState].promise; } +} + +class ReadIntoRequest { + constructor() { + this[kState] = createDeferredPromise(); + } + + [kChunk](value) { + this[kState].resolve?.({ value, done: false }); + } + + [kClose](value) { + this[kState].resolve?.({ value, done: true }); + } + + [kError](error) { + this[kState].reject?.(error); + } + + get promise() { return this[kState].promise; } +} + +class ReadableStreamDefaultReader { + [kType] = 'ReadableStreamDefaultReader'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {ReadableStream} stream + */ + constructor(stream) { + if (!isReadableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'ReadableStream', stream); + this[kState] = { + readRequests: [], + stream: undefined, + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + }; + setupReadableStreamDefaultReader(this, stream); + } + + /** + * @returns {Promise<{ + * value : any, + * done : boolean + * }>} + */ + read() { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + const readRequest = new DefaultReadRequest(); + readableStreamDefaultReaderRead(this, readRequest); + return readRequest.promise; + } + + releaseLock() { + if (!isReadableStreamDefaultReader(this)) + throw new ERR_INVALID_THIS('ReadableStreamDefaultReader'); + if (this[kState].stream === undefined) + return; + if (this[kState].readRequests.length) { + throw new ERR_INVALID_STATE.TypeError( + 'Cannot release with pending read requests'); + } + readableStreamReaderGenericRelease(this); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + return this[kState].close.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + if (this[kState].stream === undefined) { + return PromiseReject(new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + readRequests: this[kState].readRequests.length, + close: this[kState].close.promise, + }); + } +} + +ObjectDefineProperties(ReadableStreamDefaultReader.prototype, { + closed: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + cancel: { enumerable: true }, +}); + +class ReadableStreamBYOBReader { + [kType] = 'ReadableStreamBYOBReader'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {ReadableStream} stream + */ + constructor(stream) { + if (!isReadableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'ReadableStream', stream); + this[kState] = { + stream: undefined, + requestIntoRequests: [], + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + }; + setupReadableStreamBYOBReader(this, stream); + } + + /** + * @param {ArrayBufferView} view + * @returns {Promise<{ + * view : ArrayBufferView, + * done : boolean, + * }>} + */ + read(view) { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + if (!isArrayBufferView(view)) { + return PromiseReject( + new ERR_INVALID_ARG_TYPE( + 'view', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + view)); + } + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + if (viewByteLength === 0 || viewBufferByteLength === 0) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'View ArrayBuffer is zero-length or detached')); + } + // Supposed to assert here that the view's buffer is not + // detached, but there's no API available to use to check that. + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + const readIntoRequest = new ReadIntoRequest(); + readableStreamBYOBReaderRead(this, view, readIntoRequest); + return readIntoRequest.promise; + } + + releaseLock() { + if (!isReadableStreamBYOBReader(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBReader'); + if (this[kState].stream === undefined) + return; + if (this[kState].readIntoRequests.length) { + throw new ERR_INVALID_STATE.TypeError( + 'Cannot release with pending read requests'); + } + readableStreamReaderGenericRelease(this); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + return this[kState].close.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + if (this[kState].stream === undefined) { + return PromiseReject(new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + requestIntoRequests: this[kState].requestIntoRequests.length, + close: this[kState].close.promise, + }); + } +} + +ObjectDefineProperties(ReadableStreamBYOBReader.prototype, { + closed: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + cancel: { enumerable: true }, +}); + +class ReadableStreamDefaultController { + [kType] = 'ReadableStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + return readableStreamDefaultControllerGetDesiredSize(this); + } + + close() { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + readableStreamDefaultControllerClose(this); + } + + /** + * @param {any} chunk + */ + enqueue(chunk = undefined) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + readableStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any} error + */ + error(error = undefined) { + readableStreamDefaultControllerError(this, error); + } + + [kCancel](reason) { + return readableStreamDefaultControllerCancelSteps(this, reason); + } + + [kPull](readRequest) { + readableStreamDefaultControllerPullSteps(this, readRequest); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { }); + } +} + +ObjectDefineProperties(ReadableStreamDefaultController.prototype, { + desiredSize: { enumerable: true }, + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, +}); + +function createReadableStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStreamDefaultController'; + this[kState] = {}; + }, + [], + ReadableStreamDefaultController, + ); +} + +class ReadableByteStreamController { + [kType] = 'ReadableByteStreamController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {ReadableStreamBYOBRequest} + */ + get byobRequest() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (this[kState].byobRequest === null && + this[kState].pendingPullIntos.length) { + const { + buffer, + byteOffset, + bytesFilled, + byteLength, + } = this[kState].pendingPullIntos[0]; + const view = + new Uint8Array( + buffer, + byteOffset + bytesFilled, + byteLength - bytesFilled); + this[kState].byobRequest = createReadableStreamBYOBRequest(this, view); + } + return this[kState].byobRequest; + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + return readableByteStreamControllerGetDesiredSize(this); + } + + close() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (this[kState].closeRequested) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + if (this[kState].stream[kState].state !== 'readable') + throw new ERR_INVALID_STATE.TypeError('ReadableStream is already closed'); + readableByteStreamControllerClose(this); + } + + /** + * @param {ArrayBufferView} chunk + */ + enqueue(chunk) { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (!isArrayBufferView(chunk)) { + throw new ERR_INVALID_ARG_TYPE( + 'chunk', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + chunk); + } + const chunkByteLength = ArrayBufferViewGetByteLength(chunk); + const chunkByteOffset = ArrayBufferViewGetByteOffset(chunk); + const chunkBuffer = ArrayBufferViewGetBuffer(chunk); + const chunkBufferByteLength = ArrayBufferGetByteLength(chunkBuffer); + if (chunkByteLength === 0 || chunkBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'chunk ArrayBuffer is zero-length or detached'); + } + if (this[kState].closeRequested) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + if (this[kState].stream[kState].state !== 'readable') + throw new ERR_INVALID_STATE.TypeError('ReadableStream is already closed'); + readableByteStreamControllerEnqueue( + this, + chunkBuffer, + chunkByteLength, + chunkByteOffset); + } + + /** + * @param {any} error + */ + error(error = undefined) { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + readableByteStreamControllerError(this, error); + } + + [kCancel](reason) { + return readableByteStreamControllerCancelSteps(this, reason); + } + + [kPull](readRequest) { + readableByteStreamControllerPullSteps(this, readRequest); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { }); + } +} + +ObjectDefineProperties(ReadableByteStreamController.prototype, { + byobRequest: { enumerable: true }, + desiredSize: { enumerable: true }, + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, +}); + +function createReadableByteStreamController() { + return ReflectConstruct( + function() { + this[kType] = 'ReadableByteStreamController'; + this[kState] = {}; + }, + [], + ReadableByteStreamController, + ); +} + +function createTeeReadableStream(start, pull, cancel) { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStream'; + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port: undefined, + promise: undefined, + } + }; + setupReadableStreamDefaultControllerFromSource( + this, + ObjectCreate(null, { + start: { value: start }, + pull: { value: pull }, + cancel: { value: cancel } + }), + 1, + () => 1); + return makeTransferable(this); + }, [], ReadableStream, + ); +} + +const isReadableStream = + isBrandCheck('ReadableStream'); +const isReadableByteStreamController = + isBrandCheck('ReadableByteStreamController'); +const isReadableStreamBYOBRequest = + isBrandCheck('ReadableStreamBYOBRequest'); +const isReadableStreamDefaultReader = + isBrandCheck('ReadableStreamDefaultReader'); +const isReadableStreamBYOBReader = + isBrandCheck('ReadableStreamBYOBReader'); + +// ---- ReadableStream Implementation + +function readableStreamPipeTo( + source, + dest, + preventClose, + preventAbort, + preventCancel, + signal) { + + let reader; + let writer; + // Both of these can throw synchronously. We want to capture + // the error and return a rejected promise instead. + try { + reader = new ReadableStreamDefaultReader(source); + writer = new WritableStreamDefaultWriter(dest); + } catch (error) { + return PromiseReject(error); + } + + source[kState].disturbed = true; + + let shuttingDown = false; + + if (signal !== undefined && signal?.[kAborted] === undefined) { + return PromiseReject( + new ERR_INVALID_ARG_TYPE( + 'options.signal', + 'AbortSignal', + signal)); + } + + const promise = createDeferredPromise(); + + let currentWrite = PromiseResolve(); + + // The error here can be undefined. The rejected arg + // tells us that the promise must be rejected even + // when error is undefine. + function finalize(rejected, error) { + writableStreamDefaultWriterRelease(writer); + readableStreamReaderGenericRelease(reader); + if (signal !== undefined) + signal.removeEventListener('abort', abortAlgorithm); + if (rejected) + promise.reject(error); + else + promise.resolve(); + } + + async function waitForCurrentWrite() { + const write = currentWrite; + await write; + if (write !== currentWrite) + await waitForCurrentWrite(); + } + + function shutdownWithAnAction(action, rejected, originalError) { + if (shuttingDown) return; + shuttingDown = true; + if (dest[kState].state === 'writable' && + !writableStreamCloseQueuedOrInFlight(dest)) { + PromisePrototypeThen( + waitForCurrentWrite(), + complete, + (error) => finalize(true, error)); + return; + } + complete(); + + function complete() { + PromisePrototypeThen( + action(), + () => finalize(rejected, originalError), + (error) => finalize(true, error)); + } + } + + function shutdown(rejected, error) { + if (shuttingDown) return; + shuttingDown = true; + if (dest[kState].state === 'writable' && + !writableStreamCloseQueuedOrInFlight(dest)) { + PromisePrototypeThen( + waitForCurrentWrite(), + () => finalize(rejected, error), + (error) => finalize(true, error)); + return; + } + finalize(rejected, error); + } + + function abortAlgorithm() { + // Cannot use the AbortError class here. It must be a DOMException + const error = new DOMException('The operation was aborted', 'AbortError'); + const actions = []; + if (!preventAbort) { + ArrayPrototypePush( + actions, + () => { + if (dest[kState].state === 'writable') + return writableStreamAbort(dest, error); + return PromiseResolve(); + }); + } + if (!preventCancel) { + ArrayPrototypePush( + actions, + () => { + if (source[kState].state === 'readable') + return readableStreamCancel(source, error); + return PromiseResolve(); + }); + } + + shutdownWithAnAction( + async () => PromiseAll(actions.map((action) => action())), + true, + error); + } + + function watchErrored(stream, promise, action) { + if (stream[kState].state === 'errored') + action(stream[kState].storedError); + else + PromisePrototypeCatch(promise, action); + } + + function watchClosed(stream, promise, action) { + if (stream[kState].state === 'closed') + action(stream[kState].storedError); + else + PromisePrototypeThen(promise, action, () => {}); + } + + async function step() { + if (shuttingDown) + return true; + await writer[kState].ready.promise; + return new Promise((resolve, reject) => { + readableStreamDefaultReaderRead( + reader, + { + [kChunk](chunk) { + currentWrite = writableStreamDefaultWriterWrite(writer, chunk); + setPromiseHandled(currentWrite); + resolve(false); + }, + [kClose]: () => resolve(true), + [kError]: reject, + }); + }); + } + + async function run() { + // Run until step resolves as true + while (!await step()) {} + } + + if (signal !== undefined) { + if (signal.aborted) { + abortAlgorithm(); + return promise.promise; + } + signal.addEventListener('abort', abortAlgorithm, { once: true }); + } + + setPromiseHandled(run()); + + watchErrored(source, reader[kState].close.promise, (error) => { + if (!preventAbort) { + return shutdownWithAnAction( + () => writableStreamAbort(dest, error), + true, + error); + } + shutdown(true, error); + }); + + watchErrored(dest, writer[kState].close.promise, (error) => { + if (!preventCancel) { + return shutdownWithAnAction( + () => readableStreamCancel(source, error), + true, + error); + } + shutdown(true, error); + }); + + watchClosed(source, reader[kState].close.promise, () => { + if (!preventClose) { + return shutdownWithAnAction( + () => writableStreamDefaultWriterCloseWithErrorPropagation(writer)); + } + shutdown(); + }); + + if (writableStreamCloseQueuedOrInFlight(dest) || + dest[kState].state === 'closed') { + const error = new ERR_INVALID_STATE.TypeError( + 'Destination WritableStream is closed'); + if (!preventCancel) { + shutdownWithAnAction( + () => readableStreamCancel(source, error), true, error); + } else { + shutdown(true, error); + } + } + + return promise.promise; +} + +function readableStreamTee(stream, cloneForBranch2) { + const reader = new ReadableStreamDefaultReader(stream); + let reading = false; + let canceled1 = false; + let canceled2 = false; + let reason1; + let reason2; + let branch1; + let branch2; + const cancelPromise = createDeferredPromise(); + + async function pullAlgorithm() { + if (reading) return; + reading = true; + const readRequest = { + [kChunk](value) { + queueMicrotask(() => { + reading = false; + const value1 = value; + let value2 = value; + if (!canceled2 && cloneForBranch2) { + // Structured Clone + value2 = deserialize(serialize(value2)); + } + if (!canceled1) { + readableStreamDefaultControllerEnqueue( + branch1[kState].controller, + value1); + } + if (!canceled2) { + readableStreamDefaultControllerEnqueue( + branch2[kState].controller, + value2); + } + }); + }, + [kClose]() { + reading = false; + if (!canceled1) + readableStreamDefaultControllerClose(branch1[kState].controller); + if (!canceled2) + readableStreamDefaultControllerClose(branch2[kState].controller); + if (!canceled1 || !canceled2) + cancelPromise.resolve(); + }, + [kError]() { + reading = false; + }, + }; + readableStreamDefaultReaderRead(reader, readRequest); + } + + function cancel1Algorithm(reason) { + canceled1 = true; + reason1 = reason; + if (canceled2) { + const compositeReason = [reason1, reason2]; + cancelPromise.resolve(readableStreamCancel(stream, compositeReason)); + } + return cancelPromise.promise; + } + + function cancel2Algorithm(reason) { + canceled2 = true; + reason2 = reason; + if (canceled1) { + const compositeReason = [reason1, reason2]; + cancelPromise.resolve(readableStreamCancel(stream, compositeReason)); + } + return cancelPromise.promise; + } + + branch1 = + createTeeReadableStream(nonOpStart, pullAlgorithm, cancel1Algorithm); + branch2 = + createTeeReadableStream(nonOpStart, pullAlgorithm, cancel2Algorithm); + + PromisePrototypeCatch( + reader[kState].close.promise, + (error) => { + readableStreamDefaultControllerError(branch1[kState].controller, error); + readableStreamDefaultControllerError(branch2[kState].controller, error); + if (!canceled1 || !canceled2) + cancelPromise.resolve(); + }); + + return [branch1, branch2]; +} + +function readableByteStreamControllerConvertPullIntoDescriptor(desc) { + const { + buffer, + bytesFilled, + byteLength, + byteOffset, + ctor, + elementSize, + } = desc; + if (bytesFilled > byteLength) + throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); + assert(!(bytesFilled % elementSize)); + const transferredBuffer = transferArrayBuffer(buffer); + return new ctor(transferredBuffer, byteOffset, bytesFilled / elementSize); +} + +function isReadableStreamLocked(stream) { + return stream[kState].reader !== undefined; +} + +function readableStreamCancel(stream, reason) { + stream[kState].disturbed = true; + switch (stream[kState].state) { + case 'closed': + return PromiseResolve(); + case 'errored': + return PromiseReject(stream[kState].storedError); + } + readableStreamClose(stream); + const { + reader, + } = stream[kState]; + if (reader !== undefined && readableStreamHasBYOBReader(stream)) { + for (let n = 0; n < reader[kState].readIntoRequests.length; n++) + reader[kState].readIntoRequests[n][kClose](); + reader[kState].readIntoRequests = []; + } + + return PromisePrototypeThen( + ensureIsPromise( + stream[kState].controller[kCancel], + stream[kState].controller, + reason), + () => {}); +} + +function readableStreamClose(stream) { + assert(stream[kState].state === 'readable'); + stream[kState].state = 'closed'; + + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return; + + reader[kState].close.resolve(); + + if (readableStreamHasDefaultReader(stream)) { + for (let n = 0; n < reader[kState].readRequests.length; n++) + reader[kState].readRequests[n][kClose](); + reader[kState].readRequests = []; + } +} + +function readableStreamError(stream, error) { + assert(stream[kState].state === 'readable'); + stream[kState].state = 'errored'; + stream[kState].storedError = error; + + const { + reader + } = stream[kState]; + + if (reader === undefined) + return; + + reader[kState].close.reject(error); + setPromiseHandled(reader[kState].close.promise); + + if (readableStreamHasDefaultReader(stream)) { + for (let n = 0; n < reader[kState].readRequests.length; n++) + reader[kState].readRequests[n][kError](error); + reader[kState].readRequests = []; + } else { + assert(readableStreamHasBYOBReader(stream)); + for (let n = 0; n < reader[kState].readIntoRequests.length; n++) + reader[kState].readIntoRequests[n][kError](error); + reader[kState].readIntoRequests = []; + } +} + +function readableStreamHasDefaultReader(stream) { + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return false; + + return reader[kState] !== undefined && + reader[kType] === 'ReadableStreamDefaultReader'; +} + +function readableStreamGetNumReadRequests(stream) { + assert(readableStreamHasDefaultReader(stream)); + return stream[kState].reader[kState].readRequests.length; +} + +function readableStreamHasBYOBReader(stream) { + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return false; + + return reader[kState] !== undefined && + reader[kType] === 'ReadableStreamBYOBReader'; +} + +function readableStreamGetNumReadIntoRequests(stream) { + assert(readableStreamHasBYOBReader(stream)); + return stream[kState].reader[kState].readIntoRequests.length; +} + +function readableStreamFulfillReadRequest(stream, chunk, done) { + assert(readableStreamHasDefaultReader(stream)); + const { + reader, + } = stream[kState]; + assert(reader[kState].readRequests.length); + const readRequest = ArrayPrototypeShift(reader[kState].readRequests); + + // TODO(@jasnell): It's not clear under what exact conditions done + // will be true here. The spec requires this check but none of the + // WPT's or other tests trigger it. Will need to investigate how to + // get coverage for this. + if (done) + readRequest[kClose](); + else + readRequest[kChunk](chunk); +} + +function readableStreamFulfillReadIntoRequest(stream, chunk, done) { + assert(readableStreamHasBYOBReader(stream)); + const { + reader, + } = stream[kState]; + assert(reader[kState].readIntoRequests.length); + const readIntoRequest = ArrayPrototypeShift(reader[kState].readIntoRequests); + if (done) + readIntoRequest[kClose](chunk); + else + readIntoRequest[kChunk](chunk); +} + +function readableStreamAddReadRequest(stream, readRequest) { + assert(readableStreamHasDefaultReader(stream)); + assert(stream[kState].state === 'readable'); + ArrayPrototypePush(stream[kState].reader[kState].readRequests, readRequest); +} + +function readableStreamAddReadIntoRequest(stream, readIntoRequest) { + assert(readableStreamHasBYOBReader(stream)); + assert(stream[kState].state !== 'errored'); + ArrayPrototypePush( + stream[kState].reader[kState].readIntoRequests, + readIntoRequest); +} + +function readableStreamReaderGenericCancel(reader, reason) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + return readableStreamCancel(stream, reason); +} + +function readableStreamReaderGenericInitialize(reader, stream) { + reader[kState].stream = stream; + stream[kState].reader = reader; + switch (stream[kState].state) { + case 'readable': + reader[kState].close = createDeferredPromise(); + break; + case 'closed': + reader[kState].close = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + break; + case 'errored': + reader[kState].close = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(reader[kState].close.promise); + break; + } +} + +function readableStreamReaderGenericRelease(reader) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + assert(stream[kState].reader === reader); + + if (stream[kState].state === 'readable') { + reader[kState].close.reject?.( + new ERR_INVALID_STATE.TypeError('Reader released')); + } else { + reader[kState].close = { + promise: PromiseReject( + new ERR_INVALID_STATE.TypeError('Reader released')), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(reader[kState].close.promise); + stream[kState].reader = undefined; + reader[kState].stream = undefined; +} + +function readableStreamBYOBReaderRead(reader, view, readIntoRequest) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + stream[kState].disturbed = true; + if (stream[kState].state === 'errored') { + readIntoRequest[kError](stream[kState].storedError); + return; + } + readableByteStreamControllerPullInto( + stream[kState].controller, + view, + readIntoRequest); +} + +function readableStreamDefaultReaderRead(reader, readRequest) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + stream[kState].disturbed = true; + switch (stream[kState].state) { + case 'closed': + readRequest[kClose](); + break; + case 'errored': + readRequest[kError](stream[kState].storedError); + break; + case 'readable': + stream[kState].controller[kPull](readRequest); + } +} + +function setupReadableStreamBYOBReader(reader, stream) { + if (isReadableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('ReadableStream is locked'); + const { + controller, + } = stream[kState]; + if (!isReadableByteStreamController(controller)) + throw new ERR_INVALID_ARG_VALUE('reader', reader, 'must be a byte stream'); + readableStreamReaderGenericInitialize(reader, stream); + reader[kState].readIntoRequests = []; +} + +function setupReadableStreamDefaultReader(reader, stream) { + if (isReadableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('ReadableStream is locked'); + readableStreamReaderGenericInitialize(reader, stream); + reader[kState].readRequests = []; +} + +function readableStreamDefaultControllerClose(controller) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) + return; + controller[kState].closeRequested = true; + if (!controller[kState].queue.length) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(controller[kState].stream); + } +} + +function readableStreamDefaultControllerEnqueue(controller, chunk) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) + return; + + const { + stream, + } = controller[kState]; + + if (isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream)) { + readableStreamFulfillReadRequest(stream, chunk, false); + } else { + try { + const chunkSize = + FunctionPrototypeCall( + controller[kState].sizeAlgorithm, + undefined, + chunk); + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + readableStreamDefaultControllerError(controller, error); + throw error; + } + } + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +function readableStreamDefaultControllerHasBackpressure(controller) { + return !readableStreamDefaultControllerShouldCallPull(controller); +} + +function readableStreamDefaultControllerCanCloseOrEnqueue(controller) { + const { + stream, + } = controller[kState]; + return !controller[kState].closeRequested && + stream[kState].state === 'readable'; +} + +function readableStreamDefaultControllerGetDesiredSize(controller) { + const { + stream, + highWaterMark, + queueTotalSize, + } = controller[kState]; + switch (stream[kState].state) { + case 'errored': return null; + case 'closed': return 0; + default: + return highWaterMark - queueTotalSize; + } +} + +function readableStreamDefaultControllerShouldCallPull(controller) { + const { + stream, + } = controller[kState]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller) || + !controller[kState].started) + return false; + + if (isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream)) { + return true; + } + + const desiredSize = readableStreamDefaultControllerGetDesiredSize(controller); + assert(desiredSize !== null); + + return desiredSize > 0; +} + +function readableStreamDefaultControllerCallPullIfNeeded(controller) { + if (!readableStreamDefaultControllerShouldCallPull(controller)) + return; + if (controller[kState].pulling) { + controller[kState].pullAgain = true; + return; + } + assert(!controller[kState].pullAgain); + controller[kState].pulling = true; + PromisePrototypeThen( + ensureIsPromise(controller[kState].pullAlgorithm, controller), + () => { + controller[kState].pulling = false; + if (controller[kState].pullAgain) { + controller[kState].pullAgain = false; + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + }, + (error) => readableStreamDefaultControllerError(controller, error)); +} + +function readableStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].pullAlgorithm = undefined; + controller[kState].cancelAlgorithm = undefined; + controller[kState].sizeAlgorithm = undefined; +} + +function readableStreamDefaultControllerError(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state === 'readable') { + resetQueue(controller); + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamError(stream, error); + } +} + +function readableStreamDefaultControllerCancelSteps(controller, reason) { + resetQueue(controller); + const result = controller[kState].cancelAlgorithm(reason); + readableStreamDefaultControllerClearAlgorithms(controller); + return result; +} + +function readableStreamDefaultControllerPullSteps(controller, readRequest) { + const { + stream, + queue, + } = controller[kState]; + if (queue.length) { + const chunk = dequeueValue(controller); + if (controller[kState].closeRequested && !queue.length) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(stream); + } else { + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + readRequest[kChunk](chunk); + return; + } + readableStreamAddReadRequest(stream, readRequest); + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +function setupReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm) { + assert(stream[kState].controller === undefined); + controller[kState] = { + cancelAlgorithm, + closeRequested: false, + highWaterMark, + pullAgain: false, + pullAlgorithm, + pulling: false, + queue: [], + queueTotalSize: 0, + started: false, + sizeAlgorithm, + stream, + }; + stream[kState].controller = controller; + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + controller[kState].started = true; + assert(!controller[kState].pulling); + assert(!controller[kState].pullAgain); + readableStreamDefaultControllerCallPullIfNeeded(controller); + }, + (error) => readableStreamDefaultControllerError(controller, error)); +} + +function setupReadableStreamDefaultControllerFromSource( + stream, + source, + highWaterMark, + sizeAlgorithm) { + const controller = createReadableStreamDefaultController(); + const start = source?.start; + const pull = source?.pull; + const cancel = source?.cancel; + const startAlgorithm = start ? + FunctionPrototypeBind(start, source, controller) : + nonOpStart; + const pullAlgorithm = pull ? + FunctionPrototypeBind(pull, source, controller) : + nonOpPull; + + const cancelAlgorithm = cancel ? + FunctionPrototypeBind(cancel, source) : + nonOpCancel; + + setupReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm); +} + +function readableByteStreamControllerClose(controller) { + const { + closeRequested, + pendingPullIntos, + queueTotalSize, + stream, + } = controller[kState]; + + if (closeRequested || stream[kState].state !== 'readable') + return; + + if (queueTotalSize) { + controller[kState].closeRequested = true; + return; + } + + if (pendingPullIntos.length) { + const firstPendingPullInto = pendingPullIntos[0]; + if (firstPendingPullInto.bytesFilled > 0) { + const error = new ERR_INVALID_STATE.TypeError('Partial read'); + readableByteStreamControllerError(controller, error); + throw error; + } + } + + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); +} + +function readableByteStreamControllerCommitPullIntoDescriptor(stream, desc) { + assert(stream[kState].state !== 'errored'); + let done = false; + if (stream[kState].state === 'closed') { + desc.bytesFilled = 0; + done = true; + } + + const filledView = + readableByteStreamControllerConvertPullIntoDescriptor(desc); + + if (desc.type === 'default') { + readableStreamFulfillReadRequest(stream, filledView, done); + } else { + assert(desc.type === 'byob'); + readableStreamFulfillReadIntoRequest(stream, filledView, done); + } +} + +function readableByteStreamControllerInvalidateBYOBRequest(controller) { + if (controller[kState].byobRequest === null) + return; + controller[kState].byobRequest[kState].controller = undefined; + controller[kState].byobRequest[kState].view = null; + controller[kState].byobRequest = null; +} + +function readableByteStreamControllerClearAlgorithms(controller) { + controller[kState].pullAlgorithm = undefined; + controller[kState].cancelAlgorithm = undefined; +} + +function readableByteStreamControllerClearPendingPullIntos(controller) { + readableByteStreamControllerInvalidateBYOBRequest(controller); + controller[kState].pendingPullIntos = []; +} + +function readableByteStreamControllerGetDesiredSize(controller) { + const { + stream, + highWaterMark, + queueTotalSize, + } = controller[kState]; + switch (stream[kState].state) { + case 'errored': return null; + case 'closed': return 0; + default: return highWaterMark - queueTotalSize; + } +} + +function readableByteStreamControllerShouldCallPull(controller) { + const { + stream, + } = controller[kState]; + if (stream[kState].state !== 'readable' || + controller[kState].closeRequested || + !controller[kState].started) { + return false; + } + if (readableStreamHasDefaultReader(stream) && + readableStreamGetNumReadRequests(stream) > 0) { + return true; + } + + if (readableStreamHasBYOBReader(stream) && + readableStreamGetNumReadIntoRequests(stream) > 0) { + return true; + } + + const desiredSize = readableByteStreamControllerGetDesiredSize(controller); + assert(desiredSize !== null); + + return desiredSize > 0; +} + +function readableByteStreamControllerHandleQueueDrain(controller) { + const { + closeRequested, + queueTotalSize, + stream, + } = controller[kState]; + assert(stream[kState].state === 'readable'); + if (!queueTotalSize && closeRequested) { + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); + return; + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerPullInto( + controller, + view, + readIntoRequest) { + const { + closeRequested, + stream, + pendingPullIntos, + } = controller[kState]; + let elementSize = 1; + let ctor = DataViewCtor; + if (isArrayBufferView(view) && !isDataView(view)) { + elementSize = view.constructor.BYTES_PER_ELEMENT; + ctor = view.constructor; + } + const buffer = ArrayBufferViewGetBuffer(view); + const byteOffset = ArrayBufferViewGetByteOffset(view); + const byteLength = ArrayBufferViewGetByteLength(view); + const bufferByteLength = ArrayBufferGetByteLength(buffer); + + let transferredBuffer; + try { + transferredBuffer = transferArrayBuffer(buffer); + } catch (error) { + readIntoRequest[kError](error); + return; + } + const desc = { + buffer: transferredBuffer, + bufferByteLength, + byteOffset, + byteLength, + bytesFilled: 0, + elementSize, + ctor, + type: 'byob', + }; + if (pendingPullIntos.length) { + ArrayPrototypePush(pendingPullIntos, desc); + readableStreamAddReadIntoRequest(stream, readIntoRequest); + return; + } + if (stream[kState].state === 'closed') { + const emptyView = new ctor(desc.buffer, byteOffset, 0); + readIntoRequest[kClose](emptyView); + return; + } + if (controller[kState].queueTotalSize) { + if (readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc)) { + const filledView = + readableByteStreamControllerConvertPullIntoDescriptor(desc); + readableByteStreamControllerHandleQueueDrain(controller); + readIntoRequest[kChunk](filledView); + return; + } + if (closeRequested) { + const error = new ERR_INVALID_STATE.TypeError('ReadableStream closed'); + readableByteStreamControllerError(controller, error); + readIntoRequest[kError](error); + return; + } + } + ArrayPrototypePush(pendingPullIntos, desc); + readableStreamAddReadIntoRequest(stream, readIntoRequest); + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerRespondInternal(controller, bytesWritten) { + const { + stream, + pendingPullIntos, + } = controller[kState]; + const desc = pendingPullIntos[0]; + readableByteStreamControllerInvalidateBYOBRequest(controller); + if (stream[kState].state === 'closed') { + if (bytesWritten) + throw new ERR_INVALID_STATE.TypeError( + 'Controller is closed but view is not zero-length'); + readableByteStreamControllerRespondInClosedState(controller, desc); + } else { + assert(stream[kState].state === 'readable'); + if (!bytesWritten) + throw new ERR_INVALID_STATE.TypeError('View cannot be zero-length'); + readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + desc); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerRespond(controller, bytesWritten) { + const { + pendingPullIntos, + stream, + } = controller[kState]; + assert(pendingPullIntos.length); + const desc = pendingPullIntos[0]; + + if (stream[kState].state === 'closed') { + if (bytesWritten !== 0) + throw new ERR_INVALID_ARG_VALUE('bytesWritten', bytesWritten); + } else { + assert(stream[kState].state === 'readable'); + + if (!bytesWritten) + throw new ERR_INVALID_ARG_VALUE('bytesWritten', bytesWritten); + + if ((desc.bytesFilled + bytesWritten) > desc.byteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('bytesWritten', bytesWritten); + } + + desc.buffer = transferArrayBuffer(desc.buffer); + + readableByteStreamControllerRespondInternal(controller, bytesWritten); +} + +function readableByteStreamControllerRespondInClosedState(controller, desc) { + assert(!desc.bytesFilled); + const { + stream, + } = controller[kState]; + if (readableStreamHasBYOBReader(stream)) { + while (readableStreamGetNumReadIntoRequests(stream) > 0) { + readableByteStreamControllerCommitPullIntoDescriptor( + stream, + readableByteStreamControllerShiftPendingPullInto(controller)); + } + } +} + +function readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + size, + desc) { + const { + pendingPullIntos, + byobRequest, + } = controller[kState]; + assert(!pendingPullIntos.length || pendingPullIntos[0] === desc); + assert(byobRequest === null); + desc.bytesFilled += size; +} + +function readableByteStreamControllerEnqueue( + controller, + buffer, + byteLength, + byteOffset) { + const { + closeRequested, + pendingPullIntos, + queue, + stream, + } = controller[kState]; + + if (closeRequested || stream[kState].state !== 'readable') + return; + + const transferredBuffer = transferArrayBuffer(buffer); + + if (pendingPullIntos.length) { + const firstPendingPullInto = pendingPullIntos[0]; + + const pendingBufferByteLength = + ArrayBufferGetByteLength(firstPendingPullInto.buffer); + if (pendingBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'Destination ArrayBuffer is zero-length or detached'); + } + + firstPendingPullInto.buffer = + transferArrayBuffer(firstPendingPullInto.buffer); + } + + readableByteStreamControllerInvalidateBYOBRequest(controller); + + if (readableStreamHasDefaultReader(stream)) { + if (!readableStreamGetNumReadRequests(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength); + } else { + assert(!queue.length); + const transferredView = + new Uint8Array(transferredBuffer, byteOffset, byteLength); + readableStreamFulfillReadRequest(stream, transferredView, false); + } + } else if (readableStreamHasBYOBReader(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller); + } else { + assert(!isReadableStreamLocked(stream)); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerEnqueueChunkToQueue( + controller, + buffer, + byteOffset, + byteLength) { + ArrayPrototypePush( + controller[kState].queue, + { + buffer, + byteOffset, + byteLength, + }); + controller[kState].queueTotalSize += byteLength; +} + +function readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc) { + const { + buffer, + byteLength, + byteOffset, + bytesFilled, + elementSize, + } = desc; + const currentAlignedBytes = bytesFilled - (bytesFilled % elementSize); + const maxBytesToCopy = MathMin( + controller[kState].queueTotalSize, + byteLength - bytesFilled); + const maxBytesFilled = bytesFilled + maxBytesToCopy; + const maxAlignedBytes = maxBytesFilled - (maxBytesFilled % elementSize); + let totalBytesToCopyRemaining = maxBytesToCopy; + let ready = false; + if (maxAlignedBytes > currentAlignedBytes) { + totalBytesToCopyRemaining = maxAlignedBytes - bytesFilled; + ready = true; + } + const { + queue, + } = controller[kState]; + + while (totalBytesToCopyRemaining) { + const headOfQueue = queue[0]; + const bytesToCopy = MathMin( + totalBytesToCopyRemaining, + headOfQueue.byteLength); + const destStart = byteOffset + desc.bytesFilled; + const arrayBufferByteLength = ArrayBufferGetByteLength(buffer); + if (arrayBufferByteLength - destStart < bytesToCopy) { + throw new ERR_INVALID_STATE.RangeError( + 'view ArrayBuffer size is invalid'); + } + assert(arrayBufferByteLength - destStart >= bytesToCopy); + copyArrayBuffer( + buffer, + destStart, + headOfQueue.buffer, + headOfQueue.byteOffset, + bytesToCopy); + if (headOfQueue.byteLength === bytesToCopy) { + ArrayPrototypeShift(queue); + } else { + headOfQueue.byteOffset += bytesToCopy; + headOfQueue.byteLength -= bytesToCopy; + } + controller[kState].queueTotalSize -= bytesToCopy; + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesToCopy, + desc); + totalBytesToCopyRemaining -= bytesToCopy; + } + + if (!ready) { + assert(!controller[kState].queueTotalSize); + assert(desc.bytesFilled > 0); + assert(desc.bytesFilled < elementSize); + } + return ready; +} + +function readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller) { + const { + closeRequested, + pendingPullIntos, + stream, + } = controller[kState]; + assert(!closeRequested); + while (pendingPullIntos.length) { + if (!controller[kState].queueTotalSize) + return; + const desc = pendingPullIntos[0]; + if (readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc)) { + readableByteStreamControllerShiftPendingPullInto(controller); + readableByteStreamControllerCommitPullIntoDescriptor(stream, desc); + } + } +} + +function readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + desc) { + const { + buffer, + bytesFilled, + byteLength, + } = desc; + + if (bytesFilled + bytesWritten > byteLength) + throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); + + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesWritten, + desc); + + if (desc.bytesFilled < desc.elementSize) + return; + + readableByteStreamControllerShiftPendingPullInto(controller); + + const remainderSize = desc.bytesFilled % desc.elementSize; + + if (remainderSize) { + const end = desc.byteOffset + desc.bytesFilled; + const start = end - remainderSize; + const remainder = + ArrayBufferPrototypeSlice( + buffer, + start, + end); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + remainder, + 0, + ArrayBufferGetByteLength(remainder)); + } + desc.bytesFilled -= remainderSize; + readableByteStreamControllerCommitPullIntoDescriptor( + controller[kState].stream, + desc); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); +} + +function readableByteStreamControllerRespondWithNewView(controller, view) { + const { + stream, + pendingPullIntos, + } = controller[kState]; + assert(pendingPullIntos.length); + + const desc = pendingPullIntos[0]; + assert(stream[kState].state !== 'errored'); + + if (!isArrayBufferView(view)) { + throw new ERR_INVALID_ARG_TYPE( + 'view', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + view); + } + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewByteOffset = ArrayBufferViewGetByteOffset(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + const { + byteOffset, + byteLength, + bytesFilled, + bufferByteLength, + } = desc; + + if (byteOffset + bytesFilled !== viewByteOffset) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + if (bytesFilled + viewByteOffset > byteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + if (bufferByteLength !== viewBufferByteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + desc.buffer = transferArrayBuffer(viewBuffer); + + readableByteStreamControllerRespondInternal(controller, viewByteLength); +} + +function readableByteStreamControllerShiftPendingPullInto(controller) { + assert(controller[kState].byobRequest === null); + return ArrayPrototypeShift(controller[kState].pendingPullIntos); +} + +function readableByteStreamControllerCallPullIfNeeded(controller) { + if (!readableByteStreamControllerShouldCallPull(controller)) + return; + if (controller[kState].pulling) { + controller[kState].pullAgain = true; + return; + } + assert(!controller[kState].pullAgain); + controller[kState].pulling = true; + PromisePrototypeThen( + ensureIsPromise(controller[kState].pullAlgorithm, controller), + () => { + controller[kState].pulling = false; + if (controller[kState].pullAgain) { + controller[kState].pullAgain = false; + readableByteStreamControllerCallPullIfNeeded(controller); + } + }, + (error) => readableByteStreamControllerError(controller, error)); +} + +function readableByteStreamControllerError(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state !== 'readable') + return; + readableByteStreamControllerClearPendingPullIntos(controller); + resetQueue(controller); + readableByteStreamControllerClearAlgorithms(controller); + readableStreamError(stream, error); +} + +function readableByteStreamControllerCancelSteps(controller, reason) { + readableByteStreamControllerClearPendingPullIntos(controller); + resetQueue(controller); + const result = controller[kState].cancelAlgorithm(reason); + readableByteStreamControllerClearAlgorithms(controller); + return result; +} + +function readableByteStreamControllerPullSteps(controller, readRequest) { + const { + pendingPullIntos, + queue, + queueTotalSize, + stream, + } = controller[kState]; + assert(readableStreamHasDefaultReader(stream)); + if (queueTotalSize) { + assert(!readableStreamGetNumReadRequests(stream)); + const { + buffer, + byteOffset, + byteLength, + } = ArrayPrototypeShift(queue); + controller[kState].queueTotalSize -= byteLength; + readableByteStreamControllerHandleQueueDrain(controller); + const view = new Uint8Array(buffer, byteOffset, byteLength); + readRequest[kChunk](view); + return; + } + const { + autoAllocateChunkSize, + } = controller[kState]; + if (autoAllocateChunkSize !== undefined) { + try { + const buffer = new ArrayBuffer(autoAllocateChunkSize); + ArrayPrototypePush( + pendingPullIntos, + { + buffer, + byteOffset: 0, + byteLength: autoAllocateChunkSize, + bytesFilled: 0, + elementSize: 1, + ctor: Uint8Array, + type: 'default', + }); + } catch (error) { + readRequest[kError](error); + return; + } + } + + readableStreamAddReadRequest(stream, readRequest); + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function setupReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize) { + assert(stream[kState].controller === undefined); + if (autoAllocateChunkSize !== undefined) { + assert(NumberIsInteger(autoAllocateChunkSize)); + assert(autoAllocateChunkSize > 0); + } + controller[kState] = { + byobRequest: null, + closeRequested: false, + pullAgain: false, + pulling: false, + started: false, + stream, + queue: [], + queueTotalSize: 0, + highWaterMark, + pullAlgorithm, + cancelAlgorithm, + autoAllocateChunkSize, + pendingPullIntos: [], + }; + stream[kState].controller = controller; + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + controller[kState].started = true; + assert(!controller[kState].pulling); + assert(!controller[kState].pullAgain); + readableByteStreamControllerCallPullIfNeeded(controller); + }, + (error) => readableByteStreamControllerError(controller, error)); +} + +function setupReadableByteStreamControllerFromSource( + stream, + source, + highWaterMark) { + const controller = createReadableByteStreamController(); + const start = source?.start; + const pull = source?.pull; + const cancel = source?.cancel; + const autoAllocateChunkSize = source?.autoAllocateChunkSize; + const startAlgorithm = start ? + FunctionPrototypeBind(start, source, controller) : + nonOpStart; + const pullAlgorithm = pull ? + FunctionPrototypeBind(pull, source, controller) : + nonOpPull; + const cancelAlgorithm = cancel ? + FunctionPrototypeBind(cancel, source) : + nonOpCancel; + + if (autoAllocateChunkSize === 0) { + throw new ERR_INVALID_ARG_VALUE( + 'source.autoAllocateChunkSize', + autoAllocateChunkSize); + } + setupReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize); +} + +module.exports = { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransferredReadableStream, + + // Exported Brand Checks + isReadableStream, + isReadableByteStreamController, + isReadableStreamBYOBRequest, + isReadableStreamDefaultReader, + isReadableStreamBYOBReader, + isWritableStreamDefaultWriter, + isWritableStreamDefaultController, + + readableStreamPipeTo, + readableStreamTee, + readableByteStreamControllerConvertPullIntoDescriptor, + isReadableStreamLocked, + readableStreamCancel, + readableStreamClose, + readableStreamError, + readableStreamHasDefaultReader, + readableStreamGetNumReadRequests, + readableStreamHasBYOBReader, + readableStreamGetNumReadIntoRequests, + readableStreamFulfillReadRequest, + readableStreamFulfillReadIntoRequest, + readableStreamAddReadRequest, + readableStreamAddReadIntoRequest, + readableStreamReaderGenericCancel, + readableStreamReaderGenericInitialize, + readableStreamReaderGenericRelease, + readableStreamBYOBReaderRead, + readableStreamDefaultReaderRead, + setupReadableStreamBYOBReader, + setupReadableStreamDefaultReader, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerHasBackpressure, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableStreamDefaultControllerGetDesiredSize, + readableStreamDefaultControllerShouldCallPull, + readableStreamDefaultControllerCallPullIfNeeded, + readableStreamDefaultControllerClearAlgorithms, + readableStreamDefaultControllerError, + readableStreamDefaultControllerCancelSteps, + readableStreamDefaultControllerPullSteps, + setupReadableStreamDefaultController, + setupReadableStreamDefaultControllerFromSource, + readableByteStreamControllerClose, + readableByteStreamControllerCommitPullIntoDescriptor, + readableByteStreamControllerInvalidateBYOBRequest, + readableByteStreamControllerClearAlgorithms, + readableByteStreamControllerClearPendingPullIntos, + readableByteStreamControllerGetDesiredSize, + readableByteStreamControllerShouldCallPull, + readableByteStreamControllerHandleQueueDrain, + readableByteStreamControllerPullInto, + readableByteStreamControllerRespondInternal, + readableByteStreamControllerRespond, + readableByteStreamControllerRespondInClosedState, + readableByteStreamControllerFillHeadPullIntoDescriptor, + readableByteStreamControllerEnqueue, + readableByteStreamControllerEnqueueChunkToQueue, + readableByteStreamControllerFillPullIntoDescriptorFromQueue, + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue, + readableByteStreamControllerRespondInReadableState, + readableByteStreamControllerRespondWithNewView, + readableByteStreamControllerShiftPendingPullInto, + readableByteStreamControllerCallPullIfNeeded, + readableByteStreamControllerError, + readableByteStreamControllerCancelSteps, + readableByteStreamControllerPullSteps, + setupReadableByteStreamController, + setupReadableByteStreamControllerFromSource, +}; + +/* eslint-enable no-use-before-define */ diff --git a/lib/internal/webstreams/transfer.js b/lib/internal/webstreams/transfer.js new file mode 100644 index 00000000000000..985d7e86738f35 --- /dev/null +++ b/lib/internal/webstreams/transfer.js @@ -0,0 +1,299 @@ +'use strict'; + +const { + ObjectDefineProperties, + PromiseResolve, + ReflectConstruct, +} = primordials; + +const { + kState, + setPromiseHandled, +} = require('internal/webstreams/util'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + ReadableStream, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerError, + readableStreamPipeTo, +} = require('internal/webstreams/readablestream'); + +const { + WritableStream, + writableStreamDefaultControllerErrorIfNeeded, +} = require('internal/webstreams/writablestream'); + +const { + createDeferredPromise, +} = require('internal/util'); + +const assert = require('internal/assert'); + +const { + makeTransferable, + kClone, + kDeserialize, +} = require('internal/worker/js_transferable'); + +// This class is a bit of a hack. The Node.js implementation of +// DOMException is not transferable/cloneable. This provides us +// with a variant that is. Unfortunately, it means playing around +// a bit with the message, name, and code properties and the +// prototype. We can revisit this if DOMException is ever made +// properly cloneable. +class CloneableDOMException extends DOMException { + constructor(message, name) { + super(message, name); + this[kDeserialize]({ + message: this.message, + name: this.name, + code: this.code, + }); + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + [kClone]() { + return { + data: { + message: this.message, + name: this.name, + code: this.code, + }, + deserializeInfo: + 'internal/webstreams/transfer:InternalCloneableDOMException' + }; + } + + [kDeserialize]({ message, name, code }) { + ObjectDefineProperties(this, { + message: { + configurable: true, + enumerable: true, + get() { return message; }, + }, + name: { + configurable: true, + enumerable: true, + get() { return name; }, + }, + code: { + configurable: true, + enumerable: true, + get() { return code; }, + }, + }); + } +} + +function InternalCloneableDOMException() { + return makeTransferable( + ReflectConstruct( + CloneableDOMException, + [], + DOMException)); +} +InternalCloneableDOMException[kDeserialize] = () => {}; + +class CrossRealmTransformReadableSource { + constructor(port) { + this[kState] = { + port, + controller: undefined, + }; + + port.onmessage = ({ data }) => { + const { + controller, + } = this[kState]; + const { + type, + value, + } = data; + switch (type) { + case 'chunk': + readableStreamDefaultControllerEnqueue( + controller, + value); + break; + case 'close': + readableStreamDefaultControllerClose(controller); + port.close(); + break; + case 'error': + readableStreamDefaultControllerError(controller, value); + port.close(); + break; + } + }; + + port.onmessageerror = () => { + const error = new CloneableDOMException( + 'Internal transferred ReadableStream error', + 'DataCloneError'); + port.postMessage({ type: 'error', value: error }); + readableStreamDefaultControllerError( + this[kState].controller, + error); + port.close(); + }; + } + + start(controller) { + this[kState].controller = controller; + } + + async pull() { + this[kState].port.postMessage({ type: 'pull' }); + } + + async cancel(reason) { + try { + this[kState].port.postMessage({ type: 'error', value: reason }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + throw error; + } finally { + this[kState].port.close(); + } + } +} + +class CrossRealmTransformWritableSink { + constructor(port) { + this[kState] = { + port, + controller: undefined, + backpressurePromise: createDeferredPromise(), + }; + + port.onmessage = ({ data }) => { + assert(typeof data === 'object'); + const { + type, + value + } = { ...data }; + assert(typeof type === 'string'); + switch (type) { + case 'pull': + if (this[kState].backpressurePromise !== undefined) + this[kState].backpressurePromise.resolve?.(); + this[kState].backpressurePromise = undefined; + break; + case 'error': + writableStreamDefaultControllerErrorIfNeeded( + this[kState].controller, + value); + if (this[kState].backpressurePromise !== undefined) + this[kState].backpressurePromise.resolve?.(); + this[kState].backpressurePromise = undefined; + break; + } + }; + port.onmessageerror = () => { + const error = new CloneableDOMException( + 'Internal transferred ReadableStream error', + 'DataCloneError'); + port.postMessage({ type: 'error', value: error }); + writableStreamDefaultControllerErrorIfNeeded( + this[kState].controller, + error); + port.close(); + }; + + } + + start(controller) { + this[kState].controller = controller; + } + + async write(chunk) { + if (this[kState].backpressurePromise === undefined) { + this[kState].backpressurePromise = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + } + await this[kState].backpressurePromise.promise; + this[kState].backpressurePromise = createDeferredPromise(); + try { + this[kState].port.postMessage({ type: 'chunk', value: chunk }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + this[kState].port.close(); + throw error; + } + } + + close() { + this[kState].port.postMessage({ type: 'close' }); + this[kState].port.close(); + } + + abort(reason) { + try { + this[kState].port.postMessage({ type: 'error', value: reason }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + throw error; + } finally { + this[kState].port.close(); + } + } +} + +function newCrossRealmReadableStream(writable, port) { + const readable = + new ReadableStream( + new CrossRealmTransformReadableSource(port)); + + const promise = + readableStreamPipeTo(readable, writable, false, false, false); + + setPromiseHandled(promise); + + return { + readable, + promise, + }; +} + +function newCrossRealmWritableSink(readable, port) { + const writable = + new WritableStream( + new CrossRealmTransformWritableSink(port)); + + const promise = readableStreamPipeTo(readable, writable, false, false, false); + setPromiseHandled(promise); + return { + writable, + promise, + }; +} + +module.exports = { + newCrossRealmReadableStream, + newCrossRealmWritableSink, + CrossRealmTransformWritableSink, + CrossRealmTransformReadableSource, + CloneableDOMException, + InternalCloneableDOMException, +}; diff --git a/lib/internal/webstreams/transformstream.js b/lib/internal/webstreams/transformstream.js new file mode 100644 index 00000000000000..62940a1fb9da76 --- /dev/null +++ b/lib/internal/webstreams/transformstream.js @@ -0,0 +1,591 @@ +'use strict'; + +const { + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + PromisePrototypeCatch, + PromisePrototypeThen, + PromiseResolve, + ReflectConstruct, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + customInspect, + ensureIsPromise, + extractHighWaterMark, + extractSizeAlgorithm, + isBrandCheck, + nonOpFlush, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + ReadableStream, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerError, + readableStreamDefaultControllerGetDesiredSize, + readableStreamDefaultControllerHasBackpressure, +} = require('internal/webstreams/readablestream'); + +const { + WritableStream, + writableStreamDefaultControllerErrorIfNeeded, +} = require('internal/webstreams/writablestream'); + +const assert = require('internal/assert'); + +/** + * @typedef {import('./queuingstrategies').QueuingStrategy + * } QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * + * @callback TransformerStartCallback + * @param {TransformStreamDefaultController} controller; + * + * @callback TransformerFlushCallback + * @param {TransformStreamDefaultController} controller; + * @returns {Promise} + * + * @callback TransformerTransformCallback + * @param {any} chunk + * @param {TransformStreamDefaultController} controller + * @returns {Promise} + * + * @typedef {{ + * start? : TransformerStartCallback, + * transform? : TransformerTransformCallback, + * flush? : TransformerFlushCallback, + * readableType? : any, + * writableType? : any, + * }} Transformer + */ + +class TransformStream { + [kType] = 'TransformStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {Transformer} [transformer] + * @param {QueuingStrategy} [writableStrategy] + * @param {QueuingStrategy} [readableStrategy] + */ + constructor( + transformer = null, + writableStrategy = {}, + readableStrategy = {}) { + const readableType = transformer?.readableType; + const writableType = transformer?.writableType; + const start = transformer?.start; + + if (readableType !== undefined) { + throw new ERR_INVALID_ARG_VALUE.RangeError( + 'transformer.readableType', + readableType); + } + if (writableType !== undefined) { + throw new ERR_INVALID_ARG_VALUE.RangeError( + 'transformer.writableType', + writableType); + } + + const readableHighWaterMark = readableStrategy?.highWaterMark; + const readableSize = readableStrategy?.size; + + const writableHighWaterMark = writableStrategy?.highWaterMark; + const writableSize = writableStrategy?.size; + + const actualReadableHighWaterMark = + extractHighWaterMark(readableHighWaterMark, 0); + const actualReadableSize = extractSizeAlgorithm(readableSize); + + const actualWritableHighWaterMark = + extractHighWaterMark(writableHighWaterMark, 1); + const actualWritableSize = extractSizeAlgorithm(writableSize); + + const startPromise = createDeferredPromise(); + + initializeTransformStream( + this, + startPromise, + actualWritableHighWaterMark, + actualWritableSize, + actualReadableHighWaterMark, + actualReadableSize); + + setupTransformStreamDefaultControllerFromTransformer(this, transformer); + + if (start !== undefined) { + startPromise.resolve( + FunctionPrototypeCall( + start, + transformer, + this[kState].controller)); + } else { + startPromise.resolve(); + } + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {ReadableStream} + */ + get readable() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + return this[kState].readable; + } + + /** + * @readonly + * @type {WritableStream} + */ + get writable() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + return this[kState].writable; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + readable: this.readable, + writable: this.writable, + backpressure: this[kState].backpressure, + }); + } + + [kTransfer]() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + const { + readable, + writable, + } = this[kState]; + if (readable.locked) { + throw new DOMException( + 'Cannot transfer a locked ReadableStream', + 'DataCloneError'); + } + if (writable.locked) { + throw new DOMException( + 'Cannot transfer a locked WritableStream', + 'DataCloneError'); + } + return { + data: { + readable, + writable, + }, + deserializeInfo: + 'internal/webstreams/transformstream:TransferredTransformStream' + }; + } + + [kTransferList]() { + return [ this[kState].readable, this[kState].writable ]; + } + + [kDeserialize]({ readable, writable }) { + this[kState].readable = readable; + this[kState].writable = writable; + } +} + +ObjectDefineProperties(TransformStream.prototype, { + readable: { enumerable: true }, + writable: { enumerable: true }, +}); + +function TransferredTransformStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'TransformStream'; + this[kState] = { + readable: undefined, + writable: undefined, + backpressure: undefined, + backpressureChange: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + controller: undefined, + }; + }, + [], TransformStream)); +} +TransferredTransformStream.prototype[kDeserialize] = () => {}; + +class TransformStreamDefaultController { + [kType] = 'TransformStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + const { + stream, + } = this[kState]; + const { + readable, + } = stream[kState]; + const { + controller: readableController, + } = readable[kState]; + return readableStreamDefaultControllerGetDesiredSize(readableController); + } + + /** + * @param {any} chunk + */ + enqueue(chunk = undefined) { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any} reason + */ + error(reason = undefined) { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerError(this, reason); + } + + terminate() { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerTerminate(this); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + }); + } +} + +ObjectDefineProperties(TransformStreamDefaultController.prototype, { + desiredSize: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, + terminate: { enumerable: true }, +}); + +function createTransformStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'TransformStreamDefaultController'; + }, + [], + TransformStreamDefaultController); +} + +const isTransformStream = + isBrandCheck('TransformStream'); +const isTransformStreamDefaultController = + isBrandCheck('TransformStreamDefaultController'); + +async function defaultTransformAlgorithm(chunk, controller) { + transformStreamDefaultControllerEnqueue(controller, chunk); +} + +function initializeTransformStream( + stream, + startPromise, + writableHighWaterMark, + writableSizeAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm) { + + const writable = new WritableStream({ + start() { return startPromise.promise; }, + write(chunk) { + return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); + }, + abort(reason) { + return transformStreamDefaultSinkAbortAlgorithm(stream, reason); + }, + close() { + return transformStreamDefaultSinkCloseAlgorithm(stream); + }, + }, { + highWaterMark: writableHighWaterMark, + size: writableSizeAlgorithm, + }); + + const readable = new ReadableStream({ + start() { return startPromise.promise; }, + pull() { + return transformStreamDefaultSourcePullAlgorithm(stream); + }, + cancel(reason) { + transformStreamErrorWritableAndUnblockWrite(stream, reason); + return PromiseResolve(); + }, + }, { + highWaterMark: readableHighWaterMark, + size: readableSizeAlgorithm, + }); + + stream[kState] = { + readable, + writable, + controller: undefined, + backpressure: undefined, + backpressureChange: { + promise: undefined, + resolve: undefined, + reject: undefined, + } + }; + + transformStreamSetBackpressure(stream, true); +} + +function transformStreamError(stream, error) { + const { + readable, + } = stream[kState]; + const { + controller, + } = readable[kState]; + readableStreamDefaultControllerError(controller, error); + transformStreamErrorWritableAndUnblockWrite(stream, error); +} + +function transformStreamErrorWritableAndUnblockWrite(stream, error) { + const { + controller, + writable, + } = stream[kState]; + transformStreamDefaultControllerClearAlgorithms(controller); + writableStreamDefaultControllerErrorIfNeeded( + writable[kState].controller, + error); + if (stream[kState].backpressure) + transformStreamSetBackpressure(stream, false); +} + +function transformStreamSetBackpressure(stream, backpressure) { + assert(stream[kState].backpressure !== backpressure); + if (stream[kState].backpressureChange.promise !== undefined) + stream[kState].backpressureChange.resolve?.(); + stream[kState].backpressureChange = createDeferredPromise(); + stream[kState].backpressure = backpressure; +} + +function setupTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm) { + assert(isTransformStream(stream)); + assert(stream[kState].controller === undefined); + controller[kState] = { + stream, + transformAlgorithm, + flushAlgorithm, + }; + stream[kState].controller = controller; +} + +function setupTransformStreamDefaultControllerFromTransformer( + stream, + transformer) { + const controller = createTransformStreamDefaultController(); + const transform = transformer?.transform || defaultTransformAlgorithm; + const flush = transformer?.flush || nonOpFlush; + const transformAlgorithm = + FunctionPrototypeBind(transform, transformer); + const flushAlgorithm = + FunctionPrototypeBind(flush, transformer); + + setupTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm); +} + +function transformStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].transformAlgorithm = undefined; + controller[kState].flushAlgorithm = undefined; +} + +function transformStreamDefaultControllerEnqueue(controller, chunk) { + const { + stream, + } = controller[kState]; + const { + readable, + } = stream[kState]; + const { + controller: readableController, + } = readable[kState]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) + throw new ERR_INVALID_STATE.TypeError('Unable to enqueue'); + try { + readableStreamDefaultControllerEnqueue(readableController, chunk); + } catch (error) { + transformStreamErrorWritableAndUnblockWrite(stream, error); + throw readable[kState].storedError; + } + const backpressure = + readableStreamDefaultControllerHasBackpressure(readableController); + if (backpressure !== stream[kState].backpressure) { + assert(backpressure); + transformStreamSetBackpressure(stream, true); + } +} + +function transformStreamDefaultControllerError(controller, error) { + transformStreamError(controller[kState].stream, error); +} + +function transformStreamDefaultControllerPerformTransform(controller, chunk) { + const transformPromise = + ensureIsPromise( + controller[kState].transformAlgorithm, + controller, + chunk, + controller); + return PromisePrototypeCatch( + transformPromise, + (error) => { + transformStreamError(controller[kState].stream, error); + throw error; + }); +} + +function transformStreamDefaultControllerTerminate(controller) { + const { + stream, + } = controller[kState]; + const { + readable, + } = stream[kState]; + assert(readable !== undefined); + const { + controller: readableController, + } = readable[kState]; + readableStreamDefaultControllerClose(readableController); + transformStreamErrorWritableAndUnblockWrite( + stream, + new ERR_INVALID_STATE.TypeError('TransformStream has been terminated')); +} + +function transformStreamDefaultSinkWriteAlgorithm(stream, chunk) { + const { + writable, + controller, + } = stream[kState]; + assert(writable[kState].state === 'writable'); + if (stream[kState].backpressure) { + const backpressureChange = stream[kState].backpressureChange.promise; + return PromisePrototypeThen( + backpressureChange, + () => { + const { + writable, + } = stream[kState]; + if (writable[kState].state === 'erroring') + throw writable[kState].storedError; + assert(writable[kState].state === 'writable'); + return transformStreamDefaultControllerPerformTransform( + controller, + chunk); + }); + } + return transformStreamDefaultControllerPerformTransform(controller, chunk); +} + +async function transformStreamDefaultSinkAbortAlgorithm(stream, reason) { + transformStreamError(stream, reason); +} + +function transformStreamDefaultSinkCloseAlgorithm(stream) { + const { + readable, + controller, + } = stream[kState]; + + const flushPromise = + ensureIsPromise( + controller[kState].flushAlgorithm, + controller, + controller); + transformStreamDefaultControllerClearAlgorithms(controller); + return PromisePrototypeThen( + flushPromise, + () => { + if (readable[kState].state === 'errored') + throw readable[kState].storedError; + readableStreamDefaultControllerClose(readable[kState].controller); + }, + (error) => { + transformStreamError(stream, error); + throw readable[kState].storedError; + }); +} + +function transformStreamDefaultSourcePullAlgorithm(stream) { + assert(stream[kState].backpressure); + assert(stream[kState].backpressureChange.promise !== undefined); + transformStreamSetBackpressure(stream, false); + return stream[kState].backpressureChange.promise; +} + +module.exports = { + TransformStream, + TransformStreamDefaultController, + TransferredTransformStream, + + // Exported Brand Checks + isTransformStream, + isTransformStreamDefaultController, +}; diff --git a/lib/internal/webstreams/util.js b/lib/internal/webstreams/util.js new file mode 100644 index 00000000000000..8cf31d1c307ca9 --- /dev/null +++ b/lib/internal/webstreams/util.js @@ -0,0 +1,237 @@ +'use strict'; + +const { + ArrayBufferPrototype, + ArrayPrototypePush, + ArrayPrototypeShift, + AsyncIteratorPrototype, + FunctionPrototypeCall, + MathMax, + NumberIsNaN, + ObjectCreate, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + ReflectGet, + Symbol, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_OPERATION_FAILED, + }, +} = require('internal/errors'); + +const { + copyArrayBuffer, + detachArrayBuffer +} = internalBinding('buffer'); + +const { + isPromise, +} = require('util/types'); + +const { + inspect, +} = require('util'); + +const { + getPromiseDetails, + kPending, +} = internalBinding('util'); + +const assert = require('internal/assert'); + +const kState = Symbol('kState'); +const kType = Symbol('kType'); + +const AsyncIterator = ObjectCreate(AsyncIteratorPrototype, { + next: { + configurable: true, + enumerable: true, + writable: true, + }, + return: { + configurable: true, + enumerable: true, + writable: true, + }, +}); + +function extractHighWaterMark(value, defaultHWM) { + if (value === undefined) return defaultHWM; + value = +value; + if (typeof value !== 'number' || + NumberIsNaN(value) || + value < 0) + throw new ERR_INVALID_ARG_VALUE.RangeError('strategy.highWaterMark', value); + return value; +} + +function extractSizeAlgorithm(size) { + if (size === undefined) return () => 1; + if (typeof size !== 'function') + throw new ERR_INVALID_ARG_TYPE('strategy.size', 'Function', size); + return size; +} + +function customInspect(depth, options, name, data) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1 + }; + + return `${name} ${inspect(data, opts)}`; +} + +// These are defensive to work around the possibility that +// the buffer, byteLength, and byteOffset properties on +// ArrayBuffer and ArrayBufferView's may have been tampered with. + +function ArrayBufferViewGetBuffer(view) { + return ReflectGet(view.constructor.prototype, 'buffer', view); +} + +function ArrayBufferViewGetByteLength(view) { + return ReflectGet(view.constructor.prototype, 'byteLength', view); +} + +function ArrayBufferViewGetByteOffset(view) { + return ReflectGet(view.constructor.prototype, 'byteOffset', view); +} + +function ArrayBufferGetByteLength(view) { + return ReflectGet(ArrayBufferPrototype, 'byteLength', view); +} + +function isBrandCheck(brand) { + return (value) => { + return value != null && + value[kState] !== undefined && + value[kType] === brand; + }; +} + +function transferArrayBuffer(buffer) { + const res = detachArrayBuffer(buffer); + if (res === undefined) { + throw new ERR_OPERATION_FAILED.TypeError( + 'The ArrayBuffer could not be transferred'); + } + return res; +} + +function dequeueValue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + assert(controller[kState].queue.length); + const { + value, + size, + } = ArrayPrototypeShift(controller[kState].queue); + controller[kState].queueTotalSize = + MathMax(0, controller[kState].queueTotalSize - size); + return value; +} + +function resetQueue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + controller[kState].queue = []; + controller[kState].queueTotalSize = 0; +} + +function peekQueueValue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + assert(controller[kState].queue.length); + return controller[kState].queue[0].value; +} + +function enqueueValueWithSize(controller, value, size) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + size = +size; + if (typeof size !== 'number' || + size < 0 || + NumberIsNaN(size) || + size === Infinity) { + throw new ERR_INVALID_ARG_VALUE.RangeError('size', size); + } + ArrayPrototypePush(controller[kState].queue, { value, size }); + controller[kState].queueTotalSize += size; +} + +function ensureIsPromise(fn, thisArg, ...args) { + try { + const value = FunctionPrototypeCall(fn, thisArg, ...args); + return isPromise(value) ? value : PromiseResolve(value); + } catch (error) { + return PromiseReject(error); + } +} + +function isPromisePending(promise) { + if (promise === undefined) return false; + const details = getPromiseDetails(promise); + return details?.[0] === kPending; +} + +function setPromiseHandled(promise) { + // Alternatively, we could use the native API + // MarkAsHandled, but this avoids the extra boundary cross + // and is hopefully faster at the cost of an extra Promise + // allocation. + PromisePrototypeThen(promise, () => {}, () => {}); +} + +async function nonOpFlush() {} + +function nonOpStart() {} + +async function nonOpPull() {} + +async function nonOpCancel() {} + +async function nonOpWrite() {} + +let transfer; +function lazyTransfer() { + if (transfer === undefined) + transfer = require('internal/webstreams/transfer'); + return transfer; +} + +module.exports = { + ArrayBufferViewGetBuffer, + ArrayBufferViewGetByteLength, + ArrayBufferViewGetByteOffset, + ArrayBufferGetByteLength, + AsyncIterator, + copyArrayBuffer, + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + isPromisePending, + peekQueueValue, + resetQueue, + setPromiseHandled, + transferArrayBuffer, + nonOpCancel, + nonOpFlush, + nonOpPull, + nonOpStart, + nonOpWrite, + kType, + kState, +}; diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js new file mode 100644 index 00000000000000..c36ce82fcdcfbf --- /dev/null +++ b/lib/internal/webstreams/writablestream.js @@ -0,0 +1,1329 @@ +'use strict'; + +/* eslint-disable no-use-before-define */ + +const { + ArrayPrototypePush, + ArrayPrototypeShift, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + ReflectConstruct, + Symbol, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + MessageChannel, +} = require('internal/worker/io'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + isPromisePending, + peekQueueValue, + resetQueue, + setPromiseHandled, + nonOpCancel, + nonOpStart, + nonOpWrite, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + AbortController, +} = require('internal/abort_controller'); + +const assert = require('internal/assert'); + +const kAbort = Symbol('kAbort'); +const kCloseSentinel = Symbol('kCloseSentinel'); +const kError = Symbol('kError'); + +/** + * @typedef {import('../abort_controller').AbortSignal} AbortSignal + * @typedef {import('./queuingstrategies').QueuingStrategy + * } QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * + * @callback UnderlyingSinkStartCallback + * @param {WritableStreamDefaultController} controller + * + * @callback UnderlyingSinkWriteCallback + * @param {any} chunk + * @param {WritableStreamDefaultController} controller + * @returns {Promise} + * + * @callback UnderlyingSinkCloseCallback + * @returns {Promise} + * + * @callback UnderlyingSinkAbortCallback + * @param {any} reason + * @returns {Promise} + * + * @typedef {{ + * start? : UnderlyingSinkStartCallback, + * write? : UnderlyingSinkWriteCallback, + * close? : UnderlyingSinkCloseCallback, + * abort? : UnderlyingSinkAbortCallback, + * type? : any, + * }} UnderlyingSink + */ + +class WritableStream { + [kType] = 'WritableStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {UnderlyingSink} [sink] + * @param {QueuingStrategy} [strategy] + */ + constructor(sink = null, strategy = {}) { + const type = sink?.type; + if (type !== undefined) + throw new ERR_INVALID_ARG_VALUE.RangeError('type', type); + + this[kState] = { + close: createDeferredPromise(), + closeRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightWriteRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightCloseRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + pendingAbortRequest: { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }, + backpressure: false, + controller: undefined, + state: 'writable', + storedError: undefined, + writeRequests: [], + writer: undefined, + transfer: { + readable: undefined, + port1: undefined, + port2: undefined, + promise: undefined, + } + }; + + const size = extractSizeAlgorithm(strategy?.size); + const highWaterMark = extractHighWaterMark(strategy?.highWaterMark, 1); + + setupWritableStreamDefaultControllerFromSink( + this, + sink, + highWaterMark, + size); + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {boolean} + */ + get locked() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + return isWritableStreamLocked(this); + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + if (!isWritableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStream')); + if (isWritableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is locked')); + } + return writableStreamAbort(this, reason); + } + + /** + * @returns {Promise} + */ + close() { + if (!isWritableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStream')); + if (isWritableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is locked')); + } + if (writableStreamCloseQueuedOrInFlight(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Failure closing WritableStream')); + } + return writableStreamClose(this); + } + + /** + * @returns {WritableStreamDefaultWriter} + */ + getWriter() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + return new WritableStreamDefaultWriter(this); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + locked: this.locked, + state: this[kState].state, + }); + } + + [kTransfer]() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + if (this.locked) { + this[kState].transfer.port1?.close(); + this[kState].transfer.port1 = undefined; + this[kState].transfer.port2 = undefined; + throw new DOMException( + 'Cannot transfer a locked WritableStream', + 'DataCloneError'); + } + + const { + readable, + promise, + } = lazyTransfer().newCrossRealmReadableStream( + this, + this[kState].transfer.port1); + + this[kState].transfer.readable = readable; + this[kState].transfer.promise = promise; + + setPromiseHandled(this[kState].transfer.promise); + + return { + data: { port: this[kState].transfer.port2 }, + deserializeInfo: + 'internal/webstreams/writablestream:TransferredWritableStream' + }; + } + + [kTransferList]() { + const { port1, port2 } = new MessageChannel(); + this[kState].transfer.port1 = port1; + this[kState].transfer.port2 = port2; + return [ port2 ]; + } + + [kDeserialize]({ port }) { + const transfer = lazyTransfer(); + setupWritableStreamDefaultControllerFromSink( + this, + new transfer.CrossRealmTransformWritableSink(port), + 1, + () => 1); + } +} + +ObjectDefineProperties(WritableStream.prototype, { + locked: { enumerable: true }, + abort: { enumerable: true }, + close: { enumerable: true }, + getWriter: { enumerable: true }, +}); + +function TransferredWritableStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'WritableStream'; + this[kState] = { + close: createDeferredPromise(), + closeRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightWriteRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightCloseRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + pendingAbortRequest: { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }, + backpressure: false, + controller: undefined, + state: 'writable', + storedError: undefined, + writeRequests: [], + writer: undefined, + transfer: { + promise: undefined, + port1: undefined, + port2: undefined, + readable: undefined, + }, + }; + }, + [], WritableStream)); +} +TransferredWritableStream.prototype[kDeserialize] = () => {}; + +class WritableStreamDefaultWriter { + [kType] = 'WritableStreamDefaultWriter'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {WritableStream} stream + */ + constructor(stream) { + if (!isWritableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'WritableStream', stream); + this[kState] = { + stream: undefined, + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + ready: { + promise: undefined, + resolve: undefined, + reject: undefined, + } + }; + setupWritableStreamDefaultWriter(this, stream); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + return this[kState].close.promise; + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isWritableStreamDefaultWriter(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultWriter'); + if (this[kState].stream === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream'); + } + return writableStreamDefaultWriterGetDesiredSize(this); + } + + /** + * @readonly + * @type {Promise} + */ + get ready() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + return this[kState].ready.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + return writableStreamDefaultWriterAbort(this, reason); + } + + /** + * @returns {Promise} + */ + close() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + const { + stream, + } = this[kState]; + if (stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + if (writableStreamCloseQueuedOrInFlight(stream)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Failure to close WritableStream')); + } + return writableStreamDefaultWriterClose(this); + } + + releaseLock() { + if (!isWritableStreamDefaultWriter(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultWriter'); + const { + stream, + } = this[kState]; + if (stream === undefined) + return; + assert(stream[kState].writer !== undefined); + writableStreamDefaultWriterRelease(this); + } + + /** + * @param {any} chunk + * @returns {Promise} + */ + write(chunk = undefined) { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + return writableStreamDefaultWriterWrite(this, chunk); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + close: this[kState].close.promise, + ready: this[kState].ready.promise, + desiredSize: this.desiredSize, + }); + } +} + +ObjectDefineProperties(WritableStreamDefaultWriter.prototype, { + closed: { enumerable: true }, + ready: { enumerable: true }, + desiredSize: { enumerable: true }, + abort: { enumerable: true }, + close: { enumerable: true }, + releaseLock: { enumerable: true }, + write: { enumerable: true }, +}); + +class WritableStreamDefaultController { + [kType] = 'WritableStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + [kAbort](reason) { + const result = this[kState].abortAlgorithm(reason); + writableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [kError]() { + resetQueue(this); + } + + /** + * @type {any} + */ + get abortReason() { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + return this[kState].abortReason; + } + + /** + * @type {AbortSignal} + */ + get signal() { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + return this[kState].abortController.signal; + } + + /** + * @param {any} error + */ + error(error = undefined) { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + if (this[kState].stream[kState].state !== 'writable') + return; + writableStreamDefaultControllerError(this, error); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + }); + } +} + +ObjectDefineProperties(WritableStreamDefaultController.prototype, { + abortReason: { enumerable: true }, + signal: { enumerable: true }, + error: { enumerable: true }, +}); + +function createWritableStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'WritableStreamDefaultController'; + }, + [], WritableStreamDefaultController); +} + +const isWritableStream = + isBrandCheck('WritableStream'); +const isWritableStreamDefaultWriter = + isBrandCheck('WritableStreamDefaultWriter'); +const isWritableStreamDefaultController = + isBrandCheck('WritableStreamDefaultController'); + +function isWritableStreamLocked(stream) { + return stream[kState].writer !== undefined; +} + +function setupWritableStreamDefaultWriter(writer, stream) { + if (isWritableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('WritableStream is locked'); + writer[kState].stream = stream; + stream[kState].writer = writer; + switch (stream[kState].state) { + case 'writable': + if (!writableStreamCloseQueuedOrInFlight(stream) && + stream[kState].backpressure) { + writer[kState].ready = createDeferredPromise(); + } else { + writer[kState].ready = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + } + setClosedPromiseToNewPromise(); + break; + case 'erroring': + writer[kState].ready = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(writer[kState].ready.promise); + setClosedPromiseToNewPromise(); + break; + case 'closed': + writer[kState].ready = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + writer[kState].close = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + break; + default: + writer[kState].ready = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + writer[kState].close = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(writer[kState].ready.promise); + setPromiseHandled(writer[kState].close.promise); + } + + function setClosedPromiseToNewPromise() { + writer[kState].close = createDeferredPromise(); + } +} + +function writableStreamAbort(stream, reason) { + const { + state, + controller, + } = stream[kState]; + if (state === 'closed' || state === 'errored') + return PromiseResolve(); + + controller[kState].abortReason = reason; + controller[kState].abortController.abort(); + + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) + return stream[kState].pendingAbortRequest.abort.promise; + + assert(state === 'writable' || state === 'erroring'); + + let wasAlreadyErroring = false; + if (state === 'erroring') { + wasAlreadyErroring = true; + reason = undefined; + } + + const abort = createDeferredPromise(); + + stream[kState].pendingAbortRequest = { + abort, + reason, + wasAlreadyErroring, + }; + + if (!wasAlreadyErroring) + writableStreamStartErroring(stream, reason); + + return abort.promise; +} + +function writableStreamClose(stream) { + const { + state, + writer, + backpressure, + controller, + } = stream[kState]; + if (state === 'closed' || state === 'errored') { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is closed')); + } + assert(state === 'writable' || state === 'erroring'); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + stream[kState].closeRequest = createDeferredPromise(); + const { promise } = stream[kState].closeRequest; + if (writer !== undefined && backpressure && state === 'writable') + writer[kState].ready.resolve?.(); + writableStreamDefaultControllerClose(controller); + return promise; +} + +function writableStreamUpdateBackpressure(stream, backpressure) { + assert(stream[kState].state === 'writable'); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + const { + writer, + } = stream[kState]; + if (writer !== undefined && stream[kState].backpressure !== backpressure) { + if (backpressure) { + writer[kState].ready = createDeferredPromise(); + } else { + writer[kState].ready.resolve?.(); + } + } + stream[kState].backpressure = backpressure; +} + +function writableStreamStartErroring(stream, reason) { + assert(stream[kState].storedError === undefined); + assert(stream[kState].state === 'writable'); + const { + controller, + writer, + } = stream[kState]; + assert(controller !== undefined); + stream[kState].state = 'erroring'; + stream[kState].storedError = reason; + if (writer !== undefined) { + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); + } + if (!writableStreamHasOperationMarkedInFlight(stream) && + controller[kState].started) { + writableStreamFinishErroring(stream); + } +} + +function writableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { + assert(stream[kState].state === 'errored'); + if (stream[kState].closeRequest.promise !== undefined) { + assert(stream[kState].inFlightCloseRequest.promise === undefined); + stream[kState].closeRequest.reject?.(stream[kState].storedError); + stream[kState].closeRequest = { + promise: undefined, + reject: undefined, + resolve: undefined, + }; + } + const { + writer, + } = stream[kState]; + if (writer !== undefined) { + writer[kState].close.reject?.(stream[kState].storedError); + setPromiseHandled(writer[kState].close.promise); + } +} + +function writableStreamMarkFirstWriteRequestInFlight(stream) { + assert(stream[kState].inFlightWriteRequest.promise === undefined); + assert(stream[kState].writeRequests.length); + const writeRequest = ArrayPrototypeShift(stream[kState].writeRequests); + stream[kState].inFlightWriteRequest = writeRequest; +} + +function writableStreamMarkCloseRequestInFlight(stream) { + assert(stream[kState].inFlightWriteRequest.promise === undefined); + assert(stream[kState].closeRequest.promise !== undefined); + stream[kState].inFlightCloseRequest = stream[kState].closeRequest; + stream[kState].closeRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; +} + +function writableStreamHasOperationMarkedInFlight(stream) { + const { + inFlightWriteRequest, + inFlightCloseRequest, + } = stream[kState]; + if (inFlightWriteRequest.promise === undefined && + inFlightCloseRequest.promise === undefined) { + return false; + } + return true; +} + +function writableStreamFinishInFlightWriteWithError(stream, error) { + assert(stream[kState].inFlightWriteRequest.promise !== undefined); + stream[kState].inFlightWriteRequest.reject?.(error); + stream[kState].inFlightWriteRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + writableStreamDealWithRejection(stream, error); +} + +function writableStreamFinishInFlightWrite(stream) { + assert(stream[kState].inFlightWriteRequest.promise !== undefined); + stream[kState].inFlightWriteRequest.resolve?.(); + stream[kState].inFlightWriteRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; +} + +function writableStreamFinishInFlightCloseWithError(stream, error) { + assert(stream[kState].inFlightCloseRequest.promise !== undefined); + stream[kState].inFlightCloseRequest.reject?.(error); + stream[kState].inFlightCloseRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) { + stream[kState].pendingAbortRequest.abort.reject?.(error); + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + } + writableStreamDealWithRejection(stream, error); +} + +function writableStreamFinishInFlightClose(stream) { + assert(stream[kState].inFlightCloseRequest.promise !== undefined); + stream[kState].inFlightCloseRequest.resolve?.(); + stream[kState].inFlightCloseRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + if (stream[kState].state === 'erroring') { + stream[kState].storedError = undefined; + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) { + stream[kState].pendingAbortRequest.abort.resolve?.(); + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + } + } + stream[kState].state = 'closed'; + if (stream[kState].writer !== undefined) + stream[kState].writer[kState].close.resolve?.(); + assert(stream[kState].pendingAbortRequest.abort.promise === undefined); + assert(stream[kState].storedError === undefined); +} + +function writableStreamFinishErroring(stream) { + assert(stream[kState].state === 'erroring'); + assert(!writableStreamHasOperationMarkedInFlight(stream)); + stream[kState].state = 'errored'; + stream[kState].controller[kError](); + const storedError = stream[kState].storedError; + for (let n = 0; n < stream[kState].writeRequests.length; n++) + stream[kState].writeRequests[n].reject?.(storedError); + stream[kState].writeRequests = []; + + if (stream[kState].pendingAbortRequest.abort.promise === undefined) { + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + + const abortRequest = stream[kState].pendingAbortRequest; + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + if (abortRequest.wasAlreadyErroring) { + abortRequest.abort.reject?.(storedError); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + PromisePrototypeThen( + ensureIsPromise( + stream[kState].controller[kAbort], + stream[kState].controller, + abortRequest.reason), + () => { + abortRequest.abort.resolve?.(); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }, + (error) => { + abortRequest.abort.reject?.(error); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }); +} + +function writableStreamDealWithRejection(stream, error) { + const { + state, + } = stream[kState]; + if (state === 'writable') { + writableStreamStartErroring(stream, error); + return; + } + + assert(state === 'erroring'); + writableStreamFinishErroring(stream); +} + +function writableStreamCloseQueuedOrInFlight(stream) { + if (stream[kState].closeRequest.promise === undefined && + stream[kState].inFlightCloseRequest.promise === undefined) { + return false; + } + return true; +} + +function writableStreamAddWriteRequest(stream) { + assert(isWritableStreamLocked(stream)); + assert(stream[kState].state === 'writable'); + const { + promise, + resolve, + reject, + } = createDeferredPromise(); + ArrayPrototypePush( + stream[kState].writeRequests, + { + promise, + resolve, + reject, + }); + return promise; +} + +function writableStreamDefaultWriterWrite(writer, chunk) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + const { + controller, + } = stream[kState]; + const chunkSize = writableStreamDefaultControllerGetChunkSize( + controller, + chunk); + if (stream !== writer[kState].stream) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Mismatched WritableStreams')); + } + const { + state, + } = stream[kState]; + + if (state === 'errored') + return PromiseReject(stream[kState].storedError); + + if (writableStreamCloseQueuedOrInFlight(stream) || state === 'closed') { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is closed')); + } + + if (state === 'erroring') + return PromiseReject(stream[kState].storedError); + + assert(state === 'writable'); + + const promise = writableStreamAddWriteRequest(stream); + writableStreamDefaultControllerWrite(controller, chunk, chunkSize); + return promise; +} + +function writableStreamDefaultWriterRelease(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + assert(stream[kState].writer === writer); + const releasedError = + new ERR_INVALID_STATE.TypeError('Writer has been released'); + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); + writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); + stream[kState].writer = undefined; + writer[kState].stream = undefined; +} + +function writableStreamDefaultWriterGetDesiredSize(writer) { + const { + stream, + } = writer[kState]; + switch (stream[kState].state) { + case 'errored': + // Fall through + case 'erroring': + return null; + case 'closed': + return 0; + } + return writableStreamDefaultControllerGetDesiredSize( + stream[kState].controller); +} + +function writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) { + if (isPromisePending(writer[kState].ready.promise)) { + writer[kState].ready.reject?.(error); + } else { + writer[kState].ready = { + promise: PromiseReject(error), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(writer[kState].ready.promise); +} + +function writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) { + if (isPromisePending(writer[kState].close.promise)) { + writer[kState].close.reject?.(error); + } else { + writer[kState].close = { + promise: PromiseReject(error), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(writer[kState].close.promise); +} + +function writableStreamDefaultWriterCloseWithErrorPropagation(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + const { + state, + } = stream[kState]; + if (writableStreamCloseQueuedOrInFlight(stream) || state === 'closed') + return PromiseResolve(); + + if (state === 'errored') + return PromiseReject(stream[kState].storedError); + + assert(state === 'writable' || state === 'erroring'); + + return writableStreamDefaultWriterClose(writer); +} + +function writableStreamDefaultWriterClose(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + return writableStreamClose(stream); +} + +function writableStreamDefaultWriterAbort(writer, reason) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + return writableStreamAbort(stream, reason); +} + +function writableStreamDefaultControllerWrite(controller, chunk, chunkSize) { + try { + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return; + } + const { + stream, + } = controller[kState]; + if (!writableStreamCloseQueuedOrInFlight(stream) && + stream[kState].state === 'writable') { + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +function writableStreamDefaultControllerProcessWrite(controller, chunk) { + const { + stream, + writeAlgorithm, + } = controller[kState]; + writableStreamMarkFirstWriteRequestInFlight(stream); + + PromisePrototypeThen( + ensureIsPromise(writeAlgorithm, controller, chunk, controller), + () => { + writableStreamFinishInFlightWrite(stream); + const { + state, + } = stream[kState]; + assert(state === 'writable' || state === 'erroring'); + dequeueValue(controller); + if (!writableStreamCloseQueuedOrInFlight(stream) && + state === 'writable') { + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (error) => { + if (stream[kState].state === 'writable') + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamFinishInFlightWriteWithError(stream, error); + }); + +} + +function writableStreamDefaultControllerProcessClose(controller) { + const { + closeAlgorithm, + queue, + stream, + } = controller[kState]; + writableStreamMarkCloseRequestInFlight(stream); + dequeueValue(controller); + assert(!queue.length); + const sinkClosePromise = ensureIsPromise(closeAlgorithm, controller); + writableStreamDefaultControllerClearAlgorithms(controller); + PromisePrototypeThen( + sinkClosePromise, + () => writableStreamFinishInFlightClose(stream), + (error) => writableStreamFinishInFlightCloseWithError(stream, error)); +} + +function writableStreamDefaultControllerGetDesiredSize(controller) { + const { + highWaterMark, + queueTotalSize, + } = controller[kState]; + return highWaterMark - queueTotalSize; +} + +function writableStreamDefaultControllerGetChunkSize(controller, chunk) { + try { + return FunctionPrototypeCall( + controller[kState].sizeAlgorithm, + undefined, + chunk); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return 1; + } +} + +function writableStreamDefaultControllerErrorIfNeeded(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state === 'writable') + writableStreamDefaultControllerError(controller, error); +} + +function writableStreamDefaultControllerError(controller, error) { + const { + stream, + } = controller[kState]; + assert(stream[kState].state === 'writable'); + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamStartErroring(stream, error); +} + +function writableStreamDefaultControllerClose(controller) { + enqueueValueWithSize(controller, kCloseSentinel, 0); + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +function writableStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].writeAlgorithm = undefined; + controller[kState].closeAlgorithm = undefined; + controller[kState].abortAlgorithm = undefined; + controller[kState].sizeAlgorithm = undefined; +} + +function writableStreamDefaultControllerGetBackpressure(controller) { + return writableStreamDefaultControllerGetDesiredSize(controller) <= 0; +} + +function writableStreamDefaultControllerAdvanceQueueIfNeeded(controller) { + const { + queue, + started, + stream, + } = controller[kState]; + if (!started || stream[kState].inFlightWriteRequest.promise !== undefined) + return; + + if (stream[kState].state === 'erroring') { + writableStreamFinishErroring(stream); + return; + } + + if (!queue.length) + return; + + const value = peekQueueValue(controller); + if (value === kCloseSentinel) + writableStreamDefaultControllerProcessClose(controller); + else + writableStreamDefaultControllerProcessWrite(controller, value); +} + +function setupWritableStreamDefaultControllerFromSink( + stream, + sink, + highWaterMark, + sizeAlgorithm) { + const controller = createWritableStreamDefaultController(); + const start = sink?.start; + const write = sink?.write; + const close = sink?.close; + const abort = sink?.abort; + const startAlgorithm = start ? + FunctionPrototypeBind(start, sink, controller) : + nonOpStart; + const writeAlgorithm = write ? + FunctionPrototypeBind(write, sink) : + nonOpWrite; + const closeAlgorithm = close ? + FunctionPrototypeBind(close, sink) : nonOpCancel; + const abortAlgorithm = abort ? + FunctionPrototypeBind(abort, sink) : nonOpCancel; + setupWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm); +} + +function setupWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm) { + assert(isWritableStream(stream)); + assert(stream[kState].controller === undefined); + controller[kState] = { + abortAlgorithm, + abortReason: undefined, + closeAlgorithm, + highWaterMark, + queue: [], + queueTotalSize: 0, + abortController: new AbortController(), + sizeAlgorithm, + started: false, + stream, + writeAlgorithm, + }; + stream[kState].controller = controller; + + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + controller[kState].started = true; + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (error) => { + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + controller[kState].started = true; + writableStreamDealWithRejection(stream, error); + }); +} + +module.exports = { + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + TransferredWritableStream, + + // Exported Brand Checks + isWritableStream, + isWritableStreamDefaultController, + isWritableStreamDefaultWriter, + + isWritableStreamLocked, + setupWritableStreamDefaultWriter, + writableStreamAbort, + writableStreamClose, + writableStreamUpdateBackpressure, + writableStreamStartErroring, + writableStreamRejectCloseAndClosedPromiseIfNeeded, + writableStreamMarkFirstWriteRequestInFlight, + writableStreamMarkCloseRequestInFlight, + writableStreamHasOperationMarkedInFlight, + writableStreamFinishInFlightWriteWithError, + writableStreamFinishInFlightWrite, + writableStreamFinishInFlightCloseWithError, + writableStreamFinishInFlightClose, + writableStreamFinishErroring, + writableStreamDealWithRejection, + writableStreamCloseQueuedOrInFlight, + writableStreamAddWriteRequest, + writableStreamDefaultWriterWrite, + writableStreamDefaultWriterRelease, + writableStreamDefaultWriterGetDesiredSize, + writableStreamDefaultWriterEnsureReadyPromiseRejected, + writableStreamDefaultWriterEnsureClosedPromiseRejected, + writableStreamDefaultWriterCloseWithErrorPropagation, + writableStreamDefaultWriterClose, + writableStreamDefaultWriterAbort, + writableStreamDefaultControllerWrite, + writableStreamDefaultControllerProcessWrite, + writableStreamDefaultControllerProcessClose, + writableStreamDefaultControllerGetDesiredSize, + writableStreamDefaultControllerGetChunkSize, + writableStreamDefaultControllerErrorIfNeeded, + writableStreamDefaultControllerError, + writableStreamDefaultControllerClose, + writableStreamDefaultControllerClearAlgorithms, + writableStreamDefaultControllerGetBackpressure, + writableStreamDefaultControllerAdvanceQueueIfNeeded, + setupWritableStreamDefaultControllerFromSink, + setupWritableStreamDefaultController, +}; + +/* eslint-enable no-use-before-define */ diff --git a/lib/internal/worker.js b/lib/internal/worker.js index f2414ebeec4aae..931bce0c518fc3 100644 --- a/lib/internal/worker.js +++ b/lib/internal/worker.js @@ -28,7 +28,7 @@ const { const EventEmitter = require('events'); const assert = require('internal/assert'); const path = require('path'); -const { timeOrigin } = internalBinding('performance'); +const { now } = require('internal/perf/utils'); const errorCodes = require('internal/errors').codes; const { @@ -504,12 +504,6 @@ function eventLoopUtilization(util1, util2) { return { idle: idle_delta, active: active_delta, utilization }; } -// Duplicate code from performance.now() so don't need to require perf_hooks. -function now() { - const hr = process.hrtime(); - return (hr[0] * 1000 + hr[1] / 1e6) - timeOrigin; -} - module.exports = { ownsProcessState, isMainThread, diff --git a/lib/net.js b/lib/net.js index 899e73f6ff167d..20601007fab695 100644 --- a/lib/net.js +++ b/lib/net.js @@ -163,6 +163,16 @@ function isPipeName(s) { return typeof s === 'string' && toNumber(s) === false; } +/** + * Creates a new TCP or IPC server + * @param {{ + * allowHalfOpen?: boolean; + * pauseOnConnect?: boolean; + * }} [options] + * @param {Function} [connectionListener] + * @returns {Server} + */ + function createServer(options, connectionListener) { return new Server(options, connectionListener); } @@ -1548,6 +1558,11 @@ function onconnection(err, clientHandle) { self.emit('connection', socket); } +/** + * Gets the number of concurrent connections on the server + * @param {Function} cb + * @returns {Server} + */ Server.prototype.getConnections = function(cb) { const self = this; diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index a92a040f2de839..339d3ca4ff0ab4 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -1,126 +1,23 @@ 'use strict'; const { - ObjectDefineProperties, ObjectDefineProperty, - ObjectSetPrototypeOf, - TypeError, } = primordials; const { - timeOriginTimestamp, constants, } = internalBinding('performance'); -const { - EventTarget, -} = require('internal/event_target'); - -const { - PerformanceEntry, - now, -} = require('internal/perf/perf'); +const { PerformanceEntry } = require('internal/perf/performance_entry'); const { PerformanceObserver } = require('internal/perf/observe'); - -const { - PerformanceMark, - mark, - measure, - clearMarks, -} = require('internal/perf/usertiming'); +const { PerformanceMark } = require('internal/perf/usertiming'); +const { InternalPerformance } = require('internal/perf/performance'); const { createHistogram } = require('internal/histogram'); -const eventLoopUtilization = require('internal/perf/event_loop_utilization'); const monitorEventLoopDelay = require('internal/perf/event_loop_delay'); -const nodeTiming = require('internal/perf/nodetiming'); -const timerify = require('internal/perf/timerify'); -const { customInspectSymbol: kInspect } = require('internal/util'); -const { inspect } = require('util'); - -class Performance extends EventTarget { - constructor() { - // eslint-disable-next-line no-restricted-syntax - throw new TypeError('Illegal constructor'); - } - - [kInspect](depth, options) { - if (depth < 0) return this; - - const opts = { - ...options, - depth: options.depth == null ? null : options.depth - 1 - }; - - return `Performance ${inspect({ - nodeTiming: this.nodeTiming, - timeOrigin: this.timeOrigin, - }, opts)}`; - } - -} - -function toJSON() { - return { - nodeTiming: this.nodeTiming, - timeOrigin: this.timeOrigin, - eventLoopUtilization: this.eventLoopUtilization() - }; -} - -class InternalPerformance extends EventTarget {} -InternalPerformance.prototype.constructor = Performance.prototype.constructor; -ObjectSetPrototypeOf(InternalPerformance.prototype, Performance.prototype); - -ObjectDefineProperties(Performance.prototype, { - clearMarks: { - configurable: true, - enumerable: false, - value: clearMarks, - }, - eventLoopUtilization: { - configurable: true, - enumerable: false, - value: eventLoopUtilization, - }, - mark: { - configurable: true, - enumerable: false, - value: mark, - }, - measure: { - configurable: true, - enumerable: false, - value: measure, - }, - nodeTiming: { - configurable: true, - enumerable: false, - value: nodeTiming, - }, - now: { - configurable: true, - enumerable: false, - value: now, - }, - timerify: { - configurable: true, - enumerable: false, - value: timerify, - }, - timeOrigin: { - configurable: true, - enumerable: true, - value: timeOriginTimestamp, - }, - toJSON: { - configurable: true, - enumerable: true, - value: toJSON, - } -}); module.exports = { PerformanceEntry, diff --git a/lib/readline.js b/lib/readline.js index b40c8019746120..1b8b276a6d6db0 100644 --- a/lib/readline.js +++ b/lib/readline.js @@ -705,9 +705,21 @@ Interface.prototype._tabComplete = function(lastKeypressWasTab) { // If there is a common prefix to all matches, then apply that portion. const prefix = commonPrefix(ArrayPrototypeFilter(completions, (e) => e !== '')); - if (prefix.length > completeOn.length) { + if (StringPrototypeStartsWith(prefix, completeOn) && + prefix.length > completeOn.length) { this._insertString(StringPrototypeSlice(prefix, completeOn.length)); return; + } else if (!StringPrototypeStartsWith(completeOn, prefix)) { + this.line = StringPrototypeSlice(this.line, + 0, + this.cursor - completeOn.length) + + prefix + + StringPrototypeSlice(this.line, + this.cursor, + this.line.length); + this.cursor = this.cursor - completeOn.length + prefix.length; + this._refreshLine(); + return; } if (!lastKeypressWasTab) { diff --git a/lib/repl.js b/lib/repl.js index 2e80d652669ec5..c73ad5d9d61ade 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -78,6 +78,7 @@ const { ReflectApply, RegExp, RegExpPrototypeExec, + RegExpPrototypeSymbolReplace, RegExpPrototypeTest, SafeSet, SafeWeakSet, @@ -426,11 +427,47 @@ function REPLServer(prompt, ({ processTopLevelAwait } = require('internal/repl/await')); } - const potentialWrappedCode = processTopLevelAwait(code); - if (potentialWrappedCode !== null) { - code = potentialWrappedCode; - wrappedCmd = true; - awaitPromise = true; + try { + const potentialWrappedCode = processTopLevelAwait(code); + if (potentialWrappedCode !== null) { + code = potentialWrappedCode; + wrappedCmd = true; + awaitPromise = true; + } + } catch (e) { + let recoverableError = false; + if (e.name === 'SyntaxError') { + let parentURL; + try { + const { pathToFileURL } = require('url'); + // Adding `/repl` prevents dynamic imports from loading relative + // to the parent of `process.cwd()`. + parentURL = pathToFileURL(path.join(process.cwd(), 'repl')).href; + } catch { + } + + // Remove all "await"s and attempt running the script + // in order to detect if error is truly non recoverable + const fallbackCode = RegExpPrototypeSymbolReplace(/\bawait\b/g, code, ''); + try { + vm.createScript(fallbackCode, { + filename: file, + displayErrors: true, + importModuleDynamically: async (specifier) => { + return asyncESM.ESMLoader.import(specifier, parentURL); + } + }); + } catch (fallbackError) { + if (isRecoverableError(fallbackError, fallbackCode)) { + recoverableError = true; + err = new Recoverable(e); + } + } + } + if (!recoverableError) { + decorateErrorStack(e); + err = e; + } } } @@ -438,47 +475,49 @@ function REPLServer(prompt, if (code === '\n') return cb(null); - let parentURL; - try { - const { pathToFileURL } = require('url'); - // Adding `/repl` prevents dynamic imports from loading relative - // to the parent of `process.cwd()`. - parentURL = pathToFileURL(path.join(process.cwd(), 'repl')).href; - } catch { - } - while (true) { + if (err === null) { + let parentURL; try { - if (self.replMode === module.exports.REPL_MODE_STRICT && - !RegExpPrototypeTest(/^\s*$/, code)) { - // "void 0" keeps the repl from returning "use strict" as the result - // value for statements and declarations that don't return a value. - code = `'use strict'; void 0;\n${code}`; - } - script = vm.createScript(code, { - filename: file, - displayErrors: true, - importModuleDynamically: async (specifier) => { - return asyncESM.ESMLoader.import(specifier, parentURL); + const { pathToFileURL } = require('url'); + // Adding `/repl` prevents dynamic imports from loading relative + // to the parent of `process.cwd()`. + parentURL = pathToFileURL(path.join(process.cwd(), 'repl')).href; + } catch { + } + while (true) { + try { + if (self.replMode === module.exports.REPL_MODE_STRICT && + !RegExpPrototypeTest(/^\s*$/, code)) { + // "void 0" keeps the repl from returning "use strict" as the result + // value for statements and declarations that don't return a value. + code = `'use strict'; void 0;\n${code}`; } - }); - } catch (e) { - debug('parse error %j', code, e); - if (wrappedCmd) { - // Unwrap and try again - wrappedCmd = false; - awaitPromise = false; - code = input; - wrappedErr = e; - continue; + script = vm.createScript(code, { + filename: file, + displayErrors: true, + importModuleDynamically: async (specifier) => { + return asyncESM.ESMLoader.import(specifier, parentURL); + } + }); + } catch (e) { + debug('parse error %j', code, e); + if (wrappedCmd) { + // Unwrap and try again + wrappedCmd = false; + awaitPromise = false; + code = input; + wrappedErr = e; + continue; + } + // Preserve original error for wrapped command + const error = wrappedErr || e; + if (isRecoverableError(error, code)) + err = new Recoverable(error); + else + err = error; } - // Preserve original error for wrapped command - const error = wrappedErr || e; - if (isRecoverableError(error, code)) - err = new Recoverable(error); - else - err = error; + break; } - break; } // This will set the values from `savedRegExMatches` to corresponding diff --git a/lib/stream/web.js b/lib/stream/web.js new file mode 100644 index 00000000000000..929abd19044458 --- /dev/null +++ b/lib/stream/web.js @@ -0,0 +1,48 @@ +'use strict'; + +const { + emitExperimentalWarning, +} = require('internal/util'); + +emitExperimentalWarning('stream/web'); + +const { + TransformStream, + TransformStreamDefaultController, +} = require('internal/webstreams/transformstream'); + +const { + WritableStream, + WritableStreamDefaultController, + WritableStreamDefaultWriter, +} = require('internal/webstreams/writablestream'); + +const { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, +} = require('internal/webstreams/readablestream'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +} = require('internal/webstreams/queuingstrategies'); + +module.exports = { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransformStream, + TransformStreamDefaultController, + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + ByteLengthQueuingStrategy, + CountQueuingStrategy, +}; diff --git a/lib/timers.js b/lib/timers.js index 485f577d29f2f0..a4543fea1df6bc 100644 --- a/lib/timers.js +++ b/lib/timers.js @@ -127,10 +127,16 @@ function enroll(item, msecs) { } -/* - * DOM-style timers +/** + * Schedules the execution of a one-time `callback` + * after `after` milliseconds. + * @param {Function} callback + * @param {number} [after] + * @param {any} [arg1] + * @param {any} [arg2] + * @param {any} [arg3] + * @returns {Timeout} */ - function setTimeout(callback, after, arg1, arg2, arg3) { validateCallback(callback); @@ -170,6 +176,11 @@ ObjectDefineProperty(setTimeout, customPromisify, { } }); +/** + * Cancels a timeout. + * @param {Timeout | string | number} timer + * @returns {void} + */ function clearTimeout(timer) { if (timer && timer._onTimeout) { timer._onTimeout = null; @@ -185,6 +196,16 @@ function clearTimeout(timer) { } } +/** + * Schedules repeated execution of `callback` + * every `repeat` milliseconds. + * @param {Function} callback + * @param {number} [repeat] + * @param {any} [arg1] + * @param {any} [arg2] + * @param {any} [arg3] + * @returns {Timeout} + */ function setInterval(callback, repeat, arg1, arg2, arg3) { validateCallback(callback); @@ -215,6 +236,11 @@ function setInterval(callback, repeat, arg1, arg2, arg3) { return timeout; } +/** + * Cancels an interval. + * @param {Timeout | string | number} timer + * @returns {void} + */ function clearInterval(timer) { // clearTimeout and clearInterval can be used to clear timers created from // both setTimeout and setInterval, as specified by HTML Living Standard: @@ -227,6 +253,10 @@ Timeout.prototype.close = function() { return this; }; +/** + * Coerces a `Timeout` to a primitive. + * @returns {number} + */ Timeout.prototype[SymbolToPrimitive] = function() { const id = this[async_id_symbol]; if (!this[kHasPrimitive]) { @@ -236,6 +266,15 @@ Timeout.prototype[SymbolToPrimitive] = function() { return id; }; +/** + * Schedules the immediate execution of `callback` + * after I/O events' callbacks. + * @param {Function} callback + * @param {any} [arg1] + * @param {any} [arg2] + * @param {any} [arg3] + * @returns {Immediate} + */ function setImmediate(callback, arg1, arg2, arg3) { validateCallback(callback); @@ -271,7 +310,11 @@ ObjectDefineProperty(setImmediate, customPromisify, { } }); - +/** + * Cancels an immediate. + * @param {Immediate} immediate + * @returns {void} + */ function clearImmediate(immediate) { if (!immediate || immediate._destroyed) return; diff --git a/lib/tls.js b/lib/tls.js index 2282fd33008868..683736460b1ef7 100644 --- a/lib/tls.js +++ b/lib/tls.js @@ -32,7 +32,6 @@ const { ArrayPrototypeSome, ObjectDefineProperty, ObjectFreeze, - ReflectConstruct, RegExpPrototypeTest, StringFromCharCode, StringPrototypeCharCodeAt, @@ -50,19 +49,18 @@ const { } = require('internal/errors').codes; const internalUtil = require('internal/util'); internalUtil.assertCrypto(); -const internalTLS = require('internal/tls'); const { isArrayBufferView } = require('internal/util/types'); const net = require('net'); const { getOptionValue } = require('internal/options'); const { getRootCertificates, getSSLCiphers } = internalBinding('crypto'); const { Buffer } = require('buffer'); -const EventEmitter = require('events'); const { URL } = require('internal/url'); -const DuplexPair = require('internal/streams/duplexpair'); const { canonicalizeIP } = internalBinding('cares_wrap'); const _tls_common = require('_tls_common'); const _tls_wrap = require('_tls_wrap'); +const { createSecurePair } = require('internal/tls/secure-pair'); +const { parseCertString } = require('internal/tls/parse-cert-string'); // Allow {CLIENT_RENEG_LIMIT} client-initiated session renegotiations // every {CLIENT_RENEG_WINDOW} seconds. An error event is emitted if more @@ -300,43 +298,6 @@ exports.checkServerIdentity = function checkServerIdentity(hostname, cert) { } }; - -class SecurePair extends EventEmitter { - constructor(secureContext = exports.createSecureContext(), - isServer = false, - requestCert = !isServer, - rejectUnauthorized = false, - options = {}) { - super(); - const { socket1, socket2 } = new DuplexPair(); - - this.server = options.server; - this.credentials = secureContext; - - this.encrypted = socket1; - this.cleartext = new exports.TLSSocket(socket2, { - secureContext, - isServer, - requestCert, - rejectUnauthorized, - ...options - }); - this.cleartext.once('secure', () => this.emit('secure')); - } - - destroy() { - this.cleartext.destroy(); - this.encrypted.destroy(); - } -} - - -exports.parseCertString = internalUtil.deprecate( - internalTLS.parseCertString, - 'tls.parseCertString() is deprecated. ' + - 'Please use querystring.parse() instead.', - 'DEP0076'); - exports.createSecureContext = _tls_common.createSecureContext; exports.SecureContext = _tls_common.SecureContext; exports.TLSSocket = _tls_wrap.TLSSocket; @@ -344,9 +305,13 @@ exports.Server = _tls_wrap.Server; exports.createServer = _tls_wrap.createServer; exports.connect = _tls_wrap.connect; +exports.parseCertString = internalUtil.deprecate( + parseCertString, + 'tls.parseCertString() is deprecated. ' + + 'Please use querystring.parse() instead.', + 'DEP0076'); + exports.createSecurePair = internalUtil.deprecate( - function createSecurePair(...args) { - return ReflectConstruct(SecurePair, args); - }, + createSecurePair, 'tls.createSecurePair() is deprecated. Please use ' + 'tls.TLSSocket instead.', 'DEP0064'); diff --git a/lib/v8.js b/lib/v8.js index e7a44331b8b350..b4e2d5cd1a751a 100644 --- a/lib/v8.js +++ b/lib/v8.js @@ -58,6 +58,12 @@ const { } = internalBinding('heap_utils'); const { HeapSnapshotStream } = require('internal/heap_utils'); +/** + * Generates a snapshot of the current V8 heap + * and writes it to a JSON file. + * @param {string} [filename] + * @returns {string} + */ function writeHeapSnapshot(filename) { if (filename !== undefined) { filename = getValidatedPath(filename); @@ -66,6 +72,11 @@ function writeHeapSnapshot(filename) { return triggerHeapSnapshot(filename); } +/** + * Generates a snapshot of the current V8 heap + * and returns a Readable Stream. + * @returns {import('./stream.js').Readable} + */ function getHeapSnapshot() { const handle = createHeapSnapshotStream(); assert(handle); @@ -111,11 +122,32 @@ const { const kNumberOfHeapSpaces = kHeapSpaces.length; +/** + * Sets V8 command-line flags. + * @param {string} flags + * @returns {void} + */ function setFlagsFromString(flags) { validateString(flags, 'flags'); _setFlagsFromString(flags); } +/** + * Gets the current V8 heap statistics. + * @returns {{ + * total_heap_size: number; + * total_heap_size_executable: number; + * total_physical_size: number; + * total_available_size: number; + * used_heap_size: number; + * heap_size_limit: number; + * malloced_memory: number; + * peak_malloced_memory: number; + * does_zap_garbage: number; + * number_of_native_contexts: number; + * number_of_detached_contexts: number; + * }} + */ function getHeapStatistics() { const buffer = binding.heapStatisticsBuffer; @@ -136,6 +168,16 @@ function getHeapStatistics() { }; } +/** + * Gets the current V8 heap space statistics. + * @returns {{ + * space_name: string; + * space_size: number; + * space_used_size: number; + * space_available_size: number; + * physical_space_size: number; + * }[]} + */ function getHeapSpaceStatistics() { const heapSpaceStatistics = new Array(kNumberOfHeapSpaces); const buffer = binding.heapSpaceStatisticsBuffer; @@ -154,6 +196,14 @@ function getHeapSpaceStatistics() { return heapSpaceStatistics; } +/** + * Gets the current V8 heap code statistics. + * @returns {{ + * code_and_metadata_size: number; + * bytecode_and_metadata_size: number; + * external_script_source_size: number; + * }} + */ function getHeapCodeStatistics() { const buffer = binding.heapCodeStatisticsBuffer; @@ -170,6 +220,11 @@ function getHeapCodeStatistics() { /* JS methods for the base objects */ Serializer.prototype._getDataCloneError = Error; +/** + * Reads raw bytes from the deserializer's internal buffer. + * @param {number} length + * @returns {Buffer} + */ Deserializer.prototype.readRawBytes = function readRawBytes(length) { const offset = this._readRawBytes(length); // `this.buffer` can be a Buffer or a plain Uint8Array, so just calling @@ -210,6 +265,12 @@ class DefaultSerializer extends Serializer { this._setTreatArrayBufferViewsAsHostObjects(true); } + /** + * Used to write some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @param {Object} abView + * @returns {void} + */ _writeHostObject(abView) { let i = 0; if (abView.constructor === Buffer) { @@ -232,6 +293,11 @@ class DefaultSerializer extends Serializer { } class DefaultDeserializer extends Deserializer { + /** + * Used to read some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @returns {any} + */ _readHostObject() { const typeIndex = this.readUint32(); const ctor = arrayBufferViewTypes[typeIndex]; @@ -254,6 +320,12 @@ class DefaultDeserializer extends Deserializer { } } +/** + * Uses a `DefaultSerializer` to serialize `value` + * into a buffer. + * @param {any} value + * @returns {Buffer} + */ function serialize(value) { const ser = new DefaultSerializer(); ser.writeHeader(); @@ -261,6 +333,12 @@ function serialize(value) { return ser.releaseBuffer(); } +/** + * Uses a `DefaultDeserializer` with default options + * to read a JavaScript value from a buffer. + * @param {Buffer | TypedArray | DataView} buffer + * @returns {any} + */ function deserialize(buffer) { const der = new DefaultDeserializer(buffer); der.readHeader(); diff --git a/lib/wasi.js b/lib/wasi.js index 63209ce716cb9a..43ecf94b4cb050 100644 --- a/lib/wasi.js +++ b/lib/wasi.js @@ -21,6 +21,7 @@ const { validateFunction, validateInt32, validateObject, + validateUndefined, } = require('internal/validators'); const { WASI: _WASI } = internalBinding('wasi'); const kExitCode = Symbol('kExitCode'); @@ -120,10 +121,7 @@ class WASI { const { _start, _initialize } = this[kInstance].exports; validateFunction(_start, 'instance.exports._start'); - if (_initialize !== undefined) { - throw new ERR_INVALID_ARG_TYPE( - 'instance.exports._initialize', 'undefined', _initialize); - } + validateUndefined(_initialize, 'instance.exports._initialize'); try { _start(); @@ -147,16 +145,9 @@ class WASI { const { _start, _initialize } = this[kInstance].exports; - if (typeof _initialize !== 'function' && _initialize !== undefined) { - throw new ERR_INVALID_ARG_TYPE( - 'instance.exports._initialize', 'function', _initialize); - } - if (_start !== undefined) { - throw new ERR_INVALID_ARG_TYPE( - 'instance.exports._start', 'undefined', _initialize); - } - + validateUndefined(_start, 'instance.exports._start'); if (_initialize !== undefined) { + validateFunction(_initialize, 'instance.exports._initialize'); _initialize(); } } diff --git a/node.gyp b/node.gyp index 1de9edc07f610d..691ce22ff3d664 100644 --- a/node.gyp +++ b/node.gyp @@ -27,251 +27,15 @@ 'node_lib_target_name%': 'libnode', 'node_intermediate_lib_type%': 'static_library', 'node_builtin_modules_path%': '', + # We list the deps/ files out instead of globbing them in js2c.py since we + # only include a subset of all the files under these directories. + # The lengths of their file names combined should not exceed the + # Windows command length limit or there would be an error. + # See https://docs.microsoft.com/en-us/troubleshoot/windows-client/shell-experience/command-line-string-limitation 'library_files': [ - 'lib/internal/bootstrap/environment.js', - 'lib/internal/bootstrap/loaders.js', - 'lib/internal/bootstrap/node.js', - 'lib/internal/bootstrap/pre_execution.js', - 'lib/internal/bootstrap/switches/does_own_process_state.js', - 'lib/internal/bootstrap/switches/does_not_own_process_state.js', - 'lib/internal/bootstrap/switches/is_main_thread.js', - 'lib/internal/bootstrap/switches/is_not_main_thread.js', - 'lib/internal/per_context/primordials.js', - 'lib/internal/per_context/domexception.js', - 'lib/internal/per_context/messageport.js', - 'lib/async_hooks.js', - 'lib/assert.js', - 'lib/assert/strict.js', - 'lib/buffer.js', - 'lib/child_process.js', - 'lib/console.js', - 'lib/constants.js', - 'lib/crypto.js', - 'lib/cluster.js', - 'lib/diagnostics_channel.js', - 'lib/dgram.js', - 'lib/dns.js', - 'lib/dns/promises.js', - 'lib/domain.js', - 'lib/events.js', - 'lib/fs.js', - 'lib/fs/promises.js', - 'lib/http.js', - 'lib/http2.js', - 'lib/_http_agent.js', - 'lib/_http_client.js', - 'lib/_http_common.js', - 'lib/_http_incoming.js', - 'lib/_http_outgoing.js', - 'lib/_http_server.js', - 'lib/https.js', - 'lib/inspector.js', - 'lib/module.js', - 'lib/net.js', - 'lib/os.js', - 'lib/path.js', - 'lib/path/posix.js', - 'lib/path/win32.js', - 'lib/perf_hooks.js', - 'lib/process.js', - 'lib/punycode.js', - 'lib/querystring.js', - 'lib/readline.js', - 'lib/repl.js', - 'lib/stream.js', - 'lib/stream/promises.js', - 'lib/_stream_readable.js', - 'lib/_stream_writable.js', - 'lib/_stream_duplex.js', - 'lib/_stream_transform.js', - 'lib/_stream_passthrough.js', - 'lib/_stream_wrap.js', - 'lib/string_decoder.js', - 'lib/sys.js', - 'lib/timers/promises.js', - 'lib/timers.js', - 'lib/tls.js', - 'lib/_tls_common.js', - 'lib/_tls_wrap.js', - 'lib/trace_events.js', - 'lib/tty.js', - 'lib/url.js', - 'lib/util.js', - 'lib/util/types.js', - 'lib/v8.js', - 'lib/vm.js', - 'lib/wasi.js', - 'lib/worker_threads.js', - 'lib/zlib.js', - 'lib/internal/abort_controller.js', - 'lib/internal/assert.js', - 'lib/internal/assert/assertion_error.js', - 'lib/internal/assert/calltracker.js', - 'lib/internal/async_hooks.js', - 'lib/internal/blob.js', - 'lib/internal/blocklist.js', - 'lib/internal/buffer.js', - 'lib/internal/cli_table.js', - 'lib/internal/child_process.js', - 'lib/internal/child_process/serialization.js', - 'lib/internal/cluster/child.js', - 'lib/internal/cluster/primary.js', - 'lib/internal/cluster/round_robin_handle.js', - 'lib/internal/cluster/shared_handle.js', - 'lib/internal/cluster/utils.js', - 'lib/internal/cluster/worker.js', - 'lib/internal/console/constructor.js', - 'lib/internal/console/global.js', - 'lib/internal/crypto/aes.js', - 'lib/internal/crypto/certificate.js', - 'lib/internal/crypto/cipher.js', - 'lib/internal/crypto/diffiehellman.js', - 'lib/internal/crypto/dsa.js', - 'lib/internal/crypto/ec.js', - 'lib/internal/crypto/hash.js', - 'lib/internal/crypto/hashnames.js', - 'lib/internal/crypto/hkdf.js', - 'lib/internal/crypto/keygen.js', - 'lib/internal/crypto/keys.js', - 'lib/internal/crypto/mac.js', - 'lib/internal/crypto/pbkdf2.js', - 'lib/internal/crypto/random.js', - 'lib/internal/crypto/rsa.js', - 'lib/internal/crypto/scrypt.js', - 'lib/internal/crypto/sig.js', - 'lib/internal/crypto/util.js', - 'lib/internal/crypto/webcrypto.js', - 'lib/internal/crypto/x509.js', - 'lib/internal/constants.js', - 'lib/internal/debugger/_inspect.js', - 'lib/internal/debugger/inspect_client.js', - 'lib/internal/debugger/inspect_repl.js', - 'lib/internal/dgram.js', - 'lib/internal/dns/promises.js', - 'lib/internal/dns/utils.js', - 'lib/internal/dtrace.js', - 'lib/internal/encoding.js', - 'lib/internal/errors.js', - 'lib/internal/error_serdes.js', - 'lib/internal/event_target.js', - 'lib/internal/fixed_queue.js', - 'lib/internal/freelist.js', - 'lib/internal/freeze_intrinsics.js', - 'lib/internal/fs/dir.js', - 'lib/internal/fs/promises.js', - 'lib/internal/fs/read_file_context.js', - 'lib/internal/fs/rimraf.js', - 'lib/internal/fs/streams.js', - 'lib/internal/fs/sync_write_stream.js', - 'lib/internal/fs/utils.js', - 'lib/internal/fs/watchers.js', - 'lib/internal/http.js', - 'lib/internal/heap_utils.js', - 'lib/internal/histogram.js', - 'lib/internal/idna.js', - 'lib/internal/inspector_async_hook.js', - 'lib/internal/js_stream_socket.js', - 'lib/internal/legacy/processbinding.js', - 'lib/internal/linkedlist.js', - 'lib/internal/main/check_syntax.js', - 'lib/internal/main/eval_string.js', - 'lib/internal/main/eval_stdin.js', - 'lib/internal/main/inspect.js', - 'lib/internal/main/print_help.js', - 'lib/internal/main/prof_process.js', - 'lib/internal/main/repl.js', - 'lib/internal/main/run_main_module.js', - 'lib/internal/main/worker_thread.js', - 'lib/internal/modules/run_main.js', - 'lib/internal/modules/package_json_reader.js', - 'lib/internal/modules/cjs/helpers.js', - 'lib/internal/modules/cjs/loader.js', - 'lib/internal/modules/esm/loader.js', - 'lib/internal/modules/esm/create_dynamic_module.js', - 'lib/internal/modules/esm/get_format.js', - 'lib/internal/modules/esm/get_source.js', - 'lib/internal/modules/esm/module_job.js', - 'lib/internal/modules/esm/module_map.js', - 'lib/internal/modules/esm/resolve.js', - 'lib/internal/modules/esm/transform_source.js', - 'lib/internal/modules/esm/translators.js', - 'lib/internal/net.js', - 'lib/internal/options.js', - 'lib/internal/perf/perf.js', - 'lib/internal/perf/nodetiming.js', - 'lib/internal/perf/usertiming.js', - 'lib/internal/perf/observe.js', - 'lib/internal/perf/event_loop_delay.js', - 'lib/internal/perf/event_loop_utilization.js', - 'lib/internal/perf/timerify.js', - 'lib/internal/policy/manifest.js', - 'lib/internal/policy/sri.js', - 'lib/internal/priority_queue.js', - 'lib/internal/process/esm_loader.js', - 'lib/internal/process/execution.js', - 'lib/internal/process/per_thread.js', - 'lib/internal/process/policy.js', - 'lib/internal/process/promises.js', - 'lib/internal/process/warning.js', - 'lib/internal/process/worker_thread_only.js', - 'lib/internal/process/report.js', - 'lib/internal/process/signal.js', - 'lib/internal/process/task_queues.js', - 'lib/internal/querystring.js', - 'lib/internal/readline/callbacks.js', - 'lib/internal/readline/emitKeypressEvents.js', - 'lib/internal/readline/utils.js', - 'lib/internal/repl.js', - 'lib/internal/repl/await.js', - 'lib/internal/repl/history.js', - 'lib/internal/repl/utils.js', - 'lib/internal/socketaddress.js', - 'lib/internal/socket_list.js', - 'lib/internal/source_map/prepare_stack_trace.js', - 'lib/internal/source_map/source_map.js', - 'lib/internal/source_map/source_map_cache.js', - 'lib/internal/test/binding.js', - 'lib/internal/test/transfer.js', - 'lib/internal/timers.js', - 'lib/internal/tls.js', - 'lib/internal/trace_events_async_hooks.js', - 'lib/internal/tty.js', - 'lib/internal/url.js', - 'lib/internal/util.js', - 'lib/internal/util/comparisons.js', - 'lib/internal/util/debuglog.js', - 'lib/internal/util/inspect.js', - 'lib/internal/util/inspector.js', - 'lib/internal/util/iterable_weak_map.js', - 'lib/internal/util/types.js', - 'lib/internal/http2/core.js', - 'lib/internal/http2/compat.js', - 'lib/internal/http2/util.js', - 'lib/internal/v8_prof_polyfill.js', - 'lib/internal/v8_prof_processor.js', - 'lib/internal/validators.js', - 'lib/internal/stream_base_commons.js', - 'lib/internal/vm/module.js', - 'lib/internal/worker.js', - 'lib/internal/worker/io.js', - 'lib/internal/worker/js_transferable.js', - 'lib/internal/watchdog.js', - 'lib/internal/streams/lazy_transform.js', - 'lib/internal/streams/add-abort-signal.js', - 'lib/internal/streams/buffer_list.js', - 'lib/internal/streams/duplexpair.js', - 'lib/internal/streams/from.js', - 'lib/internal/streams/legacy.js', - 'lib/internal/streams/readable.js', - 'lib/internal/streams/writable.js', - 'lib/internal/streams/duplex.js', - 'lib/internal/streams/passthrough.js', - 'lib/internal/streams/transform.js', - 'lib/internal/streams/destroy.js', - 'lib/internal/streams/state.js', - 'lib/internal/streams/pipeline.js', - 'lib/internal/streams/end-of-stream.js', - 'lib/internal/streams/utils.js', + '& args) { [exception handling]: #exception-handling [internal field]: #internal-fields [introduction for V8 embedders]: https://v8.dev/docs/embed +[libuv]: https://libuv.org/ [libuv handles]: #libuv-handles-and-requests [libuv requests]: #libuv-handles-and-requests -[libuv]: https://libuv.org/ [reference documentation for the libuv API]: http://docs.libuv.org/en/v1.x/ diff --git a/src/crypto/crypto_aes.cc b/src/crypto/crypto_aes.cc index edca8fbd0dcb5c..d6f6393df6c878 100644 --- a/src/crypto/crypto_aes.cc +++ b/src/crypto/crypto_aes.cc @@ -130,7 +130,17 @@ WebCryptoCipherStatus AES_Cipher( ByteSource buf = ByteSource::Allocated(data, buf_len); unsigned char* ptr = reinterpret_cast(data); - if (!EVP_CipherUpdate( + // In some outdated version of OpenSSL (e.g. + // ubi81_sharedlibs_openssl111fips_x64) may be used in sharedlib mode, the + // logic will be failed when input size is zero. The newly OpenSSL has fixed + // it up. But we still have to regard zero as special in Node.js code to + // prevent old OpenSSL failure. + // + // Refs: https://github.com/openssl/openssl/commit/420cb707b880e4fb649094241371701013eeb15f + // Refs: https://github.com/nodejs/node/pull/38913#issuecomment-866505244 + if (in.size() == 0) { + out_len = 0; + } else if (!EVP_CipherUpdate( ctx.get(), ptr, &out_len, diff --git a/src/crypto/crypto_cipher.h b/src/crypto/crypto_cipher.h index c8dd3e48f718fd..b9b850a1d64c8b 100644 --- a/src/crypto/crypto_cipher.h +++ b/src/crypto/crypto_cipher.h @@ -249,16 +249,17 @@ class CipherJob final : public CryptoJob { v8::Local* result) override { Environment* env = AsyncWrap::env(); CryptoErrorStore* errors = CryptoJob::errors(); - if (out_.size() > 0) { + + if (errors->Empty()) + errors->Capture(); + + if (out_.size() > 0 || errors->Empty()) { CHECK(errors->Empty()); *err = v8::Undefined(env->isolate()); *result = out_.ToArrayBuffer(env); return v8::Just(!result->IsEmpty()); } - if (errors->Empty()) - errors->Capture(); - CHECK(!errors->Empty()); *result = v8::Undefined(env->isolate()); return v8::Just(errors->ToException(env).ToLocal(err)); } diff --git a/src/crypto/crypto_ec.cc b/src/crypto/crypto_ec.cc index ea4c70ad5d8c84..f0ad45529e0039 100644 --- a/src/crypto/crypto_ec.cc +++ b/src/crypto/crypto_ec.cc @@ -314,7 +314,7 @@ void ECDH::SetPrivateKey(const FunctionCallbackInfo& args) { return THROW_ERR_CRYPTO_OPERATION_FAILED(env, "Failed to set generated public key"); - EC_KEY_copy(ecdh->key_.get(), new_key.get()); + ecdh->key_ = std::move(new_key); ecdh->group_ = EC_KEY_get0_group(ecdh->key_.get()); } diff --git a/src/crypto/crypto_util.cc b/src/crypto/crypto_util.cc index 13c40dcb757661..f18304cd655842 100644 --- a/src/crypto/crypto_util.cc +++ b/src/crypto/crypto_util.cc @@ -139,7 +139,6 @@ void InitCryptoOnce() { OPENSSL_init_ssl(0, settings); OPENSSL_INIT_free(settings); settings = nullptr; -#endif #ifndef _WIN32 if (per_process::cli_options->secure_heap != 0) { @@ -160,6 +159,8 @@ void InitCryptoOnce() { } #endif +#endif // OPENSSL_IS_BORINGSSL + // Turn off compression. Saves memory and protects against CRIME attacks. // No-op with OPENSSL_NO_COMP builds of OpenSSL. sk_SSL_COMP_zero(SSL_COMP_get_compression_methods()); diff --git a/src/env.h b/src/env.h index 7b136f70fbad1e..e1b89261fcb1e9 100644 --- a/src/env.h +++ b/src/env.h @@ -520,6 +520,8 @@ constexpr size_t kFsStatsBufferLength = V(internal_binding_loader, v8::Function) \ V(immediate_callback_function, v8::Function) \ V(inspector_console_extension_installer, v8::Function) \ + V(inspector_disable_async_hooks, v8::Function) \ + V(inspector_enable_async_hooks, v8::Function) \ V(messaging_deserialize_create_object, v8::Function) \ V(message_port, v8::Object) \ V(native_module_require, v8::Function) \ diff --git a/src/gtest/LICENSE b/src/gtest/LICENSE new file mode 100644 index 00000000000000..1941a11f8ce943 --- /dev/null +++ b/src/gtest/LICENSE @@ -0,0 +1,28 @@ +Copyright 2008, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/gtest/gtest_prod.h b/src/gtest/gtest_prod.h new file mode 100644 index 00000000000000..473387f170c838 --- /dev/null +++ b/src/gtest/gtest_prod.h @@ -0,0 +1,61 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Google C++ Testing and Mocking Framework definitions useful in production +// code. GOOGLETEST_CM0003 DO NOT DELETE + +#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_PROD_H_ // NOLINT +#define GOOGLETEST_INCLUDE_GTEST_GTEST_PROD_H_ + +// When you need to test the private or protected members of a class, +// use the FRIEND_TEST macro to declare your tests as friends of the +// class. For example: +// +// class MyClass { +// private: +// void PrivateMethod(); +// FRIEND_TEST(MyClassTest, PrivateMethodWorks); +// }; +// +// class MyClassTest : public testing::Test { +// // ... +// }; +// +// TEST_F(MyClassTest, PrivateMethodWorks) { +// // Can call MyClass::PrivateMethod() here. +// } +// +// Note: The test class must be in the same namespace as the class being tested. +// For example, putting MyClassTest in an anonymous namespace will not work. + +#define FRIEND_TEST(test_case_name, test_name) \ + friend class test_case_name##_##test_name##_Test + +#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_PROD_H_ // NOLINT diff --git a/src/histogram.cc b/src/histogram.cc index d21cf2883a0ca8..6fbb0eda6c0792 100644 --- a/src/histogram.cc +++ b/src/histogram.cc @@ -1,8 +1,10 @@ #include "histogram.h" // NOLINT(build/include_inline) -#include "histogram-inl.h" #include "base_object-inl.h" +#include "histogram-inl.h" #include "memory_tracker-inl.h" #include "node_errors.h" +#include "node_external_reference.h" + namespace node { using v8::BigInt; @@ -197,6 +199,21 @@ Local HistogramBase::GetConstructorTemplate( return tmpl; } +void HistogramBase::RegisterExternalReferences( + ExternalReferenceRegistry* registry) { + registry->Register(New); + registry->Register(GetExceeds); + registry->Register(GetMin); + registry->Register(GetMax); + registry->Register(GetMean); + registry->Register(GetStddev); + registry->Register(GetPercentile); + registry->Register(GetPercentiles); + registry->Register(DoReset); + registry->Register(Record); + registry->Register(RecordDelta); +} + void HistogramBase::Initialize(Environment* env, Local target) { env->SetConstructorFunction(target, "Histogram", GetConstructorTemplate(env)); } @@ -240,6 +257,20 @@ Local IntervalHistogram::GetConstructorTemplate( return tmpl; } +void IntervalHistogram::RegisterExternalReferences( + ExternalReferenceRegistry* registry) { + registry->Register(GetExceeds); + registry->Register(GetMin); + registry->Register(GetMax); + registry->Register(GetMean); + registry->Register(GetStddev); + registry->Register(GetPercentile); + registry->Register(GetPercentiles); + registry->Register(DoReset); + registry->Register(Start); + registry->Register(Stop); +} + IntervalHistogram::IntervalHistogram( Environment* env, Local wrap, diff --git a/src/histogram.h b/src/histogram.h index 8c164f54cfd9ed..00b4f7796f8be6 100644 --- a/src/histogram.h +++ b/src/histogram.h @@ -18,6 +18,8 @@ namespace node { +class ExternalReferenceRegistry; + constexpr int kDefaultHistogramFigures = 3; class Histogram : public MemoryRetainer { @@ -78,6 +80,7 @@ class HistogramBase : public BaseObject, public HistogramImpl { static v8::Local GetConstructorTemplate( Environment* env); static void Initialize(Environment* env, v8::Local target); + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); static BaseObjectPtr Create( Environment* env, @@ -154,6 +157,8 @@ class IntervalHistogram : public HandleWrap, public HistogramImpl { RESET }; + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); + static v8::Local GetConstructorTemplate( Environment* env); diff --git a/src/inspector_agent.cc b/src/inspector_agent.cc index a3c901dca2eafc..612ce2b78b41ba 100644 --- a/src/inspector_agent.cc +++ b/src/inspector_agent.cc @@ -40,7 +40,6 @@ using node::FatalError; using v8::Context; using v8::Function; -using v8::Global; using v8::HandleScope; using v8::Isolate; using v8::Local; @@ -802,8 +801,8 @@ void Agent::PauseOnNextJavascriptStatement(const std::string& reason) { void Agent::RegisterAsyncHook(Isolate* isolate, Local enable_function, Local disable_function) { - enable_async_hook_function_.Reset(isolate, enable_function); - disable_async_hook_function_.Reset(isolate, disable_function); + parent_env_->set_inspector_enable_async_hooks(enable_function); + parent_env_->set_inspector_disable_async_hooks(disable_function); if (pending_enable_async_hook_) { CHECK(!pending_disable_async_hook_); pending_enable_async_hook_ = false; @@ -816,8 +815,10 @@ void Agent::RegisterAsyncHook(Isolate* isolate, } void Agent::EnableAsyncHook() { - if (!enable_async_hook_function_.IsEmpty()) { - ToggleAsyncHook(parent_env_->isolate(), enable_async_hook_function_); + HandleScope scope(parent_env_->isolate()); + Local enable = parent_env_->inspector_enable_async_hooks(); + if (!enable.IsEmpty()) { + ToggleAsyncHook(parent_env_->isolate(), enable); } else if (pending_disable_async_hook_) { CHECK(!pending_enable_async_hook_); pending_disable_async_hook_ = false; @@ -827,8 +828,10 @@ void Agent::EnableAsyncHook() { } void Agent::DisableAsyncHook() { - if (!disable_async_hook_function_.IsEmpty()) { - ToggleAsyncHook(parent_env_->isolate(), disable_async_hook_function_); + HandleScope scope(parent_env_->isolate()); + Local disable = parent_env_->inspector_enable_async_hooks(); + if (!disable.IsEmpty()) { + ToggleAsyncHook(parent_env_->isolate(), disable); } else if (pending_enable_async_hook_) { CHECK(!pending_disable_async_hook_); pending_enable_async_hook_ = false; @@ -837,8 +840,7 @@ void Agent::DisableAsyncHook() { } } -void Agent::ToggleAsyncHook(Isolate* isolate, - const Global& fn) { +void Agent::ToggleAsyncHook(Isolate* isolate, Local fn) { // Guard against running this during cleanup -- no async events will be // emitted anyway at that point anymore, and calling into JS is not possible. // This should probably not be something we're attempting in the first place, @@ -849,7 +851,7 @@ void Agent::ToggleAsyncHook(Isolate* isolate, CHECK(!fn.IsEmpty()); auto context = parent_env_->context(); v8::TryCatch try_catch(isolate); - USE(fn.Get(isolate)->Call(context, Undefined(isolate), 0, nullptr)); + USE(fn->Call(context, Undefined(isolate), 0, nullptr)); if (try_catch.HasCaught() && !try_catch.HasTerminated()) { PrintCaughtException(isolate, context, try_catch); FatalError("\nnode::inspector::Agent::ToggleAsyncHook", diff --git a/src/inspector_agent.h b/src/inspector_agent.h index 1c8d496ba27a9e..744e139df8f87a 100644 --- a/src/inspector_agent.h +++ b/src/inspector_agent.h @@ -117,8 +117,7 @@ class Agent { inline Environment* env() const { return parent_env_; } private: - void ToggleAsyncHook(v8::Isolate* isolate, - const v8::Global& fn); + void ToggleAsyncHook(v8::Isolate* isolate, v8::Local fn); node::Environment* parent_env_; // Encapsulates majority of the Inspector functionality @@ -137,8 +136,6 @@ class Agent { bool pending_enable_async_hook_ = false; bool pending_disable_async_hook_ = false; - v8::Global enable_async_hook_function_; - v8::Global disable_async_hook_function_; }; } // namespace inspector diff --git a/src/inspector_socket_server.cc b/src/inspector_socket_server.cc index 29e0c128026ed0..299664da9a1693 100644 --- a/src/inspector_socket_server.cc +++ b/src/inspector_socket_server.cc @@ -234,6 +234,7 @@ void PrintDebuggerReadyMessage( const std::string& host, const std::vector& server_sockets, const std::vector& ids, + const char* verb, bool publish_uid_stderr, FILE* out) { if (!publish_uid_stderr || out == nullptr) { @@ -241,7 +242,8 @@ void PrintDebuggerReadyMessage( } for (const auto& server_socket : server_sockets) { for (const std::string& id : ids) { - fprintf(out, "Debugger listening on %s\n", + fprintf(out, "Debugger %s on %s\n", + verb, FormatWsAddress(host, server_socket->port(), id, true).c_str()); } } @@ -300,6 +302,7 @@ void InspectorSocketServer::SessionTerminated(int session_id) { PrintDebuggerReadyMessage(host_, server_sockets_, delegate_->GetTargetIds(), + "ending", inspect_publish_uid_.console, out_); } @@ -425,6 +428,7 @@ bool InspectorSocketServer::Start() { PrintDebuggerReadyMessage(host_, server_sockets_, delegate_->GetTargetIds(), + "listening", inspect_publish_uid_.console, out_); return true; diff --git a/src/js_native_api_v8.cc b/src/js_native_api_v8.cc index 33587bc2a79b27..9c0f2b56c26d60 100644 --- a/src/js_native_api_v8.cc +++ b/src/js_native_api_v8.cc @@ -188,296 +188,6 @@ inline static napi_status ConcludeDeferred(napi_env env, return GET_RETURN_STATUS(env); } -// Wrapper around v8impl::Persistent that implements reference counting. -class RefBase : protected Finalizer, RefTracker { - protected: - RefBase(napi_env env, - uint32_t initial_refcount, - bool delete_self, - napi_finalize finalize_callback, - void* finalize_data, - void* finalize_hint) - : Finalizer(env, finalize_callback, finalize_data, finalize_hint), - _refcount(initial_refcount), - _delete_self(delete_self) { - Link(finalize_callback == nullptr - ? &env->reflist - : &env->finalizing_reflist); - } - - public: - static RefBase* New(napi_env env, - uint32_t initial_refcount, - bool delete_self, - napi_finalize finalize_callback, - void* finalize_data, - void* finalize_hint) { - return new RefBase(env, - initial_refcount, - delete_self, - finalize_callback, - finalize_data, - finalize_hint); - } - - virtual ~RefBase() { Unlink(); } - - inline void* Data() { - return _finalize_data; - } - - // Delete is called in 2 ways. Either from the finalizer or - // from one of Unwrap or napi_delete_reference. - // - // When it is called from Unwrap or napi_delete_reference we only - // want to do the delete if the finalizer has already run or - // cannot have been queued to run (ie the reference count is > 0), - // otherwise we may crash when the finalizer does run. - // If the finalizer may have been queued and has not already run - // delay the delete until the finalizer runs by not doing the delete - // and setting _delete_self to true so that the finalizer will - // delete it when it runs. - // - // The second way this is called is from - // the finalizer and _delete_self is set. In this case we - // know we need to do the deletion so just do it. - static inline void Delete(RefBase* reference) { - if ((reference->RefCount() != 0) || - (reference->_delete_self) || - (reference->_finalize_ran)) { - delete reference; - } else { - // defer until finalizer runs as - // it may already be queued - reference->_delete_self = true; - } - } - - inline uint32_t Ref() { - return ++_refcount; - } - - inline uint32_t Unref() { - if (_refcount == 0) { - return 0; - } - return --_refcount; - } - - inline uint32_t RefCount() { - return _refcount; - } - - protected: - inline void Finalize(bool is_env_teardown = false) override { - // In addition to being called during environment teardown, this method is - // also the entry point for the garbage collector. During environment - // teardown we have to remove the garbage collector's reference to this - // method so that, if, as part of the user's callback, JS gets executed, - // resulting in a garbage collection pass, this method is not re-entered as - // part of that pass, because that'll cause a double free (as seen in - // https://github.com/nodejs/node/issues/37236). - // - // Since this class does not have access to the V8 persistent reference, - // this method is overridden in the `Reference` class below. Therein the - // weak callback is removed, ensuring that the garbage collector does not - // re-enter this method, and the method chains up to continue the process of - // environment-teardown-induced finalization. - - // During environment teardown we have to convert a strong reference to - // a weak reference to force the deferring behavior if the user's finalizer - // happens to delete this reference so that the code in this function that - // follows the call to the user's finalizer may safely access variables from - // this instance. - if (is_env_teardown && RefCount() > 0) _refcount = 0; - - if (_finalize_callback != nullptr) { - // This ensures that we never call the finalizer twice. - napi_finalize fini = _finalize_callback; - _finalize_callback = nullptr; - _env->CallFinalizer(fini, _finalize_data, _finalize_hint); - } - - // this is safe because if a request to delete the reference - // is made in the finalize_callback it will defer deletion - // to this block and set _delete_self to true - if (_delete_self || is_env_teardown) { - Delete(this); - } else { - _finalize_ran = true; - } - } - - private: - uint32_t _refcount; - bool _delete_self; -}; - -class Reference : public RefBase { - using SecondPassCallParameterRef = Reference*; - - protected: - template - Reference(napi_env env, v8::Local value, Args&&... args) - : RefBase(env, std::forward(args)...), - _persistent(env->isolate, value), - _secondPassParameter(new SecondPassCallParameterRef(this)), - _secondPassScheduled(false) { - if (RefCount() == 0) { - SetWeak(); - } - } - - public: - static inline Reference* New(napi_env env, - v8::Local value, - uint32_t initial_refcount, - bool delete_self, - napi_finalize finalize_callback = nullptr, - void* finalize_data = nullptr, - void* finalize_hint = nullptr) { - return new Reference(env, - value, - initial_refcount, - delete_self, - finalize_callback, - finalize_data, - finalize_hint); - } - - virtual ~Reference() { - // If the second pass callback is scheduled, it will delete the - // parameter passed to it, otherwise it will never be scheduled - // and we need to delete it here. - if (!_secondPassScheduled) { - delete _secondPassParameter; - } - } - - inline uint32_t Ref() { - uint32_t refcount = RefBase::Ref(); - if (refcount == 1) { - ClearWeak(); - } - return refcount; - } - - inline uint32_t Unref() { - uint32_t old_refcount = RefCount(); - uint32_t refcount = RefBase::Unref(); - if (old_refcount == 1 && refcount == 0) { - SetWeak(); - } - return refcount; - } - - inline v8::Local Get() { - if (_persistent.IsEmpty()) { - return v8::Local(); - } else { - return v8::Local::New(_env->isolate, _persistent); - } - } - - protected: - inline void Finalize(bool is_env_teardown = false) override { - if (is_env_teardown) env_teardown_finalize_started_ = true; - if (!is_env_teardown && env_teardown_finalize_started_) return; - - // During env teardown, `~napi_env()` alone is responsible for finalizing. - // Thus, we don't want any stray gc passes to trigger a second call to - // `RefBase::Finalize()`. ClearWeak will ensure that even if the - // gc is in progress no Finalization will be run for this Reference - // by the gc. - if (is_env_teardown) { - ClearWeak(); - } - - // Chain up to perform the rest of the finalization. - RefBase::Finalize(is_env_teardown); - } - - private: - // ClearWeak is marking the Reference so that the gc should not - // collect it, but it is possible that a second pass callback - // may have been scheduled already if we are in shutdown. We clear - // the secondPassParameter so that even if it has been - // secheduled no Finalization will be run. - inline void ClearWeak() { - if (!_persistent.IsEmpty()) { - _persistent.ClearWeak(); - } - if (_secondPassParameter != nullptr) { - *_secondPassParameter = nullptr; - } - } - - // Mark the reference as weak and eligible for collection - // by the gc. - inline void SetWeak() { - if (_secondPassParameter == nullptr) { - // This means that the Reference has already been processed - // by the second pass callback, so its already been Finalized, do - // nothing - return; - } - _persistent.SetWeak( - _secondPassParameter, FinalizeCallback, - v8::WeakCallbackType::kParameter); - *_secondPassParameter = this; - } - - // The N-API finalizer callback may make calls into the engine. V8's heap is - // not in a consistent state during the weak callback, and therefore it does - // not support calls back into it. However, it provides a mechanism for adding - // a finalizer which may make calls back into the engine by allowing us to - // attach such a second-pass finalizer from the first pass finalizer. Thus, - // we do that here to ensure that the N-API finalizer callback is free to call - // into the engine. - static void FinalizeCallback( - const v8::WeakCallbackInfo& data) { - SecondPassCallParameterRef* parameter = data.GetParameter(); - Reference* reference = *parameter; - if (reference == nullptr) { - return; - } - - // The reference must be reset during the first pass. - reference->_persistent.Reset(); - // Mark the parameter not delete-able until the second pass callback is - // invoked. - reference->_secondPassScheduled = true; - data.SetSecondPassCallback(SecondPassCallback); - } - - // Second pass callbacks are scheduled with platform tasks. At env teardown, - // the tasks may have already be scheduled and we are unable to cancel the - // second pass callback task. We have to make sure that parameter is kept - // alive until the second pass callback is been invoked. In order to do - // this and still allow our code to Finalize/delete the Reference during - // shutdown we have to use a separately allocated parameter instead - // of a parameter within the Reference object itself. This is what - // is stored in _secondPassParameter and it is allocated in the - // constructor for the Reference. - static void SecondPassCallback( - const v8::WeakCallbackInfo& data) { - SecondPassCallParameterRef* parameter = data.GetParameter(); - Reference* reference = *parameter; - delete parameter; - if (reference == nullptr) { - // the reference itself has already been deleted so nothing to do - return; - } - reference->_secondPassParameter = nullptr; - reference->Finalize(); - } - - bool env_teardown_finalize_started_ = false; - v8impl::Persistent _persistent; - SecondPassCallParameterRef* _secondPassParameter; - bool _secondPassScheduled; -}; - enum UnwrapAction { KeepWrap, RemoveWrap @@ -740,6 +450,273 @@ inline napi_status Wrap(napi_env env, } // end of anonymous namespace +// Wrapper around v8impl::Persistent that implements reference counting. +RefBase::RefBase(napi_env env, + uint32_t initial_refcount, + bool delete_self, + napi_finalize finalize_callback, + void* finalize_data, + void* finalize_hint) + : Finalizer(env, finalize_callback, finalize_data, finalize_hint), + _refcount(initial_refcount), + _delete_self(delete_self) { + Link(finalize_callback == nullptr ? &env->reflist : &env->finalizing_reflist); +} + +RefBase* RefBase::New(napi_env env, + uint32_t initial_refcount, + bool delete_self, + napi_finalize finalize_callback, + void* finalize_data, + void* finalize_hint) { + return new RefBase(env, + initial_refcount, + delete_self, + finalize_callback, + finalize_data, + finalize_hint); +} + +RefBase::~RefBase() { + Unlink(); +} + +void* RefBase::Data() { + return _finalize_data; +} + +// Delete is called in 2 ways. Either from the finalizer or +// from one of Unwrap or napi_delete_reference. +// +// When it is called from Unwrap or napi_delete_reference we only +// want to do the delete if the finalizer has already run or +// cannot have been queued to run (ie the reference count is > 0), +// otherwise we may crash when the finalizer does run. +// If the finalizer may have been queued and has not already run +// delay the delete until the finalizer runs by not doing the delete +// and setting _delete_self to true so that the finalizer will +// delete it when it runs. +// +// The second way this is called is from +// the finalizer and _delete_self is set. In this case we +// know we need to do the deletion so just do it. +void RefBase::Delete(RefBase* reference) { + if ((reference->RefCount() != 0) || (reference->_delete_self) || + (reference->_finalize_ran)) { + delete reference; + } else { + // defer until finalizer runs as + // it may already be queued + reference->_delete_self = true; + } +} + +uint32_t RefBase::Ref() { + return ++_refcount; +} + +uint32_t RefBase::Unref() { + if (_refcount == 0) { + return 0; + } + return --_refcount; +} + +uint32_t RefBase::RefCount() { + return _refcount; +} + +void RefBase::Finalize(bool is_env_teardown) { + // In addition to being called during environment teardown, this method is + // also the entry point for the garbage collector. During environment + // teardown we have to remove the garbage collector's reference to this + // method so that, if, as part of the user's callback, JS gets executed, + // resulting in a garbage collection pass, this method is not re-entered as + // part of that pass, because that'll cause a double free (as seen in + // https://github.com/nodejs/node/issues/37236). + // + // Since this class does not have access to the V8 persistent reference, + // this method is overridden in the `Reference` class below. Therein the + // weak callback is removed, ensuring that the garbage collector does not + // re-enter this method, and the method chains up to continue the process of + // environment-teardown-induced finalization. + + // During environment teardown we have to convert a strong reference to + // a weak reference to force the deferring behavior if the user's finalizer + // happens to delete this reference so that the code in this function that + // follows the call to the user's finalizer may safely access variables from + // this instance. + if (is_env_teardown && RefCount() > 0) _refcount = 0; + + if (_finalize_callback != nullptr) { + // This ensures that we never call the finalizer twice. + napi_finalize fini = _finalize_callback; + _finalize_callback = nullptr; + _env->CallFinalizer(fini, _finalize_data, _finalize_hint); + } + + // this is safe because if a request to delete the reference + // is made in the finalize_callback it will defer deletion + // to this block and set _delete_self to true + if (_delete_self || is_env_teardown) { + Delete(this); + } else { + _finalize_ran = true; + } +} + +template +Reference::Reference(napi_env env, v8::Local value, Args&&... args) + : RefBase(env, std::forward(args)...), + _persistent(env->isolate, value), + _secondPassParameter(new SecondPassCallParameterRef(this)), + _secondPassScheduled(false) { + if (RefCount() == 0) { + SetWeak(); + } +} + +Reference* Reference::New(napi_env env, + v8::Local value, + uint32_t initial_refcount, + bool delete_self, + napi_finalize finalize_callback, + void* finalize_data, + void* finalize_hint) { + return new Reference(env, + value, + initial_refcount, + delete_self, + finalize_callback, + finalize_data, + finalize_hint); +} + +Reference::~Reference() { + // If the second pass callback is scheduled, it will delete the + // parameter passed to it, otherwise it will never be scheduled + // and we need to delete it here. + if (!_secondPassScheduled) { + delete _secondPassParameter; + } +} + +uint32_t Reference::Ref() { + uint32_t refcount = RefBase::Ref(); + if (refcount == 1) { + ClearWeak(); + } + return refcount; +} + +uint32_t Reference::Unref() { + uint32_t old_refcount = RefCount(); + uint32_t refcount = RefBase::Unref(); + if (old_refcount == 1 && refcount == 0) { + SetWeak(); + } + return refcount; +} + +v8::Local Reference::Get() { + if (_persistent.IsEmpty()) { + return v8::Local(); + } else { + return v8::Local::New(_env->isolate, _persistent); + } +} + +void Reference::Finalize(bool is_env_teardown) { + if (is_env_teardown) env_teardown_finalize_started_ = true; + if (!is_env_teardown && env_teardown_finalize_started_) return; + + // During env teardown, `~napi_env()` alone is responsible for finalizing. + // Thus, we don't want any stray gc passes to trigger a second call to + // `RefBase::Finalize()`. ClearWeak will ensure that even if the + // gc is in progress no Finalization will be run for this Reference + // by the gc. + if (is_env_teardown) { + ClearWeak(); + } + + // Chain up to perform the rest of the finalization. + RefBase::Finalize(is_env_teardown); +} + +// ClearWeak is marking the Reference so that the gc should not +// collect it, but it is possible that a second pass callback +// may have been scheduled already if we are in shutdown. We clear +// the secondPassParameter so that even if it has been +// scheduled no Finalization will be run. +void Reference::ClearWeak() { + if (!_persistent.IsEmpty()) { + _persistent.ClearWeak(); + } + if (_secondPassParameter != nullptr) { + *_secondPassParameter = nullptr; + } +} + +// Mark the reference as weak and eligible for collection +// by the gc. +void Reference::SetWeak() { + if (_secondPassParameter == nullptr) { + // This means that the Reference has already been processed + // by the second pass callback, so its already been Finalized, do + // nothing + return; + } + _persistent.SetWeak( + _secondPassParameter, FinalizeCallback, v8::WeakCallbackType::kParameter); + *_secondPassParameter = this; +} + +// The N-API finalizer callback may make calls into the engine. V8's heap is +// not in a consistent state during the weak callback, and therefore it does +// not support calls back into it. However, it provides a mechanism for adding +// a finalizer which may make calls back into the engine by allowing us to +// attach such a second-pass finalizer from the first pass finalizer. Thus, +// we do that here to ensure that the N-API finalizer callback is free to call +// into the engine. +void Reference::FinalizeCallback( + const v8::WeakCallbackInfo& data) { + SecondPassCallParameterRef* parameter = data.GetParameter(); + Reference* reference = *parameter; + if (reference == nullptr) { + return; + } + + // The reference must be reset during the first pass. + reference->_persistent.Reset(); + // Mark the parameter not delete-able until the second pass callback is + // invoked. + reference->_secondPassScheduled = true; + + data.SetSecondPassCallback(SecondPassCallback); +} + +// Second pass callbacks are scheduled with platform tasks. At env teardown, +// the tasks may have already be scheduled and we are unable to cancel the +// second pass callback task. We have to make sure that parameter is kept +// alive until the second pass callback is been invoked. In order to do +// this and still allow our code to Finalize/delete the Reference during +// shutdown we have to use a separately allocated parameter instead +// of a parameter within the Reference object itself. This is what +// is stored in _secondPassParameter and it is allocated in the +// constructor for the Reference. +void Reference::SecondPassCallback( + const v8::WeakCallbackInfo& data) { + SecondPassCallParameterRef* parameter = data.GetParameter(); + Reference* reference = *parameter; + delete parameter; + if (reference == nullptr) { + // the reference itself has already been deleted so nothing to do + return; + } + reference->_secondPassParameter = nullptr; + reference->Finalize(); +} + } // end of namespace v8impl // Warning: Keep in-sync with napi_status enum diff --git a/src/js_native_api_v8.h b/src/js_native_api_v8.h index f619248a84326a..057113f2cb7549 100644 --- a/src/js_native_api_v8.h +++ b/src/js_native_api_v8.h @@ -366,6 +366,81 @@ class TryCatch : public v8::TryCatch { napi_env _env; }; +// Wrapper around v8impl::Persistent that implements reference counting. +class RefBase : protected Finalizer, RefTracker { + protected: + RefBase(napi_env env, + uint32_t initial_refcount, + bool delete_self, + napi_finalize finalize_callback, + void* finalize_data, + void* finalize_hint); + + public: + static RefBase* New(napi_env env, + uint32_t initial_refcount, + bool delete_self, + napi_finalize finalize_callback, + void* finalize_data, + void* finalize_hint); + + static inline void Delete(RefBase* reference); + + virtual ~RefBase(); + void* Data(); + uint32_t Ref(); + uint32_t Unref(); + uint32_t RefCount(); + + protected: + void Finalize(bool is_env_teardown = false) override; + + private: + uint32_t _refcount; + bool _delete_self; +}; + +class Reference : public RefBase { + using SecondPassCallParameterRef = Reference*; + + protected: + template + Reference(napi_env env, v8::Local value, Args&&... args); + + public: + static Reference* New(napi_env env, + v8::Local value, + uint32_t initial_refcount, + bool delete_self, + napi_finalize finalize_callback = nullptr, + void* finalize_data = nullptr, + void* finalize_hint = nullptr); + + virtual ~Reference(); + uint32_t Ref(); + uint32_t Unref(); + v8::Local Get(); + + protected: + void Finalize(bool is_env_teardown = false) override; + + private: + void ClearWeak(); + void SetWeak(); + + static void FinalizeCallback( + const v8::WeakCallbackInfo& data); + static void SecondPassCallback( + const v8::WeakCallbackInfo& data); + + bool env_teardown_finalize_started_ = false; + v8impl::Persistent _persistent; + SecondPassCallParameterRef* _secondPassParameter; + bool _secondPassScheduled; + + FRIEND_TEST(JsNativeApiV8Test, Reference); +}; + } // end of namespace v8impl #define STATUS_CALL(call) \ diff --git a/src/js_native_api_v8_internals.h b/src/js_native_api_v8_internals.h index ddd219818cdfa9..8428390ef1eaf3 100644 --- a/src/js_native_api_v8_internals.h +++ b/src/js_native_api_v8_internals.h @@ -16,6 +16,7 @@ #include "node_version.h" #include "env.h" #include "node_internals.h" +#include "gtest/gtest_prod.h" #define NAPI_ARRAYSIZE(array) \ node::arraysize((array)) diff --git a/src/node.cc b/src/node.cc index b60be116b6139b..3ee25ebbd67a01 100644 --- a/src/node.cc +++ b/src/node.cc @@ -1030,7 +1030,7 @@ InitializationResult InitializeOncePerProcess( } if (init_flags & kInitOpenSSL) { -#if HAVE_OPENSSL +#if HAVE_OPENSSL && !defined(OPENSSL_IS_BORINGSSL) { std::string extra_ca_certs; if (credentials::SafeGetenv("NODE_EXTRA_CA_CERTS", &extra_ca_certs)) @@ -1091,7 +1091,7 @@ InitializationResult InitializeOncePerProcess( // V8 on Windows doesn't have a good source of entropy. Seed it from // OpenSSL's pool. V8::SetEntropySource(crypto::EntropySource); -#endif // HAVE_OPENSSL +#endif // HAVE_OPENSSL && !defined(OPENSSL_IS_BORINGSSL) } per_process::v8_platform.Initialize( static_cast(per_process::cli_options->v8_thread_pool_size)); diff --git a/src/node_api.cc b/src/node_api.cc index 3fcc9ecbf4a63f..8304ccb7e86a08 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -4,6 +4,7 @@ #include "js_native_api_v8.h" #include "memory_tracker-inl.h" #include "node_api.h" +#include "node_api_internals.h" #include "node_binding.h" #include "node_buffer.h" #include "node_errors.h" @@ -15,51 +16,36 @@ #include #include -struct node_napi_env__ : public napi_env__ { - explicit node_napi_env__(v8::Local context, - const std::string& module_filename): - napi_env__(context), filename(module_filename) { - CHECK_NOT_NULL(node_env()); - } - - inline node::Environment* node_env() const { - return node::Environment::GetCurrent(context()); - } +node_napi_env__::node_napi_env__(v8::Local context, + const std::string& module_filename) + : napi_env__(context), filename(module_filename) { + CHECK_NOT_NULL(node_env()); +} - bool can_call_into_js() const override { - return node_env()->can_call_into_js(); - } +bool node_napi_env__::can_call_into_js() const { + return node_env()->can_call_into_js(); +} - v8::Maybe mark_arraybuffer_as_untransferable( - v8::Local ab) const override { - return ab->SetPrivate( - context(), - node_env()->untransferable_object_private_symbol(), - v8::True(isolate)); - } +v8::Maybe node_napi_env__::mark_arraybuffer_as_untransferable( + v8::Local ab) const { + return ab->SetPrivate(context(), + node_env()->untransferable_object_private_symbol(), + v8::True(isolate)); +} - void CallFinalizer(napi_finalize cb, void* data, void* hint) override { - // we need to keep the env live until the finalizer has been run - // EnvRefHolder provides an exception safe wrapper to Ref and then - // Unref once the lamba is freed - EnvRefHolder liveEnv(static_cast(this)); - node_env()->SetImmediate([=, liveEnv = std::move(liveEnv)] - (node::Environment* node_env) { - napi_env env = liveEnv.env(); - v8::HandleScope handle_scope(env->isolate); - v8::Context::Scope context_scope(env->context()); - env->CallIntoModule([&](napi_env env) { - cb(env, data, hint); +void node_napi_env__::CallFinalizer(napi_finalize cb, void* data, void* hint) { + // we need to keep the env live until the finalizer has been run + // EnvRefHolder provides an exception safe wrapper to Ref and then + // Unref once the lamba is freed + EnvRefHolder liveEnv(static_cast(this)); + node_env()->SetImmediate( + [=, liveEnv = std::move(liveEnv)](node::Environment* node_env) { + napi_env env = liveEnv.env(); + v8::HandleScope handle_scope(env->isolate); + v8::Context::Scope context_scope(env->context()); + env->CallIntoModule([&](napi_env env) { cb(env, data, hint); }); }); - }); - } - - const char* GetFilename() const { return filename.c_str(); } - - std::string filename; -}; - -typedef node_napi_env__* node_napi_env; +} namespace v8impl { diff --git a/src/node_api_internals.h b/src/node_api_internals.h new file mode 100644 index 00000000000000..318ada38083435 --- /dev/null +++ b/src/node_api_internals.h @@ -0,0 +1,30 @@ +#ifndef SRC_NODE_API_INTERNALS_H_ +#define SRC_NODE_API_INTERNALS_H_ + +#include "v8.h" +#define NAPI_EXPERIMENTAL +#include "env-inl.h" +#include "js_native_api_v8.h" +#include "node_api.h" +#include "util-inl.h" + +struct node_napi_env__ : public napi_env__ { + node_napi_env__(v8::Local context, + const std::string& module_filename); + + bool can_call_into_js() const override; + v8::Maybe mark_arraybuffer_as_untransferable( + v8::Local ab) const override; + void CallFinalizer(napi_finalize cb, void* data, void* hint) override; + + inline node::Environment* node_env() const { + return node::Environment::GetCurrent(context()); + } + inline const char* GetFilename() const { return filename.c_str(); } + + std::string filename; +}; + +using node_napi_env = node_napi_env__*; + +#endif // SRC_NODE_API_INTERNALS_H_ diff --git a/src/node_buffer.cc b/src/node_buffer.cc index e816ba131644ad..b5651b5e325fc9 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -67,6 +67,7 @@ using v8::MaybeLocal; using v8::Nothing; using v8::Number; using v8::Object; +using v8::SharedArrayBuffer; using v8::String; using v8::Uint32; using v8::Uint32Array; @@ -1158,6 +1159,60 @@ void GetZeroFillToggle(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(Uint32Array::New(ab, 0, 1)); } +void DetachArrayBuffer(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + if (args[0]->IsArrayBuffer()) { + Local buf = args[0].As(); + if (buf->IsDetachable()) { + std::shared_ptr store = buf->GetBackingStore(); + buf->Detach(); + args.GetReturnValue().Set(ArrayBuffer::New(env->isolate(), store)); + } + } +} + +void CopyArrayBuffer(const FunctionCallbackInfo& args) { + // args[0] == Destination ArrayBuffer + // args[1] == Destination ArrayBuffer Offset + // args[2] == Source ArrayBuffer + // args[3] == Source ArrayBuffer Offset + // args[4] == bytesToCopy + + CHECK(args[0]->IsArrayBuffer() || args[0]->IsSharedArrayBuffer()); + CHECK(args[1]->IsUint32()); + CHECK(args[2]->IsArrayBuffer() || args[2]->IsSharedArrayBuffer()); + CHECK(args[3]->IsUint32()); + CHECK(args[4]->IsUint32()); + + std::shared_ptr destination; + std::shared_ptr source; + + if (args[0]->IsArrayBuffer()) { + destination = args[0].As()->GetBackingStore(); + } else if (args[0]->IsSharedArrayBuffer()) { + destination = args[0].As()->GetBackingStore(); + } + + if (args[2]->IsArrayBuffer()) { + source = args[2].As()->GetBackingStore(); + } else if (args[0]->IsSharedArrayBuffer()) { + source = args[2].As()->GetBackingStore(); + } + + uint32_t destination_offset = args[1].As()->Value(); + uint32_t source_offset = args[3].As()->Value(); + size_t bytes_to_copy = args[4].As()->Value(); + + CHECK_GE(destination->ByteLength() - destination_offset, bytes_to_copy); + CHECK_GE(source->ByteLength() - source_offset, bytes_to_copy); + + uint8_t* dest = + static_cast(destination->Data()) + destination_offset; + uint8_t* src = + static_cast(source->Data()) + source_offset; + memcpy(dest, src, bytes_to_copy); +} + void Initialize(Local target, Local unused, Local context, @@ -1176,6 +1231,9 @@ void Initialize(Local target, env->SetMethodNoSideEffect(target, "indexOfNumber", IndexOfNumber); env->SetMethodNoSideEffect(target, "indexOfString", IndexOfString); + env->SetMethod(target, "detachArrayBuffer", DetachArrayBuffer); + env->SetMethod(target, "copyArrayBuffer", CopyArrayBuffer); + env->SetMethod(target, "swap16", Swap16); env->SetMethod(target, "swap32", Swap32); env->SetMethod(target, "swap64", Swap64); @@ -1251,6 +1309,9 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(StringWrite); registry->Register(GetZeroFillToggle); + registry->Register(DetachArrayBuffer); + registry->Register(CopyArrayBuffer); + Blob::RegisterExternalReferences(registry); FixedSizeBlobCopyJob::RegisterExternalReferences(registry); } diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 2d6bc34322fd61..094558783f770e 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -61,6 +61,7 @@ class ExternalReferenceRegistry { V(heap_utils) \ V(messaging) \ V(native_module) \ + V(performance) \ V(process_methods) \ V(process_object) \ V(task_queue) \ diff --git a/src/node_internals.h b/src/node_internals.h index 31076551e70c46..8f7929994f3243 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -39,11 +39,6 @@ #include #include -// Custom constants used by both node_constants.cc and node_zlib.cc -#define Z_MIN_WINDOWBITS 8 -#define Z_MAX_WINDOWBITS 15 -#define Z_DEFAULT_WINDOWBITS 15 - struct sockaddr; namespace node { diff --git a/src/node_native_module.cc b/src/node_native_module.cc index f788732ae569d4..2642982330e8cf 100644 --- a/src/node_native_module.cc +++ b/src/node_native_module.cc @@ -1,5 +1,6 @@ #include "node_native_module.h" #include "util-inl.h" +#include "debug_utils-inl.h" namespace node { namespace native_module { @@ -98,7 +99,9 @@ void NativeModuleLoader::InitializeModuleCategories() { "tls", "_tls_common", "_tls_wrap", - "internal/tls", + "internal/tls/secure-pair", + "internal/tls/parse-cert-string", + "internal/tls/secure-context", "internal/http2/core", "internal/http2/compat", "internal/policy/manifest", diff --git a/src/node_perf.cc b/src/node_perf.cc index 8c5778ecb10bb0..acbb0e0d902c3e 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -1,10 +1,11 @@ +#include "node_perf.h" #include "aliased_buffer.h" #include "env-inl.h" #include "histogram-inl.h" #include "memory_tracker-inl.h" -#include "node_internals.h" -#include "node_perf.h" #include "node_buffer.h" +#include "node_external_reference.h" +#include "node_internals.h" #include "node_process-inl.h" #include "util-inl.h" @@ -250,6 +251,12 @@ void ELDHistogram::Initialize(Environment* env, Local target) { env->SetConstructorFunction(target, "ELDHistogram", tmpl); } +void ELDHistogram::RegisterExternalReferences( + ExternalReferenceRegistry* registry) { + registry->Register(New); + IntervalHistogram::RegisterExternalReferences(registry); +} + ELDHistogram::ELDHistogram( Environment* env, Local wrap, @@ -274,6 +281,15 @@ void ELDHistogram::OnInterval() { "stddev", histogram()->Stddev()); } +void GetTimeOrigin(const FunctionCallbackInfo& args) { + args.GetReturnValue().Set(Number::New(args.GetIsolate(), timeOrigin / 1e6)); +} + +void GetTimeOriginTimeStamp(const FunctionCallbackInfo& args) { + args.GetReturnValue().Set( + Number::New(args.GetIsolate(), timeOriginTimestamp / MICROS_PER_MILLIS)); +} + void Initialize(Local target, Local unused, Local context, @@ -308,6 +324,8 @@ void Initialize(Local target, RemoveGarbageCollectionTracking); env->SetMethod(target, "notify", Notify); env->SetMethod(target, "loopIdleTime", LoopIdleTime); + env->SetMethod(target, "getTimeOrigin", GetTimeOrigin); + env->SetMethod(target, "getTimeOriginTimestamp", GetTimeOriginTimeStamp); Local constants = Object::New(isolate); @@ -344,17 +362,6 @@ void Initialize(Local target, PropertyAttribute attr = static_cast(ReadOnly | DontDelete); - target->DefineOwnProperty(context, - FIXED_ONE_BYTE_STRING(isolate, "timeOrigin"), - Number::New(isolate, timeOrigin / 1e6), - attr).ToChecked(); - - target->DefineOwnProperty( - context, - FIXED_ONE_BYTE_STRING(isolate, "timeOriginTimestamp"), - Number::New(isolate, timeOriginTimestamp / MICROS_PER_MILLIS), - attr).ToChecked(); - target->DefineOwnProperty(context, env->constants_string(), constants, @@ -364,7 +371,21 @@ void Initialize(Local target, ELDHistogram::Initialize(env, target); } +void RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(MarkMilestone); + registry->Register(SetupPerformanceObservers); + registry->Register(InstallGarbageCollectionTracking); + registry->Register(RemoveGarbageCollectionTracking); + registry->Register(Notify); + registry->Register(LoopIdleTime); + registry->Register(GetTimeOrigin); + registry->Register(GetTimeOriginTimeStamp); + HistogramBase::RegisterExternalReferences(registry); + ELDHistogram::RegisterExternalReferences(registry); +} } // namespace performance } // namespace node NODE_MODULE_CONTEXT_AWARE_INTERNAL(performance, node::performance::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(performance, + node::performance::RegisterExternalReferences) diff --git a/src/node_perf.h b/src/node_perf.h index 33cf9f2ec651bf..64913ab9de7c61 100644 --- a/src/node_perf.h +++ b/src/node_perf.h @@ -3,10 +3,11 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS -#include "node.h" -#include "node_perf_common.h" #include "base_object-inl.h" #include "histogram.h" +#include "node.h" +#include "node_internals.h" +#include "node_perf_common.h" #include "v8.h" #include "uv.h" @@ -16,6 +17,7 @@ namespace node { class Environment; +class ExternalReferenceRegistry; namespace performance { @@ -160,6 +162,7 @@ using GCPerformanceEntry = PerformanceEntry; class ELDHistogram : public IntervalHistogram { public: + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); static void Initialize(Environment* env, v8::Local target); static void New(const v8::FunctionCallbackInfo& args); diff --git a/src/node_sockaddr.cc b/src/node_sockaddr.cc index b15b6ab47194c1..09a74f302923f7 100644 --- a/src/node_sockaddr.cc +++ b/src/node_sockaddr.cc @@ -159,10 +159,12 @@ SocketAddress::CompareResult compare_ipv4( reinterpret_cast(one.data()); const sockaddr_in* two_in = reinterpret_cast(two.data()); + const uint32_t s_addr_one = ntohl(one_in->sin_addr.s_addr); + const uint32_t s_addr_two = ntohl(two_in->sin_addr.s_addr); - if (one_in->sin_addr.s_addr < two_in->sin_addr.s_addr) + if (s_addr_one < s_addr_two) return SocketAddress::CompareResult::LESS_THAN; - else if (one_in->sin_addr.s_addr == two_in->sin_addr.s_addr) + else if (s_addr_one == s_addr_two) return SocketAddress::CompareResult::SAME; else return SocketAddress::CompareResult::GREATER_THAN; diff --git a/src/node_version.h b/src/node_version.h index 4eb0f2108776f1..0523885212d429 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 16 -#define NODE_MINOR_VERSION 4 -#define NODE_PATCH_VERSION 3 +#define NODE_MINOR_VERSION 5 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n) diff --git a/src/node_zlib.cc b/src/node_zlib.cc index 2a2466052c92a5..b8733229b0b06b 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -71,6 +71,9 @@ namespace { #define Z_MIN_LEVEL -1 #define Z_MAX_LEVEL 9 #define Z_DEFAULT_LEVEL Z_DEFAULT_COMPRESSION +#define Z_MIN_WINDOWBITS 8 +#define Z_MAX_WINDOWBITS 15 +#define Z_DEFAULT_WINDOWBITS 15 #define ZLIB_ERROR_CODES(V) \ V(Z_OK) \ diff --git a/test/cctest/test_js_native_api_v8.cc b/test/cctest/test_js_native_api_v8.cc new file mode 100644 index 00000000000000..2d95c86d09d5d5 --- /dev/null +++ b/test/cctest/test_js_native_api_v8.cc @@ -0,0 +1,102 @@ +#include +#include +#include +#include "env-inl.h" +#include "gtest/gtest.h" +#include "js_native_api_v8.h" +#include "node_api_internals.h" +#include "node_binding.h" +#include "node_test_fixture.h" + +namespace v8impl { + +using v8::Local; +using v8::Object; + +static napi_env addon_env; +static uint32_t finalizer_call_count = 0; + +class JsNativeApiV8Test : public EnvironmentTestFixture { + private: + void SetUp() override { + EnvironmentTestFixture::SetUp(); + finalizer_call_count = 0; + } + + void TearDown() override { NodeTestFixture::TearDown(); } +}; + +TEST_F(JsNativeApiV8Test, Reference) { + const v8::HandleScope handle_scope(isolate_); + Argv argv; + + napi_ref ref; + void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr, nullptr}; + v8::WeakCallbackInfo::Callback + callback; + Reference::SecondPassCallParameterRef* parameter = nullptr; + + { + Env test_env{handle_scope, argv}; + + node::Environment* env = *test_env; + node::LoadEnvironment(env, ""); + + napi_addon_register_func init = [](napi_env env, napi_value exports) { + addon_env = env; + return exports; + }; + Local module_obj = Object::New(isolate_); + Local exports_obj = Object::New(isolate_); + napi_module_register_by_symbol( + exports_obj, module_obj, env->context(), init); + ASSERT_NE(addon_env, nullptr); + node_napi_env internal_env = reinterpret_cast(addon_env); + EXPECT_EQ(internal_env->node_env(), env); + + // Create a new scope to manage the handles. + { + const v8::HandleScope handle_scope(isolate_); + napi_value value; + napi_create_object(addon_env, &value); + // Create a weak reference; + napi_add_finalizer( + addon_env, + value, + nullptr, + [](napi_env env, void* finalize_data, void* finalize_hint) { + finalizer_call_count++; + }, + nullptr, + &ref); + parameter = reinterpret_cast(ref)->_secondPassParameter; + } + + // We can hardly trigger a non-forced Garbage Collection in a stable way. + // Here we just invoke the weak callbacks directly. + // The persistant handles should be reset in the weak callback in respect + // to the API contract of v8 weak callbacks. + v8::WeakCallbackInfo data( + reinterpret_cast(isolate_), + parameter, + embedder_fields, + &callback); + Reference::FinalizeCallback(data); + EXPECT_EQ(callback, &Reference::SecondPassCallback); + } + // Env goes out of scope, the environment has been teardown. All node-api ref + // trackers should have been destroyed. + + // Now we call the second pass callback to verify the method do not abort with + // memory violations. + v8::WeakCallbackInfo data( + reinterpret_cast(isolate_), + parameter, + embedder_fields, + nullptr); + Reference::SecondPassCallback(data); + + // After Environment Teardown + EXPECT_EQ(finalizer_call_count, uint32_t(1)); +} +} // namespace v8impl diff --git a/test/cctest/test_node_api.cc b/test/cctest/test_node_api.cc new file mode 100644 index 00000000000000..ad5be52fc8ffcc --- /dev/null +++ b/test/cctest/test_node_api.cc @@ -0,0 +1,41 @@ +#include +#include +#include +#include "env-inl.h" +#include "gtest/gtest.h" +#include "node_api_internals.h" +#include "node_binding.h" +#include "node_test_fixture.h" + +using v8::Local; +using v8::Object; + +static napi_env addon_env; + +class NodeApiTest : public EnvironmentTestFixture { + private: + void SetUp() override { EnvironmentTestFixture::SetUp(); } + + void TearDown() override { NodeTestFixture::TearDown(); } +}; + +TEST_F(NodeApiTest, CreateNodeApiEnv) { + const v8::HandleScope handle_scope(isolate_); + Argv argv; + + Env test_env{handle_scope, argv}; + + node::Environment* env = *test_env; + node::LoadEnvironment(env, ""); + + napi_addon_register_func init = [](napi_env env, napi_value exports) { + addon_env = env; + return exports; + }; + Local module_obj = Object::New(isolate_); + Local exports_obj = Object::New(isolate_); + napi_module_register_by_symbol(exports_obj, module_obj, env->context(), init); + ASSERT_NE(addon_env, nullptr); + node_napi_env internal_env = reinterpret_cast(addon_env); + EXPECT_EQ(internal_env->node_env(), env); +} diff --git a/test/common/README.md b/test/common/README.md index 7b76c1a746c656..6b42fa1070d015 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -62,13 +62,6 @@ On non-Windows platforms, this always returns `true`. Creates a 10 MB file of all null characters. -### `enoughTestCpu` - -* [<boolean>][] - -Indicates if there is more than 1 CPU or that the single CPU has a speed of at -least 1 GHz. - ### `enoughTestMem` * [<boolean>][] diff --git a/test/common/debugger.js b/test/common/debugger.js index c127b1dc292e7f..36d6328dcc9b39 100644 --- a/test/common/debugger.js +++ b/test/common/debugger.js @@ -23,10 +23,7 @@ function startCLI(args, flags = [], spawnOpts = {}) { if (this === child.stderr) { stderrOutput += chunk; } - // TODO(trott): Figure out why the "breakpoints restored." message appears - // in unpredictable places especially on AIX in CI. We shouldn't be - // excluding it, but it gets in the way of the output checking for tests. - outputBuffer.push(chunk.replace(/\n*\d+ breakpoints restored\.\n*/mg, '')); + outputBuffer.push(chunk); } function getOutput() { diff --git a/test/common/index.js b/test/common/index.js index a5d74a3c5b49d6..df61b3c3cda1a0 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -252,9 +252,6 @@ function platformTimeout(ms) { const armv = process.config.variables.arm_version; - if (armv === '6') - return multipliers.seven * ms; // ARMv6 - if (armv === '7') return multipliers.two * ms; // ARMv7 @@ -788,11 +785,6 @@ const common = { skipIfInspectorDisabled, skipIfWorker, - get enoughTestCpu() { - const cpus = require('os').cpus(); - return Array.isArray(cpus) && (cpus.length > 1 || cpus[0].speed > 999); - }, - get enoughTestMem() { return require('os').totalmem() > 0x70000000; /* 1.75 Gb */ }, diff --git a/test/common/index.mjs b/test/common/index.mjs index 9700fe8039c771..1824497ff6533f 100644 --- a/test/common/index.mjs +++ b/test/common/index.mjs @@ -18,7 +18,6 @@ const { isLinux, isOSX, enoughTestMem, - enoughTestCpu, buildType, localIPv6Hosts, opensslCli, @@ -64,7 +63,6 @@ export { isLinux, isOSX, enoughTestMem, - enoughTestCpu, buildType, localIPv6Hosts, opensslCli, diff --git a/test/fixtures/inspector-cli/alive.js b/test/fixtures/debugger/alive.js similarity index 100% rename from test/fixtures/inspector-cli/alive.js rename to test/fixtures/debugger/alive.js diff --git a/test/fixtures/inspector-cli/backtrace.js b/test/fixtures/debugger/backtrace.js similarity index 100% rename from test/fixtures/inspector-cli/backtrace.js rename to test/fixtures/debugger/backtrace.js diff --git a/test/fixtures/inspector-cli/break.js b/test/fixtures/debugger/break.js similarity index 100% rename from test/fixtures/inspector-cli/break.js rename to test/fixtures/debugger/break.js diff --git a/test/fixtures/inspector-cli/cjs/index.js b/test/fixtures/debugger/cjs/index.js similarity index 100% rename from test/fixtures/inspector-cli/cjs/index.js rename to test/fixtures/debugger/cjs/index.js diff --git a/test/fixtures/inspector-cli/cjs/other.js b/test/fixtures/debugger/cjs/other.js similarity index 100% rename from test/fixtures/inspector-cli/cjs/other.js rename to test/fixtures/debugger/cjs/other.js diff --git a/test/fixtures/inspector-cli/empty.js b/test/fixtures/debugger/empty.js similarity index 100% rename from test/fixtures/inspector-cli/empty.js rename to test/fixtures/debugger/empty.js diff --git a/test/fixtures/inspector-cli/exceptions.js b/test/fixtures/debugger/exceptions.js similarity index 100% rename from test/fixtures/inspector-cli/exceptions.js rename to test/fixtures/debugger/exceptions.js diff --git a/test/fixtures/inspector-cli/three-lines.js b/test/fixtures/debugger/three-lines.js similarity index 100% rename from test/fixtures/inspector-cli/three-lines.js rename to test/fixtures/debugger/three-lines.js diff --git a/test/fixtures/inspector-cli/use-strict.js b/test/fixtures/debugger/use-strict.js similarity index 100% rename from test/fixtures/inspector-cli/use-strict.js rename to test/fixtures/debugger/use-strict.js diff --git a/test/fixtures/source-map/esm-export-missing-module.mjs b/test/fixtures/source-map/esm-export-missing-module.mjs new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/test/fixtures/source-map/esm-export-missing-module.mjs.map b/test/fixtures/source-map/esm-export-missing-module.mjs.map new file mode 100644 index 00000000000000..17417c928dc04d --- /dev/null +++ b/test/fixtures/source-map/esm-export-missing-module.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"esm-export-missing-module.esm","sourceRoot":"","sources":["./exm-export-missing-module.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,SAAS;AAEzB,CAAC"} \ No newline at end of file diff --git a/test/fixtures/source-map/esm-export-missing.mjs b/test/fixtures/source-map/esm-export-missing.mjs new file mode 100644 index 00000000000000..4bda755a86b389 --- /dev/null +++ b/test/fixtures/source-map/esm-export-missing.mjs @@ -0,0 +1,2 @@ +import { Something } from './esm-export-missing-module.mjs'; +//# sourceMappingURL=esm-export-missing.mjs.map diff --git a/test/fixtures/source-map/esm-export-missing.mjs.map b/test/fixtures/source-map/esm-export-missing.mjs.map new file mode 100644 index 00000000000000..2d1d482dc97083 --- /dev/null +++ b/test/fixtures/source-map/esm-export-missing.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"esm-export-missing.ts","sourceRoot":"","sources":["./esm-export-missing.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAC3C,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC"} \ No newline at end of file diff --git a/test/fixtures/source-map/esm-export-missing.ts b/test/fixtures/source-map/esm-export-missing.ts new file mode 100644 index 00000000000000..14797c69feaebc --- /dev/null +++ b/test/fixtures/source-map/esm-export-missing.ts @@ -0,0 +1,3 @@ + +import { Something } from './exm-export-missing-module.mjs'; +console.info(Something); diff --git a/test/fixtures/test-resolution-inspect-brk-resolver.js b/test/fixtures/test-resolution-inspect-brk-resolver.js index fdfb5ca5b170c2..b5569e69fcf698 100644 --- a/test/fixtures/test-resolution-inspect-brk-resolver.js +++ b/test/fixtures/test-resolution-inspect-brk-resolver.js @@ -1,5 +1,4 @@ 'use strict'; -// eslint-disable-next-line no-unused-vars const common = require('../common'); require.extensions['.ext'] = require.extensions['.js']; diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index b68466264ebdf8..60d3aeb98c17da 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -19,9 +19,10 @@ Last update: - html/webappapis/atob: https://github.com/web-platform-tests/wpt/tree/f267e1dca6/html/webappapis/atob - html/webappapis/microtask-queuing: https://github.com/web-platform-tests/wpt/tree/2c5c3c4c27/html/webappapis/microtask-queuing - html/webappapis/timers: https://github.com/web-platform-tests/wpt/tree/5873f2d8f1/html/webappapis/timers -- interfaces: https://github.com/web-platform-tests/wpt/tree/79fa4cf76e/interfaces +- interfaces: https://github.com/web-platform-tests/wpt/tree/fcb671ed8b/interfaces - resources: https://github.com/web-platform-tests/wpt/tree/972ca5b669/resources +- streams: https://github.com/web-platform-tests/wpt/tree/8f60d94439/streams - url: https://github.com/web-platform-tests/wpt/tree/1fcb39223d/url [Web Platform Tests]: https://github.com/web-platform-tests/wpt -[`git node wpt`]: https://github.com/nodejs/node-core-utils/blob/master/docs/git-node.md#git-node-wpt +[`git node wpt`]: https://github.com/nodejs/node-core-utils/blob/main/docs/git-node.md#git-node-wpt diff --git a/test/fixtures/wpt/interfaces/dom.idl b/test/fixtures/wpt/interfaces/dom.idl index bd8a17a379311b..4a57b2e1eb4bce 100644 --- a/test/fixtures/wpt/interfaces/dom.idl +++ b/test/fixtures/wpt/interfaces/dom.idl @@ -93,6 +93,8 @@ interface AbortController { [Exposed=(Window,Worker)] interface AbortSignal : EventTarget { + [NewObject] static AbortSignal abort(); + readonly attribute boolean aborted; attribute EventHandler onabort; @@ -331,11 +333,14 @@ interface DocumentFragment : Node { [Exposed=Window] interface ShadowRoot : DocumentFragment { readonly attribute ShadowRootMode mode; + readonly attribute boolean delegatesFocus; + readonly attribute SlotAssignmentMode slotAssignment; readonly attribute Element host; attribute EventHandler onslotchange; }; enum ShadowRootMode { "open", "closed" }; +enum SlotAssignmentMode { "manual", "named" }; [Exposed=Window] interface Element : Node { @@ -386,6 +391,7 @@ interface Element : Node { dictionary ShadowRootInit { required ShadowRootMode mode; boolean delegatesFocus = false; + SlotAssignmentMode slotAssignment = "named"; }; [Exposed=Window, @@ -621,3 +627,16 @@ interface XPathEvaluator { }; XPathEvaluator includes XPathEvaluatorBase; + +[Exposed=Window] +interface XSLTProcessor { + constructor(); + undefined importStylesheet(Node style); + [CEReactions] DocumentFragment transformToFragment(Node source, Document output); + [CEReactions] Document transformToDocument(Node source); + undefined setParameter([LegacyNullToEmptyString] DOMString namespaceURI, DOMString localName, any value); + any getParameter([LegacyNullToEmptyString] DOMString namespaceURI, DOMString localName); + undefined removeParameter([LegacyNullToEmptyString] DOMString namespaceURI, DOMString localName); + undefined clearParameters(); + undefined reset(); +}; diff --git a/test/fixtures/wpt/interfaces/html.idl b/test/fixtures/wpt/interfaces/html.idl index f6789644339b4c..d878cba3367cc6 100644 --- a/test/fixtures/wpt/interfaces/html.idl +++ b/test/fixtures/wpt/interfaces/html.idl @@ -120,6 +120,7 @@ interface HTMLElement : Element { [CEReactions] attribute DOMString autocapitalize; [CEReactions] attribute [LegacyNullToEmptyString] DOMString innerText; + [CEReactions] attribute [LegacyNullToEmptyString] DOMString outerText; ElementInternals attachInternals(); }; @@ -695,7 +696,7 @@ interface TextTrackCue : EventTarget { attribute DOMString id; attribute double startTime; - attribute double endTime; + attribute unrestricted double endTime; attribute boolean pauseOnExit; attribute EventHandler onenter; @@ -1231,6 +1232,7 @@ interface HTMLSlotElement : HTMLElement { [CEReactions] attribute DOMString name; sequence assignedNodes(optional AssignedNodesOptions options = {}); sequence assignedElements(optional AssignedNodesOptions options = {}); + undefined assign((Element or Text)... nodes); }; dictionary AssignedNodesOptions { @@ -1264,11 +1266,14 @@ typedef (HTMLOrSVGImageElement or ImageBitmap or OffscreenCanvas) CanvasImageSource; +enum PredefinedColorSpace { "srgb", "display-p3" }; + enum CanvasFillRule { "nonzero", "evenodd" }; dictionary CanvasRenderingContext2DSettings { boolean alpha = true; boolean desynchronized = false; + PredefinedColorSpace colorSpace = "srgb"; }; enum ImageSmoothingQuality { "low", "medium", "high" }; @@ -1301,6 +1306,7 @@ interface mixin CanvasState { // state undefined save(); // push state on state stack undefined restore(); // pop state stack and restore state + undefined reset(); // reset the rendering context to its default state }; interface mixin CanvasTransform { @@ -1336,6 +1342,7 @@ interface mixin CanvasFillStrokeStyles { attribute (DOMString or CanvasGradient or CanvasPattern) fillStyle; // (default black) CanvasGradient createLinearGradient(double x0, double y0, double x1, double y1); CanvasGradient createRadialGradient(double x0, double y0, double r0, double x1, double y1, double r1); + CanvasGradient createConicGradient(double startAngle, double x, double y); CanvasPattern? createPattern(CanvasImageSource image, [LegacyNullToEmptyString] DOMString repetition); }; @@ -1398,9 +1405,9 @@ interface mixin CanvasDrawImage { interface mixin CanvasImageData { // pixel manipulation - ImageData createImageData([EnforceRange] long sw, [EnforceRange] long sh); + ImageData createImageData([EnforceRange] long sw, [EnforceRange] long sh, optional ImageDataSettings settings = {}); ImageData createImageData(ImageData imagedata); - ImageData getImageData([EnforceRange] long sx, [EnforceRange] long sy, [EnforceRange] long sw, [EnforceRange] long sh); + ImageData getImageData([EnforceRange] long sx, [EnforceRange] long sy, [EnforceRange] long sw, [EnforceRange] long sh, optional ImageDataSettings settings = {}); undefined putImageData(ImageData imagedata, [EnforceRange] long dx, [EnforceRange] long dy); undefined putImageData(ImageData imagedata, [EnforceRange] long dx, [EnforceRange] long dy, [EnforceRange] long dirtyX, [EnforceRange] long dirtyY, [EnforceRange] long dirtyWidth, [EnforceRange] long dirtyHeight); }; @@ -1410,6 +1417,10 @@ enum CanvasLineJoin { "round", "bevel", "miter" }; enum CanvasTextAlign { "start", "end", "left", "right", "center" }; enum CanvasTextBaseline { "top", "hanging", "middle", "alphabetic", "ideographic", "bottom" }; enum CanvasDirection { "ltr", "rtl", "inherit" }; +enum CanvasFontKerning { "auto", "normal", "none" }; +enum CanvasFontStretch { "ultra-condensed", "extra-condensed", "condensed", "semi-condensed", "normal", "semi-expanded", "expanded", "extra-expanded", "ultra-expanded" }; +enum CanvasFontVariantCaps { "normal", "small-caps", "all-small-caps", "petite-caps", "all-petite-caps", "unicase", "titling-caps" }; +enum CanvasTextRendering { "auto", "optimizeSpeed", "optimizeLegibility", "geometricPrecision" }; interface mixin CanvasPathDrawingStyles { // line caps/joins @@ -1430,6 +1441,12 @@ interface mixin CanvasTextDrawingStyles { attribute CanvasTextAlign textAlign; // (default: "start") attribute CanvasTextBaseline textBaseline; // (default: "alphabetic") attribute CanvasDirection direction; // (default: "inherit") + attribute double textLetterSpacing; // (default: 0) + attribute double textWordSpacing; // (default: 0) + attribute CanvasFontKerning fontKerning; // (default: "auto") + attribute CanvasFontStretch fontStretch; // (default: "normal") + attribute CanvasFontVariantCaps fontVariantCaps; // (default: "normal") + attribute CanvasTextRendering textRendering; // (default: "normal") }; interface mixin CanvasPath { @@ -1476,15 +1493,20 @@ interface TextMetrics { readonly attribute double ideographicBaseline; }; +dictionary ImageDataSettings { + PredefinedColorSpace colorSpace; +}; + [Exposed=(Window,Worker), Serializable] interface ImageData { - constructor(unsigned long sw, unsigned long sh); - constructor(Uint8ClampedArray data, unsigned long sw, optional unsigned long sh); + constructor(unsigned long sw, unsigned long sh, optional ImageDataSettings settings = {}); + constructor(Uint8ClampedArray data, unsigned long sw, optional unsigned long sh, optional ImageDataSettings settings = {}); readonly attribute unsigned long width; readonly attribute unsigned long height; readonly attribute Uint8ClampedArray data; + readonly attribute PredefinedColorSpace colorSpace; }; [Exposed=(Window,Worker)] @@ -1738,8 +1760,8 @@ interface History { undefined go(optional long delta = 0); undefined back(); undefined forward(); - undefined pushState(any data, DOMString title, optional USVString? url = null); - undefined replaceState(any data, DOMString title, optional USVString? url = null); + undefined pushState(any data, DOMString unused, optional USVString? url = null); + undefined replaceState(any data, DOMString unused, optional USVString? url = null); }; [Exposed=Window] diff --git a/test/fixtures/wpt/interfaces/streams.idl b/test/fixtures/wpt/interfaces/streams.idl new file mode 100644 index 00000000000000..99c3a5dee91f69 --- /dev/null +++ b/test/fixtures/wpt/interfaces/streams.idl @@ -0,0 +1,226 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Streams Standard (https://streams.spec.whatwg.org/) + +[Exposed=(Window,Worker,Worklet), Transferable] +interface ReadableStream { + constructor(optional object underlyingSource, optional QueuingStrategy strategy = {}); + + readonly attribute boolean locked; + + Promise cancel(optional any reason); + ReadableStreamReader getReader(optional ReadableStreamGetReaderOptions options = {}); + ReadableStream pipeThrough(ReadableWritablePair transform, optional StreamPipeOptions options = {}); + Promise pipeTo(WritableStream destination, optional StreamPipeOptions options = {}); + sequence tee(); + + async iterable(optional ReadableStreamIteratorOptions options = {}); +}; + +typedef (ReadableStreamDefaultReader or ReadableStreamBYOBReader) ReadableStreamReader; + +enum ReadableStreamReaderMode { "byob" }; + +dictionary ReadableStreamGetReaderOptions { + ReadableStreamReaderMode mode; +}; + +dictionary ReadableStreamIteratorOptions { + boolean preventCancel = false; +}; + +dictionary ReadableWritablePair { + required ReadableStream readable; + required WritableStream writable; +}; + +dictionary StreamPipeOptions { + boolean preventClose = false; + boolean preventAbort = false; + boolean preventCancel = false; + AbortSignal signal; +}; + +dictionary UnderlyingSource { + UnderlyingSourceStartCallback start; + UnderlyingSourcePullCallback pull; + UnderlyingSourceCancelCallback cancel; + ReadableStreamType type; + [EnforceRange] unsigned long long autoAllocateChunkSize; +}; + +typedef (ReadableStreamDefaultController or ReadableByteStreamController) ReadableStreamController; + +callback UnderlyingSourceStartCallback = any (ReadableStreamController controller); +callback UnderlyingSourcePullCallback = Promise (ReadableStreamController controller); +callback UnderlyingSourceCancelCallback = Promise (optional any reason); + +enum ReadableStreamType { "bytes" }; + +interface mixin ReadableStreamGenericReader { + readonly attribute Promise closed; + + Promise cancel(optional any reason); +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableStreamDefaultReader { + constructor(ReadableStream stream); + + Promise read(); + undefined releaseLock(); +}; +ReadableStreamDefaultReader includes ReadableStreamGenericReader; + +dictionary ReadableStreamDefaultReadResult { + any value; + boolean done; +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableStreamBYOBReader { + constructor(ReadableStream stream); + + Promise read(ArrayBufferView view); + undefined releaseLock(); +}; +ReadableStreamBYOBReader includes ReadableStreamGenericReader; + +dictionary ReadableStreamBYOBReadResult { + ArrayBufferView value; + boolean done; +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableStreamDefaultController { + readonly attribute unrestricted double? desiredSize; + + undefined close(); + undefined enqueue(optional any chunk); + undefined error(optional any e); +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableByteStreamController { + readonly attribute ReadableStreamBYOBRequest? byobRequest; + readonly attribute unrestricted double? desiredSize; + + undefined close(); + undefined enqueue(ArrayBufferView chunk); + undefined error(optional any e); +}; + +[Exposed=(Window,Worker,Worklet)] +interface ReadableStreamBYOBRequest { + readonly attribute ArrayBufferView? view; + + undefined respond([EnforceRange] unsigned long long bytesWritten); + undefined respondWithNewView(ArrayBufferView view); +}; + +[Exposed=(Window,Worker,Worklet), Transferable] +interface WritableStream { + constructor(optional object underlyingSink, optional QueuingStrategy strategy = {}); + + readonly attribute boolean locked; + + Promise abort(optional any reason); + Promise close(); + WritableStreamDefaultWriter getWriter(); +}; + +dictionary UnderlyingSink { + UnderlyingSinkStartCallback start; + UnderlyingSinkWriteCallback write; + UnderlyingSinkCloseCallback close; + UnderlyingSinkAbortCallback abort; + any type; +}; + +callback UnderlyingSinkStartCallback = any (WritableStreamDefaultController controller); +callback UnderlyingSinkWriteCallback = Promise (any chunk, WritableStreamDefaultController controller); +callback UnderlyingSinkCloseCallback = Promise (); +callback UnderlyingSinkAbortCallback = Promise (optional any reason); + +[Exposed=(Window,Worker,Worklet)] +interface WritableStreamDefaultWriter { + constructor(WritableStream stream); + + readonly attribute Promise closed; + readonly attribute unrestricted double? desiredSize; + readonly attribute Promise ready; + + Promise abort(optional any reason); + Promise close(); + undefined releaseLock(); + Promise write(optional any chunk); +}; + +[Exposed=(Window,Worker,Worklet)] +interface WritableStreamDefaultController { + undefined error(optional any e); +}; + +[Exposed=(Window,Worker,Worklet), Transferable] +interface TransformStream { + constructor(optional object transformer, + optional QueuingStrategy writableStrategy = {}, + optional QueuingStrategy readableStrategy = {}); + + readonly attribute ReadableStream readable; + readonly attribute WritableStream writable; +}; + +dictionary Transformer { + TransformerStartCallback start; + TransformerTransformCallback transform; + TransformerFlushCallback flush; + any readableType; + any writableType; +}; + +callback TransformerStartCallback = any (TransformStreamDefaultController controller); +callback TransformerFlushCallback = Promise (TransformStreamDefaultController controller); +callback TransformerTransformCallback = Promise (any chunk, TransformStreamDefaultController controller); + +[Exposed=(Window,Worker,Worklet)] +interface TransformStreamDefaultController { + readonly attribute unrestricted double? desiredSize; + + undefined enqueue(optional any chunk); + undefined error(optional any reason); + undefined terminate(); +}; + +dictionary QueuingStrategy { + unrestricted double highWaterMark; + QueuingStrategySize size; +}; + +callback QueuingStrategySize = unrestricted double (optional any chunk); + +dictionary QueuingStrategyInit { + required unrestricted double highWaterMark; +}; + +[Exposed=(Window,Worker,Worklet)] +interface ByteLengthQueuingStrategy { + constructor(QueuingStrategyInit init); + + readonly attribute unrestricted double highWaterMark; + readonly attribute Function size; +}; + +[Exposed=(Window,Worker,Worklet)] +interface CountQueuingStrategy { + constructor(QueuingStrategyInit init); + + readonly attribute unrestricted double highWaterMark; + readonly attribute Function size; +}; + +interface mixin GenericTransformStream { + readonly attribute ReadableStream readable; + readonly attribute WritableStream writable; +}; diff --git a/test/fixtures/wpt/streams/META.yml b/test/fixtures/wpt/streams/META.yml new file mode 100644 index 00000000000000..1259a55cb5a99e --- /dev/null +++ b/test/fixtures/wpt/streams/META.yml @@ -0,0 +1,7 @@ +spec: https://streams.spec.whatwg.org/ +suggested_reviewers: + - domenic + - yutakahirano + - youennf + - wanderview + - ricea diff --git a/test/fixtures/wpt/streams/README.md b/test/fixtures/wpt/streams/README.md new file mode 100644 index 00000000000000..9ab6e1284ad50d --- /dev/null +++ b/test/fixtures/wpt/streams/README.md @@ -0,0 +1,3 @@ +# Streams Tests + +The work on the streams tests is closely tracked by the specification authors, who maintain a reference implementation intended to match the spec line-by-line while passing all of these tests. See [the whatwg/streams repository for details](https://github.com/whatwg/streams/tree/main/reference-implementation). Some tests may be in that repository while the spec sections they test are still undergoing heavy churn. diff --git a/test/fixtures/wpt/streams/idlharness.any.js b/test/fixtures/wpt/streams/idlharness.any.js new file mode 100644 index 00000000000000..42a17da58c5ae3 --- /dev/null +++ b/test/fixtures/wpt/streams/idlharness.any.js @@ -0,0 +1,79 @@ +// META: global=window,worker +// META: script=/resources/WebIDLParser.js +// META: script=/resources/idlharness.js +// META: timeout=long + +idl_test( + ['streams'], + ['dom'], // for AbortSignal + async idl_array => { + // Empty try/catches ensure that if something isn't implemented (e.g., readable byte streams, or writable streams) + // the harness still sets things up correctly. Note that the corresponding interface tests will still fail. + + try { + new ReadableStream({ + start(c) { + self.readableStreamDefaultController = c; + } + }); + } catch {} + + try { + new ReadableStream({ + start(c) { + self.readableByteStreamController = c; + }, + type: 'bytes' + }); + } catch {} + + try { + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + const stream = new ReadableStream({ + pull(c) { + self.readableStreamByobRequest = c.byobRequest; + resolvePullCalledPromise(); + }, + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + reader.read(new Uint8Array(1)); + await pullCalledPromise; + } catch {} + + try { + new WritableStream({ + start(c) { + self.writableStreamDefaultController = c; + } + }); + } catch {} + + try { + new TransformStream({ + start(c) { + self.transformStreamDefaultController = c; + } + }); + } catch {} + + idl_array.add_objects({ + ReadableStream: ["new ReadableStream()"], + ReadableStreamDefaultReader: ["(new ReadableStream()).getReader()"], + ReadableStreamBYOBReader: ["(new ReadableStream({ type: 'bytes' })).getReader({ mode: 'byob' })"], + ReadableStreamDefaultController: ["self.readableStreamDefaultController"], + ReadableByteStreamController: ["self.readableByteStreamController"], + ReadableStreamBYOBRequest: ["self.readableStreamByobRequest"], + WritableStream: ["new WritableStream()"], + WritableStreamDefaultWriter: ["(new WritableStream()).getWriter()"], + WritableStreamDefaultController: ["self.writableStreamDefaultController"], + TransformStream: ["new TransformStream()"], + TransformStreamDefaultController: ["self.transformStreamDefaultController"], + ByteLengthQueuingStrategy: ["new ByteLengthQueuingStrategy({ highWaterMark: 5 })"], + CountQueuingStrategy: ["new CountQueuingStrategy({ highWaterMark: 5 })"] + }); + } +); diff --git a/test/fixtures/wpt/streams/piping/abort.any.js b/test/fixtures/wpt/streams/piping/abort.any.js new file mode 100644 index 00000000000000..3fe029de95a1b8 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/abort.any.js @@ -0,0 +1,375 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/test-utils.js +'use strict'; + +// Tests for the use of pipeTo with AbortSignal. +// There is some extra complexity to avoid timeouts in environments where abort is not implemented. + +const error1 = new Error('error1'); +error1.name = 'error1'; +const error2 = new Error('error2'); +error2.name = 'error2'; + +const errorOnPull = { + pull(controller) { + // This will cause the test to error if pipeTo abort is not implemented. + controller.error('failed to abort'); + } +}; + +// To stop pull() being called immediately when the stream is created, we need to set highWaterMark to 0. +const hwm0 = { highWaterMark: 0 }; + +for (const invalidSignal of [null, 'AbortSignal', true, -1, Object.create(AbortSignal.prototype)]) { + promise_test(t => { + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = recordingWritableStream(); + return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { signal: invalidSignal }), 'pipeTo should reject') + .then(() => { + assert_equals(rs.events.length, 0, 'no ReadableStream methods should have been called'); + assert_equals(ws.events.length, 0, 'no WritableStream methods should have been called'); + }); + }, `a signal argument '${invalidSignal}' should cause pipeTo() to reject`); +} + +promise_test(t => { + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject') + .then(() => Promise.all([ + rs.getReader().closed, + promise_rejects_dom(t, 'AbortError', ws.getWriter().closed, 'writer.closed should reject') + ])) + .then(() => { + assert_equals(rs.events.length, 2, 'cancel should have been called'); + assert_equals(rs.events[0], 'cancel', 'first event should be cancel'); + assert_equals(rs.events[1].name, 'AbortError', 'the argument to cancel should be an AbortError'); + assert_equals(rs.events[1].constructor.name, 'DOMException', + 'the argument to cancel should be a DOMException'); + }); +}, 'an aborted signal should cause the writable stream to reject with an AbortError'); + +promise_test(() => { + let error; + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return rs.pipeTo(ws, { signal }) + .catch(e => { + error = e; + }) + .then(() => Promise.all([ + rs.getReader().closed, + ws.getWriter().closed.catch(e => { + assert_equals(e, error, 'the writable should be errored with the same object'); + }) + ])) + .then(() => { + assert_equals(rs.events.length, 2, 'cancel should have been called'); + assert_equals(rs.events[0], 'cancel', 'first event should be cancel'); + assert_equals(rs.events[1], error, 'the readable should be canceled with the same object'); + }); +}, 'all the AbortError objects should be the same object'); + +promise_test(t => { + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventCancel: true }), 'pipeTo should reject') + .then(() => assert_equals(rs.events.length, 0, 'cancel should not be called')); +}, 'preventCancel should prevent canceling the readable'); + +promise_test(t => { + const rs = new ReadableStream(errorOnPull, hwm0); + const ws = recordingWritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventAbort: true }), 'pipeTo should reject') + .then(() => { + assert_equals(ws.events.length, 0, 'writable should not have been aborted'); + return ws.getWriter().ready; + }); +}, 'preventAbort should prevent aborting the readable'); + +promise_test(t => { + const rs = recordingReadableStream(errorOnPull, hwm0); + const ws = recordingWritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal, preventCancel: true, preventAbort: true }), + 'pipeTo should reject') + .then(() => { + assert_equals(rs.events.length, 0, 'cancel should not be called'); + assert_equals(ws.events.length, 0, 'writable should not have been aborted'); + return ws.getWriter().ready; + }); +}, 'preventCancel and preventAbort should prevent canceling the readable and aborting the readable'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + const ws = recordingWritableStream({ + write() { + abortController.abort(); + } + }); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject') + .then(() => { + assert_equals(ws.events.length, 4, 'only chunk "a" should have been written'); + assert_array_equals(ws.events.slice(0, 3), ['write', 'a', 'abort'], 'events should match'); + assert_equals(ws.events[3].name, 'AbortError', 'abort reason should be an AbortError'); + }); +}, 'abort should prevent further reads'); + +promise_test(t => { + let readController; + const rs = new ReadableStream({ + start(c) { + readController = c; + c.enqueue('a'); + c.enqueue('b'); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + let resolveWrite; + const writePromise = new Promise(resolve => { + resolveWrite = resolve; + }); + const ws = recordingWritableStream({ + write() { + return writePromise; + } + }, new CountQueuingStrategy({ highWaterMark: Infinity })); + const pipeToPromise = rs.pipeTo(ws, { signal }); + return delay(0).then(() => { + abortController.abort(); + readController.close(); // Make sure the test terminates when signal is not implemented. + resolveWrite(); + return promise_rejects_dom(t, 'AbortError', pipeToPromise, 'pipeTo should reject'); + }).then(() => { + assert_equals(ws.events.length, 6, 'chunks "a" and "b" should have been written'); + assert_array_equals(ws.events.slice(0, 5), ['write', 'a', 'write', 'b', 'abort'], 'events should match'); + assert_equals(ws.events[5].name, 'AbortError', 'abort reason should be an AbortError'); + }); +}, 'all pending writes should complete on abort'); + +promise_test(t => { + const rs = new ReadableStream({ + pull(controller) { + controller.error('failed to abort'); + }, + cancel() { + return Promise.reject(error1); + } + }, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'a rejection from underlyingSource.cancel() should be returned by pipeTo()'); + +promise_test(t => { + const rs = new ReadableStream(errorOnPull, hwm0); + const ws = new WritableStream({ + abort() { + return Promise.reject(error1); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'a rejection from underlyingSink.abort() should be returned by pipeTo()'); + +promise_test(t => { + const events = []; + const rs = new ReadableStream({ + pull(controller) { + controller.error('failed to abort'); + }, + cancel() { + events.push('cancel'); + return Promise.reject(error1); + } + }, hwm0); + const ws = new WritableStream({ + abort() { + events.push('abort'); + return Promise.reject(error2); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_exactly(t, error2, rs.pipeTo(ws, { signal }), 'pipeTo should reject') + .then(() => assert_array_equals(events, ['abort', 'cancel'], 'abort() should be called before cancel()')); +}, 'a rejection from underlyingSink.abort() should be preferred to one from underlyingSource.cancel()'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'abort signal takes priority over closed readable'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.error(error1); + } + }); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'abort signal takes priority over errored readable'); + +promise_test(t => { + const rs = new ReadableStream({ + pull(controller) { + controller.error('failed to abort'); + } + }, hwm0); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + const writer = ws.getWriter(); + return writer.close().then(() => { + writer.releaseLock(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject'); + }); +}, 'abort signal takes priority over closed writable'); + +promise_test(t => { + const rs = new ReadableStream({ + pull(controller) { + controller.error('failed to abort'); + } + }, hwm0); + const ws = new WritableStream({ + start(controller) { + controller.error(error1); + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + abortController.abort(); + return promise_rejects_dom(t, 'AbortError', rs.pipeTo(ws, { signal }), 'pipeTo should reject'); +}, 'abort signal takes priority over errored writable'); + +promise_test(() => { + let readController; + const rs = new ReadableStream({ + start(c) { + readController = c; + } + }); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + const pipeToPromise = rs.pipeTo(ws, { signal, preventClose: true }); + readController.close(); + return Promise.resolve().then(() => { + abortController.abort(); + return pipeToPromise; + }).then(() => ws.getWriter().write('this should succeed')); +}, 'abort should do nothing after the readable is closed'); + +promise_test(t => { + let readController; + const rs = new ReadableStream({ + start(c) { + readController = c; + } + }); + const ws = new WritableStream(); + const abortController = new AbortController(); + const signal = abortController.signal; + const pipeToPromise = rs.pipeTo(ws, { signal, preventAbort: true }); + readController.error(error1); + return Promise.resolve().then(() => { + abortController.abort(); + return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject'); + }).then(() => ws.getWriter().write('this should succeed')); +}, 'abort should do nothing after the readable is errored'); + +promise_test(t => { + let readController; + const rs = new ReadableStream({ + start(c) { + readController = c; + } + }); + let resolveWrite; + const writePromise = new Promise(resolve => { + resolveWrite = resolve; + }); + const ws = new WritableStream({ + write() { + readController.error(error1); + return writePromise; + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + const pipeToPromise = rs.pipeTo(ws, { signal, preventAbort: true }); + readController.enqueue('a'); + return delay(0).then(() => { + abortController.abort(); + resolveWrite(); + return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject'); + }).then(() => ws.getWriter().write('this should succeed')); +}, 'abort should do nothing after the readable is errored, even with pending writes'); + +promise_test(t => { + const rs = recordingReadableStream({ + pull(controller) { + return delay(0).then(() => controller.close()); + } + }); + let writeController; + const ws = new WritableStream({ + start(c) { + writeController = c; + } + }); + const abortController = new AbortController(); + const signal = abortController.signal; + const pipeToPromise = rs.pipeTo(ws, { signal, preventCancel: true }); + return Promise.resolve().then(() => { + writeController.error(error1); + return Promise.resolve(); + }).then(() => { + abortController.abort(); + return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject'); + }).then(() => { + assert_array_equals(rs.events, ['pull'], 'cancel should not have been called'); + }); +}, 'abort should do nothing after the writable is errored'); diff --git a/test/fixtures/wpt/streams/piping/close-propagation-backward.any.js b/test/fixtures/wpt/streams/piping/close-propagation-backward.any.js new file mode 100644 index 00000000000000..bd1e9cb92657b1 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/close-propagation-backward.any.js @@ -0,0 +1,153 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +promise_test(() => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return rs.pipeTo(ws).then( + () => assert_unreached('the promise must not fulfill'), + err => { + assert_equals(err.name, 'TypeError', 'the promise must reject with a TypeError'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + } + ); + +}, 'Closing must be propagated backward: starts closed; preventCancel omitted; fulfilled cancel promise'); + +promise_test(t => { + + // Our recording streams do not deal well with errors generated by the system, so give them some help + let recordedError; + const rs = recordingReadableStream({ + cancel(cancelErr) { + recordedError = cancelErr; + throw error1; + } + }); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_equals(recordedError.name, 'TypeError', 'the cancel reason must be a TypeError'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', recordedError]); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated backward: starts closed; preventCancel omitted; rejected cancel promise'); + +for (const falsy of [undefined, null, false, +0, -0, NaN, '']) { + const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy); + + promise_test(() => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return rs.pipeTo(ws, { preventCancel: falsy }).then( + () => assert_unreached('the promise must not fulfill'), + err => { + assert_equals(err.name, 'TypeError', 'the promise must reject with a TypeError'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + } + ); + + }, `Closing must be propagated backward: starts closed; preventCancel = ${stringVersion} (falsy); fulfilled cancel ` + + `promise`); +} + +for (const truthy of [true, 'a', 1, Symbol(), { }]) { + promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { preventCancel: truthy })).then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return ws.getWriter().closed; + }); + + }, `Closing must be propagated backward: starts closed; preventCancel = ${String(truthy)} (truthy)`); +} + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { preventCancel: true, preventAbort: true })) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return ws.getWriter().closed; + }); + +}, 'Closing must be propagated backward: starts closed; preventCancel = true, preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, + rs.pipeTo(ws, { preventCancel: true, preventAbort: true, preventClose: true })) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return ws.getWriter().closed; + }); + +}, 'Closing must be propagated backward: starts closed; preventCancel = true, preventAbort = true, preventClose ' + + '= true'); diff --git a/test/fixtures/wpt/streams/piping/close-propagation-forward.any.js b/test/fixtures/wpt/streams/piping/close-propagation-forward.any.js new file mode 100644 index 00000000000000..fc3282eea74b40 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/close-propagation-forward.any.js @@ -0,0 +1,589 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated forward: starts closed; preventClose omitted; fulfilled close promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed) + ]); + }); + +}, 'Closing must be propagated forward: starts closed; preventClose omitted; rejected close promise'); + +for (const falsy of [undefined, null, false, +0, -0, NaN, '']) { + const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy); + + promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws, { preventClose: falsy }).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + + }, `Closing must be propagated forward: starts closed; preventClose = ${stringVersion} (falsy); fulfilled close ` + + `promise`); +} + +for (const truthy of [true, 'a', 1, Symbol(), { }]) { + promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws, { preventClose: truthy }).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + + }, `Closing must be propagated forward: starts closed; preventClose = ${String(truthy)} (truthy)`); +} + +promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws, { preventClose: true, preventAbort: true }).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: starts closed; preventClose = true, preventAbort = true'); + +promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.close(); + } + }); + + const ws = recordingWritableStream(); + + return rs.pipeTo(ws, { preventClose: true, preventAbort: true, preventCancel: true }).then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: starts closed; preventClose = true, preventAbort = true, preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose omitted; fulfilled close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed) + ]); + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose omitted; rejected close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws, { preventClose: true }); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; preventClose = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = rs.pipeTo(ws); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' + + 'preventClose omitted; fulfilled close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed) + ]); + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' + + 'preventClose omitted; rejected close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = rs.pipeTo(ws, { preventClose: true }); + + t.step_timeout(() => rs.controller.close()); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: becomes closed asynchronously; dest never desires chunks; ' + + 'preventClose = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.close()); + }, 10); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello', 'close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose omitted; fulfilled close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.close()); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello', 'close']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed) + ]); + }); + +}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose omitted; rejected close promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws, { preventClose: true }); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.close()); + }, 10); + + return pipePromise.then(value => { + assert_equals(value, undefined, 'the promise must fulfill with undefined'); + }) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + + return rs.getReader().closed; + }); + +}, 'Closing must be propagated forward: becomes closed after one chunk; preventClose = true'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }); + + let pipeComplete = false; + const pipePromise = rs.pipeTo(ws).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.close(); + + // Flush async events and verify that no shutdown occurs. + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a']); // no 'close' + assert_equals(pipeComplete, false, 'the pipe must not be complete'); + + resolveWritePromise(); + + return pipePromise.then(() => { + assert_array_equals(ws.events, ['write', 'a', 'close']); + }); + }); + +}, 'Closing must be propagated forward: shutdown must not occur until the final write completes'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }); + + let pipeComplete = false; + const pipePromise = rs.pipeTo(ws, { preventClose: true }).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.close(); + + // Flush async events and verify that no shutdown occurs. + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the chunk must have been written, but close must not have happened'); + assert_equals(pipeComplete, false, 'the pipe must not be complete'); + + resolveWritePromise(); + + return pipePromise; + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the chunk must have been written, but close must not have happened'); + }); + +}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; preventClose = true'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }, new CountQueuingStrategy({ highWaterMark: 2 })); + + let pipeComplete = false; + const pipePromise = rs.pipeTo(ws).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.enqueue('b'); + + return writeCalledPromise.then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the first chunk must have been written, but close must not have happened yet'); + assert_false(pipeComplete, 'the pipe should not complete while the first write is pending'); + + rs.controller.close(); + resolveWritePromise(); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'the second chunk must have been written, but close must not have happened yet'); + assert_false(pipeComplete, 'the pipe should not complete while the second write is pending'); + + resolveWritePromise(); + return pipePromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close'], + 'all chunks must have been written and close must have happened'); + }); + +}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; becomes closed after first write'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }, new CountQueuingStrategy({ highWaterMark: 2 })); + + let pipeComplete = false; + const pipePromise = rs.pipeTo(ws, { preventClose: true }).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.enqueue('b'); + + return writeCalledPromise.then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the first chunk must have been written, but close must not have happened'); + assert_false(pipeComplete, 'the pipe should not complete while the first write is pending'); + + rs.controller.close(); + resolveWritePromise(); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'the second chunk must have been written, but close must not have happened'); + assert_false(pipeComplete, 'the pipe should not complete while the second write is pending'); + + resolveWritePromise(); + return pipePromise; + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'all chunks must have been written, but close must not have happened'); + }); + +}, 'Closing must be propagated forward: shutdown must not occur until the final write completes; becomes closed after first write; preventClose = true'); + + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + let rejectWritePromise; + const ws = recordingWritableStream({ + write() { + return new Promise((resolve, reject) => { + rejectWritePromise = reject; + }); + } + }, { highWaterMark: 3 }); + const pipeToPromise = rs.pipeTo(ws); + return delay(0).then(() => { + rejectWritePromise(error1); + return promise_rejects_exactly(t, error1, pipeToPromise, 'pipeTo should reject'); + }).then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['write', 'a']); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed, 'ws should be errored') + ]); + }); +}, 'Closing must be propagated forward: erroring the writable while flushing pending writes should error pipeTo'); diff --git a/test/fixtures/wpt/streams/piping/error-propagation-backward.any.js b/test/fixtures/wpt/streams/piping/error-propagation-backward.any.js new file mode 100644 index 00000000000000..6dc203066e3d7e --- /dev/null +++ b/test/fixtures/wpt/streams/piping/error-propagation-backward.any.js @@ -0,0 +1,630 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +const error2 = new Error('error2!'); +error2.name = 'error2'; + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + start() { + return Promise.reject(error1); + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: starts errored; preventCancel omitted; fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + +}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel omitted; ' + + 'fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + +}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel omitted; rejected ' + + 'cancel promise'); + +for (const falsy of [undefined, null, false, +0, -0, NaN, '']) { + const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy); + + promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: falsy }), + 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + + }, `Errors must be propagated backward: becomes errored before piping due to write; preventCancel = ` + + `${stringVersion} (falsy); fulfilled cancel promise`); +} + +for (const truthy of [true, 'a', 1, Symbol(), { }]) { + promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: truthy }), + 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + + }, `Errors must be propagated backward: becomes errored before piping due to write; preventCancel = ` + + `${String(truthy)} (truthy)`); +} + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true, preventAbort: true }), + 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + +}, 'Errors must be propagated backward: becomes errored before piping due to write, preventCancel = true; ' + + 'preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + write() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('Hello'), 'writer.write() must reject with the write error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'writer.closed must reject with the write error')) + .then(() => { + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true, preventAbort: true, preventClose: true }), + 'pipeTo must reject with the write error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + }); + +}, 'Errors must be propagated backward: becomes errored before piping due to write; preventCancel = true, ' + + 'preventAbort = true, preventClose = true'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('Hello'); + } + }); + + const ws = recordingWritableStream({ + write() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel omitted; fulfilled ' + + 'cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('Hello'); + }, + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream({ + write() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel omitted; rejected ' + + 'cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('Hello'); + } + }); + + const ws = recordingWritableStream({ + write() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write; preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + } + }); + + const ws = recordingWritableStream({ + write() { + if (ws.events.length > 2) { + return delay(0).then(() => { + throw error1; + }); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = ' + + 'false; fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + }, + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream({ + write() { + if (ws.events.length > 2) { + return delay(0).then(() => { + throw error1; + }); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = ' + + 'false; rejected cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + } + }); + + const ws = recordingWritableStream({ + write() { + if (ws.events.length > 2) { + return delay(0).then(() => { + throw error1; + }); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b']); + }); + +}, 'Errors must be propagated backward: becomes errored during piping due to write, but async; preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; preventCancel omitted; fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; preventCancel omitted; rejected cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + controller.close(); + } + }); + + const ws = recordingWritableStream({ + write(chunk) { + if (chunk === 'c') { + return Promise.reject(error1); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error').then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c']); + }); + +}, 'Errors must be propagated backward: becomes errored after piping due to last write; source is closed; ' + + 'preventCancel omitted (but cancel is never called)'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + controller.close(); + } + }); + + const ws = recordingWritableStream({ + write(chunk) { + if (chunk === 'c') { + return Promise.reject(error1); + } + return undefined; + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c']); + }); + +}, 'Errors must be propagated backward: becomes errored after piping due to last write; source is closed; ' + + 'preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' + + 'false; fulfilled cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' + + 'false; rejected cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => ws.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated backward: becomes errored after piping; dest never desires chunks; preventCancel = ' + + 'true'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + ws.abort(error1); + + return rs.pipeTo(ws).then( + () => assert_unreached('the promise must not fulfill'), + err => { + assert_equals(err, error1, 'the promise must reject with error1'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]); + assert_array_equals(ws.events, ['abort', error1]); + } + ); + +}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel omitted; fulfilled ' + + 'cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + cancel() { + throw error2; + } + }); + + const ws = recordingWritableStream(); + + ws.abort(error1); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the cancel error') + .then(() => { + return ws.getWriter().closed.then( + () => assert_unreached('the promise must not fulfill'), + err => { + assert_equals(err, error1, 'the promise must reject with error1'); + + assert_array_equals(rs.eventsWithoutPulls, ['cancel', err]); + assert_array_equals(ws.events, ['abort', error1]); + } + ); + }); + +}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel omitted; rejected ' + + 'cancel promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + ws.abort(error1); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventCancel: true })).then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated backward: becomes errored before piping via abort; preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + return flushAsyncEvents(); + } + }); + + const pipePromise = rs.pipeTo(ws); + + rs.controller.enqueue('a'); + + return writeCalledPromise.then(() => { + ws.controller.error(error1); + + return promise_rejects_exactly(t, error1, pipePromise); + }).then(() => { + assert_array_equals(rs.eventsWithoutPulls, ['cancel', error1]); + assert_array_equals(ws.events, ['write', 'a']); + }); + +}, 'Errors must be propagated backward: erroring via the controller errors once pending write completes'); diff --git a/test/fixtures/wpt/streams/piping/error-propagation-forward.any.js b/test/fixtures/wpt/streams/piping/error-propagation-forward.any.js new file mode 100644 index 00000000000000..f35ec665eec22f --- /dev/null +++ b/test/fixtures/wpt/streams/piping/error-propagation-forward.any.js @@ -0,0 +1,569 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +const error2 = new Error('error2!'); +error2.name = 'error2'; + +promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: starts errored; preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }); + + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: starts errored; preventAbort = false; rejected abort promise'); + +for (const falsy of [undefined, null, false, +0, -0, NaN, '']) { + const stringVersion = Object.is(falsy, -0) ? '-0' : String(falsy); + + promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: falsy }), 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + + }, `Errors must be propagated forward: starts errored; preventAbort = ${stringVersion} (falsy); fulfilled abort ` + + `promise`); +} + +for (const truthy of [true, 'a', 1, Symbol(), { }]) { + promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: truthy }), + 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + }); + + }, `Errors must be propagated forward: starts errored; preventAbort = ${String(truthy)} (truthy)`); +} + + +promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true, preventCancel: true }), + 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: starts errored; preventAbort = true, preventCancel = true'); + +promise_test(t => { + + const rs = recordingReadableStream({ + start() { + return Promise.reject(error1); + } + }); + + const ws = recordingWritableStream(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true, preventCancel: true, preventClose: true }), + 'pipeTo must reject with the same error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: starts errored; preventAbort = true, preventCancel = true, preventClose = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = false; rejected abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' + + 'preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' + + 'preventAbort = false; rejected abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => rs.controller.error(error1), 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: becomes errored while empty; dest never desires chunks; ' + + 'preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello', 'abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello', 'abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = false; rejected abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'Hello']); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' + + 'preventAbort = false; fulfilled abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream({ + abort() { + throw error2; + } + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the abort error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['abort', error1]); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' + + 'preventAbort = false; rejected abort promise'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the same error'); + + t.step_timeout(() => { + rs.controller.enqueue('Hello'); + t.step_timeout(() => rs.controller.error(error1), 10); + }, 10); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }); + +}, 'Errors must be propagated forward: becomes errored after one chunk; dest never desires chunks; ' + + 'preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }); + + let pipeComplete = false; + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws)).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + + return writeCalledPromise.then(() => { + rs.controller.error(error1); + + // Flush async events and verify that no shutdown occurs. + return flushAsyncEvents(); + }).then(() => { + assert_array_equals(ws.events, ['write', 'a']); // no 'abort' + assert_equals(pipeComplete, false, 'the pipe must not be complete'); + + resolveWritePromise(); + + return pipePromise.then(() => { + assert_array_equals(ws.events, ['write', 'a', 'abort', error1]); + }); + }); + +}, 'Errors must be propagated forward: shutdown must not occur until the final write completes'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }); + + let pipeComplete = false; + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true })).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + + return writeCalledPromise.then(() => { + rs.controller.error(error1); + + // Flush async events and verify that no shutdown occurs. + return flushAsyncEvents(); + }).then(() => { + assert_array_equals(ws.events, ['write', 'a']); // no 'abort' + assert_equals(pipeComplete, false, 'the pipe must not be complete'); + + resolveWritePromise(); + return pipePromise; + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a']); // no 'abort' + }); + +}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; preventAbort = true'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }, new CountQueuingStrategy({ highWaterMark: 2 })); + + let pipeComplete = false; + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws)).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.enqueue('b'); + + return writeCalledPromise.then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the first chunk must have been written, but abort must not have happened yet'); + assert_false(pipeComplete, 'the pipe should not complete while the first write is pending'); + + rs.controller.error(error1); + resolveWritePromise(); + return flushAsyncEvents(); + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'the second chunk must have been written, but abort must not have happened yet'); + assert_false(pipeComplete, 'the pipe should not complete while the second write is pending'); + + resolveWritePromise(); + return pipePromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'abort', error1], + 'all chunks must have been written and abort must have happened'); + }); + +}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; becomes errored after first write'); + +promise_test(t => { + + const rs = recordingReadableStream(); + + let resolveWriteCalled; + const writeCalledPromise = new Promise(resolve => { + resolveWriteCalled = resolve; + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + resolveWriteCalled(); + + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + }, new CountQueuingStrategy({ highWaterMark: 2 })); + + let pipeComplete = false; + const pipePromise = promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true })).then(() => { + pipeComplete = true; + }); + + rs.controller.enqueue('a'); + rs.controller.enqueue('b'); + + return writeCalledPromise.then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a'], + 'the first chunk must have been written, but abort must not have happened'); + assert_false(pipeComplete, 'the pipe should not complete while the first write is pending'); + + rs.controller.error(error1); + resolveWritePromise(); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'the second chunk must have been written, but abort must not have happened'); + assert_false(pipeComplete, 'the pipe should not complete while the second write is pending'); + + resolveWritePromise(); + return pipePromise; + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'all chunks must have been written, but abort must not have happened'); + }); + +}, 'Errors must be propagated forward: shutdown must not occur until the final write completes; becomes errored after first write; preventAbort = true'); diff --git a/test/fixtures/wpt/streams/piping/flow-control.any.js b/test/fixtures/wpt/streams/piping/flow-control.any.js new file mode 100644 index 00000000000000..db83c011f4a718 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/flow-control.any.js @@ -0,0 +1,297 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +promise_test(t => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const ws = recordingWritableStream(undefined, new CountQueuingStrategy({ highWaterMark: 0 })); + + const pipePromise = rs.pipeTo(ws, { preventCancel: true }); + + // Wait and make sure it doesn't do any reading. + return flushAsyncEvents().then(() => { + ws.controller.error(error1); + }) + .then(() => promise_rejects_exactly(t, error1, pipePromise, 'pipeTo must reject with the same error')) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, []); + }) + .then(() => readableStreamToArray(rs)) + .then(chunksNotPreviouslyRead => { + assert_array_equals(chunksNotPreviouslyRead, ['a', 'b']); + }); + +}, 'Piping from a non-empty ReadableStream into a WritableStream that does not desire chunks'); + +promise_test(() => { + + const rs = recordingReadableStream({ + start(controller) { + controller.enqueue('b'); + controller.close(); + } + }); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + if (!resolveWritePromise) { + // first write + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + return undefined; + } + }); + + const writer = ws.getWriter(); + const firstWritePromise = writer.write('a'); + assert_equals(writer.desiredSize, 0, 'after writing the writer\'s desiredSize must be 0'); + writer.releaseLock(); + + // firstWritePromise won't settle until we call resolveWritePromise. + + const pipePromise = rs.pipeTo(ws); + + return flushAsyncEvents().then(() => resolveWritePromise()) + .then(() => Promise.all([firstWritePromise, pipePromise])) + .then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close']); + }); + +}, 'Piping from a non-empty ReadableStream into a WritableStream that does not desire chunks, but then does'); + +promise_test(() => { + + const rs = recordingReadableStream(); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + if (!resolveWritePromise) { + // first write + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + return undefined; + } + }); + + const writer = ws.getWriter(); + writer.write('a'); + + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a']); + assert_equals(writer.desiredSize, 0, 'after writing the writer\'s desiredSize must be 0'); + writer.releaseLock(); + + const pipePromise = rs.pipeTo(ws); + + rs.controller.enqueue('b'); + resolveWritePromise(); + rs.controller.close(); + + return pipePromise.then(() => { + assert_array_equals(rs.eventsWithoutPulls, []); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'close']); + }); + }); + +}, 'Piping from an empty ReadableStream into a WritableStream that does not desire chunks, but then the readable ' + + 'stream becomes non-empty and the writable stream starts desiring chunks'); + +promise_test(() => { + const unreadChunks = ['b', 'c', 'd']; + + const rs = recordingReadableStream({ + pull(controller) { + controller.enqueue(unreadChunks.shift()); + if (unreadChunks.length === 0) { + controller.close(); + } + } + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + let resolveWritePromise; + const ws = recordingWritableStream({ + write() { + if (!resolveWritePromise) { + // first write + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + } + return undefined; + } + }, new CountQueuingStrategy({ highWaterMark: 3 })); + + const writer = ws.getWriter(); + const firstWritePromise = writer.write('a'); + assert_equals(writer.desiredSize, 2, 'after writing the writer\'s desiredSize must be 2'); + writer.releaseLock(); + + // firstWritePromise won't settle until we call resolveWritePromise. + + const pipePromise = rs.pipeTo(ws); + + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a']); + assert_equals(unreadChunks.length, 1, 'chunks should continue to be enqueued until the HWM is reached'); + }).then(() => resolveWritePromise()) + .then(() => Promise.all([firstWritePromise, pipePromise])) + .then(() => { + assert_array_equals(rs.events, ['pull', 'pull', 'pull']); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b','write', 'c','write', 'd', 'close']); + }); + +}, 'Piping from a ReadableStream to a WritableStream that desires more chunks before finishing with previous ones'); + +class StepTracker { + constructor() { + this.waiters = []; + this.wakers = []; + } + + // Returns promise which resolves when step `n` is reached. Also schedules step n + 1 to happen shortly after the + // promise is resolved. + waitThenAdvance(n) { + if (this.waiters[n] === undefined) { + this.waiters[n] = new Promise(resolve => { + this.wakers[n] = resolve; + }); + this.waiters[n] + .then(() => flushAsyncEvents()) + .then(() => { + if (this.wakers[n + 1] !== undefined) { + this.wakers[n + 1](); + } + }); + } + if (n == 0) { + this.wakers[0](); + } + return this.waiters[n]; + } +} + +promise_test(() => { + const steps = new StepTracker(); + const desiredSizes = []; + const rs = recordingReadableStream({ + start(controller) { + steps.waitThenAdvance(1).then(() => enqueue('a')); + steps.waitThenAdvance(3).then(() => enqueue('b')); + steps.waitThenAdvance(5).then(() => enqueue('c')); + steps.waitThenAdvance(7).then(() => enqueue('d')); + steps.waitThenAdvance(11).then(() => controller.close()); + + function enqueue(chunk) { + controller.enqueue(chunk); + desiredSizes.push(controller.desiredSize); + } + } + }); + + const chunksFinishedWriting = []; + const writableStartPromise = Promise.resolve(); + let writeCalled = false; + const ws = recordingWritableStream({ + start() { + return writableStartPromise; + }, + write(chunk) { + const waitForStep = writeCalled ? 12 : 9; + writeCalled = true; + return steps.waitThenAdvance(waitForStep).then(() => { + chunksFinishedWriting.push(chunk); + }); + } + }); + + return writableStartPromise.then(() => { + const pipePromise = rs.pipeTo(ws); + steps.waitThenAdvance(0); + + return Promise.all([ + steps.waitThenAdvance(2).then(() => { + assert_array_equals(chunksFinishedWriting, [], 'at step 2, zero chunks must have finished writing'); + assert_array_equals(ws.events, ['write', 'a'], 'at step 2, one chunk must have been written'); + + // When 'a' (the very first chunk) was enqueued, it was immediately used to fulfill the outstanding read request + // promise, leaving the queue empty. + assert_array_equals(desiredSizes, [1], + 'at step 2, the desiredSize at the last enqueue (step 1) must have been 1'); + assert_equals(rs.controller.desiredSize, 1, 'at step 2, the current desiredSize must be 1'); + }), + + steps.waitThenAdvance(4).then(() => { + assert_array_equals(chunksFinishedWriting, [], 'at step 4, zero chunks must have finished writing'); + assert_array_equals(ws.events, ['write', 'a'], 'at step 4, one chunk must have been written'); + + // When 'b' was enqueued at step 3, the queue was also empty, since immediately after enqueuing 'a' at + // step 1, it was dequeued in order to fulfill the read() call that was made at step 0. Thus the queue + // had size 1 (thus desiredSize of 0). + assert_array_equals(desiredSizes, [1, 0], + 'at step 4, the desiredSize at the last enqueue (step 3) must have been 0'); + assert_equals(rs.controller.desiredSize, 0, 'at step 4, the current desiredSize must be 0'); + }), + + steps.waitThenAdvance(6).then(() => { + assert_array_equals(chunksFinishedWriting, [], 'at step 6, zero chunks must have finished writing'); + assert_array_equals(ws.events, ['write', 'a'], 'at step 6, one chunk must have been written'); + + // When 'c' was enqueued at step 5, the queue was not empty; it had 'b' in it, since 'b' will not be read until + // the first write completes at step 9. Thus, the queue size is 2 after enqueuing 'c', giving a desiredSize of + // -1. + assert_array_equals(desiredSizes, [1, 0, -1], + 'at step 6, the desiredSize at the last enqueue (step 5) must have been -1'); + assert_equals(rs.controller.desiredSize, -1, 'at step 6, the current desiredSize must be -1'); + }), + + steps.waitThenAdvance(8).then(() => { + assert_array_equals(chunksFinishedWriting, [], 'at step 8, zero chunks must have finished writing'); + assert_array_equals(ws.events, ['write', 'a'], 'at step 8, one chunk must have been written'); + + // When 'd' was enqueued at step 7, the situation is the same as before, leading to a queue containing 'b', 'c', + // and 'd'. + assert_array_equals(desiredSizes, [1, 0, -1, -2], + 'at step 8, the desiredSize at the last enqueue (step 7) must have been -2'); + assert_equals(rs.controller.desiredSize, -2, 'at step 8, the current desiredSize must be -2'); + }), + + steps.waitThenAdvance(10).then(() => { + assert_array_equals(chunksFinishedWriting, ['a'], 'at step 10, one chunk must have finished writing'); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b'], + 'at step 10, two chunks must have been written'); + + assert_equals(rs.controller.desiredSize, -1, 'at step 10, the current desiredSize must be -1'); + }), + + pipePromise.then(() => { + assert_array_equals(desiredSizes, [1, 0, -1, -2], 'backpressure must have been exerted at the source'); + assert_array_equals(chunksFinishedWriting, ['a', 'b', 'c', 'd'], 'all chunks finished writing'); + + assert_array_equals(rs.eventsWithoutPulls, [], 'nothing unexpected should happen to the ReadableStream'); + assert_array_equals(ws.events, ['write', 'a', 'write', 'b', 'write', 'c', 'write', 'd', 'close'], + 'all chunks were written (and the WritableStream closed)'); + }) + ]); + }); +}, 'Piping to a WritableStream that does not consume the writes fast enough exerts backpressure on the ReadableStream'); diff --git a/test/fixtures/wpt/streams/piping/general.any.js b/test/fixtures/wpt/streams/piping/general.any.js new file mode 100644 index 00000000000000..2e02dfad78a0fa --- /dev/null +++ b/test/fixtures/wpt/streams/piping/general.any.js @@ -0,0 +1,211 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +test(() => { + + const rs = new ReadableStream(); + const ws = new WritableStream(); + + assert_false(rs.locked, 'sanity check: the ReadableStream must not start locked'); + assert_false(ws.locked, 'sanity check: the WritableStream must not start locked'); + + rs.pipeTo(ws); + + assert_true(rs.locked, 'the ReadableStream must become locked'); + assert_true(ws.locked, 'the WritableStream must become locked'); + +}, 'Piping must lock both the ReadableStream and WritableStream'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + const ws = new WritableStream(); + + return rs.pipeTo(ws).then(() => { + assert_false(rs.locked, 'the ReadableStream must become unlocked'); + assert_false(ws.locked, 'the WritableStream must become unlocked'); + }); + +}, 'Piping finishing must unlock both the ReadableStream and WritableStream'); + +promise_test(t => { + + const fakeRS = Object.create(ReadableStream.prototype); + const ws = new WritableStream(); + + return methodRejects(t, ReadableStream.prototype, 'pipeTo', fakeRS, [ws]); + +}, 'pipeTo must check the brand of its ReadableStream this value'); + +promise_test(t => { + + const rs = new ReadableStream(); + const fakeWS = Object.create(WritableStream.prototype); + + return methodRejects(t, ReadableStream.prototype, 'pipeTo', rs, [fakeWS]); + +}, 'pipeTo must check the brand of its WritableStream argument'); + +promise_test(t => { + + const rs = new ReadableStream(); + const ws = new WritableStream(); + + rs.getReader(); + + assert_true(rs.locked, 'sanity check: the ReadableStream starts locked'); + assert_false(ws.locked, 'sanity check: the WritableStream does not start locked'); + + return promise_rejects_js(t, TypeError, rs.pipeTo(ws)).then(() => { + assert_false(ws.locked, 'the WritableStream must still be unlocked'); + }); + +}, 'pipeTo must fail if the ReadableStream is locked, and not lock the WritableStream'); + +promise_test(t => { + + const rs = new ReadableStream(); + const ws = new WritableStream(); + + ws.getWriter(); + + assert_false(rs.locked, 'sanity check: the ReadableStream does not start locked'); + assert_true(ws.locked, 'sanity check: the WritableStream starts locked'); + + return promise_rejects_js(t, TypeError, rs.pipeTo(ws)).then(() => { + assert_false(rs.locked, 'the ReadableStream must still be unlocked'); + }); + +}, 'pipeTo must fail if the WritableStream is locked, and not lock the ReadableStream'); + +promise_test(() => { + + const CHUNKS = 10; + + const rs = new ReadableStream({ + start(c) { + for (let i = 0; i < CHUNKS; ++i) { + c.enqueue(i); + } + c.close(); + } + }); + + const written = []; + const ws = new WritableStream({ + write(chunk) { + written.push(chunk); + }, + close() { + written.push('closed'); + } + }, new CountQueuingStrategy({ highWaterMark: CHUNKS })); + + return rs.pipeTo(ws).then(() => { + const targetValues = []; + for (let i = 0; i < CHUNKS; ++i) { + targetValues.push(i); + } + targetValues.push('closed'); + + assert_array_equals(written, targetValues, 'the correct values must be written'); + + // Ensure both readable and writable are closed by the time the pipe finishes. + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + + // NOTE: no requirement on *when* the pipe finishes; that is left to implementations. + +}, 'Piping from a ReadableStream from which lots of chunks are synchronously readable'); + +promise_test(t => { + + let controller; + const rs = recordingReadableStream({ + start(c) { + controller = c; + } + }); + + const ws = recordingWritableStream(); + + const pipePromise = rs.pipeTo(ws).then(() => { + assert_array_equals(ws.events, ['write', 'Hello', 'close']); + }); + + t.step_timeout(() => { + controller.enqueue('Hello'); + t.step_timeout(() => controller.close(), 10); + }, 10); + + return pipePromise; + +}, 'Piping from a ReadableStream for which a chunk becomes asynchronously readable after the pipeTo'); + +for (const preventAbort of [true, false]) { + promise_test(() => { + + const rs = new ReadableStream({ + pull() { + return Promise.reject(undefined); + } + }); + + return rs.pipeTo(new WritableStream(), { preventAbort }).then( + () => assert_unreached('pipeTo promise should be rejected'), + value => assert_equals(value, undefined, 'rejection value should be undefined')); + + }, `an undefined rejection from pull should cause pipeTo() to reject when preventAbort is ${preventAbort}`); +} + +for (const preventCancel of [true, false]) { + promise_test(() => { + + const rs = new ReadableStream({ + pull(controller) { + controller.enqueue(0); + } + }); + + const ws = new WritableStream({ + write() { + return Promise.reject(undefined); + } + }); + + return rs.pipeTo(ws, { preventCancel }).then( + () => assert_unreached('pipeTo promise should be rejected'), + value => assert_equals(value, undefined, 'rejection value should be undefined')); + + }, `an undefined rejection from write should cause pipeTo() to reject when preventCancel is ${preventCancel}`); +} + +promise_test(t => { + const rs = new ReadableStream(); + const ws = new WritableStream(); + return promise_rejects_js(t, TypeError, rs.pipeTo(ws, { + get preventAbort() { + ws.getWriter(); + } + }), 'pipeTo should reject'); +}, 'pipeTo() should reject if an option getter grabs a writer'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + const ws = new WritableStream(); + + return rs.pipeTo(ws, null); +}, 'pipeTo() promise should resolve if null is passed'); diff --git a/test/fixtures/wpt/streams/piping/multiple-propagation.any.js b/test/fixtures/wpt/streams/piping/multiple-propagation.any.js new file mode 100644 index 00000000000000..c9a486f3f9ac2f --- /dev/null +++ b/test/fixtures/wpt/streams/piping/multiple-propagation.any.js @@ -0,0 +1,227 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1!'); +error1.name = 'error1'; + +const error2 = new Error('error2!'); +error2.name = 'error2'; + +function createErroredWritableStream(t) { + return Promise.resolve().then(() => { + const ws = recordingWritableStream({ + start(c) { + c.error(error2); + } + }); + + const writer = ws.getWriter(); + return promise_rejects_exactly(t, error2, writer.closed, 'the writable stream must be errored with error2') + .then(() => { + writer.releaseLock(); + assert_array_equals(ws.events, []); + return ws; + }); + }); +} + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + const ws = recordingWritableStream({ + start(c) { + c.error(error2); + } + }); + + // Trying to abort a stream that is erroring will give the writable's error + return promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return Promise.all([ + promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'), + promise_rejects_exactly(t, error2, ws.getWriter().closed, 'the writable stream must be errored with error2') + ]); + }); + +}, 'Piping from an errored readable stream to an erroring writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + + return createErroredWritableStream(t) + .then(ws => promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error')) + .then(() => { + assert_array_equals(rs.events, []); + + return promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'); + }); +}, 'Piping from an errored readable stream to an errored writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + const ws = recordingWritableStream({ + start(c) { + c.error(error2); + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the readable stream\'s error') + .then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return Promise.all([ + promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'), + promise_rejects_exactly(t, error2, ws.getWriter().closed, 'the writable stream must be errored with error2') + ]); + }); + +}, 'Piping from an errored readable stream to an erroring writable stream; preventAbort = true'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + return createErroredWritableStream(t) + .then(ws => promise_rejects_exactly(t, error1, rs.pipeTo(ws, { preventAbort: true }), + 'pipeTo must reject with the readable stream\'s error')) + .then(() => { + assert_array_equals(rs.events, []); + + return promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'); + }); + +}, 'Piping from an errored readable stream to an errored writable stream; preventAbort = true'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + const closePromise = writer.close(); + writer.releaseLock(); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['abort', error1]); + + return Promise.all([ + promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'), + promise_rejects_exactly(t, error1, ws.getWriter().closed, + 'closed must reject with error1'), + promise_rejects_exactly(t, error1, closePromise, + 'close() must reject with error1') + ]); + }); + +}, 'Piping from an errored readable stream to a closing writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.error(error1); + } + }); + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + const closePromise = writer.close(); + writer.releaseLock(); + + return flushAsyncEvents().then(() => { + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the readable stream\'s error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + promise_rejects_exactly(t, error1, rs.getReader().closed, 'the readable stream must be errored with error1'), + ws.getWriter().closed, + closePromise + ]); + }); + }); + +}, 'Piping from an errored readable stream to a closed writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.close(); + } + }); + const ws = recordingWritableStream({ + start(c) { + c.error(error1); + } + }); + + return promise_rejects_exactly(t, error1, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error').then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, []); + + return Promise.all([ + rs.getReader().closed, + promise_rejects_exactly(t, error1, ws.getWriter().closed, 'the writable stream must be errored with error1') + ]); + }); + +}, 'Piping from a closed readable stream to an erroring writable stream'); + +promise_test(t => { + const rs = recordingReadableStream({ + start(c) { + c.close(); + } + }); + return createErroredWritableStream(t) + .then(ws => promise_rejects_exactly(t, error2, rs.pipeTo(ws), 'pipeTo must reject with the writable stream\'s error')) + .then(() => { + assert_array_equals(rs.events, []); + + return rs.getReader().closed; + }); + +}, 'Piping from a closed readable stream to an errored writable stream'); + +promise_test(() => { + const rs = recordingReadableStream({ + start(c) { + c.close(); + } + }); + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return rs.pipeTo(ws).then(() => { + assert_array_equals(rs.events, []); + assert_array_equals(ws.events, ['close']); + + return Promise.all([ + rs.getReader().closed, + ws.getWriter().closed + ]); + }); + +}, 'Piping from a closed readable stream to a closed writable stream'); diff --git a/test/fixtures/wpt/streams/piping/pipe-through.any.js b/test/fixtures/wpt/streams/piping/pipe-through.any.js new file mode 100644 index 00000000000000..35dbb456b3e2c6 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/pipe-through.any.js @@ -0,0 +1,268 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +function duckTypedPassThroughTransform() { + let enqueueInReadable; + let closeReadable; + + return { + writable: new WritableStream({ + write(chunk) { + enqueueInReadable(chunk); + }, + + close() { + closeReadable(); + } + }), + + readable: new ReadableStream({ + start(c) { + enqueueInReadable = c.enqueue.bind(c); + closeReadable = c.close.bind(c); + } + }) + }; +} + +function uninterestingReadableWritablePair() { + return { writable: new WritableStream(), readable: new ReadableStream() }; +} + +promise_test(() => { + const readableEnd = sequentialReadableStream(5).pipeThrough(duckTypedPassThroughTransform()); + + return readableStreamToArray(readableEnd).then(chunks => + assert_array_equals(chunks, [1, 2, 3, 4, 5]), 'chunks should match'); +}, 'Piping through a duck-typed pass-through transform stream should work'); + +promise_test(() => { + const transform = { + writable: new WritableStream({ + start(c) { + c.error(new Error('this rejection should not be reported as unhandled')); + } + }), + readable: new ReadableStream() + }; + + sequentialReadableStream(5).pipeThrough(transform); + + // The test harness should complain about unhandled rejections by then. + return flushAsyncEvents(); + +}, 'Piping through a transform errored on the writable end does not cause an unhandled promise rejection'); + +test(() => { + let calledPipeTo = false; + class BadReadableStream extends ReadableStream { + pipeTo() { + calledPipeTo = true; + } + } + + const brs = new BadReadableStream({ + start(controller) { + controller.close(); + } + }); + const readable = new ReadableStream(); + const writable = new WritableStream(); + const result = brs.pipeThrough({ readable, writable }); + + assert_false(calledPipeTo, 'the overridden pipeTo should not have been called'); + assert_equals(result, readable, 'return value should be the passed readable property'); +}, 'pipeThrough should not call pipeTo on this'); + +test(t => { + let calledFakePipeTo = false; + const realPipeTo = ReadableStream.prototype.pipeTo; + t.add_cleanup(() => { + ReadableStream.prototype.pipeTo = realPipeTo; + }); + ReadableStream.prototype.pipeTo = () => { + calledFakePipeTo = true; + }; + const rs = new ReadableStream(); + const readable = new ReadableStream(); + const writable = new WritableStream(); + const result = rs.pipeThrough({ readable, writable }); + + assert_false(calledFakePipeTo, 'the monkey-patched pipeTo should not have been called'); + assert_equals(result, readable, 'return value should be the passed readable property'); + +}, 'pipeThrough should not call pipeTo on the ReadableStream prototype'); + +const badReadables = [null, undefined, 0, NaN, true, 'ReadableStream', Object.create(ReadableStream.prototype)]; +for (const readable of badReadables) { + test(() => { + assert_throws_js(TypeError, + ReadableStream.prototype.pipeThrough.bind(readable, uninterestingReadableWritablePair()), + 'pipeThrough should throw'); + }, `pipeThrough should brand-check this and not allow '${readable}'`); + + test(() => { + const rs = new ReadableStream(); + let writableGetterCalled = false; + assert_throws_js( + TypeError, + () => rs.pipeThrough({ + get writable() { + writableGetterCalled = true; + return new WritableStream(); + }, + readable + }), + 'pipeThrough should brand-check readable' + ); + assert_false(writableGetterCalled, 'writable should not have been accessed'); + }, `pipeThrough should brand-check readable and not allow '${readable}'`); +} + +const badWritables = [null, undefined, 0, NaN, true, 'WritableStream', Object.create(WritableStream.prototype)]; +for (const writable of badWritables) { + test(() => { + const rs = new ReadableStream({ + start(c) { + c.close(); + } + }); + let readableGetterCalled = false; + assert_throws_js(TypeError, () => rs.pipeThrough({ + get readable() { + readableGetterCalled = true; + return new ReadableStream(); + }, + writable + }), + 'pipeThrough should brand-check writable'); + assert_true(readableGetterCalled, 'readable should have been accessed'); + }, `pipeThrough should brand-check writable and not allow '${writable}'`); +} + +test(t => { + const error = new Error(); + error.name = 'custom'; + + const rs = new ReadableStream({ + pull: t.unreached_func('pull should not be called') + }, { highWaterMark: 0 }); + + const throwingWritable = { + readable: rs, + get writable() { + throw error; + } + }; + assert_throws_exactly(error, + () => ReadableStream.prototype.pipeThrough.call(rs, throwingWritable, {}), + 'pipeThrough should rethrow the error thrown by the writable getter'); + + const throwingReadable = { + get readable() { + throw error; + }, + writable: {} + }; + assert_throws_exactly(error, + () => ReadableStream.prototype.pipeThrough.call(rs, throwingReadable, {}), + 'pipeThrough should rethrow the error thrown by the readable getter'); + +}, 'pipeThrough should rethrow errors from accessing readable or writable'); + +const badSignals = [null, 0, NaN, true, 'AbortSignal', Object.create(AbortSignal.prototype)]; +for (const signal of badSignals) { + test(() => { + const rs = new ReadableStream(); + assert_throws_js(TypeError, () => rs.pipeThrough(uninterestingReadableWritablePair(), { signal }), + 'pipeThrough should throw'); + }, `invalid values of signal should throw; specifically '${signal}'`); +} + +test(() => { + const rs = new ReadableStream(); + const controller = new AbortController(); + const signal = controller.signal; + rs.pipeThrough(uninterestingReadableWritablePair(), { signal }); +}, 'pipeThrough should accept a real AbortSignal'); + +test(() => { + const rs = new ReadableStream(); + rs.getReader(); + assert_throws_js(TypeError, () => rs.pipeThrough(uninterestingReadableWritablePair()), + 'pipeThrough should throw'); +}, 'pipeThrough should throw if this is locked'); + +test(() => { + const rs = new ReadableStream(); + const writable = new WritableStream(); + const readable = new ReadableStream(); + writable.getWriter(); + assert_throws_js(TypeError, () => rs.pipeThrough({writable, readable}), + 'pipeThrough should throw'); +}, 'pipeThrough should throw if writable is locked'); + +test(() => { + const rs = new ReadableStream(); + const writable = new WritableStream(); + const readable = new ReadableStream(); + readable.getReader(); + assert_equals(rs.pipeThrough({ writable, readable }), readable, + 'pipeThrough should not throw'); +}, 'pipeThrough should not care if readable is locked'); + +promise_test(() => { + const rs = recordingReadableStream(); + const writable = new WritableStream({ + start(controller) { + controller.error(); + } + }); + const readable = new ReadableStream(); + rs.pipeThrough({ writable, readable }, { preventCancel: true }); + return flushAsyncEvents(0).then(() => { + assert_array_equals(rs.events, ['pull'], 'cancel should not have been called'); + }); +}, 'preventCancel should work'); + +promise_test(() => { + const rs = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + const writable = recordingWritableStream(); + const readable = new ReadableStream(); + rs.pipeThrough({ writable, readable }, { preventClose: true }); + return flushAsyncEvents(0).then(() => { + assert_array_equals(writable.events, [], 'writable should not be closed'); + }); +}, 'preventClose should work'); + +promise_test(() => { + const rs = new ReadableStream({ + start(controller) { + controller.error(); + } + }); + const writable = recordingWritableStream(); + const readable = new ReadableStream(); + rs.pipeThrough({ writable, readable }, { preventAbort: true }); + return flushAsyncEvents(0).then(() => { + assert_array_equals(writable.events, [], 'writable should not be aborted'); + }); +}, 'preventAbort should work'); + +test(() => { + const rs = new ReadableStream(); + const readable = new ReadableStream(); + const writable = new WritableStream(); + assert_throws_js(TypeError, () => rs.pipeThrough({readable, writable}, { + get preventAbort() { + writable.getWriter(); + } + }), 'pipeThrough should throw'); +}, 'pipeThrough() should throw if an option getter grabs a writer'); diff --git a/test/fixtures/wpt/streams/piping/then-interception.any.js b/test/fixtures/wpt/streams/piping/then-interception.any.js new file mode 100644 index 00000000000000..9f772ea5841d8f --- /dev/null +++ b/test/fixtures/wpt/streams/piping/then-interception.any.js @@ -0,0 +1,68 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +function interceptThen() { + const intercepted = []; + let callCount = 0; + Object.prototype.then = function(resolver) { + if (!this.done) { + intercepted.push(this.value); + } + const retval = Object.create(null); + retval.done = ++callCount === 3; + retval.value = callCount; + resolver(retval); + if (retval.done) { + delete Object.prototype.then; + } + } + return intercepted; +} + +promise_test(async t => { + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.close(); + } + }); + const ws = recordingWritableStream(); + + const intercepted = interceptThen(); + t.add_cleanup(() => { + delete Object.prototype.then; + }); + + await rs.pipeTo(ws); + delete Object.prototype.then; + + + assert_array_equals(intercepted, [], 'nothing should have been intercepted'); + assert_array_equals(ws.events, ['write', 'a', 'close'], 'written chunk should be "a"'); +}, 'piping should not be observable'); + +promise_test(async t => { + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.close(); + } + }); + const ws = recordingWritableStream(); + + const [ branch1, branch2 ] = rs.tee(); + + const intercepted = interceptThen(); + t.add_cleanup(() => { + delete Object.prototype.then; + }); + + await branch1.pipeTo(ws); + delete Object.prototype.then; + branch2.cancel(); + + assert_array_equals(intercepted, [], 'nothing should have been intercepted'); + assert_array_equals(ws.events, ['write', 'a', 'close'], 'written chunk should be "a"'); +}, 'tee should not be observable'); diff --git a/test/fixtures/wpt/streams/piping/throwing-options.any.js b/test/fixtures/wpt/streams/piping/throwing-options.any.js new file mode 100644 index 00000000000000..bc1cf328da61e6 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/throwing-options.any.js @@ -0,0 +1,65 @@ +// META: global=window,worker,jsshell +'use strict'; + +class ThrowingOptions { + constructor(whatShouldThrow) { + this.whatShouldThrow = whatShouldThrow; + this.touched = []; + } + + get preventClose() { + this.maybeThrow('preventClose'); + return false; + } + + get preventAbort() { + this.maybeThrow('preventAbort'); + return false; + } + + get preventCancel() { + this.maybeThrow('preventCancel'); + return false; + } + + get signal() { + this.maybeThrow('signal'); + return undefined; + } + + maybeThrow(forWhat) { + this.touched.push(forWhat); + if (this.whatShouldThrow === forWhat) { + throw new Error(this.whatShouldThrow); + } + } +} + +const checkOrder = ['preventAbort', 'preventCancel', 'preventClose', 'signal']; + +for (let i = 0; i < checkOrder.length; ++i) { + const whatShouldThrow = checkOrder[i]; + const whatShouldBeTouched = checkOrder.slice(0, i + 1); + + promise_test(t => { + const options = new ThrowingOptions(whatShouldThrow); + return promise_rejects_js( + t, Error, + new ReadableStream().pipeTo(new WritableStream(), options), + 'pipeTo should reject') + .then(() => assert_array_equals( + options.touched, whatShouldBeTouched, + 'options should be touched in the right order')); + }, `pipeTo should stop after getting ${whatShouldThrow} throws`); + + test(() => { + const options = new ThrowingOptions(whatShouldThrow); + assert_throws_js( + Error, + () => new ReadableStream().pipeThrough(new TransformStream(), options), + 'pipeThrough should throw'); + assert_array_equals( + options.touched, whatShouldBeTouched, + 'options should be touched in the right order'); + }, `pipeThrough should stop after getting ${whatShouldThrow} throws`); +} diff --git a/test/fixtures/wpt/streams/piping/transform-streams.any.js b/test/fixtures/wpt/streams/piping/transform-streams.any.js new file mode 100644 index 00000000000000..a368fecd6f00d6 --- /dev/null +++ b/test/fixtures/wpt/streams/piping/transform-streams.any.js @@ -0,0 +1,22 @@ +// META: global=window,worker,jsshell +'use strict'; + +promise_test(() => { + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.enqueue('c'); + c.close(); + } + }); + + const ts = new TransformStream(); + + const ws = new WritableStream(); + + return rs.pipeThrough(ts).pipeTo(ws).then(() => { + const writer = ws.getWriter(); + return writer.closed; + }); +}, 'Piping through an identity transform stream should close the destination when the source closes'); diff --git a/test/fixtures/wpt/streams/queuing-strategies-size-function-per-global.window.js b/test/fixtures/wpt/streams/queuing-strategies-size-function-per-global.window.js new file mode 100644 index 00000000000000..0f869f13b348da --- /dev/null +++ b/test/fixtures/wpt/streams/queuing-strategies-size-function-per-global.window.js @@ -0,0 +1,14 @@ +const iframe = document.createElement('iframe'); +document.body.appendChild(iframe); + +for (const type of ['CountQueuingStrategy', 'ByteLengthQueuingStrategy']) { + test(() => { + const myQs = new window[type]({ highWaterMark: 1 }); + const yourQs = new iframe.contentWindow[type]({ highWaterMark: 1 }); + assert_not_equals(myQs.size, yourQs.size, + 'size should not be the same object'); + }, `${type} size should be different for objects in different realms`); +} + +// Cleanup the document to avoid messing up the result page. +iframe.remove(); diff --git a/test/fixtures/wpt/streams/queuing-strategies.any.js b/test/fixtures/wpt/streams/queuing-strategies.any.js new file mode 100644 index 00000000000000..1846ea63e35459 --- /dev/null +++ b/test/fixtures/wpt/streams/queuing-strategies.any.js @@ -0,0 +1,135 @@ +// META: global=window,worker,jsshell +'use strict'; + +const highWaterMarkConversions = new Map([ + [-Infinity, -Infinity], + [-5, -5], + [false, 0], + [true, 1], + [NaN, NaN], + ['foo', NaN], + ['0', 0], + [{}, NaN], + [() => {}, NaN] +]); + +for (const QueuingStrategy of [CountQueuingStrategy, ByteLengthQueuingStrategy]) { + test(() => { + new QueuingStrategy({ highWaterMark: 4 }); + }, `${QueuingStrategy.name}: Can construct a with a valid high water mark`); + + test(() => { + const highWaterMark = 1; + const highWaterMarkObjectGetter = { + get highWaterMark() { return highWaterMark; } + }; + const error = new Error('wow!'); + const highWaterMarkObjectGetterThrowing = { + get highWaterMark() { throw error; } + }; + + assert_throws_js(TypeError, () => new QueuingStrategy(), 'construction fails with undefined'); + assert_throws_js(TypeError, () => new QueuingStrategy(null), 'construction fails with null'); + assert_throws_js(TypeError, () => new QueuingStrategy(true), 'construction fails with true'); + assert_throws_js(TypeError, () => new QueuingStrategy(5), 'construction fails with 5'); + assert_throws_js(TypeError, () => new QueuingStrategy({}), 'construction fails with {}'); + assert_throws_exactly(error, () => new QueuingStrategy(highWaterMarkObjectGetterThrowing), + 'construction fails with an object with a throwing highWaterMark getter'); + + assert_equals((new QueuingStrategy(highWaterMarkObjectGetter)).highWaterMark, highWaterMark); + }, `${QueuingStrategy.name}: Constructor behaves as expected with strange arguments`); + + test(() => { + for (const [input, output] of highWaterMarkConversions.entries()) { + const strategy = new QueuingStrategy({ highWaterMark: input }); + assert_equals(strategy.highWaterMark, output, `${input} gets set correctly`); + } + }, `${QueuingStrategy.name}: highWaterMark constructor values are converted per the unrestricted double rules`); + + test(() => { + const size1 = (new QueuingStrategy({ highWaterMark: 5 })).size; + const size2 = (new QueuingStrategy({ highWaterMark: 10 })).size; + + assert_equals(size1, size2); + }, `${QueuingStrategy.name}: size is the same function across all instances`); + + test(() => { + const size = (new QueuingStrategy({ highWaterMark: 5 })).size; + assert_equals(size.name, 'size'); + }, `${QueuingStrategy.name}: size should have the right name`); + + test(() => { + class SubClass extends QueuingStrategy { + size() { + return 2; + } + + subClassMethod() { + return true; + } + } + + const sc = new SubClass({ highWaterMark: 77 }); + assert_equals(sc.constructor.name, 'SubClass', 'constructor.name should be correct'); + assert_equals(sc.highWaterMark, 77, 'highWaterMark should come from the parent class'); + assert_equals(sc.size(), 2, 'size() on the subclass should override the parent'); + assert_true(sc.subClassMethod(), 'subClassMethod() should work'); + }, `${QueuingStrategy.name}: subclassing should work correctly`); +} + +test(() => { + const size = (new CountQueuingStrategy({ highWaterMark: 5 })).size; + assert_equals(size.length, 0); +}, 'CountQueuingStrategy: size should have the right length'); + +test(() => { + const size = (new ByteLengthQueuingStrategy({ highWaterMark: 5 })).size; + assert_equals(size.length, 1); +}, 'ByteLengthQueuingStrategy: size should have the right length'); + +test(() => { + const size = 1024; + const chunk = { byteLength: size }; + const chunkGetter = { + get byteLength() { return size; } + }; + const error = new Error('wow!'); + const chunkGetterThrowing = { + get byteLength() { throw error; } + }; + + const sizeFunction = (new CountQueuingStrategy({ highWaterMark: 5 })).size; + + assert_equals(sizeFunction(), 1, 'size returns 1 with undefined'); + assert_equals(sizeFunction(null), 1, 'size returns 1 with null'); + assert_equals(sizeFunction('potato'), 1, 'size returns 1 with non-object type'); + assert_equals(sizeFunction({}), 1, 'size returns 1 with empty object'); + assert_equals(sizeFunction(chunk), 1, 'size returns 1 with a chunk'); + assert_equals(sizeFunction(chunkGetter), 1, 'size returns 1 with chunk getter'); + assert_equals(sizeFunction(chunkGetterThrowing), 1, + 'size returns 1 with chunk getter that throws'); +}, 'CountQueuingStrategy: size behaves as expected with strange arguments'); + +test(() => { + const size = 1024; + const chunk = { byteLength: size }; + const chunkGetter = { + get byteLength() { return size; } + }; + const error = new Error('wow!'); + const chunkGetterThrowing = { + get byteLength() { throw error; } + }; + + const sizeFunction = (new ByteLengthQueuingStrategy({ highWaterMark: 5 })).size; + + assert_throws_js(TypeError, () => sizeFunction(), 'size fails with undefined'); + assert_throws_js(TypeError, () => sizeFunction(null), 'size fails with null'); + assert_equals(sizeFunction('potato'), undefined, 'size succeeds with undefined with a random non-object type'); + assert_equals(sizeFunction({}), undefined, 'size succeeds with undefined with an object without hwm property'); + assert_equals(sizeFunction(chunk), size, 'size succeeds with the right amount with an object with a hwm'); + assert_equals(sizeFunction(chunkGetter), size, + 'size succeeds with the right amount with an object with a hwm getter'); + assert_throws_exactly(error, () => sizeFunction(chunkGetterThrowing), + 'size fails with the error thrown by the getter'); +}, 'ByteLengthQueuingStrategy: size behaves as expected with strange arguments'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/bad-buffers-and-views.any.js b/test/fixtures/wpt/streams/readable-byte-streams/bad-buffers-and-views.any.js new file mode 100644 index 00000000000000..eed3a5ed4f8d02 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/bad-buffers-and-views.any.js @@ -0,0 +1,349 @@ +// META: global=window,worker,jsshell +'use strict'; + +promise_test(() => { + const stream = new ReadableStream({ + start(c) { + c.close(); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + const view = new Uint8Array([1, 2, 3]); + return reader.read(view).then(({ value, done }) => { + // Sanity checks + assert_true(value instanceof Uint8Array, 'The value read must be a Uint8Array'); + assert_not_equals(value, view, 'The value read must not be the *same* Uint8Array'); + assert_array_equals(value, [], 'The value read must be an empty Uint8Array, since the stream is closed'); + assert_true(done, 'done must be true, since the stream is closed'); + + // The important assertions + assert_not_equals(value.buffer, view.buffer, 'a different ArrayBuffer must underlie the value'); + assert_equals(view.buffer.byteLength, 0, 'the original buffer must be detached'); + }); +}, 'ReadableStream with byte source: read()ing from a closed stream still transfers the buffer'); + +promise_test(() => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array([1, 2, 3])); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + const view = new Uint8Array([4, 5, 6]); + return reader.read(view).then(({ value, done }) => { + // Sanity checks + assert_true(value instanceof Uint8Array, 'The value read must be a Uint8Array'); + assert_not_equals(value, view, 'The value read must not be the *same* Uint8Array'); + assert_array_equals(value, [1, 2, 3], 'The value read must be the enqueued Uint8Array, not the original values'); + assert_false(done, 'done must be false, since the stream is not closed'); + + // The important assertions + assert_not_equals(value.buffer, view.buffer, 'a different ArrayBuffer must underlie the value'); + assert_equals(view.buffer.byteLength, 0, 'the original buffer must be detached'); + }); +}, 'ReadableStream with byte source: read()ing from a stream with queued chunks still transfers the buffer'); + +test(() => { + new ReadableStream({ + start(c) { + const view = new Uint8Array([1, 2, 3]); + c.enqueue(view); + assert_throws_js(TypeError, () => c.enqueue(view)); + }, + type: 'bytes' + }); +}, 'ReadableStream with byte source: enqueuing an already-detached buffer throws'); + +test(() => { + new ReadableStream({ + start(c) { + const view = new Uint8Array([]); + assert_throws_js(TypeError, () => c.enqueue(view)); + }, + type: 'bytes' + }); +}, 'ReadableStream with byte source: enqueuing a zero-length buffer throws'); + +test(() => { + new ReadableStream({ + start(c) { + const view = new Uint8Array(new ArrayBuffer(10), 0, 0); + assert_throws_js(TypeError, () => c.enqueue(view)); + }, + type: 'bytes' + }); +}, 'ReadableStream with byte source: enqueuing a zero-length view on a non-zero-length buffer throws'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array([1, 2, 3])); + }, + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const view = new Uint8Array([4, 5, 6]); + return reader.read(view).then(() => { + // view is now detached + return promise_rejects_js(t, TypeError, reader.read(view)); + }); +}, 'ReadableStream with byte source: reading into an already-detached buffer rejects'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array([1, 2, 3])); + }, + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const view = new Uint8Array(); + return promise_rejects_js(t, TypeError, reader.read(view)); +}, 'ReadableStream with byte source: reading into a zero-length buffer rejects'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array([1, 2, 3])); + }, + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const view = new Uint8Array(new ArrayBuffer(10), 0, 0); + return promise_rejects_js(t, TypeError, reader.read(view)); +}, 'ReadableStream with byte source: reading into a zero-length view on a non-zero-length buffer rejects'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + // Detach it by reading into it + reader.read(c.byobRequest.view); + + assert_throws_js(TypeError, () => c.byobRequest.respond(1), + 'respond() must throw if the corresponding view has become detached'); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respond() throws if the BYOB request\'s buffer has been detached (in the ' + + 'readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + c.close(); + + // Detach it by reading into it + reader.read(c.byobRequest.view); + + assert_throws_js(TypeError, () => c.byobRequest.respond(0), + 'respond() must throw if the corresponding view has become detached'); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respond() throws if the BYOB request\'s buffer has been detached (in the ' + + 'closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + // Detach it by reading into it + const view = new Uint8Array([1, 2, 3]); + reader.read(view); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has been detached ' + + '(in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer is zero-length ' + + '(in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 0); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view is zero-length on a ' + + 'non-zero-length buffer (in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(new ArrayBuffer(10), 0, 3); + + assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has a ' + + 'different length (in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 4); + view[0] = 20; + view[1] = 21; + view[2] = 22; + view[3] = 23; + + assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const buffer = new ArrayBuffer(10); + const view = new Uint8Array(buffer, 0, 3); + view[0] = 10; + view[1] = 11; + view[2] = 12; + reader.read(view); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view has a larger length ' + + '(in the readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + c.close(); + + // Detach it by reading into it + const view = new Uint8Array([1, 2, 3]); + reader.read(view); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has been detached ' + + '(in the closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(); + + c.close(); + + assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer is zero-length ' + + '(in the closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 1); + + c.close(); + + assert_throws_js(TypeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view is non-zero-length ' + + '(in the closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + const view = new Uint8Array(new ArrayBuffer(10), 0, 0); + + c.close(); + + assert_throws_js(RangeError, () => c.byobRequest.respondWithNewView(view)); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: respondWithNewView() throws if the supplied view\'s buffer has a ' + + 'different length (in the closed state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + // Detach it by reading into it + reader.read(c.byobRequest.view); + + assert_throws_js(TypeError, () => c.enqueue(new Uint8Array([1])), + 'enqueue() must throw if the BYOB request\'s buffer has become detached'); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: enqueue() throws if the BYOB request\'s buffer has been detached (in the ' + + 'readable state)'); + +async_test(t => { + const stream = new ReadableStream({ + pull: t.step_func_done(c => { + c.close(); + + // Detach it by reading into it + reader.read(c.byobRequest.view); + + assert_throws_js(TypeError, () => c.enqueue(new Uint8Array([1])), + 'enqueue() must throw if the BYOB request\'s buffer has become detached'); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + reader.read(new Uint8Array([4, 5, 6])); +}, 'ReadableStream with byte source: enqueue() throws if the BYOB request\'s buffer has been detached (in the ' + + 'closed state)'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/construct-byob-request.any.js b/test/fixtures/wpt/streams/readable-byte-streams/construct-byob-request.any.js new file mode 100644 index 00000000000000..1386d84599a4cd --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/construct-byob-request.any.js @@ -0,0 +1,53 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +'use strict'; + +// Prior to whatwg/stream#870 it was possible to construct a ReadableStreamBYOBRequest directly. This made it possible +// to construct requests that were out-of-sync with the state of the ReadableStream. They could then be used to call +// internal operations, resulting in asserts or bad behaviour. This file contains regression tests for the change. + +function getRealByteStreamController() { + let controller; + new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + return controller; +} + +// Create an object pretending to have prototype |prototype|, of type |type|. |type| is one of "undefined", "null", +// "fake", or "real". "real" will call the realObjectCreator function to get a real instance of the object. +function createDummyObject(prototype, type, realObjectCreator) { + switch (type) { + case 'undefined': + return undefined; + + case 'null': + return null; + + case 'fake': + return Object.create(prototype); + + case 'real': + return realObjectCreator(); + } + + throw new Error('not reached'); +} + +const dummyTypes = ['undefined', 'null', 'fake', 'real']; + +for (const controllerType of dummyTypes) { + const controller = createDummyObject(ReadableByteStreamController.prototype, controllerType, + getRealByteStreamController); + for (const viewType of dummyTypes) { + const view = createDummyObject(Uint8Array.prototype, viewType, () => new Uint8Array(16)); + test(() => { + assert_throws_js(TypeError, () => new ReadableStreamBYOBRequest(controller, view), + 'constructor should throw'); + }, `ReadableStreamBYOBRequest constructor should throw when passed a ${controllerType} ` + + `ReadableByteStreamController and a ${viewType} view`); + } +} diff --git a/test/fixtures/wpt/streams/readable-byte-streams/enqueue-with-detached-buffer.window.js b/test/fixtures/wpt/streams/readable-byte-streams/enqueue-with-detached-buffer.window.js new file mode 100644 index 00000000000000..15400f69340451 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/enqueue-with-detached-buffer.window.js @@ -0,0 +1,19 @@ +promise_test(async t => { + const error = new Error('cannot proceed'); + const rs = new ReadableStream({ + type: 'bytes', + pull: t.step_func((controller) => { + const buffer = controller.byobRequest.view.buffer; + // Detach the buffer. + postMessage(buffer, '*', [buffer]); + + // Try to enqueue with a new buffer. + assert_throws_js(TypeError, () => controller.enqueue(new Uint8Array([42]))); + + // If we got here the test passed. + controller.error(error); + }) + }); + const reader = rs.getReader({ mode: 'byob' }); + await promise_rejects_exactly(t, error, reader.read(new Uint8Array(1))); +}, 'enqueue after detaching byobRequest.view.buffer should throw'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/general.any.js b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js new file mode 100644 index 00000000000000..9aa508225865c8 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js @@ -0,0 +1,2336 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +test(() => { + assert_throws_js(TypeError, () => new ReadableStream().getReader({ mode: 'byob' })); +}, 'getReader({mode: "byob"}) throws on non-bytes streams'); + + +test(() => { + // Constructing ReadableStream with an empty underlying byte source object as parameter shouldn't throw. + new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }); + // Constructor must perform ToString(type). + new ReadableStream({ type: { toString() {return 'bytes';} } }) + .getReader({ mode: 'byob' }); + new ReadableStream({ type: { toString: null, valueOf() {return 'bytes';} } }) + .getReader({ mode: 'byob' }); +}, 'ReadableStream with byte source can be constructed with no errors'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + const rs = new ReadableStream({ type: 'bytes' }); + + let reader = rs.getReader({ mode: { toString() { return 'byob'; } } }); + assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader'); + reader.releaseLock(); + + reader = rs.getReader({ mode: { toString: null, valueOf() {return 'byob';} } }); + assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader'); + reader.releaseLock(); + + reader = rs.getReader({ mode: 'byob', notmode: 'ignored' }); + assert_true(reader instanceof ReadableStreamBYOBReader, 'must give a BYOB reader'); +}, 'getReader({mode}) must perform ToString()'); + +promise_test(() => { + let startCalled = false; + let startCalledBeforePull = false; + let desiredSize; + let controller; + + let resolveTestPromise; + const testPromise = new Promise(resolve => { + resolveTestPromise = resolve; + }); + + new ReadableStream({ + start(c) { + controller = c; + startCalled = true; + }, + pull() { + startCalledBeforePull = startCalled; + desiredSize = controller.desiredSize; + resolveTestPromise(); + }, + type: 'bytes' + }, { + highWaterMark: 256 + }); + + return testPromise.then(() => { + assert_true(startCalledBeforePull, 'start should be called before pull'); + assert_equals(desiredSize, 256, 'desiredSize should equal highWaterMark'); + }); + +}, 'ReadableStream with byte source: Construct and expect start and pull being called'); + +promise_test(() => { + let pullCount = 0; + let checkedNoPull = false; + + let resolveTestPromise; + const testPromise = new Promise(resolve => { + resolveTestPromise = resolve; + }); + let resolveStartPromise; + + new ReadableStream({ + start() { + return new Promise(resolve => { + resolveStartPromise = resolve; + }); + }, + pull() { + if (checkedNoPull) { + resolveTestPromise(); + } + + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 256 + }); + + Promise.resolve().then(() => { + assert_equals(pullCount, 0); + checkedNoPull = true; + resolveStartPromise(); + }); + + return testPromise; + +}, 'ReadableStream with byte source: No automatic pull call if start doesn\'t finish'); + +test(() => { + assert_throws_js(Error, () => new ReadableStream({ start() { throw new Error(); }, type:'bytes' }), + 'start() can throw an exception with type: bytes'); +}, 'ReadableStream with byte source: start() throws an exception'); + +promise_test(t => { + new ReadableStream({ + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }, { + highWaterMark: 0 + }); + + return Promise.resolve(); +}, 'ReadableStream with byte source: Construct with highWaterMark of 0'); + +test(() => { + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 10, 'desiredSize must start at the highWaterMark'); + c.close(); + assert_equals(c.desiredSize, 0, 'after closing, desiredSize must be 0'); + }, + type: 'bytes' + }, { + highWaterMark: 10 + }); +}, 'ReadableStream with byte source: desiredSize when closed'); + +test(() => { + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 10, 'desiredSize must start at the highWaterMark'); + c.error(); + assert_equals(c.desiredSize, null, 'after erroring, desiredSize must be null'); + }, + type: 'bytes' + }, { + highWaterMark: 10 + }); +}, 'ReadableStream with byte source: desiredSize when errored'); + +promise_test(t => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader(); + reader.releaseLock(); + + return promise_rejects_js(t, TypeError, reader.closed, 'closed must reject'); +}, 'ReadableStream with byte source: getReader(), then releaseLock()'); + +promise_test(t => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + reader.releaseLock(); + + return promise_rejects_js(t, TypeError, reader.closed, 'closed must reject'); +}, 'ReadableStream with byte source: getReader() with mode set to byob, then releaseLock()'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.close(); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.closed.then(() => { + assert_throws_js(TypeError, () => stream.getReader(), 'getReader() must throw'); + }); +}, 'ReadableStream with byte source: Test that closing a stream does not release a reader automatically'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.close(); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.closed.then(() => { + assert_throws_js(TypeError, () => stream.getReader({ mode: 'byob' }), 'getReader() must throw'); + }); +}, 'ReadableStream with byte source: Test that closing a stream does not release a BYOB reader automatically'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.error(error1); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + return promise_rejects_exactly(t, error1, reader.closed, 'closed must reject').then(() => { + assert_throws_js(TypeError, () => stream.getReader(), 'getReader() must throw'); + }); +}, 'ReadableStream with byte source: Test that erroring a stream does not release a reader automatically'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.error(error1); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_exactly(t, error1, reader.closed, 'closed must reject').then(() => { + assert_throws_js(TypeError, () => stream.getReader({ mode: 'byob' }), 'getReader() must throw'); + }); +}, 'ReadableStream with byte source: Test that erroring a stream does not release a BYOB reader automatically'); + +test(() => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader(); + reader.read(); + assert_throws_js(TypeError, () => reader.releaseLock(), 'reader.releaseLock() must throw'); +}, 'ReadableStream with byte source: releaseLock() on ReadableStreamDefaultReader with pending read() must throw'); + +promise_test(() => { + let pullCount = 0; + + const stream = new ReadableStream({ + pull() { + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 8 + }); + + stream.getReader(); + + assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 1, 'pull must be invoked'); + }); +}, 'ReadableStream with byte source: Automatic pull() after start()'); + +promise_test(() => { + let pullCount = 0; + + const stream = new ReadableStream({ + pull() { + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + reader.read(); + + assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 1, 'pull must be invoked'); + }); +}, 'ReadableStream with byte source: Automatic pull() after start() and read()'); + +// View buffers are detached after pull() returns, so record the information at the time that pull() was called. +function extractViewInfo(view) { + return { + constructor: view.constructor, + bufferByteLength: view.buffer.byteLength, + byteOffset: view.byteOffset, + byteLength: view.byteLength + }; +} + +promise_test(() => { + let pullCount = 0; + let controller; + const byobRequests = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + const byobRequest = controller.byobRequest; + const view = byobRequest.view; + byobRequests[pullCount] = { + nonNull: byobRequest !== null, + viewNonNull: view !== null, + viewInfo: extractViewInfo(view) + }; + if (pullCount === 0) { + view[0] = 0x01; + byobRequest.respond(1); + } else if (pullCount === 1) { + view[0] = 0x02; + view[1] = 0x03; + byobRequest.respond(2); + } + + ++pullCount; + }, + type: 'bytes', + autoAllocateChunkSize: 16 + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + const p0 = reader.read(); + const p1 = reader.read(); + + assert_equals(pullCount, 0, 'No pull() as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 1, 'pull() must have been invoked once'); + const byobRequest = byobRequests[0]; + assert_true(byobRequest.nonNull, 'first byobRequest must not be null'); + assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null'); + const viewInfo = byobRequest.viewInfo; + assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 16, 'first view.buffer.byteLength should be 16'); + assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 16, 'first view.byteLength should be 16'); + + return p0; + }).then(result => { + assert_equals(pullCount, 2, 'pull() must have been invoked twice'); + const value = result.value; + assert_not_equals(value, undefined, 'first read should have a value'); + assert_equals(value.constructor, Uint8Array, 'first value should be a Uint8Array'); + assert_equals(value.buffer.byteLength, 16, 'first value.buffer.byteLength should be 16'); + assert_equals(value.byteOffset, 0, 'first value.byteOffset should be 0'); + assert_equals(value.byteLength, 1, 'first value.byteLength should be 1'); + assert_equals(value[0], 0x01, 'first value[0] should be 0x01'); + const byobRequest = byobRequests[1]; + assert_true(byobRequest.nonNull, 'second byobRequest must not be null'); + assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null'); + const viewInfo = byobRequest.viewInfo; + assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 16, 'second view.buffer.byteLength should be 16'); + assert_equals(viewInfo.byteOffset, 0, 'second view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 16, 'second view.byteLength should be 16'); + + return p1; + }).then(result => { + assert_equals(pullCount, 2, 'pull() should only be invoked twice'); + const value = result.value; + assert_not_equals(value, undefined, 'second read should have a value'); + assert_equals(value.constructor, Uint8Array, 'second value should be a Uint8Array'); + assert_equals(value.buffer.byteLength, 16, 'second value.buffer.byteLength should be 16'); + assert_equals(value.byteOffset, 0, 'second value.byteOffset should be 0'); + assert_equals(value.byteLength, 2, 'second value.byteLength should be 2'); + assert_equals(value[0], 0x02, 'second value[0] should be 0x02'); + assert_equals(value[1], 0x03, 'second value[1] should be 0x03'); + }); +}, 'ReadableStream with byte source: autoAllocateChunkSize'); + +promise_test(() => { + let pullCount = 0; + let controller; + const byobRequests = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + const byobRequest = controller.byobRequest; + const view = byobRequest.view; + byobRequests[pullCount] = { + nonNull: byobRequest !== null, + viewNonNull: view !== null, + viewInfo: extractViewInfo(view) + }; + if (pullCount === 0) { + view[0] = 0x01; + byobRequest.respond(1); + } else if (pullCount === 1) { + view[0] = 0x02; + view[1] = 0x03; + byobRequest.respond(2); + } + + ++pullCount; + }, + type: 'bytes', + autoAllocateChunkSize: 16 + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + return reader.read().then(result => { + const value = result.value; + assert_not_equals(value, undefined, 'first read should have a value'); + assert_equals(value.constructor, Uint8Array, 'first value should be a Uint8Array'); + assert_equals(value.buffer.byteLength, 16, 'first value.buffer.byteLength should be 16'); + assert_equals(value.byteOffset, 0, 'first value.byteOffset should be 0'); + assert_equals(value.byteLength, 1, 'first value.byteLength should be 1'); + assert_equals(value[0], 0x01, 'first value[0] should be 0x01'); + const byobRequest = byobRequests[0]; + assert_true(byobRequest.nonNull, 'first byobRequest must not be null'); + assert_true(byobRequest.viewNonNull, 'first byobRequest.view must not be null'); + const viewInfo = byobRequest.viewInfo; + assert_equals(viewInfo.constructor, Uint8Array, 'first view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 16, 'first view.buffer.byteLength should be 16'); + assert_equals(viewInfo.byteOffset, 0, 'first view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 16, 'first view.byteLength should be 16'); + + reader.releaseLock(); + const byobReader = stream.getReader({ mode: 'byob' }); + return byobReader.read(new Uint8Array(32)); + }).then(result => { + const value = result.value; + assert_not_equals(value, undefined, 'second read should have a value'); + assert_equals(value.constructor, Uint8Array, 'second value should be a Uint8Array'); + assert_equals(value.buffer.byteLength, 32, 'second value.buffer.byteLength should be 32'); + assert_equals(value.byteOffset, 0, 'second value.byteOffset should be 0'); + assert_equals(value.byteLength, 2, 'second value.byteLength should be 2'); + assert_equals(value[0], 0x02, 'second value[0] should be 0x02'); + assert_equals(value[1], 0x03, 'second value[1] should be 0x03'); + const byobRequest = byobRequests[1]; + assert_true(byobRequest.nonNull, 'second byobRequest must not be null'); + assert_true(byobRequest.viewNonNull, 'second byobRequest.view must not be null'); + const viewInfo = byobRequest.viewInfo; + assert_equals(viewInfo.constructor, Uint8Array, 'second view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 32, 'second view.buffer.byteLength should be 32'); + assert_equals(viewInfo.byteOffset, 0, 'second view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 32, 'second view.byteLength should be 32'); + assert_equals(pullCount, 2, 'pullCount should be 2'); + }); +}, 'ReadableStream with byte source: Mix of auto allocate and BYOB'); + +promise_test(() => { + let pullCount = 0; + + const stream = new ReadableStream({ + pull() { + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + reader.read(new Uint8Array(8)); + + assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 1, 'pull must be invoked'); + }); +}, 'ReadableStream with byte source: Automatic pull() after start() and read(view)'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let desiredSizeInStart; + let desiredSizeInPull; + + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array(16)); + desiredSizeInStart = c.desiredSize; + controller = c; + }, + pull() { + ++pullCount; + + if (pullCount === 1) { + desiredSizeInPull = controller.desiredSize; + } + }, + type: 'bytes' + }, { + highWaterMark: 8 + }); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 0, 'No pull as the queue was filled by start()'); + assert_equals(desiredSizeInStart, -8, 'desiredSize after enqueue() in start()'); + + const reader = stream.getReader(); + + const promise = reader.read(); + assert_equals(pullCount, 1, 'The first pull() should be made on read()'); + assert_equals(desiredSizeInPull, 8, 'desiredSize in pull()'); + + return promise.then(result => { + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.constructor, Uint8Array, 'view.constructor'); + assert_equals(view.buffer.byteLength, 16, 'view.buffer'); + assert_equals(view.byteOffset, 0, 'view.byteOffset'); + assert_equals(view.byteLength, 16, 'view.byteLength'); + }); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read()'); + +promise_test(() => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + const promise = reader.read().then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 1); + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 1); + }); + + controller.enqueue(new Uint8Array(1)); + + return promise; +}, 'ReadableStream with byte source: Push source that doesn\'t understand pull signal'); + +test(() => { + assert_throws_js(TypeError, () => new ReadableStream({ + pull: 'foo', + type: 'bytes' + }), 'constructor should throw'); +}, 'ReadableStream with byte source: pull() function is not callable'); + +promise_test(() => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint16Array(16)); + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.read().then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 32); + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 32); + }); +}, 'ReadableStream with byte source: enqueue() with Uint16Array, getReader(), then read()'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(16); + view[0] = 0x01; + view[8] = 0x02; + c.enqueue(view); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const byobReader = stream.getReader({ mode: 'byob' }); + + return byobReader.read(new Uint8Array(8)).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.constructor, Uint8Array, 'value.constructor'); + assert_equals(view.buffer.byteLength, 8, 'value.buffer.byteLength'); + assert_equals(view.byteOffset, 0, 'value.byteOffset'); + assert_equals(view.byteLength, 8, 'value.byteLength'); + assert_equals(view[0], 0x01); + + byobReader.releaseLock(); + + const reader = stream.getReader(); + + return reader.read(); + }).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.constructor, Uint8Array, 'value.constructor'); + assert_equals(view.buffer.byteLength, 16, 'value.buffer.byteLength'); + assert_equals(view.byteOffset, 8, 'value.byteOffset'); + assert_equals(view.byteLength, 8, 'value.byteLength'); + assert_equals(view[0], 0x02); + }); +}, 'ReadableStream with byte source: enqueue(), read(view) partially, then read()'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + controller.enqueue(new Uint8Array(16)); + controller.close(); + + return reader.read().then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 16, 'byteLength'); + + return reader.read(); + }).then(result => { + assert_true(result.done, 'done'); + assert_equals(result.value, undefined, 'value'); + }); +}, 'ReadableStream with byte source: getReader(), enqueue(), close(), then read()'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array(16)); + c.close(); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.read().then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 16, 'byteLength'); + + return reader.read(); + }).then(result => { + assert_true(result.done, 'done'); + assert_equals(result.value, undefined, 'value'); + }); +}, 'ReadableStream with byte source: enqueue(), close(), getReader(), then read()'); + +promise_test(() => { + let controller; + let byobRequest; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + controller.enqueue(new Uint8Array(16)); + byobRequest = controller.byobRequest; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.read().then(result => { + assert_false(result.done, 'done'); + assert_equals(result.value.byteLength, 16, 'byteLength'); + assert_equals(byobRequest, null, 'byobRequest must be null'); + }); +}, 'ReadableStream with byte source: Respond to pull() by enqueue()'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + const desiredSizes = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + desiredSizes.push(controller.desiredSize); + controller.enqueue(new Uint8Array(1)); + desiredSizes.push(controller.desiredSize); + controller.enqueue(new Uint8Array(1)); + desiredSizes.push(controller.desiredSize); + + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 0 + }); + + const reader = stream.getReader(); + + const p0 = reader.read(); + const p1 = reader.read(); + const p2 = reader.read(); + + // Respond to the first pull call. + controller.enqueue(new Uint8Array(1)); + + assert_equals(pullCount, 0, 'pullCount after the enqueue() outside pull'); + + return Promise.all([p0, p1, p2]).then(result => { + assert_equals(pullCount, 1, 'pullCount after completion of all read()s'); + + assert_equals(result[0].done, false, 'result[0].done'); + assert_equals(result[0].value.byteLength, 1, 'result[0].value.byteLength'); + assert_equals(result[1].done, false, 'result[1].done'); + assert_equals(result[1].value.byteLength, 1, 'result[1].value.byteLength'); + assert_equals(result[2].done, false, 'result[2].done'); + assert_equals(result[2].value.byteLength, 1, 'result[2].value.byteLength'); + assert_equals(byobRequest, null, 'byobRequest should be null'); + assert_equals(desiredSizes[0], 0, 'desiredSize on pull should be 0'); + assert_equals(desiredSizes[1], 0, 'desiredSize after 1st enqueue() should be 0'); + assert_equals(desiredSizes[2], 0, 'desiredSize after 2nd enqueue() should be 0'); + assert_equals(pullCount, 1, 'pull() should only be called once'); + }); +}, 'ReadableStream with byte source: Respond to pull() by enqueue() asynchronously'); + +promise_test(() => { + let pullCount = 0; + + let byobRequest; + const desiredSizes = []; + + const stream = new ReadableStream({ + pull(c) { + byobRequest = c.byobRequest; + desiredSizes.push(c.desiredSize); + + if (pullCount < 3) { + c.enqueue(new Uint8Array(1)); + } else { + c.close(); + } + + ++pullCount; + }, + type: 'bytes' + }, { + highWaterMark: 256 + }); + + const reader = stream.getReader(); + + const p0 = reader.read(); + const p1 = reader.read(); + const p2 = reader.read(); + + assert_equals(pullCount, 0, 'No pull as start() just finished and is not yet reflected to the state of the stream'); + + return Promise.all([p0, p1, p2]).then(result => { + assert_equals(pullCount, 4, 'pullCount after completion of all read()s'); + + assert_equals(result[0].done, false, 'result[0].done'); + assert_equals(result[0].value.byteLength, 1, 'result[0].value.byteLength'); + assert_equals(result[1].done, false, 'result[1].done'); + assert_equals(result[1].value.byteLength, 1, 'result[1].value.byteLength'); + assert_equals(result[2].done, false, 'result[2].done'); + assert_equals(result[2].value.byteLength, 1, 'result[2].value.byteLength'); + assert_equals(byobRequest, null, 'byobRequest should be null'); + assert_equals(desiredSizes[0], 256, 'desiredSize on pull should be 256'); + assert_equals(desiredSizes[1], 256, 'desiredSize after 1st enqueue() should be 256'); + assert_equals(desiredSizes[2], 256, 'desiredSize after 2nd enqueue() should be 256'); + assert_equals(desiredSizes[3], 256, 'desiredSize after 3rd enqueue() should be 256'); + }); +}, 'ReadableStream with byte source: Respond to multiple pull() by separate enqueue()'); + +promise_test(() => { + let controller; + + let pullCount = 0; + const byobRequestDefined = []; + let byobRequestViewDefined; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequestDefined.push(controller.byobRequest !== null); + const initialByobRequest = controller.byobRequest; + + const view = controller.byobRequest.view; + view[0] = 0x01; + controller.byobRequest.respond(1); + + byobRequestDefined.push(controller.byobRequest !== null); + byobRequestViewDefined = initialByobRequest.view !== null; + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(1)).then(result => { + assert_false(result.done, 'result.done'); + assert_equals(result.value.byteLength, 1, 'result.value.byteLength'); + assert_equals(result.value[0], 0x01, 'result.value[0]'); + assert_equals(pullCount, 1, 'pull() should be called only once'); + assert_true(byobRequestDefined[0], 'byobRequest must not be null before respond()'); + assert_false(byobRequestDefined[1], 'byobRequest must be null after respond()'); + assert_false(byobRequestViewDefined, 'view of initial byobRequest must be null after respond()'); + }); +}, 'ReadableStream with byte source: read(view), then respond()'); + +promise_test(() => { + let controller; + + let pullCount = 0; + const byobRequestDefined = []; + let byobRequestViewDefined; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + async pull() { + byobRequestDefined.push(controller.byobRequest !== null); + const initialByobRequest = controller.byobRequest; + + const transferredView = await transferArrayBufferView(controller.byobRequest.view); + transferredView[0] = 0x01; + controller.byobRequest.respondWithNewView(transferredView); + + byobRequestDefined.push(controller.byobRequest !== null); + byobRequestViewDefined = initialByobRequest.view !== null; + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(1)).then(result => { + assert_false(result.done, 'result.done'); + assert_equals(result.value.byteLength, 1, 'result.value.byteLength'); + assert_equals(result.value[0], 0x01, 'result.value[0]'); + assert_equals(pullCount, 1, 'pull() should be called only once'); + assert_true(byobRequestDefined[0], 'byobRequest must not be null before respondWithNewView()'); + assert_false(byobRequestDefined[1], 'byobRequest must be null after respondWithNewView()'); + assert_false(byobRequestViewDefined, 'view of initial byobRequest must be null after respondWithNewView()'); + }); +}, 'ReadableStream with byte source: read(view), then respondWithNewView() with a transferred ArrayBuffer'); + +promise_test(() => { + let controller; + let byobRequestWasDefined; + let incorrectRespondException; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequestWasDefined = controller.byobRequest !== null; + + try { + controller.byobRequest.respond(2); + } catch (e) { + incorrectRespondException = e; + } + + controller.byobRequest.respond(1); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(1)).then(() => { + assert_true(byobRequestWasDefined, 'byobRequest should be non-null'); + assert_not_equals(incorrectRespondException, undefined, 'respond() must throw'); + assert_equals(incorrectRespondException.name, 'RangeError', 'respond() must throw a RangeError'); + }); +}, 'ReadableStream with byte source: read(view), then respond() with too big value'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + let viewInfo; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + ++pullCount; + + byobRequest = controller.byobRequest; + const view = byobRequest.view; + viewInfo = extractViewInfo(view); + + view[0] = 0x01; + view[1] = 0x02; + view[2] = 0x03; + + controller.byobRequest.respond(3); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint16Array(2)).then(result => { + assert_equals(pullCount, 1); + + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 2, 'byteLength'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0102); + + return reader.read(new Uint8Array(1)); + }).then(result => { + assert_equals(pullCount, 1); + assert_not_equals(byobRequest, null, 'byobRequest must not be null'); + assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 4, 'view.buffer.byteLength should be 4'); + assert_equals(viewInfo.byteOffset, 0, 'view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 4, 'view.byteLength should be 4'); + + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 1, 'byteLength'); + + assert_equals(view[0], 0x03); + }); +}, 'ReadableStream with byte source: respond(3) to read(view) with 2 element Uint16Array enqueues the 1 byte ' + + 'remainder'); + +promise_test(t => { + const stream = new ReadableStream({ + start(controller) { + const view = new Uint8Array(16); + view[15] = 0x01; + controller.enqueue(view); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(16)).then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 16); + assert_equals(view[15], 0x01); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view)'); + +promise_test(t => { + let cancelCount = 0; + let reason; + + const passedReason = new TypeError('foo'); + + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array(16)); + }, + pull: t.unreached_func('pull() should not be called'), + cancel(r) { + if (cancelCount === 0) { + reason = r; + } + + ++cancelCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + return reader.cancel(passedReason).then(result => { + assert_equals(result, undefined); + assert_equals(cancelCount, 1); + assert_equals(reason, passedReason, 'reason should equal the passed reason'); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then cancel() (mode = not BYOB)'); + +promise_test(t => { + let cancelCount = 0; + let reason; + + const passedReason = new TypeError('foo'); + + const stream = new ReadableStream({ + start(c) { + c.enqueue(new Uint8Array(16)); + }, + pull: t.unreached_func('pull() should not be called'), + cancel(r) { + if (cancelCount === 0) { + reason = r; + } + + ++cancelCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.cancel(passedReason).then(result => { + assert_equals(result, undefined); + assert_equals(cancelCount, 1); + assert_equals(reason, passedReason, 'reason should equal the passed reason'); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then cancel() (mode = BYOB)'); + +promise_test(t => { + let cancelCount = 0; + let reason; + + const passedReason = new TypeError('foo'); + + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + cancel(r) { + if (cancelCount === 0) { + reason = r; + } + + ++cancelCount; + + return 'bar'; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const readPromise = reader.read(new Uint8Array(1)).then(result => { + assert_true(result.done, 'result.done'); + assert_equals(result.value, undefined, 'result.value'); + }); + + const cancelPromise = reader.cancel(passedReason).then(result => { + assert_equals(result, undefined, 'cancel() return value should be fulfilled with undefined'); + assert_equals(cancelCount, 1, 'cancel() should be called only once'); + assert_equals(reason, passedReason, 'reason should equal the passed reason'); + }); + + return Promise.all([readPromise, cancelPromise]); +}, 'ReadableStream with byte source: getReader(), read(view), then cancel()'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + const viewInfos = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + + viewInfos.push(extractViewInfo(controller.byobRequest.view)); + controller.enqueue(new Uint8Array(1)); + viewInfos.push(extractViewInfo(controller.byobRequest.view)); + + ++pullCount; + }, + type: 'bytes' + }); + + return Promise.resolve().then(() => { + assert_equals(pullCount, 0, 'No pull() as no read(view) yet'); + + const reader = stream.getReader({ mode: 'byob' }); + + const promise = reader.read(new Uint16Array(1)).then(result => { + assert_true(result.done, 'result.done'); + assert_equals(result.value, undefined, 'result.value'); + }); + + assert_equals(pullCount, 1, '1 pull() should have been made in response to partial fill by enqueue()'); + assert_not_equals(byobRequest, null, 'byobRequest should not be null'); + assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() should be 2'); + assert_equals(viewInfos[1].byteLength, 1, 'byteLength after enqueue() should be 1'); + + reader.cancel(); + + assert_equals(pullCount, 1, 'pull() should only be called once'); + return promise; + }); +}, 'ReadableStream with byte source: cancel() with partially filled pending pull() request'); + +promise_test(() => { + let controller; + let pullCalled = false; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(8); + view[7] = 0x01; + c.enqueue(view); + + controller = c; + }, + pull() { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const buffer = new ArrayBuffer(16); + + return reader.read(new Uint8Array(buffer, 8, 8)).then(result => { + assert_false(result.done); + + assert_false(pullCalled, 'pull() must not have been called'); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 16); + assert_equals(view.byteOffset, 8); + assert_equals(view.byteLength, 8); + assert_equals(view[7], 0x01); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) where view.buffer is not fully ' + + 'covered by view'); + +promise_test(() => { + let controller; + let pullCalled = false; + + const stream = new ReadableStream({ + start(c) { + let view; + + view = new Uint8Array(16); + view[15] = 123; + c.enqueue(view); + + view = new Uint8Array(8); + view[7] = 111; + c.enqueue(view); + + controller = c; + }, + pull() { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(24)).then(result => { + assert_false(result.done, 'done'); + + assert_false(pullCalled, 'pull() must not have been called'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 24, 'byteLength'); + assert_equals(view[15], 123, 'Contents are set from the first chunk'); + assert_equals(view[23], 111, 'Contents are set from the second chunk'); + }); +}, 'ReadableStream with byte source: Multiple enqueue(), getReader(), then read(view)'); + +promise_test(() => { + let pullCalled = false; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(16); + view[15] = 0x01; + c.enqueue(view); + }, + pull() { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(24)).then(result => { + assert_false(result.done); + + assert_false(pullCalled, 'pull() must not have been called'); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 16); + assert_equals(view[15], 0x01); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) with a bigger view'); + +promise_test(() => { + let pullCalled = false; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(16); + view[7] = 0x01; + view[15] = 0x02; + c.enqueue(view); + }, + pull() { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(8)).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 8); + assert_equals(view[7], 0x01); + + return reader.read(new Uint8Array(8)); + }).then(result => { + assert_false(result.done, 'done'); + + assert_false(pullCalled, 'pull() must not have been called'); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 8); + assert_equals(view[7], 0x02); + }); +}, 'ReadableStream with byte source: enqueue(), getReader(), then read(view) with smaller views'); + +promise_test(() => { + let controller; + let viewInfo; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(1); + view[0] = 0xff; + c.enqueue(view); + + controller = c; + }, + pull() { + if (controller.byobRequest === null) { + return; + } + + const view = controller.byobRequest.view; + viewInfo = extractViewInfo(view); + + view[0] = 0xaa; + controller.byobRequest.respond(1); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint16Array(1)).then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 2); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0xffaa); + + assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2'); + assert_equals(viewInfo.byteOffset, 1, 'view.byteOffset should be 1'); + assert_equals(viewInfo.byteLength, 1, 'view.byteLength should be 1'); + }); +}, 'ReadableStream with byte source: enqueue() 1 byte, getReader(), then read(view) with Uint16Array'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + let viewInfo; + let desiredSize; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(3); + view[0] = 0x01; + view[2] = 0x02; + c.enqueue(view); + + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + + const view = controller.byobRequest.view; + + viewInfo = extractViewInfo(view); + + view[0] = 0x03; + controller.byobRequest.respond(1); + + desiredSize = controller.desiredSize; + + ++pullCount; + }, + type: 'bytes' + }); + + // Wait for completion of the start method to be reflected. + return Promise.resolve().then(() => { + const reader = stream.getReader({ mode: 'byob' }); + + const promise = reader.read(new Uint16Array(2)).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.constructor, Uint16Array, 'constructor'); + assert_equals(view.buffer.byteLength, 4, 'buffer.byteLength'); + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 2, 'byteLength'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0100, 'contents are set'); + + const p = reader.read(new Uint16Array(1)); + + assert_equals(pullCount, 1); + + return p; + }).then(result => { + assert_false(result.done, 'done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 2, 'buffer.byteLength'); + assert_equals(view.byteOffset, 0, 'byteOffset'); + assert_equals(view.byteLength, 2, 'byteLength'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0203, 'contents are set'); + + assert_not_equals(byobRequest, null, 'byobRequest must not be null'); + assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2'); + assert_equals(viewInfo.byteOffset, 1, 'view.byteOffset should be 1'); + assert_equals(viewInfo.byteLength, 1, 'view.byteLength should be 1'); + assert_equals(desiredSize, 0, 'desiredSize should be zero'); + }); + + assert_equals(pullCount, 0); + + return promise; + }); +}, 'ReadableStream with byte source: enqueue() 3 byte, getReader(), then read(view) with 2-element Uint16Array'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(1); + view[0] = 0xff; + c.enqueue(view); + c.close(); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + + return promise_rejects_js(t, TypeError, reader.read(new Uint16Array(1)), 'read(view) must fail') + .then(() => promise_rejects_js(t, TypeError, reader.closed, 'reader.closed should reject')); +}, 'ReadableStream with byte source: read(view) with Uint16Array on close()-d stream with 1 byte enqueue()-d must ' + + 'fail'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + const view = new Uint8Array(1); + view[0] = 0xff; + c.enqueue(view); + + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const readPromise = reader.read(new Uint16Array(1)); + + assert_throws_js(TypeError, () => controller.close(), 'controller.close() must throw'); + + return promise_rejects_js(t, TypeError, readPromise, 'read(view) must fail') + .then(() => promise_rejects_js(t, TypeError, reader.closed, 'reader.closed must reject')); +}, 'ReadableStream with byte source: A stream must be errored if close()-d before fulfilling read(view) with ' + + 'Uint16Array'); + +test(() => { + let controller; + + new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + // Enqueue a chunk so that the stream doesn't get closed. This is to check duplicate close() calls are rejected + // even if the stream has not yet entered the closed state. + const view = new Uint8Array(1); + controller.enqueue(view); + controller.close(); + + assert_throws_js(TypeError, () => controller.close(), 'controller.close() must throw'); +}, 'ReadableStream with byte source: Throw if close()-ed more than once'); + +test(() => { + let controller; + + new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + // Enqueue a chunk so that the stream doesn't get closed. This is to check enqueue() after close() is rejected + // even if the stream has not yet entered the closed state. + const view = new Uint8Array(1); + controller.enqueue(view); + controller.close(); + + assert_throws_js(TypeError, () => controller.enqueue(view), 'controller.close() must throw'); +}, 'ReadableStream with byte source: Throw on enqueue() after close()'); + +promise_test(() => { + let controller; + let byobRequest; + let viewInfo; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + const view = controller.byobRequest.view; + viewInfo = extractViewInfo(view); + + view[15] = 0x01; + controller.byobRequest.respond(16); + controller.close(); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint8Array(16)).then(result => { + assert_false(result.done); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 16); + assert_equals(view[15], 0x01); + + return reader.read(new Uint8Array(16)); + }).then(result => { + assert_true(result.done); + + const view = result.value; + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 0); + + assert_not_equals(byobRequest, null, 'byobRequest must not be null'); + assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfo.bufferByteLength, 16, 'view.buffer.byteLength should be 16'); + assert_equals(viewInfo.byteOffset, 0, 'view.byteOffset should be 0'); + assert_equals(viewInfo.byteLength, 16, 'view.byteLength should be 16'); + }); +}, 'ReadableStream with byte source: read(view), then respond() and close() in pull()'); + +promise_test(() => { + let pullCount = 0; + + let controller; + const viewInfos = []; + const viewInfosAfterRespond = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + if (controller.byobRequest === null) { + return; + } + + for (let i = 0; i < 4; ++i) { + const view = controller.byobRequest.view; + viewInfos.push(extractViewInfo(view)); + + view[0] = 0x01; + controller.byobRequest.respond(1); + viewInfosAfterRespond.push(extractViewInfo(view)); + } + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint32Array(1)).then(result => { + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 4, 'result.value.byteLength'); + assert_equals(view[0], 0x01010101, 'result.value[0]'); + + assert_equals(pullCount, 1, 'pull() should only be called once'); + + for (let i = 0; i < 4; ++i) { + assert_equals(viewInfos[i].constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfos[i].bufferByteLength, 4, 'view.buffer.byteLength should be 4'); + + assert_equals(viewInfos[i].byteOffset, i, 'view.byteOffset should be i'); + assert_equals(viewInfos[i].byteLength, 4 - i, 'view.byteLength should be 4 - i'); + + assert_equals(viewInfosAfterRespond[i].bufferByteLength, 0, 'view.buffer should be transferred after respond()'); + } + }); +}, 'ReadableStream with byte source: read(view) with Uint32Array, then fill it by multiple respond() calls'); + +promise_test(() => { + let pullCount = 0; + + let controller; + const viewInfos = []; + const viewInfosAfterEnqueue = []; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + if (controller.byobRequest === null) { + return; + } + + for (let i = 0; i < 4; ++i) { + const view = controller.byobRequest.view; + viewInfos.push(extractViewInfo(view)); + + controller.enqueue(new Uint8Array([0x01])); + viewInfosAfterEnqueue.push(extractViewInfo(view)); + } + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return reader.read(new Uint32Array(1)).then(result => { + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 4, 'result.value.byteLength'); + assert_equals(view[0], 0x01010101, 'result.value[0]'); + + assert_equals(pullCount, 1, 'pull() should only be called once'); + + for (let i = 0; i < 4; ++i) { + assert_equals(viewInfos[i].constructor, Uint8Array, 'view.constructor should be Uint8Array'); + assert_equals(viewInfos[i].bufferByteLength, 4, 'view.buffer.byteLength should be 4'); + + assert_equals(viewInfos[i].byteOffset, i, 'view.byteOffset should be i'); + assert_equals(viewInfos[i].byteLength, 4 - i, 'view.byteLength should be 4 - i'); + + assert_equals(viewInfosAfterEnqueue[i].bufferByteLength, 0, 'view.buffer should be transferred after enqueue()'); + } + }); +}, 'ReadableStream with byte source: read(view) with Uint32Array, then fill it by multiple enqueue() calls'); + +promise_test(() => { + let pullCount = 0; + + let controller; + let byobRequest; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + + ++pullCount; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + const p0 = reader.read().then(result => { + assert_equals(pullCount, 1); + + controller.enqueue(new Uint8Array(2)); + + // Since the queue has data no less than HWM, no more pull. + assert_equals(pullCount, 1); + + assert_false(result.done); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 1); + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 1); + }); + + assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled'); + + const p1 = reader.read().then(result => { + assert_equals(pullCount, 1); + + assert_false(result.done); + + const view = result.value; + assert_equals(view.constructor, Uint8Array); + assert_equals(view.buffer.byteLength, 2); + assert_equals(view.byteOffset, 0); + assert_equals(view.byteLength, 2); + + assert_equals(byobRequest, null, 'byobRequest must be null'); + }); + + assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled'); + + controller.enqueue(new Uint8Array(1)); + + assert_equals(pullCount, 0, 'No pull should have been made since the startPromise has not yet been handled'); + + return Promise.all([p0, p1]); +}, 'ReadableStream with byte source: read() twice, then enqueue() twice'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const p0 = reader.read(new Uint8Array(16)).then(result => { + assert_true(result.done, '1st read: done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 16, '1st read: buffer.byteLength'); + assert_equals(view.byteOffset, 0, '1st read: byteOffset'); + assert_equals(view.byteLength, 0, '1st read: byteLength'); + }); + + const p1 = reader.read(new Uint8Array(32)).then(result => { + assert_true(result.done, '2nd read: done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 32, '2nd read: buffer.byteLength'); + assert_equals(view.byteOffset, 0, '2nd read: byteOffset'); + assert_equals(view.byteLength, 0, '2nd read: byteLength'); + }); + + controller.close(); + controller.byobRequest.respond(0); + + return Promise.all([p0, p1]); +}, 'ReadableStream with byte source: Multiple read(view), close() and respond()'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const p0 = reader.read(new Uint8Array(16)).then(result => { + assert_false(result.done, '1st read: done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 16, '1st read: buffer.byteLength'); + assert_equals(view.byteOffset, 0, '1st read: byteOffset'); + assert_equals(view.byteLength, 16, '1st read: byteLength'); + }); + + const p1 = reader.read(new Uint8Array(16)).then(result => { + assert_false(result.done, '2nd read: done'); + + const view = result.value; + assert_equals(view.buffer.byteLength, 16, '2nd read: buffer.byteLength'); + assert_equals(view.byteOffset, 0, '2nd read: byteOffset'); + assert_equals(view.byteLength, 8, '2nd read: byteLength'); + }); + + controller.enqueue(new Uint8Array(24)); + + return Promise.all([p0, p1]); +}, 'ReadableStream with byte source: Multiple read(view), big enqueue()'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + let bytesRead = 0; + + function pump() { + return reader.read(new Uint8Array(7)).then(result => { + if (result.done) { + assert_equals(bytesRead, 1024); + return undefined; + } + + bytesRead += result.value.byteLength; + + return pump(); + }); + } + const promise = pump(); + + controller.enqueue(new Uint8Array(512)); + controller.enqueue(new Uint8Array(512)); + controller.close(); + + return promise; +}, 'ReadableStream with byte source: Multiple read(view) and multiple enqueue()'); + +promise_test(t => { + let pullCalled = false; + const stream = new ReadableStream({ + pull(controller) { + pullCalled = true; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_js(t, TypeError, reader.read(), 'read() must fail') + .then(() => assert_false(pullCalled, 'pull() must not have been called')); +}, 'ReadableStream with byte source: read(view) with passing undefined as view must fail'); + +promise_test(t => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_js(t, TypeError, reader.read({}), 'read(view) must fail'); +}, 'ReadableStream with byte source: read(view) with passing an empty object as view must fail'); + +promise_test(t => { + const stream = new ReadableStream({ + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_js(t, TypeError, + reader.read({ buffer: new ArrayBuffer(10), byteOffset: 0, byteLength: 10 }), + 'read(view) must fail'); +}, 'ReadableStream with byte source: Even read(view) with passing ArrayBufferView like object as view must fail'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.error(error1); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader(); + + return promise_rejects_exactly(t, error1, reader.read(), 'read() must fail'); +}, 'ReadableStream with byte source: read() on an errored stream'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + const promise = promise_rejects_exactly(t, error1, reader.read(), 'read() must fail'); + + controller.error(error1); + + return promise; +}, 'ReadableStream with byte source: read(), then error()'); + +promise_test(t => { + const stream = new ReadableStream({ + start(c) { + c.error(error1); + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read() must fail'); +}, 'ReadableStream with byte source: read(view) on an errored stream'); + +promise_test(t => { + let controller; + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + const promise = promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read() must fail'); + + controller.error(error1); + + return promise; +}, 'ReadableStream with byte source: read(view), then error()'); + +promise_test(t => { + let controller; + let byobRequest; + + const testError = new TypeError('foo'); + + const stream = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + byobRequest = controller.byobRequest; + throw testError; + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + const promise = promise_rejects_exactly(t, testError, reader.read(), 'read() must fail'); + return promise_rejects_exactly(t, testError, promise.then(() => reader.closed)) + .then(() => assert_equals(byobRequest, null, 'byobRequest must be null')); +}, 'ReadableStream with byte source: Throwing in pull function must error the stream'); + +promise_test(t => { + let byobRequest; + + const stream = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + controller.error(error1); + throw new TypeError('foo'); + }, + type: 'bytes' + }); + + const reader = stream.getReader(); + + return promise_rejects_exactly(t, error1, reader.read(), 'read() must fail') + .then(() => promise_rejects_exactly(t, error1, reader.closed, 'closed must fail')) + .then(() => assert_equals(byobRequest, null, 'byobRequest must be null')); +}, 'ReadableStream with byte source: Throwing in pull in response to read() must be ignored if the stream is ' + + 'errored in it'); + +promise_test(t => { + let byobRequest; + + const testError = new TypeError('foo'); + + const stream = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + throw testError; + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_exactly(t, testError, reader.read(new Uint8Array(1)), 'read(view) must fail') + .then(() => promise_rejects_exactly(t, testError, reader.closed, 'reader.closed must reject')) + .then(() => assert_not_equals(byobRequest, null, 'byobRequest must not be null')); +}, 'ReadableStream with byte source: Throwing in pull in response to read(view) function must error the stream'); + +promise_test(t => { + let byobRequest; + + const stream = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + controller.error(error1); + throw new TypeError('foo'); + }, + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + + return promise_rejects_exactly(t, error1, reader.read(new Uint8Array(1)), 'read(view) must fail') + .then(() => promise_rejects_exactly(t, error1, reader.closed, 'closed must fail')) + .then(() => assert_not_equals(byobRequest, null, 'byobRequest must not be null')); +}, 'ReadableStream with byte source: Throwing in pull in response to read(view) must be ignored if the stream is ' + + 'errored in it'); + +promise_test(() => { + let byobRequest; + const rs = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + byobRequest.respond(4); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + const view = new Uint8Array(16); + return reader.read(view).then(() => { + assert_throws_js(TypeError, () => byobRequest.respond(4), 'respond() should throw a TypeError'); + }); +}, 'calling respond() twice on the same byobRequest should throw'); + +promise_test(() => { + let byobRequest; + const newView = () => new Uint8Array(16); + const rs = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + byobRequest.respondWithNewView(newView()); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + return reader.read(newView()).then(() => { + assert_throws_js(TypeError, () => byobRequest.respondWithNewView(newView()), + 'respondWithNewView() should throw a TypeError'); + }); +}, 'calling respondWithNewView() twice on the same byobRequest should throw'); + +promise_test(() => { + let controller; + let byobRequest; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + let resolvePull; + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull(c) { + byobRequest = c.byobRequest; + resolvePullCalledPromise(); + return new Promise(resolve => { + resolvePull = resolve; + }); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + const readPromise = reader.read(new Uint8Array(16)); + return pullCalledPromise.then(() => { + controller.close(); + byobRequest.respond(0); + resolvePull(); + return readPromise.then(() => { + assert_throws_js(TypeError, () => byobRequest.respond(0), 'respond() should throw'); + }); + }); +}, 'calling respond(0) twice on the same byobRequest should throw even when closed'); + +promise_test(() => { + let controller; + let byobRequest; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + let resolvePull; + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull(c) { + byobRequest = c.byobRequest; + resolvePullCalledPromise(); + return new Promise(resolve => { + resolvePull = resolve; + }); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + const readPromise = reader.read(new Uint8Array(16)); + return pullCalledPromise.then(() => { + const cancelPromise = reader.cancel('meh'); + assert_throws_js(TypeError, () => byobRequest.respond(0), 'respond() should throw'); + resolvePull(); + return Promise.all([readPromise, cancelPromise]); + }); +}, 'calling respond() should throw when canceled'); + +promise_test(() => { + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + let resolvePull; + const rs = new ReadableStream({ + pull() { + resolvePullCalledPromise(); + return new Promise(resolve => { + resolvePull = resolve; + }); + }, + type: 'bytes' + }); + const reader = rs.getReader({ mode: 'byob' }); + reader.read(new Uint8Array(16)); + return pullCalledPromise.then(() => { + resolvePull(); + return delay(0).then(() => { + assert_throws_js(TypeError, () => reader.releaseLock(), 'releaseLock() should throw'); + }); + }); +}, 'pull() resolving should not make releaseLock() possible'); + +promise_test(() => { + // Tests https://github.com/whatwg/streams/issues/686 + + let controller; + const rs = new ReadableStream({ + autoAllocateChunkSize: 128, + start(c) { + controller = c; + }, + type: 'bytes' + }); + + const readPromise = rs.getReader().read(); + + const br = controller.byobRequest; + controller.close(); + + br.respond(0); + + return readPromise; +}, 'ReadableStream with byte source: default reader + autoAllocateChunkSize + byobRequest interaction'); + +test(() => { + assert_throws_js(TypeError, () => new ReadableStream({ autoAllocateChunkSize: 0, type: 'bytes' }), + 'controller cannot be setup with autoAllocateChunkSize = 0'); +}, 'ReadableStream with byte source: autoAllocateChunkSize cannot be 0'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + const stream = new ReadableStream({ type: 'bytes' }); + new ReadableStreamBYOBReader(stream); +}, 'ReadableStreamBYOBReader can be constructed directly'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + assert_throws_js(TypeError, () => new ReadableStreamBYOBReader({}), 'constructor must throw'); +}, 'ReadableStreamBYOBReader constructor requires a ReadableStream argument'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + const stream = new ReadableStream({ type: 'bytes' }); + stream.getReader(); + assert_throws_js(TypeError, () => new ReadableStreamBYOBReader(stream), 'constructor must throw'); +}, 'ReadableStreamBYOBReader constructor requires an unlocked ReadableStream'); + +test(() => { + const ReadableStreamBYOBReader = new ReadableStream({ type: 'bytes' }).getReader({ mode: 'byob' }).constructor; + const stream = new ReadableStream(); + assert_throws_js(TypeError, () => new ReadableStreamBYOBReader(stream), 'constructor must throw'); +}, 'ReadableStreamBYOBReader constructor requires a ReadableStream with type "bytes"'); + +test(() => { + assert_throws_js(RangeError, () => new ReadableStream({ type: 'bytes' }, { + size() { + return 1; + } + }), 'constructor should throw for size function'); + + assert_throws_js(RangeError, + () => new ReadableStream({ type: 'bytes' }, new CountQueuingStrategy({ highWaterMark: 1 })), + 'constructor should throw when strategy is CountQueuingStrategy'); + + assert_throws_js(RangeError, + () => new ReadableStream({ type: 'bytes' }, new ByteLengthQueuingStrategy({ highWaterMark: 512 })), + 'constructor should throw when strategy is ByteLengthQueuingStrategy'); + + class HasSizeMethod { + size() {} + } + + assert_throws_js(RangeError, () => new ReadableStream({ type: 'bytes' }, new HasSizeMethod()), + 'constructor should throw when size on the prototype chain'); +}, 'ReadableStream constructor should not accept a strategy with a size defined if type is "bytes"'); + +promise_test(async t => { + const stream = new ReadableStream({ + pull: t.step_func(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 1); + view[0] = 1; + + c.byobRequest.respondWithNewView(view); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const result = await reader.read(new Uint8Array([4, 5, 6])); + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 1, 'result.value.byteLength'); + assert_equals(view[0], 1, 'result.value[0]'); + assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength'); + assert_array_equals([...new Uint8Array(view.buffer)], [1, 5, 6], 'result.value.buffer'); +}, 'ReadableStream with byte source: respondWithNewView() with a smaller view'); + +promise_test(async t => { + const stream = new ReadableStream({ + pull: t.step_func(c => { + const view = new Uint8Array(c.byobRequest.view.buffer, 0, 0); + + c.close(); + + c.byobRequest.respondWithNewView(view); + }), + type: 'bytes' + }); + const reader = stream.getReader({ mode: 'byob' }); + + const result = await reader.read(new Uint8Array([4, 5, 6])); + assert_true(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 0, 'result.value.byteLength'); + assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength'); + assert_array_equals([...new Uint8Array(view.buffer)], [4, 5, 6], 'result.value.buffer'); +}, 'ReadableStream with byte source: respondWithNewView() with a zero-length view (in the closed state)'); + +promise_test(async t => { + let controller; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + const stream = new ReadableStream({ + start: t.step_func((c) => { + controller = c; + }), + pull: t.step_func(() => { + resolvePullCalledPromise(); + }), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + const readPromise = reader.read(new Uint8Array([4, 5, 6])); + await pullCalledPromise; + + // Transfer the original BYOB request's buffer, and respond with a new view on that buffer + const transferredView = await transferArrayBufferView(controller.byobRequest.view); + const newView = transferredView.subarray(0, 1); + newView[0] = 42; + + controller.byobRequest.respondWithNewView(newView); + + const result = await readPromise; + assert_false(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 1, 'result.value.byteLength'); + assert_equals(view[0], 42, 'result.value[0]'); + assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength'); + assert_array_equals([...new Uint8Array(view.buffer)], [42, 5, 6], 'result.value.buffer'); + +}, 'ReadableStream with byte source: respondWithNewView() with a transferred non-zero-length view ' + + '(in the readable state)'); + +promise_test(async t => { + let controller; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + const stream = new ReadableStream({ + start: t.step_func((c) => { + controller = c; + }), + pull: t.step_func(() => { + resolvePullCalledPromise(); + }), + type: 'bytes' + }); + + const reader = stream.getReader({ mode: 'byob' }); + const readPromise = reader.read(new Uint8Array([4, 5, 6])); + await pullCalledPromise; + + // Transfer the original BYOB request's buffer, and respond with an empty view on that buffer + const transferredView = await transferArrayBufferView(controller.byobRequest.view); + const newView = transferredView.subarray(0, 0); + + controller.close(); + controller.byobRequest.respondWithNewView(newView); + + const result = await readPromise; + assert_true(result.done, 'result.done'); + + const view = result.value; + assert_equals(view.byteOffset, 0, 'result.value.byteOffset'); + assert_equals(view.byteLength, 0, 'result.value.byteLength'); + assert_equals(view.buffer.byteLength, 3, 'result.value.buffer.byteLength'); + assert_array_equals([...new Uint8Array(view.buffer)], [4, 5, 6], 'result.value.buffer'); + +}, 'ReadableStream with byte source: respondWithNewView() with a transferred zero-length view ' + + '(in the closed state)'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js new file mode 100644 index 00000000000000..7c0bffb78710fe --- /dev/null +++ b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js @@ -0,0 +1,58 @@ +// META: global=window,worker,jsshell +'use strict'; + +promise_test(async t => { + const rs = new ReadableStream({ + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const reader = rs.getReader({ mode: 'byob' }); + const memory = new WebAssembly.Memory({ initial: 1 }); + const view = new Uint8Array(memory.buffer, 0, 1); + await promise_rejects_js(t, TypeError, reader.read(view)); +}, 'ReadableStream with byte source: read() with a non-transferable buffer'); + +test(t => { + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull: t.unreached_func('pull() should not be called'), + type: 'bytes' + }); + + const memory = new WebAssembly.Memory({ initial: 1 }); + const view = new Uint8Array(memory.buffer, 0, 1); + assert_throws_js(TypeError, () => controller.enqueue(view)); +}, 'ReadableStream with byte source: enqueue() with a non-transferable buffer'); + +promise_test(async t => { + let byobRequest; + let resolvePullCalledPromise; + const pullCalledPromise = new Promise(resolve => { + resolvePullCalledPromise = resolve; + }); + const rs = new ReadableStream({ + pull(controller) { + byobRequest = controller.byobRequest; + resolvePullCalledPromise(); + }, + type: 'bytes' + }); + + const memory = new WebAssembly.Memory({ initial: 1 }); + // Make sure the backing buffers of both views have the same length + const byobView = new Uint8Array(new ArrayBuffer(memory.buffer.byteLength), 0, 1); + const newView = new Uint8Array(memory.buffer, byobView.byteOffset, byobView.byteLength); + + const reader = rs.getReader({ mode: 'byob' }); + reader.read(byobView).then( + t.unreached_func('read() should not resolve'), + t.unreached_func('read() should not reject') + ); + await pullCalledPromise; + + assert_throws_js(TypeError, () => byobRequest.respondWithNewView(newView)); +}, 'ReadableStream with byte source: respondWithNewView() with a non-transferable buffer'); diff --git a/test/fixtures/wpt/streams/readable-streams/async-iterator.any.js b/test/fixtures/wpt/streams/readable-streams/async-iterator.any.js new file mode 100644 index 00000000000000..7669a35d9a7a0c --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/async-iterator.any.js @@ -0,0 +1,650 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); + +function assert_iter_result(iterResult, value, done, message) { + const prefix = message === undefined ? '' : `${message} `; + assert_equals(typeof iterResult, 'object', `${prefix}type is object`); + assert_equals(Object.getPrototypeOf(iterResult), Object.prototype, `${prefix}[[Prototype]]`); + assert_array_equals(Object.getOwnPropertyNames(iterResult).sort(), ['done', 'value'], `${prefix}property names`); + assert_equals(iterResult.value, value, `${prefix}value`); + assert_equals(iterResult.done, done, `${prefix}done`); +} + +test(() => { + const s = new ReadableStream(); + const it = s.values(); + const proto = Object.getPrototypeOf(it); + + const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () {}).prototype); + assert_equals(Object.getPrototypeOf(proto), AsyncIteratorPrototype, 'prototype should extend AsyncIteratorPrototype'); + + const methods = ['next', 'return'].sort(); + assert_array_equals(Object.getOwnPropertyNames(proto).sort(), methods, 'should have all the correct methods'); + + for (const m of methods) { + const propDesc = Object.getOwnPropertyDescriptor(proto, m); + assert_true(propDesc.enumerable, 'method should be enumerable'); + assert_true(propDesc.configurable, 'method should be configurable'); + assert_true(propDesc.writable, 'method should be writable'); + assert_equals(typeof it[m], 'function', 'method should be a function'); + assert_equals(it[m].name, m, 'method should have the correct name'); + } + + assert_equals(it.next.length, 0, 'next should have no parameters'); + assert_equals(it.return.length, 1, 'return should have 1 parameter'); + assert_equals(typeof it.throw, 'undefined', 'throw should not exist'); +}, 'Async iterator instances should have the correct list of properties'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [1, 2, 3]); +}, 'Async-iterating a push source'); + +promise_test(async () => { + let i = 1; + const s = new ReadableStream({ + pull(c) { + c.enqueue(i); + if (i >= 3) { + c.close(); + } + i += 1; + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [1, 2, 3]); +}, 'Async-iterating a pull source'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(undefined); + c.enqueue(undefined); + c.enqueue(undefined); + c.close(); + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [undefined, undefined, undefined]); +}, 'Async-iterating a push source with undefined values'); + +promise_test(async () => { + let i = 1; + const s = new ReadableStream({ + pull(c) { + c.enqueue(undefined); + if (i >= 3) { + c.close(); + } + i += 1; + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [undefined, undefined, undefined]); +}, 'Async-iterating a pull source with undefined values'); + +promise_test(async () => { + let i = 1; + const s = recordingReadableStream({ + pull(c) { + c.enqueue(i); + if (i >= 3) { + c.close(); + } + i += 1; + }, + }, new CountQueuingStrategy({ highWaterMark: 0 })); + + const it = s.values(); + assert_array_equals(s.events, []); + + const read1 = await it.next(); + assert_iter_result(read1, 1, false); + assert_array_equals(s.events, ['pull']); + + const read2 = await it.next(); + assert_iter_result(read2, 2, false); + assert_array_equals(s.events, ['pull', 'pull']); + + const read3 = await it.next(); + assert_iter_result(read3, 3, false); + assert_array_equals(s.events, ['pull', 'pull', 'pull']); + + const read4 = await it.next(); + assert_iter_result(read4, undefined, true); + assert_array_equals(s.events, ['pull', 'pull', 'pull']); +}, 'Async-iterating a pull source manually'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.error('e'); + }, + }); + + try { + for await (const chunk of s) {} + assert_unreached(); + } catch (e) { + assert_equals(e, 'e'); + } +}, 'Async-iterating an errored stream throws'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.close(); + } + }); + + for await (const chunk of s) { + assert_unreached(); + } +}, 'Async-iterating a closed stream never executes the loop body, but works fine'); + +promise_test(async () => { + const s = new ReadableStream(); + + const loop = async () => { + for await (const chunk of s) { + assert_unreached(); + } + assert_unreached(); + }; + + await Promise.race([ + loop(), + flushAsyncEvents() + ]); +}, 'Async-iterating an empty but not closed/errored stream never executes the loop body and stalls the async function'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + }, + }); + + const reader = s.getReader(); + const readResult = await reader.read(); + assert_iter_result(readResult, 1, false); + reader.releaseLock(); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [2, 3]); +}, 'Async-iterating a partially consumed stream'); + +for (const type of ['throw', 'break', 'return']) { + for (const preventCancel of [false, true]) { + promise_test(async () => { + const s = recordingReadableStream({ + start(c) { + c.enqueue(0); + } + }); + + // use a separate function for the loop body so return does not stop the test + const loop = async () => { + for await (const c of s.values({ preventCancel })) { + if (type === 'throw') { + throw new Error(); + } else if (type === 'break') { + break; + } else if (type === 'return') { + return; + } + } + }; + + try { + await loop(); + } catch (e) {} + + if (preventCancel) { + assert_array_equals(s.events, ['pull'], `cancel() should not be called`); + } else { + assert_array_equals(s.events, ['pull', 'cancel', undefined], `cancel() should be called`); + } + }, `Cancellation behavior when ${type}ing inside loop body; preventCancel = ${preventCancel}`); + } +} + +for (const preventCancel of [false, true]) { + promise_test(async () => { + const s = recordingReadableStream({ + start(c) { + c.enqueue(0); + } + }); + + const it = s.values({ preventCancel }); + await it.return(); + + if (preventCancel) { + assert_array_equals(s.events, [], `cancel() should not be called`); + } else { + assert_array_equals(s.events, ['cancel', undefined], `cancel() should be called`); + } + }, `Cancellation behavior when manually calling return(); preventCancel = ${preventCancel}`); +} + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, '1st next()'); + + await promise_rejects_exactly(t, error1, it.next(), '2nd next()'); +}, 'next() rejects if the stream errors'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResult = await it.return('return value'); + assert_iter_result(iterResult, 'return value', true); +}, 'return() does not rejects if the stream has not errored yet'); + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + // Do not error in start() because doing so would prevent acquiring a reader/async iterator. + c.error(error1); + } + }); + + const it = s[Symbol.asyncIterator](); + + await flushAsyncEvents(); + await promise_rejects_exactly(t, error1, it.return('return value')); +}, 'return() rejects if the stream has errored'); + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, '1st next()'); + + await promise_rejects_exactly(t, error1, it.next(), '2nd next()'); + + const iterResult3 = await it.next(); + assert_iter_result(iterResult3, undefined, true, '3rd next()'); +}, 'next() that succeeds; next() that reports an error; next()'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResults = await Promise.allSettled([it.next(), it.next(), it.next()]); + + assert_equals(iterResults[0].status, 'fulfilled', '1st next() promise status'); + assert_iter_result(iterResults[0].value, 0, false, '1st next()'); + + assert_equals(iterResults[1].status, 'rejected', '2nd next() promise status'); + assert_equals(iterResults[1].reason, error1, '2nd next() rejection reason'); + + assert_equals(iterResults[2].status, 'fulfilled', '3rd next() promise status'); + assert_iter_result(iterResults[2].value, undefined, true, '3rd next()'); +}, 'next() that succeeds; next() that reports an error(); next() [no awaiting]'); + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, '1st next()'); + + await promise_rejects_exactly(t, error1, it.next(), '2nd next()'); + + const iterResult3 = await it.return('return value'); + assert_iter_result(iterResult3, 'return value', true, 'return()'); +}, 'next() that succeeds; next() that reports an error(); return()'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator](); + + const iterResults = await Promise.allSettled([it.next(), it.next(), it.return('return value')]); + + assert_equals(iterResults[0].status, 'fulfilled', '1st next() promise status'); + assert_iter_result(iterResults[0].value, 0, false, '1st next()'); + + assert_equals(iterResults[1].status, 'rejected', '2nd next() promise status'); + assert_equals(iterResults[1].reason, error1, '2nd next() rejection reason'); + + assert_equals(iterResults[2].status, 'fulfilled', 'return() promise status'); + assert_iter_result(iterResults[2].value, 'return value', true, 'return()'); +}, 'next() that succeeds; next() that reports an error(); return() [no awaiting]'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + c.enqueue(timesPulled); + ++timesPulled; + } + }); + const it = s[Symbol.asyncIterator](); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, 'next()'); + + const iterResult2 = await it.return('return value'); + assert_iter_result(iterResult2, 'return value', true, 'return()'); + + assert_equals(timesPulled, 2); +}, 'next() that succeeds; return()'); + +promise_test(async () => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + c.enqueue(timesPulled); + ++timesPulled; + } + }); + const it = s[Symbol.asyncIterator](); + + const iterResults = await Promise.allSettled([it.next(), it.return('return value')]); + + assert_equals(iterResults[0].status, 'fulfilled', 'next() promise status'); + assert_iter_result(iterResults[0].value, 0, false, 'next()'); + + assert_equals(iterResults[1].status, 'fulfilled', 'return() promise status'); + assert_iter_result(iterResults[1].value, 'return value', true, 'return()'); + + assert_equals(timesPulled, 2); +}, 'next() that succeeds; return() [no awaiting]'); + +promise_test(async () => { + const rs = new ReadableStream(); + const it = rs.values(); + + const iterResult1 = await it.return('return value'); + assert_iter_result(iterResult1, 'return value', true, 'return()'); + + const iterResult2 = await it.next(); + assert_iter_result(iterResult2, undefined, true, 'next()'); +}, 'return(); next()'); + +promise_test(async () => { + const rs = new ReadableStream(); + const it = rs.values(); + + const iterResults = await Promise.allSettled([it.return('return value'), it.next()]); + + assert_equals(iterResults[0].status, 'fulfilled', 'return() promise status'); + assert_iter_result(iterResults[0].value, 'return value', true, 'return()'); + + assert_equals(iterResults[1].status, 'fulfilled', 'next() promise status'); + assert_iter_result(iterResults[1].value, undefined, true, 'next()'); +}, 'return(); next() [no awaiting]'); + +promise_test(async () => { + const rs = new ReadableStream(); + const it = rs.values(); + + const iterResult1 = await it.return('return value 1'); + assert_iter_result(iterResult1, 'return value 1', true, '1st return()'); + + const iterResult2 = await it.return('return value 2'); + assert_iter_result(iterResult2, 'return value 2', true, '1st return()'); +}, 'return(); return()'); + +promise_test(async () => { + const rs = new ReadableStream(); + const it = rs.values(); + + const iterResults = await Promise.allSettled([it.return('return value 1'), it.return('return value 2')]); + + assert_equals(iterResults[0].status, 'fulfilled', '1st return() promise status'); + assert_iter_result(iterResults[0].value, 'return value 1', true, '1st return()'); + + assert_equals(iterResults[1].status, 'fulfilled', '2nd return() promise status'); + assert_iter_result(iterResults[1].value, 'return value 2', true, '1st return()'); +}, 'return(); return() [no awaiting]'); + +test(() => { + const s = new ReadableStream({ + start(c) { + c.enqueue(0); + c.close(); + }, + }); + s.values(); + assert_throws_js(TypeError, () => s.values(), 'values() should throw'); +}, 'values() throws if there\'s already a lock'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + } + }); + + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + } + assert_array_equals(chunks, [1, 2, 3]); + + const reader = s.getReader(); + await reader.closed; +}, 'Acquiring a reader after exhaustively async-iterating a stream'); + +promise_test(async t => { + let timesPulled = 0; + const s = new ReadableStream({ + pull(c) { + if (timesPulled === 0) { + c.enqueue(0); + ++timesPulled; + } else { + c.error(error1); + } + } + }); + + const it = s[Symbol.asyncIterator]({ preventCancel: true }); + + const iterResult1 = await it.next(); + assert_iter_result(iterResult1, 0, false, '1st next()'); + + await promise_rejects_exactly(t, error1, it.next(), '2nd next()'); + + const iterResult2 = await it.return('return value'); + assert_iter_result(iterResult2, 'return value', true, 'return()'); + + // i.e. it should not reject with a generic "this stream is locked" TypeError. + const reader = s.getReader(); + await promise_rejects_exactly(t, error1, reader.closed, 'closed on the new reader should reject with the error'); +}, 'Acquiring a reader after return()ing from a stream that errors'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + }, + }); + + // read the first two chunks, then cancel + const chunks = []; + for await (const chunk of s) { + chunks.push(chunk); + if (chunk >= 2) { + break; + } + } + assert_array_equals(chunks, [1, 2]); + + const reader = s.getReader(); + await reader.closed; +}, 'Acquiring a reader after partially async-iterating a stream'); + +promise_test(async () => { + const s = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + }, + }); + + // read the first two chunks, then release lock + const chunks = []; + for await (const chunk of s.values({preventCancel: true})) { + chunks.push(chunk); + if (chunk >= 2) { + break; + } + } + assert_array_equals(chunks, [1, 2]); + + const reader = s.getReader(); + const readResult = await reader.read(); + assert_iter_result(readResult, 3, false); + await reader.closed; +}, 'Acquiring a reader and reading the remaining chunks after partially async-iterating a stream with preventCancel = true'); + +for (const preventCancel of [false, true]) { + test(() => { + const rs = new ReadableStream(); + rs.values({ preventCancel }).return(); + // The test passes if this line doesn't throw. + rs.getReader(); + }, `return() should unlock the stream synchronously when preventCancel = ${preventCancel}`); +} + +promise_test(async () => { + const rs = new ReadableStream({ + async start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.enqueue('c'); + await flushAsyncEvents(); + // At this point, the async iterator has a read request in the stream's queue for its pending next() promise. + // Closing the stream now causes two things to happen *synchronously*: + // 1. ReadableStreamClose resolves reader.[[closedPromise]] with undefined. + // 2. ReadableStreamClose calls the read request's close steps, which calls ReadableStreamReaderGenericRelease, + // which replaces reader.[[closedPromise]] with a rejected promise. + c.close(); + } + }); + + const chunks = []; + for await (const chunk of rs) { + chunks.push(chunk); + } + assert_array_equals(chunks, ['a', 'b', 'c']); +}, 'close() while next() is pending'); diff --git a/test/fixtures/wpt/streams/readable-streams/bad-strategies.any.js b/test/fixtures/wpt/streams/readable-streams/bad-strategies.any.js new file mode 100644 index 00000000000000..b795360fb7c7bb --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/bad-strategies.any.js @@ -0,0 +1,159 @@ +// META: global=window,worker,jsshell +'use strict'; + +test(() => { + + const theError = new Error('a unique string'); + + assert_throws_exactly(theError, () => { + new ReadableStream({}, { + get size() { + throw theError; + }, + highWaterMark: 5 + }); + }, 'construction should re-throw the error'); + +}, 'Readable stream: throwing strategy.size getter'); + +promise_test(t => { + + const controllerError = { name: 'controller error' }; + const thrownError = { name: 'thrown error' }; + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + { + size() { + controller.error(controllerError); + throw thrownError; + }, + highWaterMark: 5 + } + ); + + assert_throws_exactly(thrownError, () => controller.enqueue('a'), 'enqueue should re-throw the error'); + + return promise_rejects_exactly(t, controllerError, rs.getReader().closed); + +}, 'Readable stream: strategy.size errors the stream and then throws'); + +promise_test(t => { + + const theError = { name: 'my error' }; + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + { + size() { + controller.error(theError); + return Infinity; + }, + highWaterMark: 5 + } + ); + + assert_throws_js(RangeError, () => controller.enqueue('a'), 'enqueue should throw a RangeError'); + + return promise_rejects_exactly(t, theError, rs.getReader().closed, 'closed should reject with the error'); + +}, 'Readable stream: strategy.size errors the stream and then returns Infinity'); + +promise_test(() => { + + const theError = new Error('a unique string'); + const rs = new ReadableStream( + { + start(c) { + assert_throws_exactly(theError, () => c.enqueue('a'), 'enqueue should throw the error'); + } + }, + { + size() { + throw theError; + }, + highWaterMark: 5 + } + ); + + return rs.getReader().closed.catch(e => { + assert_equals(e, theError, 'closed should reject with the error'); + }); + +}, 'Readable stream: throwing strategy.size method'); + +test(() => { + + const theError = new Error('a unique string'); + + assert_throws_exactly(theError, () => { + new ReadableStream({}, { + size() { + return 1; + }, + get highWaterMark() { + throw theError; + } + }); + }, 'construction should re-throw the error'); + +}, 'Readable stream: throwing strategy.highWaterMark getter'); + +test(() => { + + for (const highWaterMark of [-1, -Infinity, NaN, 'foo', {}]) { + assert_throws_js(RangeError, () => { + new ReadableStream({}, { + size() { + return 1; + }, + highWaterMark + }); + }, 'construction should throw a RangeError for ' + highWaterMark); + } + +}, 'Readable stream: invalid strategy.highWaterMark'); + +promise_test(() => { + + const promises = []; + for (const size of [NaN, -Infinity, Infinity, -1]) { + let theError; + const rs = new ReadableStream( + { + start(c) { + try { + c.enqueue('hi'); + assert_unreached('enqueue didn\'t throw'); + } catch (error) { + assert_equals(error.name, 'RangeError', 'enqueue should throw a RangeError for ' + size); + theError = error; + } + } + }, + { + size() { + return size; + }, + highWaterMark: 5 + } + ); + + promises.push(rs.getReader().closed.catch(e => { + assert_equals(e, theError, 'closed should reject with the error for ' + size); + })); + } + + return Promise.all(promises); + +}, 'Readable stream: invalid strategy.size return value'); diff --git a/test/fixtures/wpt/streams/readable-streams/bad-underlying-sources.any.js b/test/fixtures/wpt/streams/readable-streams/bad-underlying-sources.any.js new file mode 100644 index 00000000000000..6f59197a49b18b --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/bad-underlying-sources.any.js @@ -0,0 +1,400 @@ +// META: global=window,worker,jsshell +'use strict'; + + +test(() => { + + const theError = new Error('a unique string'); + + assert_throws_exactly(theError, () => { + new ReadableStream({ + get start() { + throw theError; + } + }); + }, 'constructing the stream should re-throw the error'); + +}, 'Underlying source start: throwing getter'); + + +test(() => { + + const theError = new Error('a unique string'); + + assert_throws_exactly(theError, () => { + new ReadableStream({ + start() { + throw theError; + } + }); + }, 'constructing the stream should re-throw the error'); + +}, 'Underlying source start: throwing method'); + + +test(() => { + + const theError = new Error('a unique string'); + assert_throws_exactly(theError, () => new ReadableStream({ + get pull() { + throw theError; + } + }), 'constructor should throw'); + +}, 'Underlying source: throwing pull getter (initial pull)'); + + +promise_test(t => { + + const theError = new Error('a unique string'); + const rs = new ReadableStream({ + pull() { + throw theError; + } + }); + + return promise_rejects_exactly(t, theError, rs.getReader().closed); + +}, 'Underlying source: throwing pull method (initial pull)'); + + +promise_test(t => { + + const theError = new Error('a unique string'); + + let counter = 0; + const rs = new ReadableStream({ + get pull() { + ++counter; + if (counter === 1) { + return c => c.enqueue('a'); + } + + throw theError; + } + }); + const reader = rs.getReader(); + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'the first chunk read should be correct'); + }), + reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'the second chunk read should be correct'); + assert_equals(counter, 1, 'counter should be 1'); + }) + ]); + +}, 'Underlying source pull: throwing getter (second pull does not result in a second get)'); + +promise_test(t => { + + const theError = new Error('a unique string'); + + let counter = 0; + const rs = new ReadableStream({ + pull(c) { + ++counter; + if (counter === 1) { + c.enqueue('a'); + return; + } + + throw theError; + } + }); + const reader = rs.getReader(); + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'the chunk read should be correct'); + }), + promise_rejects_exactly(t, theError, reader.closed) + ]); + +}, 'Underlying source pull: throwing method (second pull)'); + +test(() => { + + const theError = new Error('a unique string'); + assert_throws_exactly(theError, () => new ReadableStream({ + get cancel() { + throw theError; + } + }), 'constructor should throw'); + +}, 'Underlying source cancel: throwing getter'); + +promise_test(t => { + + const theError = new Error('a unique string'); + const rs = new ReadableStream({ + cancel() { + throw theError; + } + }); + + return promise_rejects_exactly(t, theError, rs.cancel()); + +}, 'Underlying source cancel: throwing method'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + rs.cancel(); + assert_throws_js(TypeError, () => controller.enqueue('a'), 'Calling enqueue after canceling should throw'); + + return rs.getReader().closed; + +}, 'Underlying source: calling enqueue on an empty canceled stream should throw'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + controller = c; + } + }); + + rs.cancel(); + assert_throws_js(TypeError, () => controller.enqueue('c'), 'Calling enqueue after canceling should throw'); + + return rs.getReader().closed; + +}, 'Underlying source: calling enqueue on a non-empty canceled stream should throw'); + +promise_test(() => { + + return new ReadableStream({ + start(c) { + c.close(); + assert_throws_js(TypeError, () => c.enqueue('a'), 'call to enqueue should throw a TypeError'); + } + }).getReader().closed; + +}, 'Underlying source: calling enqueue on a closed stream should throw'); + +promise_test(t => { + + const theError = new Error('boo'); + const closed = new ReadableStream({ + start(c) { + c.error(theError); + assert_throws_js(TypeError, () => c.enqueue('a'), 'call to enqueue should throw the error'); + } + }).getReader().closed; + + return promise_rejects_exactly(t, theError, closed); + +}, 'Underlying source: calling enqueue on an errored stream should throw'); + +promise_test(() => { + + return new ReadableStream({ + start(c) { + c.close(); + assert_throws_js(TypeError, () => c.close(), 'second call to close should throw a TypeError'); + } + }).getReader().closed; + +}, 'Underlying source: calling close twice on an empty stream should throw the second time'); + +promise_test(() => { + + let startCalled = false; + let readCalled = false; + const reader = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.close(); + assert_throws_js(TypeError, () => c.close(), 'second call to close should throw a TypeError'); + startCalled = true; + } + }).getReader(); + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'read() should read the enqueued chunk'); + readCalled = true; + }), + reader.closed.then(() => { + assert_true(startCalled); + assert_true(readCalled); + }) + ]); + +}, 'Underlying source: calling close twice on a non-empty stream should throw the second time'); + +promise_test(() => { + + let controller; + let startCalled = false; + const rs = new ReadableStream({ + start(c) { + controller = c; + startCalled = true; + } + }); + + rs.cancel(); + assert_throws_js(TypeError, () => controller.close(), 'Calling close after canceling should throw'); + + return rs.getReader().closed.then(() => { + assert_true(startCalled); + }); + +}, 'Underlying source: calling close on an empty canceled stream should throw'); + +promise_test(() => { + + let controller; + let startCalled = false; + const rs = new ReadableStream({ + start(c) { + controller = c; + c.enqueue('a'); + startCalled = true; + } + }); + + rs.cancel(); + assert_throws_js(TypeError, () => controller.close(), 'Calling close after canceling should throw'); + + return rs.getReader().closed.then(() => { + assert_true(startCalled); + }); + +}, 'Underlying source: calling close on a non-empty canceled stream should throw'); + +promise_test(() => { + + const theError = new Error('boo'); + let startCalled = false; + + const closed = new ReadableStream({ + start(c) { + c.error(theError); + assert_throws_js(TypeError, () => c.close(), 'call to close should throw a TypeError'); + startCalled = true; + } + }).getReader().closed; + + return closed.catch(e => { + assert_true(startCalled); + assert_equals(e, theError, 'closed should reject with the error'); + }); + +}, 'Underlying source: calling close after error should throw'); + +promise_test(() => { + + const theError = new Error('boo'); + let startCalled = false; + + const closed = new ReadableStream({ + start(c) { + c.error(theError); + c.error(); + startCalled = true; + } + }).getReader().closed; + + return closed.catch(e => { + assert_true(startCalled); + assert_equals(e, theError, 'closed should reject with the error'); + }); + +}, 'Underlying source: calling error twice should not throw'); + +promise_test(() => { + + let startCalled = false; + + const closed = new ReadableStream({ + start(c) { + c.close(); + c.error(); + startCalled = true; + } + }).getReader().closed; + + return closed.then(() => assert_true(startCalled)); + +}, 'Underlying source: calling error after close should not throw'); + +promise_test(() => { + + let startCalled = false; + const firstError = new Error('1'); + const secondError = new Error('2'); + + const closed = new ReadableStream({ + start(c) { + c.error(firstError); + startCalled = true; + return Promise.reject(secondError); + } + }).getReader().closed; + + return closed.catch(e => { + assert_true(startCalled); + assert_equals(e, firstError, 'closed should reject with the first error'); + }); + +}, 'Underlying source: calling error and returning a rejected promise from start should cause the stream to error ' + + 'with the first error'); + +promise_test(() => { + + let startCalled = false; + const firstError = new Error('1'); + const secondError = new Error('2'); + + const closed = new ReadableStream({ + pull(c) { + c.error(firstError); + startCalled = true; + return Promise.reject(secondError); + } + }).getReader().closed; + + return closed.catch(e => { + assert_true(startCalled); + assert_equals(e, firstError, 'closed should reject with the first error'); + }); + +}, 'Underlying source: calling error and returning a rejected promise from pull should cause the stream to error ' + + 'with the first error'); + +const error1 = { name: 'error1' }; + +promise_test(t => { + + let pullShouldThrow = false; + const rs = new ReadableStream({ + pull(controller) { + if (pullShouldThrow) { + throw error1; + } + controller.enqueue(0); + } + }, new CountQueuingStrategy({highWaterMark: 1})); + const reader = rs.getReader(); + return Promise.resolve().then(() => { + pullShouldThrow = true; + return Promise.all([ + reader.read(), + promise_rejects_exactly(t, error1, reader.closed, '.closed promise should reject') + ]); + }); + +}, 'read should not error if it dequeues and pull() throws'); diff --git a/test/fixtures/wpt/streams/readable-streams/cancel.any.js b/test/fixtures/wpt/streams/readable-streams/cancel.any.js new file mode 100644 index 00000000000000..c3723a465c9988 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/cancel.any.js @@ -0,0 +1,236 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-utils.js +'use strict'; + +promise_test(t => { + + const randomSource = new RandomPushSource(); + + let cancellationFinished = false; + const rs = new ReadableStream({ + start(c) { + randomSource.ondata = c.enqueue.bind(c); + randomSource.onend = c.close.bind(c); + randomSource.onerror = c.error.bind(c); + }, + + pull() { + randomSource.readStart(); + }, + + cancel() { + randomSource.readStop(); + + return new Promise(resolve => { + t.step_timeout(() => { + cancellationFinished = true; + resolve(); + }, 1); + }); + } + }); + + const reader = rs.getReader(); + + // We call delay multiple times to avoid cancelling too early for the + // source to enqueue at least one chunk. + const cancel = delay(5).then(() => delay(5)).then(() => delay(5)).then(() => { + const cancelPromise = reader.cancel(); + assert_false(cancellationFinished, 'cancellation in source should happen later'); + return cancelPromise; + }); + + return readableStreamToArray(rs, reader).then(chunks => { + assert_greater_than(chunks.length, 0, 'at least one chunk should be read'); + for (let i = 0; i < chunks.length; i++) { + assert_equals(chunks[i].length, 128, 'chunk ' + i + ' should have 128 bytes'); + } + return cancel; + }).then(() => { + assert_true(cancellationFinished, 'it returns a promise that is fulfilled when the cancellation finishes'); + }); + +}, 'ReadableStream cancellation: integration test on an infinite stream derived from a random push source'); + +test(() => { + + let recordedReason; + const rs = new ReadableStream({ + cancel(reason) { + recordedReason = reason; + } + }); + + const passedReason = new Error('Sorry, it just wasn\'t meant to be.'); + rs.cancel(passedReason); + + assert_equals(recordedReason, passedReason, + 'the error passed to the underlying source\'s cancel method should equal the one passed to the stream\'s cancel'); + +}, 'ReadableStream cancellation: cancel(reason) should pass through the given reason to the underlying source'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.close(); + }, + cancel() { + assert_unreached('underlying source cancel() should not have been called'); + } + }); + + const reader = rs.getReader(); + + return rs.cancel().then(() => { + assert_unreached('cancel() should be rejected'); + }, e => { + assert_equals(e.name, 'TypeError', 'cancel() should be rejected with a TypeError'); + }).then(() => { + return reader.read(); + }).then(result => { + assert_object_equals(result, { value: 'a', done: false }, 'read() should still work after the attempted cancel'); + return reader.closed; + }); + +}, 'ReadableStream cancellation: cancel() on a locked stream should fail and not call the underlying source cancel'); + +promise_test(() => { + + let cancelReceived = false; + const cancelReason = new Error('I am tired of this stream, I prefer to cancel it'); + const rs = new ReadableStream({ + cancel(reason) { + cancelReceived = true; + assert_equals(reason, cancelReason, 'cancellation reason given to the underlying source should be equal to the one passed'); + } + }); + + return rs.cancel(cancelReason).then(() => { + assert_true(cancelReceived); + }); + +}, 'ReadableStream cancellation: should fulfill promise when cancel callback went fine'); + +promise_test(() => { + + const rs = new ReadableStream({ + cancel() { + return 'Hello'; + } + }); + + return rs.cancel().then(v => { + assert_equals(v, undefined, 'cancel() return value should be fulfilled with undefined'); + }); + +}, 'ReadableStream cancellation: returning a value from the underlying source\'s cancel should not affect the fulfillment value of the promise returned by the stream\'s cancel'); + +promise_test(() => { + + const thrownError = new Error('test'); + let cancelCalled = false; + + const rs = new ReadableStream({ + cancel() { + cancelCalled = true; + throw thrownError; + } + }); + + return rs.cancel('test').then(() => { + assert_unreached('cancel should reject'); + }, e => { + assert_true(cancelCalled); + assert_equals(e, thrownError); + }); + +}, 'ReadableStream cancellation: should reject promise when cancel callback raises an exception'); + +promise_test(() => { + + const cancelReason = new Error('test'); + + const rs = new ReadableStream({ + cancel(error) { + assert_equals(error, cancelReason); + return delay(1); + } + }); + + return rs.cancel(cancelReason); + +}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should fulfill when that one does (1)'); + +promise_test(t => { + + let resolveSourceCancelPromise; + let sourceCancelPromiseHasFulfilled = false; + + const rs = new ReadableStream({ + cancel() { + const sourceCancelPromise = new Promise(resolve => resolveSourceCancelPromise = resolve); + + sourceCancelPromise.then(() => { + sourceCancelPromiseHasFulfilled = true; + }); + + return sourceCancelPromise; + } + }); + + t.step_timeout(() => resolveSourceCancelPromise('Hello'), 1); + + return rs.cancel().then(value => { + assert_true(sourceCancelPromiseHasFulfilled, 'cancel() return value should be fulfilled only after the promise returned by the underlying source\'s cancel'); + assert_equals(value, undefined, 'cancel() return value should be fulfilled with undefined'); + }); + +}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should fulfill when that one does (2)'); + +promise_test(t => { + + let rejectSourceCancelPromise; + let sourceCancelPromiseHasRejected = false; + + const rs = new ReadableStream({ + cancel() { + const sourceCancelPromise = new Promise((resolve, reject) => rejectSourceCancelPromise = reject); + + sourceCancelPromise.catch(() => { + sourceCancelPromiseHasRejected = true; + }); + + return sourceCancelPromise; + } + }); + + const errorInCancel = new Error('Sorry, it just wasn\'t meant to be.'); + + t.step_timeout(() => rejectSourceCancelPromise(errorInCancel), 1); + + return rs.cancel().then(() => { + assert_unreached('cancel() return value should be rejected'); + }, r => { + assert_true(sourceCancelPromiseHasRejected, 'cancel() return value should be rejected only after the promise returned by the underlying source\'s cancel'); + assert_equals(r, errorInCancel, 'cancel() return value should be rejected with the underlying source\'s rejection reason'); + }); + +}, 'ReadableStream cancellation: if the underlying source\'s cancel method returns a promise, the promise returned by the stream\'s cancel should reject when that one does'); + +promise_test(() => { + + const rs = new ReadableStream({ + start() { + return new Promise(() => {}); + }, + pull() { + assert_unreached('pull should not have been called'); + } + }); + + return Promise.all([rs.cancel(), rs.getReader().closed]); + +}, 'ReadableStream cancellation: cancelling before start finishes should prevent pull() from being called'); diff --git a/test/fixtures/wpt/streams/readable-streams/constructor.any.js b/test/fixtures/wpt/streams/readable-streams/constructor.any.js new file mode 100644 index 00000000000000..dcfd9e9c33861f --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/constructor.any.js @@ -0,0 +1,17 @@ +// META: global=window,worker,jsshell +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +test(() => { + const underlyingSource = { get start() { throw error1; } }; + const queuingStrategy = { highWaterMark: 0, get size() { throw error2; } }; + + // underlyingSource is converted in prose in the method body, whereas queuingStrategy is done at the IDL layer. + // So the queuingStrategy exception should be encountered first. + assert_throws_exactly(error2, () => new ReadableStream(underlyingSource, queuingStrategy)); +}, 'underlyingSource argument should be converted after queuingStrategy argument'); diff --git a/test/fixtures/wpt/streams/readable-streams/count-queuing-strategy-integration.any.js b/test/fixtures/wpt/streams/readable-streams/count-queuing-strategy-integration.any.js new file mode 100644 index 00000000000000..78a25318b2dd5a --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/count-queuing-strategy-integration.any.js @@ -0,0 +1,208 @@ +// META: global=window,worker,jsshell +'use strict'; + +test(() => { + + new ReadableStream({}, new CountQueuingStrategy({ highWaterMark: 4 })); + +}, 'Can construct a readable stream with a valid CountQueuingStrategy'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + new CountQueuingStrategy({ highWaterMark: 0 }) + ); + const reader = rs.getReader(); + + assert_equals(controller.desiredSize, 0, '0 reads, 0 enqueues: desiredSize should be 0'); + controller.enqueue('a'); + assert_equals(controller.desiredSize, -1, '0 reads, 1 enqueue: desiredSize should be -1'); + controller.enqueue('b'); + assert_equals(controller.desiredSize, -2, '0 reads, 2 enqueues: desiredSize should be -2'); + controller.enqueue('c'); + assert_equals(controller.desiredSize, -3, '0 reads, 3 enqueues: desiredSize should be -3'); + controller.enqueue('d'); + assert_equals(controller.desiredSize, -4, '0 reads, 4 enqueues: desiredSize should be -4'); + + return reader.read() + .then(result => { + assert_object_equals(result, { value: 'a', done: false }, + '1st read gives back the 1st chunk enqueued (queue now contains 3 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'b', done: false }, + '2nd read gives back the 2nd chunk enqueued (queue now contains 2 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'c', done: false }, + '3rd read gives back the 3rd chunk enqueued (queue now contains 1 chunk)'); + + assert_equals(controller.desiredSize, -1, '3 reads, 4 enqueues: desiredSize should be -1'); + controller.enqueue('e'); + assert_equals(controller.desiredSize, -2, '3 reads, 5 enqueues: desiredSize should be -2'); + + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'd', done: false }, + '4th read gives back the 4th chunk enqueued (queue now contains 1 chunks)'); + return reader.read(); + + }).then(result => { + assert_object_equals(result, { value: 'e', done: false }, + '5th read gives back the 5th chunk enqueued (queue now contains 0 chunks)'); + + assert_equals(controller.desiredSize, 0, '5 reads, 5 enqueues: desiredSize should be 0'); + controller.enqueue('f'); + assert_equals(controller.desiredSize, -1, '5 reads, 6 enqueues: desiredSize should be -1'); + controller.enqueue('g'); + assert_equals(controller.desiredSize, -2, '5 reads, 7 enqueues: desiredSize should be -2'); + }); + +}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 0)'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + new CountQueuingStrategy({ highWaterMark: 1 }) + ); + const reader = rs.getReader(); + + assert_equals(controller.desiredSize, 1, '0 reads, 0 enqueues: desiredSize should be 1'); + controller.enqueue('a'); + assert_equals(controller.desiredSize, 0, '0 reads, 1 enqueue: desiredSize should be 0'); + controller.enqueue('b'); + assert_equals(controller.desiredSize, -1, '0 reads, 2 enqueues: desiredSize should be -1'); + controller.enqueue('c'); + assert_equals(controller.desiredSize, -2, '0 reads, 3 enqueues: desiredSize should be -2'); + controller.enqueue('d'); + assert_equals(controller.desiredSize, -3, '0 reads, 4 enqueues: desiredSize should be -3'); + + return reader.read() + .then(result => { + assert_object_equals(result, { value: 'a', done: false }, + '1st read gives back the 1st chunk enqueued (queue now contains 3 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'b', done: false }, + '2nd read gives back the 2nd chunk enqueued (queue now contains 2 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'c', done: false }, + '3rd read gives back the 3rd chunk enqueued (queue now contains 1 chunk)'); + + assert_equals(controller.desiredSize, 0, '3 reads, 4 enqueues: desiredSize should be 0'); + controller.enqueue('e'); + assert_equals(controller.desiredSize, -1, '3 reads, 5 enqueues: desiredSize should be -1'); + + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'd', done: false }, + '4th read gives back the 4th chunk enqueued (queue now contains 1 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'e', done: false }, + '5th read gives back the 5th chunk enqueued (queue now contains 0 chunks)'); + + assert_equals(controller.desiredSize, 1, '5 reads, 5 enqueues: desiredSize should be 1'); + controller.enqueue('f'); + assert_equals(controller.desiredSize, 0, '5 reads, 6 enqueues: desiredSize should be 0'); + controller.enqueue('g'); + assert_equals(controller.desiredSize, -1, '5 reads, 7 enqueues: desiredSize should be -1'); + }); + +}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 1)'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream( + { + start(c) { + controller = c; + } + }, + new CountQueuingStrategy({ highWaterMark: 4 }) + ); + const reader = rs.getReader(); + + assert_equals(controller.desiredSize, 4, '0 reads, 0 enqueues: desiredSize should be 4'); + controller.enqueue('a'); + assert_equals(controller.desiredSize, 3, '0 reads, 1 enqueue: desiredSize should be 3'); + controller.enqueue('b'); + assert_equals(controller.desiredSize, 2, '0 reads, 2 enqueues: desiredSize should be 2'); + controller.enqueue('c'); + assert_equals(controller.desiredSize, 1, '0 reads, 3 enqueues: desiredSize should be 1'); + controller.enqueue('d'); + assert_equals(controller.desiredSize, 0, '0 reads, 4 enqueues: desiredSize should be 0'); + controller.enqueue('e'); + assert_equals(controller.desiredSize, -1, '0 reads, 5 enqueues: desiredSize should be -1'); + controller.enqueue('f'); + assert_equals(controller.desiredSize, -2, '0 reads, 6 enqueues: desiredSize should be -2'); + + + return reader.read() + .then(result => { + assert_object_equals(result, { value: 'a', done: false }, + '1st read gives back the 1st chunk enqueued (queue now contains 5 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'b', done: false }, + '2nd read gives back the 2nd chunk enqueued (queue now contains 4 chunks)'); + + assert_equals(controller.desiredSize, 0, '2 reads, 6 enqueues: desiredSize should be 0'); + controller.enqueue('g'); + assert_equals(controller.desiredSize, -1, '2 reads, 7 enqueues: desiredSize should be -1'); + + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'c', done: false }, + '3rd read gives back the 3rd chunk enqueued (queue now contains 4 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'd', done: false }, + '4th read gives back the 4th chunk enqueued (queue now contains 3 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'e', done: false }, + '5th read gives back the 5th chunk enqueued (queue now contains 2 chunks)'); + return reader.read(); + }) + .then(result => { + assert_object_equals(result, { value: 'f', done: false }, + '6th read gives back the 6th chunk enqueued (queue now contains 0 chunks)'); + + assert_equals(controller.desiredSize, 3, '6 reads, 7 enqueues: desiredSize should be 3'); + controller.enqueue('h'); + assert_equals(controller.desiredSize, 2, '6 reads, 8 enqueues: desiredSize should be 2'); + controller.enqueue('i'); + assert_equals(controller.desiredSize, 1, '6 reads, 9 enqueues: desiredSize should be 1'); + controller.enqueue('j'); + assert_equals(controller.desiredSize, 0, '6 reads, 10 enqueues: desiredSize should be 0'); + controller.enqueue('k'); + assert_equals(controller.desiredSize, -1, '6 reads, 11 enqueues: desiredSize should be -1'); + }); + +}, 'Correctly governs a ReadableStreamController\'s desiredSize property (HWM = 4)'); diff --git a/test/fixtures/wpt/streams/readable-streams/default-reader.any.js b/test/fixtures/wpt/streams/readable-streams/default-reader.any.js new file mode 100644 index 00000000000000..60c740a8288631 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/default-reader.any.js @@ -0,0 +1,514 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +'use strict'; + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader('potato')); + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader({})); + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader()); + +}, 'ReadableStreamDefaultReader constructor should get a ReadableStream object as argument'); + +test(() => { + + const rsReader = new ReadableStreamDefaultReader(new ReadableStream()); + assert_equals(rsReader.closed, rsReader.closed, 'closed should return the same promise'); + +}, 'ReadableStreamDefaultReader closed should always return the same promise object'); + +test(() => { + + const rs = new ReadableStream(); + new ReadableStreamDefaultReader(rs); // Constructing directly the first time should be fine. + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader(rs), + 'constructing directly the second time should fail'); + +}, 'Constructing a ReadableStreamDefaultReader directly should fail if the stream is already locked (via direct ' + + 'construction)'); + +test(() => { + + const rs = new ReadableStream(); + new ReadableStreamDefaultReader(rs); // Constructing directly should be fine. + assert_throws_js(TypeError, () => rs.getReader(), 'getReader() should fail'); + +}, 'Getting a ReadableStreamDefaultReader via getReader should fail if the stream is already locked (via direct ' + + 'construction)'); + +test(() => { + + const rs = new ReadableStream(); + rs.getReader(); // getReader() should be fine. + assert_throws_js(TypeError, () => new ReadableStreamDefaultReader(rs), 'constructing directly should fail'); + +}, 'Constructing a ReadableStreamDefaultReader directly should fail if the stream is already locked (via getReader)'); + +test(() => { + + const rs = new ReadableStream(); + rs.getReader(); // getReader() should be fine. + assert_throws_js(TypeError, () => rs.getReader(), 'getReader() should fail'); + +}, 'Getting a ReadableStreamDefaultReader via getReader should fail if the stream is already locked (via getReader)'); + +test(() => { + + const rs = new ReadableStream({ + start(c) { + c.close(); + } + }); + + new ReadableStreamDefaultReader(rs); // Constructing directly should not throw. + +}, 'Constructing a ReadableStreamDefaultReader directly should be OK if the stream is closed'); + +test(() => { + + const theError = new Error('don\'t say i didn\'t warn ya'); + const rs = new ReadableStream({ + start(c) { + c.error(theError); + } + }); + + new ReadableStreamDefaultReader(rs); // Constructing directly should not throw. + +}, 'Constructing a ReadableStreamDefaultReader directly should be OK if the stream is errored'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + const reader = rs.getReader(); + + const promise = reader.read().then(result => { + assert_object_equals(result, { value: 'a', done: false }, 'read() should fulfill with the enqueued chunk'); + }); + + controller.enqueue('a'); + return promise; + +}, 'Reading from a reader for an empty stream will wait until a chunk is available'); + +promise_test(() => { + + let cancelCalled = false; + const passedReason = new Error('it wasn\'t the right time, sorry'); + const rs = new ReadableStream({ + cancel(reason) { + assert_true(rs.locked, 'the stream should still be locked'); + assert_throws_js(TypeError, () => rs.getReader(), 'should not be able to get another reader'); + assert_equals(reason, passedReason, 'the cancellation reason is passed through to the underlying source'); + cancelCalled = true; + } + }); + + const reader = rs.getReader(); + return reader.cancel(passedReason).then(() => assert_true(cancelCalled)); + +}, 'cancel() on a reader does not release the reader'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader = rs.getReader(); + const promise = reader.closed; + + controller.close(); + return promise; + +}, 'closed should be fulfilled after stream is closed (.closed access before acquiring)'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader1 = rs.getReader(); + + reader1.releaseLock(); + + const reader2 = rs.getReader(); + controller.close(); + + return Promise.all([ + promise_rejects_js(t, TypeError, reader1.closed), + reader2.closed + ]); + +}, 'closed should be rejected after reader releases its lock (multiple stream locks)'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader = rs.getReader(); + const promise1 = reader.closed; + + controller.close(); + + reader.releaseLock(); + const promise2 = reader.closed; + + assert_not_equals(promise1, promise2, '.closed should be replaced'); + return Promise.all([ + promise1, + promise_rejects_js(t, TypeError, promise2, '.closed after releasing lock'), + ]); + +}, 'closed is replaced when stream closes and reader releases its lock'); + +promise_test(t => { + + const theError = { name: 'unique error' }; + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader = rs.getReader(); + const promise1 = reader.closed; + + controller.error(theError); + + reader.releaseLock(); + const promise2 = reader.closed; + + assert_not_equals(promise1, promise2, '.closed should be replaced'); + return Promise.all([ + promise_rejects_exactly(t, theError, promise1, '.closed before releasing lock'), + promise_rejects_js(t, TypeError, promise2, '.closed after releasing lock') + ]); + +}, 'closed is replaced when stream errors and reader releases its lock'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + + const reader1 = rs.getReader(); + const promise1 = reader1.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'reading the first chunk from reader1 works'); + }); + reader1.releaseLock(); + + const reader2 = rs.getReader(); + const promise2 = reader2.read().then(r => { + assert_object_equals(r, { value: 'b', done: false }, 'reading the second chunk from reader2 works'); + }); + reader2.releaseLock(); + + return Promise.all([promise1, promise2]); + +}, 'Multiple readers can access the stream in sequence'); + +promise_test(() => { + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + } + }); + + const reader1 = rs.getReader(); + reader1.releaseLock(); + + const reader2 = rs.getReader(); + + // Should be a no-op + reader1.releaseLock(); + + return reader2.read().then(result => { + assert_object_equals(result, { value: 'a', done: false }, + 'read() should still work on reader2 even after reader1 is released'); + }); + +}, 'Cannot use an already-released reader to unlock a stream again'); + +promise_test(t => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + }, + cancel() { + assert_unreached('underlying source cancel should not be called'); + } + }); + + const reader = rs.getReader(); + reader.releaseLock(); + const cancelPromise = reader.cancel(); + + const reader2 = rs.getReader(); + const readPromise = reader2.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'a new reader should be able to read a chunk'); + }); + + return Promise.all([ + promise_rejects_js(t, TypeError, cancelPromise), + readPromise + ]); + +}, 'cancel() on a released reader is a no-op and does not pass through'); + +promise_test(t => { + + const promiseAsserts = []; + + let controller; + const theError = { name: 'unique error' }; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const reader1 = rs.getReader(); + + promiseAsserts.push( + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader1.read()) + ); + + assert_throws_js(TypeError, () => rs.getReader(), 'trying to get another reader before erroring should throw'); + + controller.error(theError); + + reader1.releaseLock(); + + const reader2 = rs.getReader(); + + promiseAsserts.push( + promise_rejects_exactly(t, theError, reader2.closed), + promise_rejects_exactly(t, theError, reader2.read()) + ); + + return Promise.all(promiseAsserts); + +}, 'Getting a second reader after erroring the stream and releasing the reader should succeed'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const promise = rs.getReader().closed.then( + t.unreached_func('closed promise should not be fulfilled when stream is errored'), + err => { + assert_equals(err, undefined, 'passed error should be undefined as it was'); + } + ); + + controller.error(); + return promise; + +}, 'ReadableStreamDefaultReader closed promise should be rejected with undefined if that is the error'); + + +promise_test(t => { + + const rs = new ReadableStream({ + start() { + return Promise.reject(); + } + }); + + return rs.getReader().read().then( + t.unreached_func('read promise should not be fulfilled when stream is errored'), + err => { + assert_equals(err, undefined, 'passed error should be undefined as it was'); + } + ); + +}, 'ReadableStreamDefaultReader: if start rejects with no parameter, it should error the stream with an undefined ' + + 'error'); + +promise_test(t => { + + const theError = { name: 'unique string' }; + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const promise = promise_rejects_exactly(t, theError, rs.getReader().closed); + + controller.error(theError); + return promise; + +}, 'Erroring a ReadableStream after checking closed should reject ReadableStreamDefaultReader closed promise'); + +promise_test(t => { + + const theError = { name: 'unique string' }; + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + controller.error(theError); + + // Let's call getReader twice for extra test coverage of this code path. + rs.getReader().releaseLock(); + + return promise_rejects_exactly(t, theError, rs.getReader().closed); + +}, 'Erroring a ReadableStream before checking closed should reject ReadableStreamDefaultReader closed promise'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + const reader = rs.getReader(); + + const promise = Promise.all([ + reader.read().then(result => { + assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (1)'); + }), + reader.read().then(result => { + assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (2)'); + }), + reader.closed + ]); + + controller.close(); + return promise; + +}, 'Reading twice on a stream that gets closed'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + controller.close(); + const reader = rs.getReader(); + + return Promise.all([ + reader.read().then(result => { + assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (1)'); + }), + reader.read().then(result => { + assert_object_equals(result, { value: undefined, done: true }, 'read() should fulfill with close (2)'); + }), + reader.closed + ]); + +}, 'Reading twice on a closed stream'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const myError = { name: 'mashed potatoes' }; + controller.error(myError); + + const reader = rs.getReader(); + + return Promise.all([ + promise_rejects_exactly(t, myError, reader.read()), + promise_rejects_exactly(t, myError, reader.read()), + promise_rejects_exactly(t, myError, reader.closed) + ]); + +}, 'Reading twice on an errored stream'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const myError = { name: 'mashed potatoes' }; + const reader = rs.getReader(); + + const promise = Promise.all([ + promise_rejects_exactly(t, myError, reader.read()), + promise_rejects_exactly(t, myError, reader.read()), + promise_rejects_exactly(t, myError, reader.closed) + ]); + + controller.error(myError); + return promise; + +}, 'Reading twice on a stream that gets errored'); + +test(() => { + const rs = new ReadableStream(); + let toStringCalled = false; + const mode = { + toString() { + toStringCalled = true; + return ''; + } + }; + assert_throws_js(TypeError, () => rs.getReader({ mode }), 'getReader() should throw'); + assert_true(toStringCalled, 'toString() should be called'); +}, 'getReader() should call ToString() on mode'); + +promise_test(() => { + const rs = new ReadableStream({ + pull(controller) { + controller.close(); + } + }); + + const reader = rs.getReader(); + return reader.read().then(() => { + // The test passes if releaseLock() does not throw. + reader.releaseLock(); + }); +}, 'controller.close() should clear the list of pending read requests'); diff --git a/test/fixtures/wpt/streams/readable-streams/floating-point-total-queue-size.any.js b/test/fixtures/wpt/streams/readable-streams/floating-point-total-queue-size.any.js new file mode 100644 index 00000000000000..400482a450cad2 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/floating-point-total-queue-size.any.js @@ -0,0 +1,116 @@ +// META: global=window,worker,jsshell +'use strict'; + +// Due to the limitations of floating-point precision, the calculation of desiredSize sometimes gives different answers +// than adding up the items in the queue would. It is important that implementations give the same result in these edge +// cases so that developers do not come to depend on non-standard behaviour. See +// https://github.com/whatwg/streams/issues/582 and linked issues for further discussion. + +promise_test(() => { + const { reader, controller } = setupTestStream(); + + controller.enqueue(2); + assert_equals(controller.desiredSize, 0 - 2, 'desiredSize must be -2 after enqueueing such a chunk'); + + controller.enqueue(Number.MAX_SAFE_INTEGER); + assert_equals(controller.desiredSize, 0 - Number.MAX_SAFE_INTEGER - 2, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)'); + + return reader.read().then(() => { + assert_equals(controller.desiredSize, 0 - Number.MAX_SAFE_INTEGER - 2 + 2, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative'); + }); +}, 'Floating point arithmetic must manifest near NUMBER.MAX_SAFE_INTEGER (total ends up positive)'); + +promise_test(() => { + const { reader, controller } = setupTestStream(); + + controller.enqueue(1e-16); + assert_equals(controller.desiredSize, 0 - 1e-16, 'desiredSize must be -1e16 after enqueueing such a chunk'); + + controller.enqueue(1); + assert_equals(controller.desiredSize, 0 - 1e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)'); + + return reader.read().then(() => { + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 + 1e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up positive, but clamped)'); + +promise_test(() => { + const { reader, controller } = setupTestStream(); + + controller.enqueue(1e-16); + assert_equals(controller.desiredSize, 0 - 1e-16, 'desiredSize must be -2e16 after enqueueing such a chunk'); + + controller.enqueue(1); + assert_equals(controller.desiredSize, 0 - 1e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)'); + + controller.enqueue(2e-16); + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a third chunk)'); + + return reader.read().then(() => { + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a second chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1 + 2e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a third chunk)'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up positive, and not clamped)'); + +promise_test(() => { + const { reader, controller } = setupTestStream(); + + controller.enqueue(2e-16); + assert_equals(controller.desiredSize, 0 - 2e-16, 'desiredSize must be -2e16 after enqueueing such a chunk'); + + controller.enqueue(1); + assert_equals(controller.desiredSize, 0 - 2e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (adding a second chunk)'); + + return reader.read().then(() => { + assert_equals(controller.desiredSize, 0 - 2e-16 - 1 + 2e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a chunk)'); + + return reader.read(); + }).then(() => { + assert_equals(controller.desiredSize, 0, + 'desiredSize must be calculated using double-precision floating-point arithmetic (subtracting a second chunk)'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up zero)'); + +function setupTestStream() { + const strategy = { + size(x) { + return x; + }, + highWaterMark: 0 + }; + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, strategy); + + return { reader: rs.getReader(), controller }; +} diff --git a/test/fixtures/wpt/streams/readable-streams/garbage-collection.any.js b/test/fixtures/wpt/streams/readable-streams/garbage-collection.any.js new file mode 100644 index 00000000000000..dad0ad1535a5bf --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/garbage-collection.any.js @@ -0,0 +1,70 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +'use strict'; + +promise_test(() => { + + let controller; + new ReadableStream({ + start(c) { + controller = c; + } + }); + + garbageCollect(); + + return delay(50).then(() => { + controller.close(); + assert_throws_js(TypeError, () => controller.close(), 'close should throw a TypeError the second time'); + controller.error(); + }); + +}, 'ReadableStreamController methods should continue working properly when scripts lose their reference to the ' + + 'readable stream'); + +promise_test(() => { + + let controller; + + const closedPromise = new ReadableStream({ + start(c) { + controller = c; + } + }).getReader().closed; + + garbageCollect(); + + return delay(50).then(() => controller.close()).then(() => closedPromise); + +}, 'ReadableStream closed promise should fulfill even if the stream and reader JS references are lost'); + +promise_test(t => { + + const theError = new Error('boo'); + let controller; + + const closedPromise = new ReadableStream({ + start(c) { + controller = c; + } + }).getReader().closed; + + garbageCollect(); + + return delay(50).then(() => controller.error(theError)) + .then(() => promise_rejects_exactly(t, theError, closedPromise)); + +}, 'ReadableStream closed promise should reject even if stream and reader JS references are lost'); + +promise_test(() => { + + const rs = new ReadableStream({}); + + rs.getReader(); + + garbageCollect(); + + return delay(50).then(() => assert_throws_js(TypeError, () => rs.getReader(), + 'old reader should still be locking the stream even after garbage collection')); + +}, 'Garbage-collecting a ReadableStreamDefaultReader should not unlock its stream'); diff --git a/test/fixtures/wpt/streams/readable-streams/general.any.js b/test/fixtures/wpt/streams/readable-streams/general.any.js new file mode 100644 index 00000000000000..4e54990823ab01 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/general.any.js @@ -0,0 +1,840 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-utils.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +test(() => { + + new ReadableStream(); // ReadableStream constructed with no parameters + new ReadableStream({ }); // ReadableStream constructed with an empty object as parameter + new ReadableStream({ type: undefined }); // ReadableStream constructed with undefined type + new ReadableStream(undefined); // ReadableStream constructed with undefined as parameter + + let x; + new ReadableStream(x); // ReadableStream constructed with an undefined variable as parameter + +}, 'ReadableStream can be constructed with no errors'); + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStream(null), 'constructor should throw when the source is null'); + +}, 'ReadableStream can\'t be constructed with garbage'); + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStream({ type: null }), + 'constructor should throw when the type is null'); + assert_throws_js(TypeError, () => new ReadableStream({ type: '' }), + 'constructor should throw when the type is empty string'); + assert_throws_js(TypeError, () => new ReadableStream({ type: 'asdf' }), + 'constructor should throw when the type is asdf'); + assert_throws_exactly( + error1, + () => new ReadableStream({ type: { get toString() { throw error1; } } }), + 'constructor should throw when ToString() throws' + ); + assert_throws_exactly( + error1, + () => new ReadableStream({ type: { toString() { throw error1; } } }), + 'constructor should throw when ToString() throws' + ); + +}, 'ReadableStream can\'t be constructed with an invalid type'); + +test(() => { + + assert_throws_js(TypeError, () => { + new ReadableStream({ start: 'potato' }); + }, 'constructor should throw when start is not a function'); + +}, 'ReadableStream constructor should throw for non-function start arguments'); + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStream({ cancel: '2' }), 'constructor should throw'); + +}, 'ReadableStream constructor will not tolerate initial garbage as cancel argument'); + +test(() => { + + assert_throws_js(TypeError, () => new ReadableStream({ pull: { } }), 'constructor should throw'); + +}, 'ReadableStream constructor will not tolerate initial garbage as pull argument'); + +test(() => { + + let startCalled = false; + + const source = { + start() { + assert_equals(this, source, 'source is this during start'); + startCalled = true; + } + }; + + new ReadableStream(source); + assert_true(startCalled); + +}, 'ReadableStream start should be called with the proper thisArg'); + +test(() => { + + let startCalled = false; + const source = { + start(controller) { + const properties = ['close', 'constructor', 'desiredSize', 'enqueue', 'error']; + assert_array_equals(Object.getOwnPropertyNames(Object.getPrototypeOf(controller)).sort(), properties, + 'prototype should have the right properties'); + + controller.test = ''; + assert_array_equals(Object.getOwnPropertyNames(Object.getPrototypeOf(controller)).sort(), properties, + 'prototype should still have the right properties'); + assert_not_equals(Object.getOwnPropertyNames(controller).indexOf('test'), -1, + '"test" should be a property of the controller'); + + startCalled = true; + } + }; + + new ReadableStream(source); + assert_true(startCalled); + +}, 'ReadableStream start controller parameter should be extensible'); + +test(() => { + (new ReadableStream()).getReader(undefined); + (new ReadableStream()).getReader({}); + (new ReadableStream()).getReader({ mode: undefined, notmode: 'ignored' }); + assert_throws_js(TypeError, () => (new ReadableStream()).getReader({ mode: 'potato' })); +}, 'default ReadableStream getReader() should only accept mode:undefined'); + +promise_test(() => { + + function SimpleStreamSource() {} + let resolve; + const promise = new Promise(r => resolve = r); + SimpleStreamSource.prototype = { + start: resolve + }; + + new ReadableStream(new SimpleStreamSource()); + return promise; + +}, 'ReadableStream should be able to call start method within prototype chain of its source'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + return delay(5).then(() => { + c.enqueue('a'); + c.close(); + }); + } + }); + + const reader = rs.getReader(); + return reader.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'value read should be the one enqueued'); + return reader.closed; + }); + +}, 'ReadableStream start should be able to return a promise'); + +promise_test(() => { + + const theError = new Error('rejected!'); + const rs = new ReadableStream({ + start() { + return delay(1).then(() => { + throw theError; + }); + } + }); + + return rs.getReader().closed.then(() => { + assert_unreached('closed promise should be rejected'); + }, e => { + assert_equals(e, theError, 'promise should be rejected with the same error'); + }); + +}, 'ReadableStream start should be able to return a promise and reject it'); + +promise_test(() => { + + const objects = [ + { potato: 'Give me more!' }, + 'test', + 1 + ]; + + const rs = new ReadableStream({ + start(c) { + for (const o of objects) { + c.enqueue(o); + } + c.close(); + } + }); + + const reader = rs.getReader(); + + return Promise.all([reader.read(), reader.read(), reader.read(), reader.closed]).then(r => { + assert_object_equals(r[0], { value: objects[0], done: false }, 'value read should be the one enqueued'); + assert_object_equals(r[1], { value: objects[1], done: false }, 'value read should be the one enqueued'); + assert_object_equals(r[2], { value: objects[2], done: false }, 'value read should be the one enqueued'); + }); + +}, 'ReadableStream should be able to enqueue different objects.'); + +promise_test(() => { + + const error = new Error('pull failure'); + const rs = new ReadableStream({ + pull() { + return Promise.reject(error); + } + }); + + const reader = rs.getReader(); + + let closed = false; + let read = false; + + return Promise.all([ + reader.closed.then(() => { + assert_unreached('closed should be rejected'); + }, e => { + closed = true; + assert_false(read); + assert_equals(e, error, 'closed should be rejected with the thrown error'); + }), + reader.read().then(() => { + assert_unreached('read() should be rejected'); + }, e => { + read = true; + assert_true(closed); + assert_equals(e, error, 'read() should be rejected with the thrown error'); + }) + ]); + +}, 'ReadableStream: if pull rejects, it should error the stream'); + +promise_test(() => { + + let pullCount = 0; + + new ReadableStream({ + pull() { + pullCount++; + } + }); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should be called once start finishes'); + return delay(10); + }).then(() => { + assert_equals(pullCount, 1, 'pull should be called exactly once'); + }); + +}, 'ReadableStream: should only call pull once upon starting the stream'); + +promise_test(() => { + + let pullCount = 0; + + const rs = new ReadableStream({ + pull(c) { + // Don't enqueue immediately after start. We want the stream to be empty when we call .read() on it. + if (pullCount > 0) { + c.enqueue(pullCount); + } + ++pullCount; + } + }); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should be called once start finishes'); + }).then(() => { + const reader = rs.getReader(); + const read = reader.read(); + assert_equals(pullCount, 2, 'pull should be called when read is called'); + return read; + }).then(result => { + assert_equals(pullCount, 3, 'pull should be called again in reaction to calling read'); + assert_object_equals(result, { value: 1, done: false }, 'the result read should be the one enqueued'); + }); + +}, 'ReadableStream: should call pull when trying to read from a started, empty stream'); + +promise_test(() => { + + let pullCount = 0; + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + }, + pull() { + pullCount++; + } + }); + + const read = rs.getReader().read(); + assert_equals(pullCount, 0, 'calling read() should not cause pull to be called yet'); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should be called once start finishes'); + return read; + }).then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'first read() should return first chunk'); + assert_equals(pullCount, 1, 'pull should not have been called again'); + return delay(10); + }).then(() => { + assert_equals(pullCount, 1, 'pull should be called exactly once'); + }); + +}, 'ReadableStream: should only call pull once on a non-empty stream read from before start fulfills'); + +promise_test(() => { + + let pullCount = 0; + const startPromise = Promise.resolve(); + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + }, + pull() { + pullCount++; + } + }); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 0, 'pull should not be called once start finishes, since the queue is full'); + + const read = rs.getReader().read(); + assert_equals(pullCount, 1, 'calling read() should cause pull to be called immediately'); + return read; + }).then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'first read() should return first chunk'); + return delay(10); + }).then(() => { + assert_equals(pullCount, 1, 'pull should be called exactly once'); + }); + +}, 'ReadableStream: should only call pull once on a non-empty stream read from after start fulfills'); + +promise_test(() => { + + let pullCount = 0; + let controller; + + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + ++pullCount; + } + }); + + const reader = rs.getReader(); + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should have been called once by the time the stream starts'); + + controller.enqueue('a'); + assert_equals(pullCount, 1, 'pull should not have been called again after enqueue'); + + return reader.read(); + }).then(() => { + assert_equals(pullCount, 2, 'pull should have been called again after read'); + + return delay(10); + }).then(() => { + assert_equals(pullCount, 2, 'pull should be called exactly twice'); + }); +}, 'ReadableStream: should call pull in reaction to read()ing the last chunk, if not draining'); + +promise_test(() => { + + let pullCount = 0; + let controller; + + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + pull() { + ++pullCount; + } + }); + + const reader = rs.getReader(); + + return flushAsyncEvents().then(() => { + assert_equals(pullCount, 1, 'pull should have been called once by the time the stream starts'); + + controller.enqueue('a'); + assert_equals(pullCount, 1, 'pull should not have been called again after enqueue'); + + controller.close(); + + return reader.read(); + }).then(() => { + assert_equals(pullCount, 1, 'pull should not have been called a second time after read'); + + return delay(10); + }).then(() => { + assert_equals(pullCount, 1, 'pull should be called exactly once'); + }); + +}, 'ReadableStream: should not call pull() in reaction to read()ing the last chunk, if draining'); + +promise_test(() => { + + let resolve; + let returnedPromise; + let timesCalled = 0; + + const rs = new ReadableStream({ + pull(c) { + c.enqueue(++timesCalled); + returnedPromise = new Promise(r => resolve = r); + return returnedPromise; + } + }); + const reader = rs.getReader(); + + return reader.read() + .then(result1 => { + assert_equals(timesCalled, 1, + 'pull should have been called once after start, but not yet have been called a second time'); + assert_object_equals(result1, { value: 1, done: false }, 'read() should fulfill with the enqueued value'); + + return delay(10); + }).then(() => { + assert_equals(timesCalled, 1, 'after 10 ms, pull should still only have been called once'); + + resolve(); + return returnedPromise; + }).then(() => { + assert_equals(timesCalled, 2, + 'after the promise returned by pull is fulfilled, pull should be called a second time'); + }); + +}, 'ReadableStream: should not call pull until the previous pull call\'s promise fulfills'); + +promise_test(() => { + + let timesCalled = 0; + + const rs = new ReadableStream( + { + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.enqueue('c'); + }, + pull() { + ++timesCalled; + } + }, + { + size() { + return 1; + }, + highWaterMark: Infinity + } + ); + const reader = rs.getReader(); + + return flushAsyncEvents().then(() => { + return reader.read(); + }).then(result1 => { + assert_object_equals(result1, { value: 'a', done: false }, 'first chunk should be as expected'); + + return reader.read(); + }).then(result2 => { + assert_object_equals(result2, { value: 'b', done: false }, 'second chunk should be as expected'); + + return reader.read(); + }).then(result3 => { + assert_object_equals(result3, { value: 'c', done: false }, 'third chunk should be as expected'); + + return delay(10); + }).then(() => { + // Once for after start, and once for every read. + assert_equals(timesCalled, 4, 'pull() should be called exactly four times'); + }); + +}, 'ReadableStream: should pull after start, and after every read'); + +promise_test(() => { + + let timesCalled = 0; + const startPromise = Promise.resolve(); + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.close(); + return startPromise; + }, + pull() { + ++timesCalled; + } + }); + + const reader = rs.getReader(); + return startPromise.then(() => { + assert_equals(timesCalled, 0, 'after start finishes, pull should not have been called'); + + return reader.read(); + }).then(() => { + assert_equals(timesCalled, 0, 'reading should not have triggered a pull call'); + + return reader.closed; + }).then(() => { + assert_equals(timesCalled, 0, 'stream should have closed with still no calls to pull'); + }); + +}, 'ReadableStream: should not call pull after start if the stream is now closed'); + +promise_test(() => { + + let timesCalled = 0; + let resolve; + const ready = new Promise(r => resolve = r); + + new ReadableStream( + { + start() {}, + pull(c) { + c.enqueue(++timesCalled); + + if (timesCalled === 4) { + resolve(); + } + } + }, + { + size() { + return 1; + }, + highWaterMark: 4 + } + ); + + return ready.then(() => { + // after start: size = 0, pull() + // after enqueue(1): size = 1, pull() + // after enqueue(2): size = 2, pull() + // after enqueue(3): size = 3, pull() + // after enqueue(4): size = 4, do not pull + assert_equals(timesCalled, 4, 'pull() should have been called four times'); + }); + +}, 'ReadableStream: should call pull after enqueueing from inside pull (with no read requests), if strategy allows'); + +promise_test(() => { + + let pullCalled = false; + + const rs = new ReadableStream({ + pull(c) { + pullCalled = true; + c.close(); + } + }); + + const reader = rs.getReader(); + return reader.closed.then(() => { + assert_true(pullCalled); + }); + +}, 'ReadableStream pull should be able to close a stream.'); + +promise_test(t => { + + const controllerError = { name: 'controller error' }; + + const rs = new ReadableStream({ + pull(c) { + c.error(controllerError); + } + }); + + return promise_rejects_exactly(t, controllerError, rs.getReader().closed); + +}, 'ReadableStream pull should be able to error a stream.'); + +promise_test(t => { + + const controllerError = { name: 'controller error' }; + const thrownError = { name: 'thrown error' }; + + const rs = new ReadableStream({ + pull(c) { + c.error(controllerError); + throw thrownError; + } + }); + + return promise_rejects_exactly(t, controllerError, rs.getReader().closed); + +}, 'ReadableStream pull should be able to error a stream and throw.'); + +test(() => { + + let startCalled = false; + + new ReadableStream({ + start(c) { + assert_equals(c.enqueue('a'), undefined, 'the first enqueue should return undefined'); + c.close(); + + assert_throws_js(TypeError, () => c.enqueue('b'), 'enqueue after close should throw a TypeError'); + startCalled = true; + } + }); + + assert_true(startCalled); + +}, 'ReadableStream: enqueue should throw when the stream is readable but draining'); + +test(() => { + + let startCalled = false; + + new ReadableStream({ + start(c) { + c.close(); + + assert_throws_js(TypeError, () => c.enqueue('a'), 'enqueue after close should throw a TypeError'); + startCalled = true; + } + }); + + assert_true(startCalled); + +}, 'ReadableStream: enqueue should throw when the stream is closed'); + +promise_test(() => { + + let startCalled = 0; + let pullCalled = 0; + let cancelCalled = 0; + + /* eslint-disable no-use-before-define */ + class Source { + start(c) { + startCalled++; + assert_equals(this, theSource, 'start() should be called with the correct this'); + c.enqueue('a'); + } + + pull() { + pullCalled++; + assert_equals(this, theSource, 'pull() should be called with the correct this'); + } + + cancel() { + cancelCalled++; + assert_equals(this, theSource, 'cancel() should be called with the correct this'); + } + } + /* eslint-enable no-use-before-define */ + + const theSource = new Source(); + theSource.debugName = 'the source object passed to the constructor'; // makes test failures easier to diagnose + + const rs = new ReadableStream(theSource); + const reader = rs.getReader(); + + return reader.read().then(() => { + reader.releaseLock(); + rs.cancel(); + assert_equals(startCalled, 1); + assert_equals(pullCalled, 1); + assert_equals(cancelCalled, 1); + return rs.getReader().closed; + }); + +}, 'ReadableStream: should call underlying source methods as methods'); + +test(() => { + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 10, 'desiredSize must start at highWaterMark'); + c.close(); + assert_equals(c.desiredSize, 0, 'after closing, desiredSize must be 0'); + } + }, { + highWaterMark: 10 + }); +}, 'ReadableStream: desiredSize when closed'); + +test(() => { + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 10, 'desiredSize must start at highWaterMark'); + c.error(); + assert_equals(c.desiredSize, null, 'after erroring, desiredSize must be null'); + } + }, { + highWaterMark: 10 + }); +}, 'ReadableStream: desiredSize when errored'); + +test(() => { + class Subclass extends ReadableStream { + extraFunction() { + return true; + } + } + assert_equals( + Object.getPrototypeOf(Subclass.prototype), ReadableStream.prototype, + 'Subclass.prototype\'s prototype should be ReadableStream.prototype'); + assert_equals(Object.getPrototypeOf(Subclass), ReadableStream, + 'Subclass\'s prototype should be ReadableStream'); + const sub = new Subclass(); + assert_true(sub instanceof ReadableStream, + 'Subclass object should be an instance of ReadableStream'); + assert_true(sub instanceof Subclass, + 'Subclass object should be an instance of Subclass'); + const lockedGetter = Object.getOwnPropertyDescriptor( + ReadableStream.prototype, 'locked').get; + assert_equals(lockedGetter.call(sub), sub.locked, + 'Subclass object should pass brand check'); + assert_true(sub.extraFunction(), + 'extraFunction() should be present on Subclass object'); +}, 'Subclassing ReadableStream should work'); + +test(() => { + + let startCalled = false; + new ReadableStream({ + start(c) { + assert_equals(c.desiredSize, 1); + c.enqueue('a'); + assert_equals(c.desiredSize, 0); + c.enqueue('b'); + assert_equals(c.desiredSize, -1); + c.enqueue('c'); + assert_equals(c.desiredSize, -2); + c.enqueue('d'); + assert_equals(c.desiredSize, -3); + c.enqueue('e'); + startCalled = true; + } + }); + + assert_true(startCalled); + +}, 'ReadableStream strategies: the default strategy should give desiredSize of 1 to start, decreasing by 1 per enqueue'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + const reader = rs.getReader(); + + assert_equals(controller.desiredSize, 1, 'desiredSize should start at 1'); + controller.enqueue('a'); + assert_equals(controller.desiredSize, 0, 'desiredSize should decrease to 0 after first enqueue'); + + return reader.read().then(result1 => { + assert_object_equals(result1, { value: 'a', done: false }, 'first chunk read should be correct'); + + assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the first read'); + controller.enqueue('b'); + assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the second enqueue'); + + return reader.read(); + }).then(result2 => { + assert_object_equals(result2, { value: 'b', done: false }, 'second chunk read should be correct'); + + assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the second read'); + controller.enqueue('c'); + assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the third enqueue'); + + return reader.read(); + }).then(result3 => { + assert_object_equals(result3, { value: 'c', done: false }, 'third chunk read should be correct'); + + assert_equals(controller.desiredSize, 1, 'desiredSize should go up to 1 after the third read'); + controller.enqueue('d'); + assert_equals(controller.desiredSize, 0, 'desiredSize should go down to 0 after the fourth enqueue'); + }); + +}, 'ReadableStream strategies: the default strategy should continue giving desiredSize of 1 if the chunks are read immediately'); + +promise_test(t => { + + const randomSource = new RandomPushSource(8); + + const rs = new ReadableStream({ + start(c) { + assert_equals(typeof c, 'object', 'c should be an object in start'); + assert_equals(typeof c.enqueue, 'function', 'enqueue should be a function in start'); + assert_equals(typeof c.close, 'function', 'close should be a function in start'); + assert_equals(typeof c.error, 'function', 'error should be a function in start'); + + randomSource.ondata = t.step_func(chunk => { + if (!c.enqueue(chunk) <= 0) { + randomSource.readStop(); + } + }); + + randomSource.onend = c.close.bind(c); + randomSource.onerror = c.error.bind(c); + }, + + pull(c) { + assert_equals(typeof c, 'object', 'c should be an object in pull'); + assert_equals(typeof c.enqueue, 'function', 'enqueue should be a function in pull'); + assert_equals(typeof c.close, 'function', 'close should be a function in pull'); + + randomSource.readStart(); + } + }); + + return readableStreamToArray(rs).then(chunks => { + assert_equals(chunks.length, 8, '8 chunks should be read'); + for (const chunk of chunks) { + assert_equals(chunk.length, 128, 'chunk should have 128 bytes'); + } + }); + +}, 'ReadableStream integration test: adapting a random push source'); + +promise_test(() => { + + const rs = sequentialReadableStream(10); + + return readableStreamToArray(rs).then(chunks => { + assert_true(rs.source.closed, 'source should be closed after all chunks are read'); + assert_array_equals(chunks, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'the expected 10 chunks should be read'); + }); + +}, 'ReadableStream integration test: adapting a sync pull source'); + +promise_test(() => { + + const rs = sequentialReadableStream(10, { async: true }); + + return readableStreamToArray(rs).then(chunks => { + assert_true(rs.source.closed, 'source should be closed after all chunks are read'); + assert_array_equals(chunks, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'the expected 10 chunks should be read'); + }); + +}, 'ReadableStream integration test: adapting an async pull source'); diff --git a/test/fixtures/wpt/streams/readable-streams/patched-global.any.js b/test/fixtures/wpt/streams/readable-streams/patched-global.any.js new file mode 100644 index 00000000000000..576a39f6777e2b --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/patched-global.any.js @@ -0,0 +1,142 @@ +// META: global=window,worker,jsshell +'use strict'; + +// Tests which patch the global environment are kept separate to avoid +// interfering with other tests. + +const ReadableStream_prototype_locked_get = + Object.getOwnPropertyDescriptor(ReadableStream.prototype, 'locked').get; + +// Verify that |rs| passes the brand check as a readable stream. +function isReadableStream(rs) { + try { + ReadableStream_prototype_locked_get.call(rs); + return true; + } catch (e) { + return false; + } +} + +test(t => { + const rs = new ReadableStream(); + + const trappedProperties = ['highWaterMark', 'size', 'start', 'type', 'mode']; + for (const property of trappedProperties) { + // eslint-disable-next-line no-extend-native, accessor-pairs + Object.defineProperty(Object.prototype, property, { + get() { throw new Error(`${property} getter called`); }, + configurable: true + }); + } + t.add_cleanup(() => { + for (const property of trappedProperties) { + delete Object.prototype[property]; + } + }); + + const [branch1, branch2] = rs.tee(); + assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream'); + assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream'); +}, 'ReadableStream tee() should not touch Object.prototype properties'); + +test(t => { + const rs = new ReadableStream(); + + const oldReadableStream = self.ReadableStream; + + self.ReadableStream = function() { + throw new Error('ReadableStream called on global object'); + }; + + t.add_cleanup(() => { + self.ReadableStream = oldReadableStream; + }); + + const [branch1, branch2] = rs.tee(); + + assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream'); + assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream'); +}, 'ReadableStream tee() should not call the global ReadableStream'); + +promise_test(async t => { + const rs = new ReadableStream({ + start(c) { + c.enqueue(1); + c.enqueue(2); + c.enqueue(3); + c.close(); + } + }); + + const oldReadableStreamGetReader = ReadableStream.prototype.getReader; + + const ReadableStreamDefaultReader = (new ReadableStream()).getReader().constructor; + const oldDefaultReaderRead = ReadableStreamDefaultReader.prototype.read; + const oldDefaultReaderCancel = ReadableStreamDefaultReader.prototype.cancel; + const oldDefaultReaderReleaseLock = ReadableStreamDefaultReader.prototype.releaseLock; + + self.ReadableStream.prototype.getReader = function() { + throw new Error('patched getReader() called'); + }; + + ReadableStreamDefaultReader.prototype.read = function() { + throw new Error('patched read() called'); + }; + ReadableStreamDefaultReader.prototype.cancel = function() { + throw new Error('patched cancel() called'); + }; + ReadableStreamDefaultReader.prototype.releaseLock = function() { + throw new Error('patched releaseLock() called'); + }; + + t.add_cleanup(() => { + self.ReadableStream.prototype.getReader = oldReadableStreamGetReader; + + ReadableStreamDefaultReader.prototype.read = oldDefaultReaderRead; + ReadableStreamDefaultReader.prototype.cancel = oldDefaultReaderCancel; + ReadableStreamDefaultReader.prototype.releaseLock = oldDefaultReaderReleaseLock; + }); + + // read the first chunk, then cancel + for await (const chunk of rs) { + break; + } + + // should be able to acquire a new reader + const reader = oldReadableStreamGetReader.call(rs); + // stream should be cancelled + await reader.closed; +}, 'ReadableStream async iterator should use the original values of getReader() and ReadableStreamDefaultReader ' + + 'methods'); + +test(t => { + const oldPromiseThen = Promise.prototype.then; + Promise.prototype.then = () => { + throw new Error('patched then() called'); + }; + t.add_cleanup(() => { + Promise.prototype.then = oldPromiseThen; + }); + const [branch1, branch2] = new ReadableStream().tee(); + assert_true(isReadableStream(branch1), 'branch1 should be a ReadableStream'); + assert_true(isReadableStream(branch2), 'branch2 should be a ReadableStream'); +}, 'tee() should not call Promise.prototype.then()'); + +test(t => { + const oldPromiseThen = Promise.prototype.then; + Promise.prototype.then = () => { + throw new Error('patched then() called'); + }; + t.add_cleanup(() => { + Promise.prototype.then = oldPromiseThen; + }); + let readableController; + const rs = new ReadableStream({ + start(c) { + readableController = c; + } + }); + const ws = new WritableStream(); + rs.pipeTo(ws); + readableController.close(); +}, 'pipeTo() should not call Promise.prototype.then()'); diff --git a/test/fixtures/wpt/streams/readable-streams/reentrant-strategies.any.js b/test/fixtures/wpt/streams/readable-streams/reentrant-strategies.any.js new file mode 100644 index 00000000000000..a02d08b0acc50a --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/reentrant-strategies.any.js @@ -0,0 +1,264 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +'use strict'; + +// The size() function of the readable strategy can re-entrantly call back into the ReadableStream implementation. This +// makes it risky to cache state across the call to ReadableStreamDefaultControllerEnqueue. These tests attempt to catch +// such errors. They are separated from the other strategy tests because no real user code should ever do anything like +// this. + +const error1 = new Error('error1'); +error1.name = 'error1'; + +promise_test(() => { + let controller; + let calls = 0; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + ++calls; + if (calls < 2) { + controller.enqueue('b'); + } + return 1; + } + }); + controller.enqueue('a'); + controller.close(); + return readableStreamToArray(rs) + .then(array => assert_array_equals(array, ['b', 'a'], 'array should contain two chunks')); +}, 'enqueue() inside size() should work'); + +promise_test(() => { + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + // The queue is empty. + controller.close(); + // The state has gone from "readable" to "closed". + return 1; + // This chunk will be enqueued, but will be impossible to read because the state is already "closed". + } + }); + controller.enqueue('a'); + return readableStreamToArray(rs) + .then(array => assert_array_equals(array, [], 'array should contain no chunks')); + // The chunk 'a' is still in rs's queue. It is closed so 'a' cannot be read. +}, 'close() inside size() should not crash'); + +promise_test(() => { + let controller; + let calls = 0; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + ++calls; + if (calls === 2) { + // The queue contains one chunk. + controller.close(); + // The state is still "readable", but closeRequest is now true. + } + return 1; + } + }); + controller.enqueue('a'); + controller.enqueue('b'); + return readableStreamToArray(rs) + .then(array => assert_array_equals(array, ['a', 'b'], 'array should contain two chunks')); +}, 'close request inside size() should work'); + +promise_test(t => { + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + controller.error(error1); + return 1; + } + }); + controller.enqueue('a'); + return promise_rejects_exactly(t, error1, rs.getReader().read(), 'read() should reject'); +}, 'error() inside size() should work'); + +promise_test(() => { + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + assert_equals(controller.desiredSize, 1, 'desiredSize should be 1'); + return 1; + }, + highWaterMark: 1 + }); + controller.enqueue('a'); + controller.close(); + return readableStreamToArray(rs) + .then(array => assert_array_equals(array, ['a'], 'array should contain one chunk')); +}, 'desiredSize inside size() should work'); + +promise_test(t => { + let cancelPromise; + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + }, + cancel: t.step_func(reason => { + assert_equals(reason, error1, 'reason should be error1'); + assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue() should throw'); + }) + }, { + size() { + cancelPromise = rs.cancel(error1); + return 1; + }, + highWaterMark: Infinity + }); + controller.enqueue('a'); + const reader = rs.getReader(); + return Promise.all([ + reader.closed, + cancelPromise + ]); +}, 'cancel() inside size() should work'); + +promise_test(() => { + let controller; + let pipeToPromise; + const ws = recordingWritableStream(); + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + if (!pipeToPromise) { + pipeToPromise = rs.pipeTo(ws); + } + return 1; + }, + highWaterMark: 1 + }); + controller.enqueue('a'); + assert_not_equals(pipeToPromise, undefined); + + // Some pipeTo() implementations need an additional chunk enqueued in order for the first one to be processed. See + // https://github.com/whatwg/streams/issues/794 for background. + controller.enqueue('a'); + + // Give pipeTo() a chance to process the queued chunks. + return delay(0).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'a'], 'ws should contain two chunks'); + controller.close(); + return pipeToPromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'a', 'close'], 'target should have been closed'); + }); +}, 'pipeTo() inside size() should behave as expected'); + +promise_test(() => { + let controller; + let readPromise; + let calls = 0; + let readResolved = false; + let reader; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + // This is triggered by controller.enqueue(). The queue is empty and there are no pending reads. This read is + // added to the list of pending reads. + readPromise = reader.read(); + ++calls; + return 1; + }, + highWaterMark: 0 + }); + reader = rs.getReader(); + controller.enqueue('a'); + readPromise.then(() => { + readResolved = true; + }); + return flushAsyncEvents().then(() => { + assert_false(readResolved); + controller.enqueue('b'); + assert_equals(calls, 1, 'size() should have been called once'); + return delay(0); + }).then(() => { + assert_true(readResolved); + assert_equals(calls, 1, 'size() should only be called once'); + return readPromise; + }).then(({ value, done }) => { + assert_false(done, 'done should be false'); + // See https://github.com/whatwg/streams/issues/794 for why this chunk is not 'a'. + assert_equals(value, 'b', 'chunk should have been read'); + assert_equals(calls, 1, 'calls should still be 1'); + return reader.read(); + }).then(({ value, done }) => { + assert_false(done, 'done should be false again'); + assert_equals(value, 'a', 'chunk a should come after b'); + }); +}, 'read() inside of size() should behave as expected'); + +promise_test(() => { + let controller; + let reader; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + reader = rs.getReader(); + return 1; + } + }); + controller.enqueue('a'); + return reader.read().then(({ value, done }) => { + assert_false(done, 'done should be false'); + assert_equals(value, 'a', 'value should be a'); + }); +}, 'getReader() inside size() should work'); + +promise_test(() => { + let controller; + let branch1; + let branch2; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }, { + size() { + [branch1, branch2] = rs.tee(); + return 1; + } + }); + controller.enqueue('a'); + assert_true(rs.locked, 'rs should be locked'); + controller.close(); + return Promise.all([ + readableStreamToArray(branch1).then(array => assert_array_equals(array, ['a'], 'branch1 should have one chunk')), + readableStreamToArray(branch2).then(array => assert_array_equals(array, ['a'], 'branch2 should have one chunk')) + ]); +}, 'tee() inside size() should work'); diff --git a/test/fixtures/wpt/streams/readable-streams/tee.any.js b/test/fixtures/wpt/streams/readable-streams/tee.any.js new file mode 100644 index 00000000000000..761f6e9c3599c1 --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/tee.any.js @@ -0,0 +1,541 @@ +// META: global=window,worker,jsshell +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +test(() => { + + const rs = new ReadableStream(); + const result = rs.tee(); + + assert_true(Array.isArray(result), 'return value should be an array'); + assert_equals(result.length, 2, 'array should have length 2'); + assert_equals(result[0].constructor, ReadableStream, '0th element should be a ReadableStream'); + assert_equals(result[1].constructor, ReadableStream, '1st element should be a ReadableStream'); + +}, 'ReadableStream teeing: rs.tee() returns an array of two ReadableStreams'); + +promise_test(t => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + const reader1 = branch1.getReader(); + const reader2 = branch2.getReader(); + + reader2.closed.then(t.unreached_func('branch2 should not be closed')); + + return Promise.all([ + reader1.closed, + reader1.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'first chunk from branch1 should be correct'); + }), + reader1.read().then(r => { + assert_object_equals(r, { value: 'b', done: false }, 'second chunk from branch1 should be correct'); + }), + reader1.read().then(r => { + assert_object_equals(r, { value: undefined, done: true }, 'third read() from branch1 should be done'); + }), + reader2.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'first chunk from branch2 should be correct'); + }) + ]); + +}, 'ReadableStream teeing: should be able to read one branch to the end without affecting the other'); + +promise_test(() => { + + const theObject = { the: 'test object' }; + const rs = new ReadableStream({ + start(c) { + c.enqueue(theObject); + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + const reader1 = branch1.getReader(); + const reader2 = branch2.getReader(); + + return Promise.all([reader1.read(), reader2.read()]).then(values => { + assert_object_equals(values[0], values[1], 'the values should be equal'); + }); + +}, 'ReadableStream teeing: values should be equal across each branch'); + +promise_test(t => { + + const theError = { name: 'boo!' }; + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + }, + pull() { + throw theError; + } + }); + + const branches = rs.tee(); + const reader1 = branches[0].getReader(); + const reader2 = branches[1].getReader(); + + reader1.label = 'reader1'; + reader2.label = 'reader2'; + + return Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed), + reader1.read().then(r => { + assert_object_equals(r, { value: 'a', done: false }, 'should be able to read the first chunk in branch1'); + }), + reader1.read().then(r => { + assert_object_equals(r, { value: 'b', done: false }, 'should be able to read the second chunk in branch1'); + + return promise_rejects_exactly(t, theError, reader2.read()); + }) + .then(() => promise_rejects_exactly(t, theError, reader1.read())) + ]); + +}, 'ReadableStream teeing: errors in the source should propagate to both branches'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + + const branches = rs.tee(); + const branch1 = branches[0]; + const branch2 = branches[1]; + branch1.cancel(); + + return Promise.all([ + readableStreamToArray(branch1).then(chunks => { + assert_array_equals(chunks, [], 'branch1 should have no chunks'); + }), + readableStreamToArray(branch2).then(chunks => { + assert_array_equals(chunks, ['a', 'b'], 'branch2 should have two chunks'); + }) + ]); + +}, 'ReadableStream teeing: canceling branch1 should not impact branch2'); + +promise_test(() => { + + const rs = new ReadableStream({ + start(c) { + c.enqueue('a'); + c.enqueue('b'); + c.close(); + } + }); + + const branches = rs.tee(); + const branch1 = branches[0]; + const branch2 = branches[1]; + branch2.cancel(); + + return Promise.all([ + readableStreamToArray(branch1).then(chunks => { + assert_array_equals(chunks, ['a', 'b'], 'branch1 should have two chunks'); + }), + readableStreamToArray(branch2).then(chunks => { + assert_array_equals(chunks, [], 'branch2 should have no chunks'); + }) + ]); + +}, 'ReadableStream teeing: canceling branch2 should not impact branch1'); + +promise_test(() => { + + const reason1 = new Error('We\'re wanted men.'); + const reason2 = new Error('I have the death sentence on twelve systems.'); + + let resolve; + const promise = new Promise(r => resolve = r); + const rs = new ReadableStream({ + cancel(reason) { + assert_array_equals(reason, [reason1, reason2], + 'the cancel reason should be an array containing those from the branches'); + resolve(); + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + branch1.cancel(reason1); + branch2.cancel(reason2); + + return promise; + +}, 'ReadableStream teeing: canceling both branches should aggregate the cancel reasons into an array'); + +promise_test(() => { + + const reason1 = new Error('This little one\'s not worth the effort.'); + const reason2 = new Error('Come, let me get you something.'); + + let resolve; + const promise = new Promise(r => resolve = r); + const rs = new ReadableStream({ + cancel(reason) { + assert_array_equals(reason, [reason1, reason2], + 'the cancel reason should be an array containing those from the branches'); + resolve(); + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + return Promise.all([ + branch2.cancel(reason2), + branch1.cancel(reason1), + promise + ]); + +}, 'ReadableStream teeing: canceling both branches in reverse order should aggregate the cancel reasons into an array'); + +promise_test(t => { + + const theError = { name: 'I\'ll be careful.' }; + const rs = new ReadableStream({ + cancel() { + throw theError; + } + }); + + const branch = rs.tee(); + const branch1 = branch[0]; + const branch2 = branch[1]; + + return Promise.all([ + promise_rejects_exactly(t, theError, branch1.cancel()), + promise_rejects_exactly(t, theError, branch2.cancel()) + ]); + +}, 'ReadableStream teeing: failing to cancel the original stream should cause cancel() to reject on branches'); + +promise_test(t => { + + const theError = { name: 'You just watch yourself!' }; + let controller; + const stream = new ReadableStream({ start(c) { controller = c; } }); + const [branch1, branch2] = stream.tee(); + + controller.error(theError); + + return Promise.all([ + promise_rejects_exactly(t, theError, branch1.cancel()), + promise_rejects_exactly(t, theError, branch2.cancel()) + ]); + +}, 'ReadableStream teeing: erroring a teed stream should properly handle canceled branches'); + +promise_test(t => { + + let controller; + const stream = new ReadableStream({ start(c) { controller = c; } }); + const [branch1, branch2] = stream.tee(); + + const error = new Error(); + error.name = 'distinctive'; + + // Ensure neither branch is waiting in ReadableStreamDefaultReaderRead(). + controller.enqueue(); + controller.enqueue(); + + return delay(0).then(() => { + // This error will have to be detected via [[closedPromise]]. + controller.error(error); + + const reader1 = branch1.getReader(); + const reader2 = branch2.getReader(); + + return Promise.all([ + promise_rejects_exactly(t, error, reader1.closed, 'reader1.closed should reject'), + promise_rejects_exactly(t, error, reader2.closed, 'reader2.closed should reject') + ]); + }); + +}, 'ReadableStream teeing: erroring a teed stream should error both branches'); + +promise_test(() => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const branches = rs.tee(); + const reader1 = branches[0].getReader(); + const reader2 = branches[1].getReader(); + + const promise = Promise.all([reader1.closed, reader2.closed]); + + controller.close(); + return promise; + +}, 'ReadableStream teeing: closing the original should immediately close the branches'); + +promise_test(t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const branches = rs.tee(); + const reader1 = branches[0].getReader(); + const reader2 = branches[1].getReader(); + + const theError = { name: 'boo!' }; + const promise = Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed) + ]); + + controller.error(theError); + return promise; + +}, 'ReadableStream teeing: erroring the original should immediately error the branches'); + +promise_test(async t => { + + let controller; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + const cancelPromise = reader2.cancel(); + + controller.enqueue('a'); + + const read1 = await reader1.read(); + assert_object_equals(read1, { value: 'a', done: false }, 'first read() from branch1 should fulfill with the chunk'); + + controller.close(); + + const read2 = await reader1.read(); + assert_object_equals(read2, { value: undefined, done: true }, 'second read() from branch1 should be done'); + + await Promise.all([ + reader1.closed, + cancelPromise + ]); + +}, 'ReadableStream teeing: canceling branch1 should finish when branch2 reads until end of stream'); + +promise_test(async t => { + + let controller; + const theError = { name: 'boo!' }; + const rs = new ReadableStream({ + start(c) { + controller = c; + } + }); + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + const cancelPromise = reader2.cancel(); + + controller.error(theError); + + await Promise.all([ + promise_rejects_exactly(t, theError, reader1.read()), + cancelPromise + ]); + +}, 'ReadableStream teeing: canceling branch1 should finish when original stream errors'); + +promise_test(async () => { + + const rs = new ReadableStream({}); + + const [branch1, branch2] = rs.tee(); + + const cancel1 = branch1.cancel(); + await flushAsyncEvents(); + const cancel2 = branch2.cancel(); + + await Promise.all([cancel1, cancel2]); + +}, 'ReadableStream teeing: canceling both branches in sequence with delay'); + +promise_test(async t => { + + const theError = { name: 'boo!' }; + const rs = new ReadableStream({ + cancel() { + throw theError; + } + }); + + const [branch1, branch2] = rs.tee(); + + const cancel1 = branch1.cancel(); + await flushAsyncEvents(); + const cancel2 = branch2.cancel(); + + await Promise.all([ + promise_rejects_exactly(t, theError, cancel1), + promise_rejects_exactly(t, theError, cancel2) + ]); + +}, 'ReadableStream teeing: failing to cancel when canceling both branches in sequence with delay'); + +test(t => { + + // Copy original global. + const oldReadableStream = ReadableStream; + const getReader = ReadableStream.prototype.getReader; + + const origRS = new ReadableStream(); + + // Replace the global ReadableStream constructor with one that doesn't work. + ReadableStream = function() { + throw new Error('global ReadableStream constructor called'); + }; + t.add_cleanup(() => { + ReadableStream = oldReadableStream; + }); + + // This will probably fail if the global ReadableStream constructor was used. + const [rs1, rs2] = origRS.tee(); + + // These will definitely fail if the global ReadableStream constructor was used. + assert_not_equals(getReader.call(rs1), undefined, 'getReader should work on rs1'); + assert_not_equals(getReader.call(rs2), undefined, 'getReader should work on rs2'); + +}, 'ReadableStreamTee should not use a modified ReadableStream constructor from the global object'); + +promise_test(t => { + + const rs = recordingReadableStream({}, { highWaterMark: 0 }); + + // Create two branches, each with a HWM of 1. This should result in one + // chunk being pulled, not two. + rs.tee(); + return flushAsyncEvents().then(() => { + assert_array_equals(rs.events, ['pull'], 'pull should only be called once'); + }); + +}, 'ReadableStreamTee should not pull more chunks than can fit in the branch queue'); + +promise_test(t => { + + const rs = recordingReadableStream({ + pull(controller) { + controller.enqueue('a'); + } + }, { highWaterMark: 0 }); + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + return Promise.all([reader1.read(), reader2.read()]) + .then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + }); + +}, 'ReadableStreamTee should only pull enough to fill the emptiest queue'); + +promise_test(t => { + + const rs = recordingReadableStream({}, { highWaterMark: 0 }); + const theError = { name: 'boo!' }; + + rs.controller.error(theError); + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + + return flushAsyncEvents().then(() => { + assert_array_equals(rs.events, [], 'pull should not be called'); + + return Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed) + ]); + }); + +}, 'ReadableStreamTee should not pull when original is already errored'); + +for (const branch of [1, 2]) { + promise_test(t => { + + const rs = recordingReadableStream({}, { highWaterMark: 0 }); + const theError = { name: 'boo!' }; + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + + return flushAsyncEvents().then(() => { + assert_array_equals(rs.events, ['pull'], 'pull should be called once'); + + rs.controller.enqueue('a'); + + const reader = (branch === 1) ? reader1 : reader2; + return reader.read(); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + + rs.controller.error(theError); + + return Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed) + ]); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + }); + + }, `ReadableStreamTee stops pulling when original stream errors while branch ${branch} is reading`); +} + +promise_test(t => { + + const rs = recordingReadableStream({}, { highWaterMark: 0 }); + const theError = { name: 'boo!' }; + + const [reader1, reader2] = rs.tee().map(branch => branch.getReader()); + + return flushAsyncEvents().then(() => { + assert_array_equals(rs.events, ['pull'], 'pull should be called once'); + + rs.controller.enqueue('a'); + + return Promise.all([reader1.read(), reader2.read()]); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + + rs.controller.error(theError); + + return Promise.all([ + promise_rejects_exactly(t, theError, reader1.closed), + promise_rejects_exactly(t, theError, reader2.closed) + ]); + }).then(() => flushAsyncEvents()).then(() => { + assert_array_equals(rs.events, ['pull', 'pull'], 'pull should be called twice'); + }); + +}, 'ReadableStreamTee stops pulling when original stream errors while both branches are reading'); diff --git a/test/fixtures/wpt/streams/readable-streams/templated.any.js b/test/fixtures/wpt/streams/readable-streams/templated.any.js new file mode 100644 index 00000000000000..4d524e69fee19e --- /dev/null +++ b/test/fixtures/wpt/streams/readable-streams/templated.any.js @@ -0,0 +1,143 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-test-templates.js +'use strict'; + +// Run the readable stream test templates against readable streams created directly using the constructor + +const theError = { name: 'boo!' }; +const chunks = ['a', 'b']; + +templatedRSEmpty('ReadableStream (empty)', () => { + return new ReadableStream(); +}); + +templatedRSEmptyReader('ReadableStream (empty) reader', () => { + return streamAndDefaultReader(new ReadableStream()); +}); + +templatedRSClosed('ReadableStream (closed via call in start)', () => { + return new ReadableStream({ + start(c) { + c.close(); + } + }); +}); + +templatedRSClosedReader('ReadableStream reader (closed before getting reader)', () => { + let controller; + const stream = new ReadableStream({ + start(c) { + controller = c; + } + }); + controller.close(); + const result = streamAndDefaultReader(stream); + return result; +}); + +templatedRSClosedReader('ReadableStream reader (closed after getting reader)', () => { + let controller; + const stream = new ReadableStream({ + start(c) { + controller = c; + } + }); + const result = streamAndDefaultReader(stream); + controller.close(); + return result; +}); + +templatedRSClosed('ReadableStream (closed via cancel)', () => { + const stream = new ReadableStream(); + stream.cancel(); + return stream; +}); + +templatedRSClosedReader('ReadableStream reader (closed via cancel after getting reader)', () => { + const stream = new ReadableStream(); + const result = streamAndDefaultReader(stream); + result.reader.cancel(); + return result; +}); + +templatedRSErrored('ReadableStream (errored via call in start)', () => { + return new ReadableStream({ + start(c) { + c.error(theError); + } + }); +}, theError); + +templatedRSErroredSyncOnly('ReadableStream (errored via call in start)', () => { + return new ReadableStream({ + start(c) { + c.error(theError); + } + }); +}, theError); + +templatedRSErrored('ReadableStream (errored via returning a rejected promise in start)', () => { + return new ReadableStream({ + start() { + return Promise.reject(theError); + } + }); +}, theError); + +templatedRSErroredReader('ReadableStream (errored via returning a rejected promise in start) reader', () => { + return streamAndDefaultReader(new ReadableStream({ + start() { + return Promise.reject(theError); + } + })); +}, theError); + +templatedRSErroredReader('ReadableStream reader (errored before getting reader)', () => { + let controller; + const stream = new ReadableStream({ + start(c) { + controller = c; + } + }); + controller.error(theError); + return streamAndDefaultReader(stream); +}, theError); + +templatedRSErroredReader('ReadableStream reader (errored after getting reader)', () => { + let controller; + const result = streamAndDefaultReader(new ReadableStream({ + start(c) { + controller = c; + } + })); + controller.error(theError); + return result; +}, theError); + +templatedRSTwoChunksOpenReader('ReadableStream (two chunks enqueued, still open) reader', () => { + return streamAndDefaultReader(new ReadableStream({ + start(c) { + c.enqueue(chunks[0]); + c.enqueue(chunks[1]); + } + })); +}, chunks); + +templatedRSTwoChunksClosedReader('ReadableStream (two chunks enqueued, then closed) reader', () => { + let doClose; + const stream = new ReadableStream({ + start(c) { + c.enqueue(chunks[0]); + c.enqueue(chunks[1]); + doClose = c.close.bind(c); + } + }); + const result = streamAndDefaultReader(stream); + doClose(); + return result; +}, chunks); + +function streamAndDefaultReader(stream) { + return { stream, reader: stream.getReader() }; +} diff --git a/test/fixtures/wpt/streams/resources/recording-streams.js b/test/fixtures/wpt/streams/resources/recording-streams.js new file mode 100644 index 00000000000000..34d02a143dccdb --- /dev/null +++ b/test/fixtures/wpt/streams/resources/recording-streams.js @@ -0,0 +1,130 @@ +'use strict'; + +self.recordingReadableStream = (extras = {}, strategy) => { + let controllerToCopyOver; + const stream = new ReadableStream({ + start(controller) { + controllerToCopyOver = controller; + + if (extras.start) { + return extras.start(controller); + } + + return undefined; + }, + pull(controller) { + stream.events.push('pull'); + + if (extras.pull) { + return extras.pull(controller); + } + + return undefined; + }, + cancel(reason) { + stream.events.push('cancel', reason); + stream.eventsWithoutPulls.push('cancel', reason); + + if (extras.cancel) { + return extras.cancel(reason); + } + + return undefined; + } + }, strategy); + + stream.controller = controllerToCopyOver; + stream.events = []; + stream.eventsWithoutPulls = []; + + return stream; +}; + +self.recordingWritableStream = (extras = {}, strategy) => { + let controllerToCopyOver; + const stream = new WritableStream({ + start(controller) { + controllerToCopyOver = controller; + + if (extras.start) { + return extras.start(controller); + } + + return undefined; + }, + write(chunk, controller) { + stream.events.push('write', chunk); + + if (extras.write) { + return extras.write(chunk, controller); + } + + return undefined; + }, + close() { + stream.events.push('close'); + + if (extras.close) { + return extras.close(); + } + + return undefined; + }, + abort(e) { + stream.events.push('abort', e); + + if (extras.abort) { + return extras.abort(e); + } + + return undefined; + } + }, strategy); + + stream.controller = controllerToCopyOver; + stream.events = []; + + return stream; +}; + +self.recordingTransformStream = (extras = {}, writableStrategy, readableStrategy) => { + let controllerToCopyOver; + const stream = new TransformStream({ + start(controller) { + controllerToCopyOver = controller; + + if (extras.start) { + return extras.start(controller); + } + + return undefined; + }, + + transform(chunk, controller) { + stream.events.push('transform', chunk); + + if (extras.transform) { + return extras.transform(chunk, controller); + } + + controller.enqueue(chunk); + + return undefined; + }, + + flush(controller) { + stream.events.push('flush'); + + if (extras.flush) { + return extras.flush(controller); + } + + return undefined; + } + }, writableStrategy, readableStrategy); + + stream.controller = controllerToCopyOver; + stream.events = []; + + return stream; +}; diff --git a/test/fixtures/wpt/streams/resources/rs-test-templates.js b/test/fixtures/wpt/streams/resources/rs-test-templates.js new file mode 100644 index 00000000000000..700bd9c3ca9f9e --- /dev/null +++ b/test/fixtures/wpt/streams/resources/rs-test-templates.js @@ -0,0 +1,638 @@ +'use strict'; + +// These tests can be run against any readable stream produced by the web platform that meets the given descriptions. +// For readable stream tests, the factory should return the stream. For reader tests, the factory should return a +// { stream, reader } object. (You can use this to vary the time at which you acquire a reader.) + +self.templatedRSEmpty = (label, factory) => { + test(() => {}, 'Running templatedRSEmpty with ' + label); + + test(() => { + + const rs = factory(); + + assert_equals(typeof rs.locked, 'boolean', 'has a boolean locked getter'); + assert_equals(typeof rs.cancel, 'function', 'has a cancel method'); + assert_equals(typeof rs.getReader, 'function', 'has a getReader method'); + assert_equals(typeof rs.pipeThrough, 'function', 'has a pipeThrough method'); + assert_equals(typeof rs.pipeTo, 'function', 'has a pipeTo method'); + assert_equals(typeof rs.tee, 'function', 'has a tee method'); + + }, label + ': instances have the correct methods and properties'); + + test(() => { + const rs = factory(); + + assert_throws_js(TypeError, () => rs.getReader({ mode: '' }), 'empty string mode should throw'); + assert_throws_js(TypeError, () => rs.getReader({ mode: null }), 'null mode should throw'); + assert_throws_js(TypeError, () => rs.getReader({ mode: 'asdf' }), 'asdf mode should throw'); + assert_throws_js(TypeError, () => rs.getReader(5), '5 should throw'); + + // Should not throw + rs.getReader(null); + + }, label + ': calling getReader with invalid arguments should throw appropriate errors'); +}; + +self.templatedRSClosed = (label, factory) => { + test(() => {}, 'Running templatedRSClosed with ' + label); + + promise_test(() => { + + const rs = factory(); + const cancelPromise1 = rs.cancel(); + const cancelPromise2 = rs.cancel(); + + assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises'); + + return Promise.all([ + cancelPromise1.then(v => assert_equals(v, undefined, 'first cancel() call should fulfill with undefined')), + cancelPromise2.then(v => assert_equals(v, undefined, 'second cancel() call should fulfill with undefined')) + ]); + + }, label + ': cancel() should return a distinct fulfilled promise each time'); + + test(() => { + + const rs = factory(); + assert_false(rs.locked, 'locked getter should return false'); + + }, label + ': locked should be false'); + + test(() => { + + const rs = factory(); + rs.getReader(); // getReader() should not throw. + + }, label + ': getReader() should be OK'); + + test(() => { + + const rs = factory(); + + const reader = rs.getReader(); + reader.releaseLock(); + + const reader2 = rs.getReader(); // Getting a second reader should not throw. + reader2.releaseLock(); + + rs.getReader(); // Getting a third reader should not throw. + + }, label + ': should be able to acquire multiple readers if they are released in succession'); + + test(() => { + + const rs = factory(); + + rs.getReader(); + + assert_throws_js(TypeError, () => rs.getReader(), 'getting a second reader should throw'); + assert_throws_js(TypeError, () => rs.getReader(), 'getting a third reader should throw'); + + }, label + ': should not be able to acquire a second reader if we don\'t release the first one'); +}; + +self.templatedRSErrored = (label, factory, error) => { + test(() => {}, 'Running templatedRSErrored with ' + label); + + promise_test(t => { + + const rs = factory(); + const reader = rs.getReader(); + + return Promise.all([ + promise_rejects_exactly(t, error, reader.closed), + promise_rejects_exactly(t, error, reader.read()) + ]); + + }, label + ': getReader() should return a reader that acts errored'); + + promise_test(t => { + + const rs = factory(); + const reader = rs.getReader(); + + return Promise.all([ + promise_rejects_exactly(t, error, reader.read()), + promise_rejects_exactly(t, error, reader.read()), + promise_rejects_exactly(t, error, reader.closed) + ]); + + }, label + ': read() twice should give the error each time'); + + test(() => { + const rs = factory(); + + assert_false(rs.locked, 'locked getter should return false'); + }, label + ': locked should be false'); +}; + +self.templatedRSErroredSyncOnly = (label, factory, error) => { + test(() => {}, 'Running templatedRSErroredSyncOnly with ' + label); + + promise_test(t => { + + const rs = factory(); + rs.getReader().releaseLock(); + const reader = rs.getReader(); // Calling getReader() twice does not throw (the stream is not locked). + + return promise_rejects_exactly(t, error, reader.closed); + + }, label + ': should be able to obtain a second reader, with the correct closed promise'); + + test(() => { + + const rs = factory(); + rs.getReader(); + + assert_throws_js(TypeError, () => rs.getReader(), 'getting a second reader should throw a TypeError'); + assert_throws_js(TypeError, () => rs.getReader(), 'getting a third reader should throw a TypeError'); + + }, label + ': should not be able to obtain additional readers if we don\'t release the first lock'); + + promise_test(t => { + + const rs = factory(); + const cancelPromise1 = rs.cancel(); + const cancelPromise2 = rs.cancel(); + + assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises'); + + return Promise.all([ + promise_rejects_exactly(t, error, cancelPromise1), + promise_rejects_exactly(t, error, cancelPromise2) + ]); + + }, label + ': cancel() should return a distinct rejected promise each time'); + + promise_test(t => { + + const rs = factory(); + const reader = rs.getReader(); + const cancelPromise1 = reader.cancel(); + const cancelPromise2 = reader.cancel(); + + assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises'); + + return Promise.all([ + promise_rejects_exactly(t, error, cancelPromise1), + promise_rejects_exactly(t, error, cancelPromise2) + ]); + + }, label + ': reader cancel() should return a distinct rejected promise each time'); +}; + +self.templatedRSEmptyReader = (label, factory) => { + test(() => {}, 'Running templatedRSEmptyReader with ' + label); + + test(() => { + + const reader = factory().reader; + + assert_true('closed' in reader, 'has a closed property'); + assert_equals(typeof reader.closed.then, 'function', 'closed property is thenable'); + + assert_equals(typeof reader.cancel, 'function', 'has a cancel method'); + assert_equals(typeof reader.read, 'function', 'has a read method'); + assert_equals(typeof reader.releaseLock, 'function', 'has a releaseLock method'); + + }, label + ': instances have the correct methods and properties'); + + test(() => { + + const stream = factory().stream; + + assert_true(stream.locked, 'locked getter should return true'); + + }, label + ': locked should be true'); + + promise_test(t => { + + const reader = factory().reader; + + reader.read().then( + t.unreached_func('read() should not fulfill'), + t.unreached_func('read() should not reject') + ); + + return delay(500); + + }, label + ': read() should never settle'); + + promise_test(t => { + + const reader = factory().reader; + + reader.read().then( + t.unreached_func('read() should not fulfill'), + t.unreached_func('read() should not reject') + ); + + reader.read().then( + t.unreached_func('read() should not fulfill'), + t.unreached_func('read() should not reject') + ); + + return delay(500); + + }, label + ': two read()s should both never settle'); + + test(() => { + + const reader = factory().reader; + assert_not_equals(reader.read(), reader.read(), 'the promises returned should be distinct'); + + }, label + ': read() should return distinct promises each time'); + + test(() => { + + const stream = factory().stream; + assert_throws_js(TypeError, () => stream.getReader(), 'stream.getReader() should throw a TypeError'); + + }, label + ': getReader() again on the stream should fail'); + + promise_test(t => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + reader.read().then( + t.unreached_func('first read() should not fulfill'), + t.unreached_func('first read() should not reject') + ); + + reader.read().then( + t.unreached_func('second read() should not fulfill'), + t.unreached_func('second read() should not reject') + ); + + reader.closed.then( + t.unreached_func('closed should not fulfill'), + t.unreached_func('closed should not reject') + ); + + assert_throws_js(TypeError, () => reader.releaseLock(), 'releaseLock should throw a TypeError'); + + assert_true(stream.locked, 'the stream should still be locked'); + + return delay(500); + + }, label + ': releasing the lock with pending read requests should throw but the read requests should stay pending'); + + promise_test(t => { + + const reader = factory().reader; + reader.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, reader.read()), + promise_rejects_js(t, TypeError, reader.read()) + ]); + + }, label + ': releasing the lock should cause further read() calls to reject with a TypeError'); + + promise_test(t => { + + const reader = factory().reader; + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + + assert_equals(closedBefore, closedAfter, 'the closed promise should not change identity'); + + return promise_rejects_js(t, TypeError, closedBefore); + + }, label + ': releasing the lock should cause closed calls to reject with a TypeError'); + + test(() => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + reader.releaseLock(); + assert_false(stream.locked, 'locked getter should return false'); + + }, label + ': releasing the lock should cause locked to become false'); + + promise_test(() => { + + const reader = factory().reader; + reader.cancel(); + + return reader.read().then(r => { + assert_object_equals(r, { value: undefined, done: true }, 'read()ing from the reader should give a done result'); + }); + + }, label + ': canceling via the reader should cause the reader to act closed'); + + promise_test(t => { + + const stream = factory().stream; + return promise_rejects_js(t, TypeError, stream.cancel()); + + }, label + ': canceling via the stream should fail'); +}; + +self.templatedRSClosedReader = (label, factory) => { + test(() => {}, 'Running templatedRSClosedReader with ' + label); + + promise_test(() => { + + const reader = factory().reader; + + return reader.read().then(v => { + assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly'); + }); + + }, label + ': read() should fulfill with { value: undefined, done: true }'); + + promise_test(() => { + + const reader = factory().reader; + + return Promise.all([ + reader.read().then(v => { + assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly'); + }), + reader.read().then(v => { + assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly'); + }) + ]); + + }, label + ': read() multiple times should fulfill with { value: undefined, done: true }'); + + promise_test(() => { + + const reader = factory().reader; + + return reader.read().then(() => reader.read()).then(v => { + assert_object_equals(v, { value: undefined, done: true }, 'read() should fulfill correctly'); + }); + + }, label + ': read() should work when used within another read() fulfill callback'); + + promise_test(() => { + + const reader = factory().reader; + + return reader.closed.then(v => assert_equals(v, undefined, 'reader closed should fulfill with undefined')); + + }, label + ': closed should fulfill with undefined'); + + promise_test(t => { + + const reader = factory().reader; + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + + assert_not_equals(closedBefore, closedAfter, 'the closed promise should change identity'); + + return Promise.all([ + closedBefore.then(v => assert_equals(v, undefined, 'reader.closed acquired before release should fulfill')), + promise_rejects_js(t, TypeError, closedAfter) + ]); + + }, label + ': releasing the lock should cause closed to reject and change identity'); + + promise_test(() => { + + const reader = factory().reader; + const cancelPromise1 = reader.cancel(); + const cancelPromise2 = reader.cancel(); + const closedReaderPromise = reader.closed; + + assert_not_equals(cancelPromise1, cancelPromise2, 'cancel() calls should return distinct promises'); + assert_not_equals(cancelPromise1, closedReaderPromise, 'cancel() promise 1 should be distinct from reader.closed'); + assert_not_equals(cancelPromise2, closedReaderPromise, 'cancel() promise 2 should be distinct from reader.closed'); + + return Promise.all([ + cancelPromise1.then(v => assert_equals(v, undefined, 'first cancel() should fulfill with undefined')), + cancelPromise2.then(v => assert_equals(v, undefined, 'second cancel() should fulfill with undefined')) + ]); + + }, label + ': cancel() should return a distinct fulfilled promise each time'); +}; + +self.templatedRSErroredReader = (label, factory, error) => { + test(() => {}, 'Running templatedRSErroredReader with ' + label); + + promise_test(t => { + + const reader = factory().reader; + return promise_rejects_exactly(t, error, reader.closed); + + }, label + ': closed should reject with the error'); + + promise_test(t => { + + const reader = factory().reader; + const closedBefore = reader.closed; + + return promise_rejects_exactly(t, error, closedBefore).then(() => { + reader.releaseLock(); + + const closedAfter = reader.closed; + assert_not_equals(closedBefore, closedAfter, 'the closed promise should change identity'); + + return promise_rejects_js(t, TypeError, closedAfter); + }); + + }, label + ': releasing the lock should cause closed to reject and change identity'); + + promise_test(t => { + + const reader = factory().reader; + return promise_rejects_exactly(t, error, reader.read()); + + }, label + ': read() should reject with the error'); +}; + +self.templatedRSTwoChunksOpenReader = (label, factory, chunks) => { + test(() => {}, 'Running templatedRSTwoChunksOpenReader with ' + label); + + promise_test(() => { + + const reader = factory().reader; + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct'); + }), + reader.read().then(r => { + assert_object_equals(r, { value: chunks[1], done: false }, 'second result should be correct'); + }) + ]); + + }, label + ': calling read() twice without waiting will eventually give both chunks (sequential)'); + + promise_test(() => { + + const reader = factory().reader; + + return reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct'); + + return reader.read().then(r2 => { + assert_object_equals(r2, { value: chunks[1], done: false }, 'second result should be correct'); + }); + }); + + }, label + ': calling read() twice without waiting will eventually give both chunks (nested)'); + + test(() => { + + const reader = factory().reader; + assert_not_equals(reader.read(), reader.read(), 'the promises returned should be distinct'); + + }, label + ': read() should return distinct promises each time'); + + promise_test(() => { + + const reader = factory().reader; + + const promise1 = reader.closed.then(v => { + assert_equals(v, undefined, 'reader closed should fulfill with undefined'); + }); + + const promise2 = reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, + 'promise returned before cancellation should fulfill with a chunk'); + }); + + reader.cancel(); + + const promise3 = reader.read().then(r => { + assert_object_equals(r, { value: undefined, done: true }, + 'promise returned after cancellation should fulfill with an end-of-stream signal'); + }); + + return Promise.all([promise1, promise2, promise3]); + + }, label + ': cancel() after a read() should still give that single read result'); +}; + +self.templatedRSTwoChunksClosedReader = function (label, factory, chunks) { + test(() => {}, 'Running templatedRSTwoChunksClosedReader with ' + label); + + promise_test(() => { + + const reader = factory().reader; + + return Promise.all([ + reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct'); + }), + reader.read().then(r => { + assert_object_equals(r, { value: chunks[1], done: false }, 'second result should be correct'); + }), + reader.read().then(r => { + assert_object_equals(r, { value: undefined, done: true }, 'third result should be correct'); + }) + ]); + + }, label + ': third read(), without waiting, should give { value: undefined, done: true } (sequential)'); + + promise_test(() => { + + const reader = factory().reader; + + return reader.read().then(r => { + assert_object_equals(r, { value: chunks[0], done: false }, 'first result should be correct'); + + return reader.read().then(r2 => { + assert_object_equals(r2, { value: chunks[1], done: false }, 'second result should be correct'); + + return reader.read().then(r3 => { + assert_object_equals(r3, { value: undefined, done: true }, 'third result should be correct'); + }); + }); + }); + + }, label + ': third read(), without waiting, should give { value: undefined, done: true } (nested)'); + + promise_test(() => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + assert_true(stream.locked, 'stream should start locked'); + + const promise = reader.closed.then(v => { + assert_equals(v, undefined, 'reader closed should fulfill with undefined'); + assert_true(stream.locked, 'stream should remain locked'); + }); + + reader.read(); + reader.read(); + + return promise; + + }, label + + ': draining the stream via read() should cause the reader closed promise to fulfill, but locked stays true'); + + promise_test(() => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + const promise = reader.closed.then(() => { + assert_true(stream.locked, 'the stream should start locked'); + reader.releaseLock(); // Releasing the lock after reader closed should not throw. + assert_false(stream.locked, 'the stream should end unlocked'); + }); + + reader.read(); + reader.read(); + + return promise; + + }, label + ': releasing the lock after the stream is closed should cause locked to become false'); + + promise_test(t => { + + const reader = factory().reader; + + reader.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, reader.read()), + promise_rejects_js(t, TypeError, reader.read()), + promise_rejects_js(t, TypeError, reader.read()) + ]); + + }, label + ': releasing the lock should cause further read() calls to reject with a TypeError'); + + promise_test(() => { + + const streamAndReader = factory(); + const stream = streamAndReader.stream; + const reader = streamAndReader.reader; + + const readerClosed = reader.closed; + + assert_equals(reader.closed, readerClosed, 'accessing reader.closed twice in succession gives the same value'); + + const promise = reader.read().then(() => { + assert_equals(reader.closed, readerClosed, 'reader.closed is the same after read() fulfills'); + + reader.releaseLock(); + + assert_equals(reader.closed, readerClosed, 'reader.closed is the same after releasing the lock'); + + const newReader = stream.getReader(); + return newReader.read(); + }); + + assert_equals(reader.closed, readerClosed, 'reader.closed is the same after calling read()'); + + return promise; + + }, label + ': reader\'s closed property always returns the same promise'); +}; diff --git a/test/fixtures/wpt/streams/resources/rs-utils.js b/test/fixtures/wpt/streams/resources/rs-utils.js new file mode 100644 index 00000000000000..f1a014275a2fbc --- /dev/null +++ b/test/fixtures/wpt/streams/resources/rs-utils.js @@ -0,0 +1,197 @@ +'use strict'; +(function () { + + class RandomPushSource { + constructor(toPush) { + this.pushed = 0; + this.toPush = toPush; + this.started = false; + this.paused = false; + this.closed = false; + + this._intervalHandle = null; + } + + readStart() { + if (this.closed) { + return; + } + + if (!this.started) { + this._intervalHandle = setInterval(writeChunk, 2); + this.started = true; + } + + if (this.paused) { + this._intervalHandle = setInterval(writeChunk, 2); + this.paused = false; + } + + const source = this; + function writeChunk() { + if (source.paused) { + return; + } + + source.pushed++; + + if (source.toPush > 0 && source.pushed > source.toPush) { + if (source._intervalHandle) { + clearInterval(source._intervalHandle); + source._intervalHandle = undefined; + } + source.closed = true; + source.onend(); + } else { + source.ondata(randomChunk(128)); + } + } + } + + readStop() { + if (this.paused) { + return; + } + + if (this.started) { + this.paused = true; + clearInterval(this._intervalHandle); + this._intervalHandle = undefined; + } else { + throw new Error('Can\'t pause reading an unstarted source.'); + } + } + } + + function randomChunk(size) { + let chunk = ''; + + for (let i = 0; i < size; ++i) { + // Add a random character from the basic printable ASCII set. + chunk += String.fromCharCode(Math.round(Math.random() * 84) + 32); + } + + return chunk; + } + + function readableStreamToArray(readable, reader) { + if (reader === undefined) { + reader = readable.getReader(); + } + + const chunks = []; + + return pump(); + + function pump() { + return reader.read().then(result => { + if (result.done) { + return chunks; + } + + chunks.push(result.value); + return pump(); + }); + } + } + + class SequentialPullSource { + constructor(limit, options) { + const async = options && options.async; + + this.current = 0; + this.limit = limit; + this.opened = false; + this.closed = false; + + this._exec = f => f(); + if (async) { + this._exec = f => step_timeout(f, 0); + } + } + + open(cb) { + this._exec(() => { + this.opened = true; + cb(); + }); + } + + read(cb) { + this._exec(() => { + if (++this.current <= this.limit) { + cb(null, false, this.current); + } else { + cb(null, true, null); + } + }); + } + + close(cb) { + this._exec(() => { + this.closed = true; + cb(); + }); + } + } + + function sequentialReadableStream(limit, options) { + const sequentialSource = new SequentialPullSource(limit, options); + + const stream = new ReadableStream({ + start() { + return new Promise((resolve, reject) => { + sequentialSource.open(err => { + if (err) { + reject(err); + } + resolve(); + }); + }); + }, + + pull(c) { + return new Promise((resolve, reject) => { + sequentialSource.read((err, done, chunk) => { + if (err) { + reject(err); + } else if (done) { + sequentialSource.close(err2 => { + if (err2) { + reject(err2); + } + c.close(); + resolve(); + }); + } else { + c.enqueue(chunk); + resolve(); + } + }); + }); + } + }); + + stream.source = sequentialSource; + + return stream; + } + + function transferArrayBufferView(view) { + const noopByteStream = new ReadableStream({ + type: 'bytes', + pull(c) { + c.byobRequest.respond(c.byobRequest.view.byteLength); + c.close(); + } + }); + const reader = noopByteStream.getReader({ mode: 'byob' }); + return reader.read(view).then((result) => result.value); + } + + self.RandomPushSource = RandomPushSource; + self.readableStreamToArray = readableStreamToArray; + self.sequentialReadableStream = sequentialReadableStream; + self.transferArrayBufferView = transferArrayBufferView; + +}()); diff --git a/test/fixtures/wpt/streams/resources/test-utils.js b/test/fixtures/wpt/streams/resources/test-utils.js new file mode 100644 index 00000000000000..0593980e1055b5 --- /dev/null +++ b/test/fixtures/wpt/streams/resources/test-utils.js @@ -0,0 +1,74 @@ +'use strict'; + +self.getterRejects = (t, obj, getterName, target) => { + const getter = Object.getOwnPropertyDescriptor(obj, getterName).get; + + return promise_rejects_js(t, TypeError, getter.call(target), getterName + ' should reject with a TypeError'); +}; + +self.getterRejectsForAll = (t, obj, getterName, targets) => { + return Promise.all(targets.map(target => self.getterRejects(t, obj, getterName, target))); +}; + +self.methodRejects = (t, obj, methodName, target, args) => { + const method = obj[methodName]; + + return promise_rejects_js(t, TypeError, method.apply(target, args), + methodName + ' should reject with a TypeError'); +}; + +self.methodRejectsForAll = (t, obj, methodName, targets, args) => { + return Promise.all(targets.map(target => self.methodRejects(t, obj, methodName, target, args))); +}; + +self.getterThrows = (obj, getterName, target) => { + const getter = Object.getOwnPropertyDescriptor(obj, getterName).get; + + assert_throws_js(TypeError, () => getter.call(target), getterName + ' should throw a TypeError'); +}; + +self.getterThrowsForAll = (obj, getterName, targets) => { + targets.forEach(target => self.getterThrows(obj, getterName, target)); +}; + +self.methodThrows = (obj, methodName, target, args) => { + const method = obj[methodName]; + assert_equals(typeof method, 'function', methodName + ' should exist'); + + assert_throws_js(TypeError, () => method.apply(target, args), methodName + ' should throw a TypeError'); +}; + +self.methodThrowsForAll = (obj, methodName, targets, args) => { + targets.forEach(target => self.methodThrows(obj, methodName, target, args)); +}; + +self.constructorThrowsForAll = (constructor, firstArgs) => { + firstArgs.forEach(firstArg => assert_throws_js(TypeError, () => new constructor(firstArg), + 'constructor should throw a TypeError')); +}; + +self.garbageCollect = () => { + if (self.gc) { + // Use --expose_gc for V8 (and Node.js) + // to pass this flag at chrome launch use: --js-flags="--expose-gc" + // Exposed in SpiderMonkey shell as well + self.gc(); + } else if (self.GCController) { + // Present in some WebKit development environments + GCController.collect(); + } else { + /* eslint-disable no-console */ + console.warn('Tests are running without the ability to do manual garbage collection. They will still work, but ' + + 'coverage will be suboptimal.'); + /* eslint-enable no-console */ + } +}; + +self.delay = ms => new Promise(resolve => step_timeout(resolve, ms)); + +// For tests which verify that the implementation doesn't do something it shouldn't, it's better not to use a +// timeout. Instead, assume that any reasonable implementation is going to finish work after 2 times around the event +// loop, and use flushAsyncEvents().then(() => assert_array_equals(...)); +// Some tests include promise resolutions which may mean the test code takes a couple of event loop visits itself. So go +// around an extra 2 times to avoid complicating those tests. +self.flushAsyncEvents = () => delay(0).then(() => delay(0)).then(() => delay(0)).then(() => delay(0)); diff --git a/test/fixtures/wpt/streams/transferable/deserialize-error.window.js b/test/fixtures/wpt/streams/transferable/deserialize-error.window.js new file mode 100644 index 00000000000000..64cf2bbfb1293e --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/deserialize-error.window.js @@ -0,0 +1,39 @@ +// META: script=/common/get-host-info.sub.js +// META: script=resources/create-wasm-module.js +// META: timeout=long + +const { HTTPS_NOTSAMESITE_ORIGIN } = get_host_info(); +const iframe = document.createElement('iframe'); +iframe.src = `${HTTPS_NOTSAMESITE_ORIGIN}/streams/transferable/resources/deserialize-error-frame.html`; + +window.addEventListener('message', async evt => { + // Tests are serialized to make the results deterministic. + switch (evt.data) { + case 'init done': { + const ws = new WritableStream(); + iframe.contentWindow.postMessage(ws, '*', [ws]); + return; + } + + case 'ws done': { + const module = await createWasmModule(); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(module); + } + }); + iframe.contentWindow.postMessage(rs, '*', [rs]); + return; + } + + case 'rs done': { + iframe.remove(); + } + } +}); + +// Need to do this after adding the listener to ensure we catch the first +// message. +document.body.appendChild(iframe); + +fetch_tests_from_window(iframe.contentWindow); diff --git a/test/fixtures/wpt/streams/transferable/readable-stream.html b/test/fixtures/wpt/streams/transferable/readable-stream.html new file mode 100644 index 00000000000000..59b57ce6723c10 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/readable-stream.html @@ -0,0 +1,255 @@ + + + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/reason.html b/test/fixtures/wpt/streams/transferable/reason.html new file mode 100644 index 00000000000000..4251aa85b816bb --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/reason.html @@ -0,0 +1,132 @@ + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/resources/create-wasm-module.js b/test/fixtures/wpt/streams/transferable/resources/create-wasm-module.js new file mode 100644 index 00000000000000..37064af95c55c0 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/create-wasm-module.js @@ -0,0 +1,11 @@ +// There aren't many cloneable types that will cause an error on +// deserialization. WASM modules have the property that it's an error to +// deserialize them cross-site, which works for our purposes. +async function createWasmModule() { + // It doesn't matter what the module is, so we use one from another + // test. + const response = + await fetch("/wasm/serialization/module/resources/incrementer.wasm"); + const ab = await response.arrayBuffer(); + return WebAssembly.compile(ab); +} diff --git a/test/fixtures/wpt/streams/transferable/resources/deserialize-error-frame.html b/test/fixtures/wpt/streams/transferable/resources/deserialize-error-frame.html new file mode 100644 index 00000000000000..5ec2fcda2cdd8d --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/deserialize-error-frame.html @@ -0,0 +1,39 @@ + + + + diff --git a/test/fixtures/wpt/streams/transferable/resources/echo-iframe.html b/test/fixtures/wpt/streams/transferable/resources/echo-iframe.html new file mode 100644 index 00000000000000..68f68503439fdb --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/echo-iframe.html @@ -0,0 +1,7 @@ + + + diff --git a/test/fixtures/wpt/streams/transferable/resources/echo-worker.js b/test/fixtures/wpt/streams/transferable/resources/echo-worker.js new file mode 100644 index 00000000000000..806c2371083399 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/echo-worker.js @@ -0,0 +1,2 @@ +// A worker that just transfers back any message that is sent to it. +onmessage = evt => postMessage(evt.data, [evt.data]); diff --git a/test/fixtures/wpt/streams/transferable/resources/helpers.js b/test/fixtures/wpt/streams/transferable/resources/helpers.js new file mode 100644 index 00000000000000..12504537f91eab --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/helpers.js @@ -0,0 +1,132 @@ +'use strict'; + +(() => { + // Create a ReadableStream that will pass the tests in + // testTransferredReadableStream(), below. + function createOriginalReadableStream() { + return new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.close(); + } + }); + } + + // Common tests to roughly determine that |rs| is a correctly transferred + // version of a stream created by createOriginalReadableStream(). + function testTransferredReadableStream(rs) { + assert_equals(rs.constructor, ReadableStream, + 'rs should be a ReadableStream in this realm'); + assert_true(rs instanceof ReadableStream, + 'instanceof check should pass'); + + // Perform a brand-check on |rs| in the process of calling getReader(). + const reader = ReadableStream.prototype.getReader.call(rs); + + return reader.read().then(({value, done}) => { + assert_false(done, 'done should be false'); + assert_equals(value, 'a', 'value should be "a"'); + return reader.read(); + }).then(({done}) => { + assert_true(done, 'done should be true'); + }); + } + + function testMessage(msg) { + assert_array_equals(msg.ports, [], 'there should be no ports in the event'); + return testTransferredReadableStream(msg.data); + } + + function testMessageEvent(target) { + return new Promise((resolve, reject) => { + target.addEventListener('message', ev => { + try { + resolve(testMessage(ev)); + } catch (e) { + reject(e); + } + }, {once: true}); + }); + } + + function testMessageEventOrErrorMessage(target) { + return new Promise((resolve, reject) => { + target.addEventListener('message', ev => { + if (typeof ev.data === 'string') { + // Assume it's an error message and reject with it. + reject(ev.data); + return; + } + + try { + resolve(testMessage(ev)); + } catch (e) { + reject(e); + } + }, {once: true}); + }); + } + + function checkTestResults(target) { + return new Promise((resolve, reject) => { + target.onmessage = msg => { + // testharness.js sends us objects which we need to ignore. + if (typeof msg.data !== 'string') + return; + + if (msg.data === 'OK') { + resolve(); + } else { + reject(msg.data); + } + }; + }); + } + + // These tests assume that a transferred ReadableStream will behave the same + // regardless of how it was transferred. This enables us to simply transfer the + // stream to ourselves. + function createTransferredReadableStream(underlyingSource) { + const original = new ReadableStream(underlyingSource); + const promise = new Promise((resolve, reject) => { + addEventListener('message', msg => { + const rs = msg.data; + if (rs instanceof ReadableStream) { + resolve(rs); + } else { + reject(new Error(`what is this thing: "${rs}"?`)); + } + }, {once: true}); + }); + postMessage(original, '*', [original]); + return promise; + } + + function recordingTransferredReadableStream(underlyingSource, strategy) { + const original = recordingReadableStream(underlyingSource, strategy); + const promise = new Promise((resolve, reject) => { + addEventListener('message', msg => { + const rs = msg.data; + if (rs instanceof ReadableStream) { + rs.events = original.events; + rs.eventsWithoutPulls = original.eventsWithoutPulls; + rs.controller = original.controller; + resolve(rs); + } else { + reject(new Error(`what is this thing: "${rs}"?`)); + } + }, {once: true}); + }); + postMessage(original, '*', [original]); + return promise; + } + + self.createOriginalReadableStream = createOriginalReadableStream; + self.testMessage = testMessage; + self.testMessageEvent = testMessageEvent; + self.testMessageEventOrErrorMessage = testMessageEventOrErrorMessage; + self.checkTestResults = checkTestResults; + self.createTransferredReadableStream = createTransferredReadableStream; + self.recordingTransferredReadableStream = recordingTransferredReadableStream; + +})(); diff --git a/test/fixtures/wpt/streams/transferable/resources/receiving-shared-worker.js b/test/fixtures/wpt/streams/transferable/resources/receiving-shared-worker.js new file mode 100644 index 00000000000000..84f779c3db6e13 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/receiving-shared-worker.js @@ -0,0 +1,11 @@ +'use strict'; +importScripts('/resources/testharness.js', 'helpers.js'); + +onconnect = evt => { + const port = evt.source; + const promise = testMessageEvent(port); + port.start(); + promise + .then(() => port.postMessage('OK')) + .catch(err => port.postMessage(`BAD: ${err}`)); +}; diff --git a/test/fixtures/wpt/streams/transferable/resources/receiving-worker.js b/test/fixtures/wpt/streams/transferable/resources/receiving-worker.js new file mode 100644 index 00000000000000..4ebb9c5f8fcec2 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/receiving-worker.js @@ -0,0 +1,7 @@ +'use strict'; +importScripts('/resources/testharness.js', 'helpers.js'); + +const promise = testMessageEvent(self); +promise + .then(() => postMessage('OK')) + .catch(err => postMessage(`BAD: ${err}`)); diff --git a/test/fixtures/wpt/streams/transferable/resources/sending-shared-worker.js b/test/fixtures/wpt/streams/transferable/resources/sending-shared-worker.js new file mode 100644 index 00000000000000..e579077894d5b9 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/sending-shared-worker.js @@ -0,0 +1,12 @@ +'use strict'; +importScripts('helpers.js'); + +onconnect = msg => { + const port = msg.source; + const orig = createOriginalReadableStream(); + try { + port.postMessage(orig, [orig]); + } catch (e) { + port.postMessage(e.message); + } +}; diff --git a/test/fixtures/wpt/streams/transferable/resources/sending-worker.js b/test/fixtures/wpt/streams/transferable/resources/sending-worker.js new file mode 100644 index 00000000000000..0b79733f74d97b --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/sending-worker.js @@ -0,0 +1,5 @@ +'use strict'; +importScripts('helpers.js'); + +const orig = createOriginalReadableStream(); +postMessage(orig, [orig]); diff --git a/test/fixtures/wpt/streams/transferable/resources/service-worker-iframe.html b/test/fixtures/wpt/streams/transferable/resources/service-worker-iframe.html new file mode 100644 index 00000000000000..348d067c926f58 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/service-worker-iframe.html @@ -0,0 +1,39 @@ + + + + + diff --git a/test/fixtures/wpt/streams/transferable/resources/service-worker.js b/test/fixtures/wpt/streams/transferable/resources/service-worker.js new file mode 100644 index 00000000000000..af76b6c11b4ed1 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/resources/service-worker.js @@ -0,0 +1,30 @@ +'use strict'; +importScripts('/resources/testharness.js', 'helpers.js'); + +onmessage = msg => { + const client = msg.source; + if (msg.data === 'SEND') { + sendingTest(client); + } else { + receivingTest(msg, client); + } +}; + +function sendingTest(client) { + const orig = createOriginalReadableStream(); + try { + client.postMessage(orig, [orig]); + } catch (e) { + client.postMessage(e.message); + } +} + +function receivingTest(msg, client) { + try { + msg.waitUntil(testMessage(msg) + .then(() => client.postMessage('OK')) + .catch(e => client.postMessage(`BAD: ${e}`))); + } catch (e) { + client.postMessage(`BAD: ${e}`); + } +} diff --git a/test/fixtures/wpt/streams/transferable/service-worker.https.html b/test/fixtures/wpt/streams/transferable/service-worker.https.html new file mode 100644 index 00000000000000..2ca7f19c910f76 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/service-worker.https.html @@ -0,0 +1,28 @@ + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/shared-worker.html b/test/fixtures/wpt/streams/transferable/shared-worker.html new file mode 100644 index 00000000000000..cd0415402d5018 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/shared-worker.html @@ -0,0 +1,25 @@ + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/transform-stream.html b/test/fixtures/wpt/streams/transferable/transform-stream.html new file mode 100644 index 00000000000000..fbfbfe8fc1347a --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/transform-stream.html @@ -0,0 +1,104 @@ + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/window.html b/test/fixtures/wpt/streams/transferable/window.html new file mode 100644 index 00000000000000..beaf548fe641c5 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/window.html @@ -0,0 +1,60 @@ + + + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/worker.html b/test/fixtures/wpt/streams/transferable/worker.html new file mode 100644 index 00000000000000..c5dc9fc62f8cf2 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/worker.html @@ -0,0 +1,76 @@ + + + + + + + diff --git a/test/fixtures/wpt/streams/transferable/writable-stream.html b/test/fixtures/wpt/streams/transferable/writable-stream.html new file mode 100644 index 00000000000000..adc6f457c27e87 --- /dev/null +++ b/test/fixtures/wpt/streams/transferable/writable-stream.html @@ -0,0 +1,136 @@ + + + + + + + + diff --git a/test/fixtures/wpt/streams/transform-streams/backpressure.any.js b/test/fixtures/wpt/streams/transform-streams/backpressure.any.js new file mode 100644 index 00000000000000..64c9d0930ed2f2 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/backpressure.any.js @@ -0,0 +1,195 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/test-utils.js +'use strict'; + +const error1 = new Error('error1 message'); +error1.name = 'error1'; + +promise_test(() => { + const ts = recordingTransformStream(); + const writer = ts.writable.getWriter(); + // This call never resolves. + writer.write('a'); + return flushAsyncEvents().then(() => { + assert_array_equals(ts.events, [], 'transform should not be called'); + }); +}, 'backpressure allows no transforms with a default identity transform and no reader'); + +promise_test(() => { + const ts = recordingTransformStream({}, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + // This call to write() resolves asynchronously. + writer.write('a'); + // This call to write() waits for backpressure that is never relieved and never calls transform(). + writer.write('b'); + return flushAsyncEvents().then(() => { + assert_array_equals(ts.events, ['transform', 'a'], 'transform should be called once'); + }); +}, 'backpressure only allows one transform() with a identity transform with a readable HWM of 1 and no reader'); + +promise_test(() => { + // Without a transform() implementation, recordingTransformStream() never enqueues anything. + const ts = recordingTransformStream({ + transform() { + // Discard all chunks. As a result, the readable side is never full enough to exert backpressure and transform() + // keeps being called. + } + }, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + const writePromises = []; + for (let i = 0; i < 4; ++i) { + writePromises.push(writer.write(i)); + } + return Promise.all(writePromises).then(() => { + assert_array_equals(ts.events, ['transform', 0, 'transform', 1, 'transform', 2, 'transform', 3], + 'all 4 events should be transformed'); + }); +}, 'transform() should keep being called as long as there is no backpressure'); + +promise_test(() => { + const ts = new TransformStream({}, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + const events = []; + const writerPromises = [ + writer.write('a').then(() => events.push('a')), + writer.write('b').then(() => events.push('b')), + writer.close().then(() => events.push('closed'))]; + return delay(0).then(() => { + assert_array_equals(events, ['a'], 'the first write should have resolved'); + return reader.read(); + }).then(({ value, done }) => { + assert_false(done, 'done should not be true'); + assert_equals('a', value, 'value should be "a"'); + return delay(0); + }).then(() => { + assert_array_equals(events, ['a', 'b', 'closed'], 'both writes and close() should have resolved'); + return reader.read(); + }).then(({ value, done }) => { + assert_false(done, 'done should still not be true'); + assert_equals('b', value, 'value should be "b"'); + return reader.read(); + }).then(({ done }) => { + assert_true(done, 'done should be true'); + return writerPromises; + }); +}, 'writes should resolve as soon as transform completes'); + +promise_test(() => { + const ts = new TransformStream(undefined, undefined, { highWaterMark: 0 }); + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + const readPromise = reader.read(); + writer.write('a'); + return readPromise.then(({ value, done }) => { + assert_false(done, 'not done'); + assert_equals(value, 'a', 'value should be "a"'); + }); +}, 'calling pull() before the first write() with backpressure should work'); + +promise_test(() => { + let reader; + const ts = recordingTransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk); + return reader.read(); + } + }, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + reader = ts.readable.getReader(); + return writer.write('a'); +}, 'transform() should be able to read the chunk it just enqueued'); + +promise_test(() => { + let resolveTransform; + const transformPromise = new Promise(resolve => { + resolveTransform = resolve; + }); + const ts = recordingTransformStream({ + transform() { + return transformPromise; + } + }, undefined, new CountQueuingStrategy({ highWaterMark: Infinity })); + const writer = ts.writable.getWriter(); + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + return delay(0).then(() => { + writer.write('a'); + assert_array_equals(ts.events, ['transform', 'a']); + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0'); + return flushAsyncEvents(); + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should still be 0'); + resolveTransform(); + return delay(0); + }).then(() => { + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + }); +}, 'blocking transform() should cause backpressure'); + +promise_test(t => { + const ts = new TransformStream(); + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject'); +}, 'writer.closed should resolve after readable is canceled during start'); + +promise_test(t => { + const ts = new TransformStream({}, undefined, { highWaterMark: 0 }); + return delay(0).then(() => { + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject'); + }); +}, 'writer.closed should resolve after readable is canceled with backpressure'); + +promise_test(t => { + const ts = new TransformStream({}, undefined, { highWaterMark: 1 }); + return delay(0).then(() => { + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, ts.writable.getWriter().closed, 'closed should reject'); + }); +}, 'writer.closed should resolve after readable is canceled with no backpressure'); + +promise_test(() => { + const ts = new TransformStream({}, undefined, { highWaterMark: 1 }); + const writer = ts.writable.getWriter(); + return delay(0).then(() => { + const writePromise = writer.write('a'); + ts.readable.cancel(error1); + return writePromise; + }); +}, 'cancelling the readable should cause a pending write to resolve'); + +promise_test(t => { + const rs = new ReadableStream(); + const ts = new TransformStream(); + const pipePromise = rs.pipeTo(ts.writable); + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected'); +}, 'cancelling the readable side of a TransformStream should abort an empty pipe'); + +promise_test(t => { + const rs = new ReadableStream(); + const ts = new TransformStream(); + const pipePromise = rs.pipeTo(ts.writable); + return delay(0).then(() => { + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected'); + }); +}, 'cancelling the readable side of a TransformStream should abort an empty pipe after startup'); + +promise_test(t => { + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + } + }); + const ts = new TransformStream(); + const pipePromise = rs.pipeTo(ts.writable); + // Allow data to flow into the pipe. + return delay(0).then(() => { + ts.readable.cancel(error1); + return promise_rejects_exactly(t, error1, pipePromise, 'promise returned from pipeTo() should be rejected'); + }); +}, 'cancelling the readable side of a TransformStream should abort a full pipe'); diff --git a/test/fixtures/wpt/streams/transform-streams/errors.any.js b/test/fixtures/wpt/streams/transform-streams/errors.any.js new file mode 100644 index 00000000000000..ba26b32b75a6a4 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/errors.any.js @@ -0,0 +1,341 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +'use strict'; + +const thrownError = new Error('bad things are happening!'); +thrownError.name = 'error1'; + +promise_test(t => { + const ts = new TransformStream({ + transform() { + throw thrownError; + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + + return Promise.all([ + promise_rejects_exactly(t, thrownError, writer.write('a'), + 'writable\'s write should reject with the thrown error'), + promise_rejects_exactly(t, thrownError, reader.read(), + 'readable\'s read should reject with the thrown error'), + promise_rejects_exactly(t, thrownError, reader.closed, + 'readable\'s closed should be rejected with the thrown error'), + promise_rejects_exactly(t, thrownError, writer.closed, + 'writable\'s closed should be rejected with the thrown error') + ]); +}, 'TransformStream errors thrown in transform put the writable and readable in an errored state'); + +promise_test(t => { + const ts = new TransformStream({ + transform() { + }, + flush() { + throw thrownError; + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + + return Promise.all([ + writer.write('a'), + promise_rejects_exactly(t, thrownError, writer.close(), + 'writable\'s close should reject with the thrown error'), + promise_rejects_exactly(t, thrownError, reader.read(), + 'readable\'s read should reject with the thrown error'), + promise_rejects_exactly(t, thrownError, reader.closed, + 'readable\'s closed should be rejected with the thrown error'), + promise_rejects_exactly(t, thrownError, writer.closed, + 'writable\'s closed should be rejected with the thrown error') + ]); +}, 'TransformStream errors thrown in flush put the writable and readable in an errored state'); + +test(() => { + new TransformStream({ + start(c) { + c.enqueue('a'); + c.error(new Error('generic error')); + assert_throws_js(TypeError, () => c.enqueue('b'), 'enqueue() should throw'); + } + }); +}, 'errored TransformStream should not enqueue new chunks'); + +promise_test(t => { + const ts = new TransformStream({ + start() { + return flushAsyncEvents().then(() => { + throw thrownError; + }); + }, + transform: t.unreached_func('transform should not be called'), + flush: t.unreached_func('flush should not be called') + }); + + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + return Promise.all([ + promise_rejects_exactly(t, thrownError, writer.write('a'), 'writer should reject with thrownError'), + promise_rejects_exactly(t, thrownError, writer.close(), 'close() should reject with thrownError'), + promise_rejects_exactly(t, thrownError, reader.read(), 'reader should reject with thrownError') + ]); +}, 'TransformStream transformer.start() rejected promise should error the stream'); + +promise_test(t => { + const controllerError = new Error('start failure'); + controllerError.name = 'controllerError'; + const ts = new TransformStream({ + start(c) { + return flushAsyncEvents() + .then(() => { + c.error(controllerError); + throw new Error('ignored error'); + }); + }, + transform: t.unreached_func('transform should never be called if start() fails'), + flush: t.unreached_func('flush should never be called if start() fails') + }); + + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + return Promise.all([ + promise_rejects_exactly(t, controllerError, writer.write('a'), 'writer should reject with controllerError'), + promise_rejects_exactly(t, controllerError, writer.close(), 'close should reject with same error'), + promise_rejects_exactly(t, controllerError, reader.read(), 'reader should reject with same error') + ]); +}, 'when controller.error is followed by a rejection, the error reason should come from controller.error'); + +test(() => { + assert_throws_js(URIError, () => new TransformStream({ + start() { throw new URIError('start thrown error'); }, + transform() {} + }), 'constructor should throw'); +}, 'TransformStream constructor should throw when start does'); + +test(() => { + const strategy = { + size() { throw new URIError('size thrown error'); } + }; + + assert_throws_js(URIError, () => new TransformStream({ + start(c) { + c.enqueue('a'); + }, + transform() {} + }, undefined, strategy), 'constructor should throw the same error strategy.size throws'); +}, 'when strategy.size throws inside start(), the constructor should throw the same error'); + +test(() => { + const controllerError = new URIError('controller.error'); + + let controller; + const strategy = { + size() { + controller.error(controllerError); + throw new Error('redundant error'); + } + }; + + assert_throws_js(URIError, () => new TransformStream({ + start(c) { + controller = c; + c.enqueue('a'); + }, + transform() {} + }, undefined, strategy), 'the first error should be thrown'); +}, 'when strategy.size calls controller.error() then throws, the constructor should throw the first error'); + +promise_test(t => { + const ts = new TransformStream(); + const writer = ts.writable.getWriter(); + const closedPromise = writer.closed; + return Promise.all([ + ts.readable.cancel(thrownError), + promise_rejects_exactly(t, thrownError, closedPromise, 'closed should throw a TypeError') + ]); +}, 'cancelling the readable side should error the writable'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + const writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + const writePromise = writer.write('a'); + const closePromise = writer.close(); + controller.error(thrownError); + return Promise.all([ + promise_rejects_exactly(t, thrownError, reader.closed, 'reader.closed should reject'), + promise_rejects_exactly(t, thrownError, writePromise, 'writePromise should reject'), + promise_rejects_exactly(t, thrownError, closePromise, 'closePromise should reject')]); +}, 'it should be possible to error the readable between close requested and complete'); + +promise_test(t => { + const ts = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk); + controller.terminate(); + throw thrownError; + } + }, undefined, { highWaterMark: 1 }); + const writePromise = ts.writable.getWriter().write('a'); + const closedPromise = ts.readable.getReader().closed; + return Promise.all([ + promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject'), + promise_rejects_exactly(t, thrownError, closedPromise, 'reader.closed should reject') + ]); +}, 'an exception from transform() should error the stream if terminate has been requested but not completed'); + +promise_test(t => { + const ts = new TransformStream(); + const writer = ts.writable.getWriter(); + // The microtask following transformer.start() hasn't completed yet, so the abort is queued and not notified to the + // TransformStream yet. + const abortPromise = writer.abort(thrownError); + const cancelPromise = ts.readable.cancel(new Error('cancel reason')); + return Promise.all([ + abortPromise, + cancelPromise, + promise_rejects_exactly(t, thrownError, writer.closed, 'writer.closed should reject with thrownError')]); +}, 'abort should set the close reason for the writable when it happens before cancel during start, but cancel should ' + + 'still succeed'); + +promise_test(t => { + let resolveTransform; + const transformPromise = new Promise(resolve => { + resolveTransform = resolve; + }); + const ts = new TransformStream({ + transform() { + return transformPromise; + } + }, undefined, { highWaterMark: 2 }); + const writer = ts.writable.getWriter(); + return delay(0).then(() => { + const writePromise = writer.write(); + const abortPromise = writer.abort(thrownError); + const cancelPromise = ts.readable.cancel(new Error('cancel reason')); + resolveTransform(); + return Promise.all([ + writePromise, + abortPromise, + cancelPromise, + promise_rejects_exactly(t, thrownError, writer.closed, 'writer.closed should reject with thrownError')]); + }); +}, 'abort should set the close reason for the writable when it happens before cancel during underlying sink write, ' + + 'but cancel should still succeed'); + +const ignoredError = new Error('ignoredError'); +ignoredError.name = 'ignoredError'; + +promise_test(t => { + const ts = new TransformStream({ + start(controller) { + controller.error(thrownError); + controller.error(ignoredError); + } + }); + return promise_rejects_exactly(t, thrownError, ts.writable.abort(), 'abort() should reject with thrownError'); +}, 'controller.error() should do nothing the second time it is called'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + const cancelPromise = ts.readable.cancel(thrownError); + controller.error(ignoredError); + return Promise.all([ + cancelPromise, + promise_rejects_exactly(t, thrownError, ts.writable.getWriter().closed, 'closed should reject with thrownError') + ]); +}, 'controller.error() should do nothing after readable.cancel()'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + return ts.writable.abort(thrownError).then(() => { + controller.error(ignoredError); + return promise_rejects_exactly(t, thrownError, ts.writable.getWriter().closed, 'closed should reject with thrownError'); + }); +}, 'controller.error() should do nothing after writable.abort() has completed'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + }, + transform() { + throw thrownError; + } + }, undefined, { highWaterMark: Infinity }); + const writer = ts.writable.getWriter(); + return promise_rejects_exactly(t, thrownError, writer.write(), 'write() should reject').then(() => { + controller.error(); + return promise_rejects_exactly(t, thrownError, writer.closed, 'closed should reject with thrownError'); + }); +}, 'controller.error() should do nothing after a transformer method has thrown an exception'); + +promise_test(t => { + let controller; + let calls = 0; + const ts = new TransformStream({ + start(c) { + controller = c; + }, + transform() { + ++calls; + } + }, undefined, { highWaterMark: 1 }); + return delay(0).then(() => { + // Create backpressure. + controller.enqueue('a'); + const writer = ts.writable.getWriter(); + // transform() will not be called until backpressure is relieved. + const writePromise = writer.write('b'); + assert_equals(calls, 0, 'transform() should not have been called'); + controller.error(thrownError); + // Now backpressure has been relieved and the write can proceed. + return promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject').then(() => { + assert_equals(calls, 0, 'transform() should not be called'); + }); + }); +}, 'erroring during write with backpressure should result in the write failing'); + +promise_test(t => { + const ts = new TransformStream({}, undefined, { highWaterMark: 0 }); + return delay(0).then(() => { + const writer = ts.writable.getWriter(); + // write should start synchronously + const writePromise = writer.write(0); + // The underlying sink's abort() is not called until the write() completes. + const abortPromise = writer.abort(thrownError); + // Perform a read to relieve backpressure and permit the write() to complete. + const readPromise = ts.readable.getReader().read(); + return Promise.all([ + promise_rejects_exactly(t, thrownError, readPromise, 'read() should reject'), + promise_rejects_exactly(t, thrownError, writePromise, 'write() should reject'), + abortPromise + ]); + }); +}, 'a write() that was waiting for backpressure should reject if the writable is aborted'); + +promise_test(t => { + const ts = new TransformStream(); + ts.writable.abort(thrownError); + const reader = ts.readable.getReader(); + return promise_rejects_exactly(t, thrownError, reader.read(), 'read() should reject with thrownError'); +}, 'the readable should be errored with the reason passed to the writable abort() method'); diff --git a/test/fixtures/wpt/streams/transform-streams/flush.any.js b/test/fixtures/wpt/streams/transform-streams/flush.any.js new file mode 100644 index 00000000000000..dc40532957b14b --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/flush.any.js @@ -0,0 +1,131 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +'use strict'; + +promise_test(() => { + let flushCalled = false; + const ts = new TransformStream({ + transform() { }, + flush() { + flushCalled = true; + } + }); + + return ts.writable.getWriter().close().then(() => { + return assert_true(flushCalled, 'closing the writable triggers the transform flush immediately'); + }); +}, 'TransformStream flush is called immediately when the writable is closed, if no writes are queued'); + +promise_test(() => { + let flushCalled = false; + let resolveTransform; + const ts = new TransformStream({ + transform() { + return new Promise(resolve => { + resolveTransform = resolve; + }); + }, + flush() { + flushCalled = true; + return new Promise(() => {}); // never resolves + } + }, undefined, { highWaterMark: 1 }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + assert_false(flushCalled, 'closing the writable does not immediately call flush if writes are not finished'); + + let rsClosed = false; + ts.readable.getReader().closed.then(() => { + rsClosed = true; + }); + + return delay(0).then(() => { + assert_false(flushCalled, 'closing the writable does not asynchronously call flush if writes are not finished'); + resolveTransform(); + return delay(0); + }).then(() => { + assert_true(flushCalled, 'flush is eventually called'); + assert_false(rsClosed, 'if flushPromise does not resolve, the readable does not become closed'); + }); +}, 'TransformStream flush is called after all queued writes finish, once the writable is closed'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform() { + }, + flush() { + c.enqueue('x'); + c.enqueue('y'); + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + return reader.read().then(result1 => { + assert_equals(result1.value, 'x', 'the first chunk read is the first one enqueued in flush'); + assert_equals(result1.done, false, 'the first chunk read is the first one enqueued in flush'); + + return reader.read().then(result2 => { + assert_equals(result2.value, 'y', 'the second chunk read is the second one enqueued in flush'); + assert_equals(result2.done, false, 'the second chunk read is the second one enqueued in flush'); + }); + }); +}, 'TransformStream flush gets a chance to enqueue more into the readable'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform() { + }, + flush() { + c.enqueue('x'); + c.enqueue('y'); + return delay(0); + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + return Promise.all([ + reader.read().then(result1 => { + assert_equals(result1.value, 'x', 'the first chunk read is the first one enqueued in flush'); + assert_equals(result1.done, false, 'the first chunk read is the first one enqueued in flush'); + + return reader.read().then(result2 => { + assert_equals(result2.value, 'y', 'the second chunk read is the second one enqueued in flush'); + assert_equals(result2.done, false, 'the second chunk read is the second one enqueued in flush'); + }); + }), + reader.closed.then(() => { + assert_true(true, 'readable reader becomes closed'); + }) + ]); +}, 'TransformStream flush gets a chance to enqueue more into the readable, and can then async close'); + +const error1 = new Error('error1'); +error1.name = 'error1'; + +promise_test(t => { + const ts = new TransformStream({ + flush(controller) { + controller.error(error1); + } + }); + return promise_rejects_exactly(t, error1, ts.writable.getWriter().close(), 'close() should reject'); +}, 'error() during flush should cause writer.close() to reject'); diff --git a/test/fixtures/wpt/streams/transform-streams/general.any.js b/test/fixtures/wpt/streams/transform-streams/general.any.js new file mode 100644 index 00000000000000..d4f2a1d5a29cf6 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/general.any.js @@ -0,0 +1,437 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/rs-utils.js +'use strict'; + +test(() => { + new TransformStream({ transform() { } }); +}, 'TransformStream can be constructed with a transform function'); + +test(() => { + new TransformStream(); + new TransformStream({}); +}, 'TransformStream can be constructed with no transform function'); + +test(() => { + const ts = new TransformStream({ transform() { } }); + + const writer = ts.writable.getWriter(); + assert_equals(writer.desiredSize, 1, 'writer.desiredSize should be 1'); +}, 'TransformStream writable starts in the writable state'); + +promise_test(() => { + const ts = new TransformStream(); + + const writer = ts.writable.getWriter(); + writer.write('a'); + assert_equals(writer.desiredSize, 0, 'writer.desiredSize should be 0 after write()'); + + return ts.readable.getReader().read().then(result => { + assert_equals(result.value, 'a', + 'result from reading the readable is the same as was written to writable'); + assert_false(result.done, 'stream should not be done'); + + return delay(0).then(() => assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 again')); + }); +}, 'Identity TransformStream: can read from readable what is put into writable'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform(chunk) { + c.enqueue(chunk.toUpperCase()); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + + return ts.readable.getReader().read().then(result => { + assert_equals(result.value, 'A', + 'result from reading the readable is the transformation of what was written to writable'); + assert_false(result.done, 'stream should not be done'); + }); +}, 'Uppercaser sync TransformStream: can read from readable transformed version of what is put into writable'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform(chunk) { + c.enqueue(chunk.toUpperCase()); + c.enqueue(chunk.toUpperCase()); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + + const reader = ts.readable.getReader(); + + return reader.read().then(result1 => { + assert_equals(result1.value, 'A', + 'the first chunk read is the transformation of the single chunk written'); + assert_false(result1.done, 'stream should not be done'); + + return reader.read().then(result2 => { + assert_equals(result2.value, 'A', + 'the second chunk read is also the transformation of the single chunk written'); + assert_false(result2.done, 'stream should not be done'); + }); + }); +}, 'Uppercaser-doubler sync TransformStream: can read both chunks put into the readable'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform(chunk) { + return delay(0).then(() => c.enqueue(chunk.toUpperCase())); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + + return ts.readable.getReader().read().then(result => { + assert_equals(result.value, 'A', + 'result from reading the readable is the transformation of what was written to writable'); + assert_false(result.done, 'stream should not be done'); + }); +}, 'Uppercaser async TransformStream: can read from readable transformed version of what is put into writable'); + +promise_test(() => { + let doSecondEnqueue; + let returnFromTransform; + const ts = new TransformStream({ + transform(chunk, controller) { + delay(0).then(() => controller.enqueue(chunk.toUpperCase())); + doSecondEnqueue = () => controller.enqueue(chunk.toUpperCase()); + return new Promise(resolve => { + returnFromTransform = resolve; + }); + } + }); + + const reader = ts.readable.getReader(); + + const writer = ts.writable.getWriter(); + writer.write('a'); + + return reader.read().then(result1 => { + assert_equals(result1.value, 'A', + 'the first chunk read is the transformation of the single chunk written'); + assert_false(result1.done, 'stream should not be done'); + doSecondEnqueue(); + + return reader.read().then(result2 => { + assert_equals(result2.value, 'A', + 'the second chunk read is also the transformation of the single chunk written'); + assert_false(result2.done, 'stream should not be done'); + returnFromTransform(); + }); + }); +}, 'Uppercaser-doubler async TransformStream: can read both chunks put into the readable'); + +promise_test(() => { + const ts = new TransformStream({ transform() { } }); + + const writer = ts.writable.getWriter(); + writer.close(); + + return Promise.all([writer.closed, ts.readable.getReader().closed]); +}, 'TransformStream: by default, closing the writable closes the readable (when there are no queued writes)'); + +promise_test(() => { + let transformResolve; + const transformPromise = new Promise(resolve => { + transformResolve = resolve; + }); + const ts = new TransformStream({ + transform() { + return transformPromise; + } + }, undefined, { highWaterMark: 1 }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + let rsClosed = false; + ts.readable.getReader().closed.then(() => { + rsClosed = true; + }); + + return delay(0).then(() => { + assert_equals(rsClosed, false, 'readable is not closed after a tick'); + transformResolve(); + + return writer.closed.then(() => { + // TODO: Is this expectation correct? + assert_equals(rsClosed, true, 'readable is closed at that point'); + }); + }); +}, 'TransformStream: by default, closing the writable waits for transforms to finish before closing both'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform() { + c.enqueue('x'); + c.enqueue('y'); + return delay(0); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + const readableChunks = readableStreamToArray(ts.readable); + + return writer.closed.then(() => { + return readableChunks.then(chunks => { + assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable'); + }); + }); +}, 'TransformStream: by default, closing the writable closes the readable after sync enqueues and async done'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + start(controller) { + c = controller; + }, + transform() { + return delay(0) + .then(() => c.enqueue('x')) + .then(() => c.enqueue('y')) + .then(() => delay(0)); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + const readableChunks = readableStreamToArray(ts.readable); + + return writer.closed.then(() => { + return readableChunks.then(chunks => { + assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable'); + }); + }); +}, 'TransformStream: by default, closing the writable closes the readable after async enqueues and async done'); + +promise_test(() => { + let c; + const ts = new TransformStream({ + suffix: '-suffix', + + start(controller) { + c = controller; + c.enqueue('start' + this.suffix); + }, + + transform(chunk) { + c.enqueue(chunk + this.suffix); + }, + + flush() { + c.enqueue('flushed' + this.suffix); + } + }); + + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + const readableChunks = readableStreamToArray(ts.readable); + + return writer.closed.then(() => { + return readableChunks.then(chunks => { + assert_array_equals(chunks, ['start-suffix', 'a-suffix', 'flushed-suffix'], 'all enqueued chunks have suffixes'); + }); + }); +}, 'Transform stream should call transformer methods as methods'); + +promise_test(() => { + function functionWithOverloads() {} + functionWithOverloads.apply = () => assert_unreached('apply() should not be called'); + functionWithOverloads.call = () => assert_unreached('call() should not be called'); + const ts = new TransformStream({ + start: functionWithOverloads, + transform: functionWithOverloads, + flush: functionWithOverloads + }); + const writer = ts.writable.getWriter(); + writer.write('a'); + writer.close(); + + return readableStreamToArray(ts.readable); +}, 'methods should not not have .apply() or .call() called'); + +promise_test(t => { + let startCalled = false; + let startDone = false; + let transformDone = false; + let flushDone = false; + const ts = new TransformStream({ + start() { + startCalled = true; + return flushAsyncEvents().then(() => { + startDone = true; + }); + }, + transform() { + return t.step(() => { + assert_true(startDone, 'transform() should not be called until the promise returned from start() has resolved'); + return flushAsyncEvents().then(() => { + transformDone = true; + }); + }); + }, + flush() { + return t.step(() => { + assert_true(transformDone, + 'flush() should not be called until the promise returned from transform() has resolved'); + return flushAsyncEvents().then(() => { + flushDone = true; + }); + }); + } + }, undefined, { highWaterMark: 1 }); + + assert_true(startCalled, 'start() should be called synchronously'); + + const writer = ts.writable.getWriter(); + const writePromise = writer.write('a'); + return writer.close().then(() => { + assert_true(flushDone, 'promise returned from flush() should have resolved'); + return writePromise; + }); +}, 'TransformStream start, transform, and flush should be strictly ordered'); + +promise_test(() => { + let transformCalled = false; + const ts = new TransformStream({ + transform() { + transformCalled = true; + } + }, undefined, { highWaterMark: Infinity }); + // transform() is only called synchronously when there is no backpressure and all microtasks have run. + return delay(0).then(() => { + const writePromise = ts.writable.getWriter().write(); + assert_true(transformCalled, 'transform() should have been called'); + return writePromise; + }); +}, 'it should be possible to call transform() synchronously'); + +promise_test(() => { + const ts = new TransformStream({}, undefined, { highWaterMark: 0 }); + + const writer = ts.writable.getWriter(); + writer.close(); + + return Promise.all([writer.closed, ts.readable.getReader().closed]); +}, 'closing the writable should close the readable when there are no queued chunks, even with backpressure'); + +test(() => { + new TransformStream({ + start(controller) { + controller.terminate(); + assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue should throw'); + } + }); +}, 'enqueue() should throw after controller.terminate()'); + +promise_test(() => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + const cancelPromise = ts.readable.cancel(); + assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue should throw'); + return cancelPromise; +}, 'enqueue() should throw after readable.cancel()'); + +test(() => { + new TransformStream({ + start(controller) { + controller.terminate(); + controller.terminate(); + } + }); +}, 'controller.terminate() should do nothing the second time it is called'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }); + const cancelReason = { name: 'cancelReason' }; + const cancelPromise = ts.readable.cancel(cancelReason); + controller.terminate(); + return Promise.all([ + cancelPromise, + promise_rejects_exactly(t, cancelReason, ts.writable.getWriter().closed, 'closed should reject with cancelReason') + ]); +}, 'terminate() should do nothing after readable.cancel()'); + +promise_test(() => { + let calls = 0; + new TransformStream({ + start() { + ++calls; + } + }); + return flushAsyncEvents().then(() => { + assert_equals(calls, 1, 'start() should have been called exactly once'); + }); +}, 'start() should not be called twice'); + +test(() => { + assert_throws_js(RangeError, () => new TransformStream({ readableType: 'bytes' }), 'constructor should throw'); +}, 'specifying a defined readableType should throw'); + +test(() => { + assert_throws_js(RangeError, () => new TransformStream({ writableType: 'bytes' }), 'constructor should throw'); +}, 'specifying a defined writableType should throw'); + +test(() => { + class Subclass extends TransformStream { + extraFunction() { + return true; + } + } + assert_equals( + Object.getPrototypeOf(Subclass.prototype), TransformStream.prototype, + 'Subclass.prototype\'s prototype should be TransformStream.prototype'); + assert_equals(Object.getPrototypeOf(Subclass), TransformStream, + 'Subclass\'s prototype should be TransformStream'); + const sub = new Subclass(); + assert_true(sub instanceof TransformStream, + 'Subclass object should be an instance of TransformStream'); + assert_true(sub instanceof Subclass, + 'Subclass object should be an instance of Subclass'); + const readableGetter = Object.getOwnPropertyDescriptor( + TransformStream.prototype, 'readable').get; + assert_equals(readableGetter.call(sub), sub.readable, + 'Subclass object should pass brand check'); + assert_true(sub.extraFunction(), + 'extraFunction() should be present on Subclass object'); +}, 'Subclassing TransformStream should work'); diff --git a/test/fixtures/wpt/streams/transform-streams/lipfuzz.any.js b/test/fixtures/wpt/streams/transform-streams/lipfuzz.any.js new file mode 100644 index 00000000000000..c8c3803c6dfb4b --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/lipfuzz.any.js @@ -0,0 +1,163 @@ +// META: global=window,worker,jsshell +'use strict'; + +class LipFuzzTransformer { + constructor(substitutions) { + this.substitutions = substitutions; + this.partialChunk = ''; + this.lastIndex = undefined; + } + + transform(chunk, controller) { + chunk = this.partialChunk + chunk; + this.partialChunk = ''; + // lastIndex is the index of the first character after the last substitution. + this.lastIndex = 0; + chunk = chunk.replace(/\{\{([a-zA-Z0-9_-]+)\}\}/g, this.replaceTag.bind(this)); + // Regular expression for an incomplete template at the end of a string. + const partialAtEndRegexp = /\{(\{([a-zA-Z0-9_-]+(\})?)?)?$/g; + // Avoid looking at any characters that have already been substituted. + partialAtEndRegexp.lastIndex = this.lastIndex; + this.lastIndex = undefined; + const match = partialAtEndRegexp.exec(chunk); + if (match) { + this.partialChunk = chunk.substring(match.index); + chunk = chunk.substring(0, match.index); + } + controller.enqueue(chunk); + } + + flush(controller) { + if (this.partialChunk.length > 0) { + controller.enqueue(this.partialChunk); + } + } + + replaceTag(match, p1, offset) { + let replacement = this.substitutions[p1]; + if (replacement === undefined) { + replacement = ''; + } + this.lastIndex = offset + replacement.length; + return replacement; + } +} + +const substitutions = { + in1: 'out1', + in2: 'out2', + quine: '{{quine}}', + bogusPartial: '{{incompleteResult}' +}; + +const cases = [ + { + input: [''], + output: [''] + }, + { + input: [], + output: [] + }, + { + input: ['{{in1}}'], + output: ['out1'] + }, + { + input: ['z{{in1}}'], + output: ['zout1'] + }, + { + input: ['{{in1}}q'], + output: ['out1q'] + }, + { + input: ['{{in1}}{{in1}'], + output: ['out1', '{{in1}'] + }, + { + input: ['{{in1}}{{in1}', '}'], + output: ['out1', 'out1'] + }, + { + input: ['{{in1', '}}'], + output: ['', 'out1'] + }, + { + input: ['{{', 'in1}}'], + output: ['', 'out1'] + }, + { + input: ['{', '{in1}}'], + output: ['', 'out1'] + }, + { + input: ['{{', 'in1}'], + output: ['', '', '{{in1}'] + }, + { + input: ['{'], + output: ['', '{'] + }, + { + input: ['{', ''], + output: ['', '', '{'] + }, + { + input: ['{', '{', 'i', 'n', '1', '}', '}'], + output: ['', '', '', '', '', '', 'out1'] + }, + { + input: ['{{in1}}{{in2}}{{in1}}'], + output: ['out1out2out1'] + }, + { + input: ['{{wrong}}'], + output: [''] + }, + { + input: ['{{wron', 'g}}'], + output: ['', ''] + }, + { + input: ['{{quine}}'], + output: ['{{quine}}'] + }, + { + input: ['{{bogusPartial}}'], + output: ['{{incompleteResult}'] + }, + { + input: ['{{bogusPartial}}}'], + output: ['{{incompleteResult}}'] + } +]; + +for (const testCase of cases) { + const inputChunks = testCase.input; + const outputChunks = testCase.output; + promise_test(() => { + const lft = new TransformStream(new LipFuzzTransformer(substitutions)); + const writer = lft.writable.getWriter(); + const promises = []; + for (const inputChunk of inputChunks) { + promises.push(writer.write(inputChunk)); + } + promises.push(writer.close()); + const reader = lft.readable.getReader(); + let readerChain = Promise.resolve(); + for (const outputChunk of outputChunks) { + readerChain = readerChain.then(() => { + return reader.read().then(({ value, done }) => { + assert_false(done, `done should be false when reading ${outputChunk}`); + assert_equals(value, outputChunk, `value should match outputChunk`); + }); + }); + } + readerChain = readerChain.then(() => { + return reader.read().then(({ done }) => assert_true(done, `done should be true`)); + }); + promises.push(readerChain); + return Promise.all(promises); + }, `testing "${inputChunks}" (length ${inputChunks.length})`); +} diff --git a/test/fixtures/wpt/streams/transform-streams/patched-global.any.js b/test/fixtures/wpt/streams/transform-streams/patched-global.any.js new file mode 100644 index 00000000000000..5142d236fb65e8 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/patched-global.any.js @@ -0,0 +1,53 @@ +// META: global=window,worker,jsshell +'use strict'; + +// Tests which patch the global environment are kept separate to avoid +// interfering with other tests. + +test(t => { + // eslint-disable-next-line no-extend-native, accessor-pairs + Object.defineProperty(Object.prototype, 'highWaterMark', { + set() { throw new Error('highWaterMark setter called'); }, + configurable: true + }); + + // eslint-disable-next-line no-extend-native, accessor-pairs + Object.defineProperty(Object.prototype, 'size', { + set() { throw new Error('size setter called'); }, + configurable: true + }); + + t.add_cleanup(() => { + delete Object.prototype.highWaterMark; + delete Object.prototype.size; + }); + + assert_not_equals(new TransformStream(), null, 'constructor should work'); +}, 'TransformStream constructor should not call setters for highWaterMark or size'); + +test(t => { + const oldReadableStream = ReadableStream; + const oldWritableStream = WritableStream; + const getReader = ReadableStream.prototype.getReader; + const getWriter = WritableStream.prototype.getWriter; + + // Replace ReadableStream and WritableStream with broken versions. + ReadableStream = function () { + throw new Error('Called the global ReadableStream constructor'); + }; + WritableStream = function () { + throw new Error('Called the global WritableStream constructor'); + }; + t.add_cleanup(() => { + ReadableStream = oldReadableStream; + WritableStream = oldWritableStream; + }); + + const ts = new TransformStream(); + + // Just to be sure, ensure the readable and writable pass brand checks. + assert_not_equals(getReader.call(ts.readable), undefined, + 'getReader should work when called on ts.readable'); + assert_not_equals(getWriter.call(ts.writable), undefined, + 'getWriter should work when called on ts.writable'); +}, 'TransformStream should use the original value of ReadableStream and WritableStream'); diff --git a/test/fixtures/wpt/streams/transform-streams/properties.any.js b/test/fixtures/wpt/streams/transform-streams/properties.any.js new file mode 100644 index 00000000000000..f2ac482e0de223 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/properties.any.js @@ -0,0 +1,49 @@ +// META: global=window,worker,jsshell +'use strict'; + +const transformerMethods = { + start: { + length: 1, + trigger: () => Promise.resolve() + }, + transform: { + length: 2, + trigger: ts => ts.writable.getWriter().write() + }, + flush: { + length: 1, + trigger: ts => ts.writable.getWriter().close() + } +}; + +for (const method in transformerMethods) { + const { length, trigger } = transformerMethods[method]; + + // Some semantic tests of how transformer methods are called can be found in general.js, as well as in the test files + // specific to each method. + promise_test(() => { + let argCount; + const ts = new TransformStream({ + [method](...args) { + argCount = args.length; + } + }, undefined, { highWaterMark: Infinity }); + return Promise.resolve(trigger(ts)).then(() => { + assert_equals(argCount, length, `${method} should be called with ${length} arguments`); + }); + }, `transformer method ${method} should be called with the right number of arguments`); + + promise_test(() => { + let methodWasCalled = false; + function Transformer() {} + Transformer.prototype = { + [method]() { + methodWasCalled = true; + } + }; + const ts = new TransformStream(new Transformer(), undefined, { highWaterMark: Infinity }); + return Promise.resolve(trigger(ts)).then(() => { + assert_true(methodWasCalled, `${method} should be called`); + }); + }, `transformer method ${method} should be called even when it's located on the prototype chain`); +} diff --git a/test/fixtures/wpt/streams/transform-streams/reentrant-strategies.any.js b/test/fixtures/wpt/streams/transform-streams/reentrant-strategies.any.js new file mode 100644 index 00000000000000..31e53949f3c26e --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/reentrant-strategies.any.js @@ -0,0 +1,319 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/rs-utils.js +// META: script=../resources/test-utils.js +'use strict'; + +// The size() function of readableStrategy can re-entrantly call back into the TransformStream implementation. This +// makes it risky to cache state across the call to ReadableStreamDefaultControllerEnqueue. These tests attempt to catch +// such errors. They are separated from the other strategy tests because no real user code should ever do anything like +// this. +// +// There is no such issue with writableStrategy size() because it is never called from within TransformStream +// algorithms. + +const error1 = new Error('error1'); +error1.name = 'error1'; + +promise_test(() => { + let controller; + let calls = 0; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + ++calls; + if (calls < 2) { + controller.enqueue('b'); + } + return 1; + }, + highWaterMark: Infinity + }); + const writer = ts.writable.getWriter(); + return Promise.all([writer.write('a'), writer.close()]) + .then(() => readableStreamToArray(ts.readable)) + .then(array => assert_array_equals(array, ['b', 'a'], 'array should contain two chunks')); +}, 'enqueue() inside size() should work'); + +promise_test(() => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + // The readable queue is empty. + controller.terminate(); + // The readable state has gone from "readable" to "closed". + return 1; + // This chunk will be enqueued, but will be impossible to read because the state is already "closed". + }, + highWaterMark: Infinity + }); + const writer = ts.writable.getWriter(); + return writer.write('a') + .then(() => readableStreamToArray(ts.readable)) + .then(array => assert_array_equals(array, [], 'array should contain no chunks')); + // The chunk 'a' is still in readable's queue. readable is closed so 'a' cannot be read. writable's queue is empty and + // it is still writable. +}, 'terminate() inside size() should work'); + +promise_test(t => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + controller.error(error1); + return 1; + }, + highWaterMark: Infinity + }); + const writer = ts.writable.getWriter(); + return writer.write('a') + .then(() => promise_rejects_exactly(t, error1, ts.readable.getReader().read(), 'read() should reject')); +}, 'error() inside size() should work'); + +promise_test(() => { + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + assert_equals(controller.desiredSize, 1, 'desiredSize should be 1'); + return 1; + }, + highWaterMark: 1 + }); + const writer = ts.writable.getWriter(); + return Promise.all([writer.write('a'), writer.close()]) + .then(() => readableStreamToArray(ts.readable)) + .then(array => assert_array_equals(array, ['a'], 'array should contain one chunk')); +}, 'desiredSize inside size() should work'); + +promise_test(t => { + let cancelPromise; + const ts = new TransformStream({}, undefined, { + size() { + cancelPromise = ts.readable.cancel(error1); + return 1; + }, + highWaterMark: Infinity + }); + const writer = ts.writable.getWriter(); + return writer.write('a') + .then(() => { + promise_rejects_exactly(t, error1, writer.closed, 'writer.closed should reject'); + return cancelPromise; + }); +}, 'readable cancel() inside size() should work'); + +promise_test(() => { + let controller; + let pipeToPromise; + const ws = recordingWritableStream(); + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + if (!pipeToPromise) { + pipeToPromise = ts.readable.pipeTo(ws); + } + return 1; + }, + highWaterMark: 1 + }); + // Allow promise returned by start() to resolve so that enqueue() will happen synchronously. + return delay(0).then(() => { + controller.enqueue('a'); + assert_not_equals(pipeToPromise, undefined); + + // Some pipeTo() implementations need an additional chunk enqueued in order for the first one to be processed. See + // https://github.com/whatwg/streams/issues/794 for background. + controller.enqueue('a'); + + // Give pipeTo() a chance to process the queued chunks. + return delay(0); + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'a'], 'ws should contain two chunks'); + controller.terminate(); + return pipeToPromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 'a', 'write', 'a', 'close'], 'target should have been closed'); + }); +}, 'pipeTo() inside size() should work'); + +promise_test(() => { + let controller; + let readPromise; + let calls = 0; + let reader; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + // This is triggered by controller.enqueue(). The queue is empty and there are no pending reads. pull() is called + // synchronously, allowing transform() to proceed asynchronously. This results in a second call to enqueue(), + // which resolves this pending read() without calling size() again. + readPromise = reader.read(); + ++calls; + return 1; + }, + highWaterMark: 0 + }); + reader = ts.readable.getReader(); + const writer = ts.writable.getWriter(); + let writeResolved = false; + const writePromise = writer.write('b').then(() => { + writeResolved = true; + }); + return flushAsyncEvents().then(() => { + assert_false(writeResolved); + controller.enqueue('a'); + assert_equals(calls, 1, 'size() should have been called once'); + return delay(0); + }).then(() => { + assert_true(writeResolved); + assert_equals(calls, 1, 'size() should only be called once'); + return readPromise; + }).then(({ value, done }) => { + assert_false(done, 'done should be false'); + // See https://github.com/whatwg/streams/issues/794 for why this chunk is not 'a'. + assert_equals(value, 'b', 'chunk should have been read'); + assert_equals(calls, 1, 'calls should still be 1'); + return writePromise; + }); +}, 'read() inside of size() should work'); + +promise_test(() => { + let writer; + let writePromise1; + let calls = 0; + const ts = new TransformStream({}, undefined, { + size() { + ++calls; + if (calls < 2) { + writePromise1 = writer.write('a'); + } + return 1; + }, + highWaterMark: Infinity + }); + writer = ts.writable.getWriter(); + // Give pull() a chance to be called. + return delay(0).then(() => { + // This write results in a synchronous call to transform(), enqueue(), and size(). + const writePromise2 = writer.write('b'); + assert_equals(calls, 1, 'size() should have been called once'); + return Promise.all([writePromise1, writePromise2, writer.close()]); + }).then(() => { + assert_equals(calls, 2, 'size() should have been called twice'); + return readableStreamToArray(ts.readable); + }).then(array => { + assert_array_equals(array, ['b', 'a'], 'both chunks should have been enqueued'); + assert_equals(calls, 2, 'calls should still be 2'); + }); +}, 'writer.write() inside size() should work'); + +promise_test(() => { + let controller; + let writer; + let writePromise; + let calls = 0; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + ++calls; + if (calls < 2) { + writePromise = writer.write('a'); + } + return 1; + }, + highWaterMark: Infinity + }); + writer = ts.writable.getWriter(); + // Give pull() a chance to be called. + return delay(0).then(() => { + // This enqueue results in synchronous calls to size(), write(), transform() and enqueue(). + controller.enqueue('b'); + assert_equals(calls, 2, 'size() should have been called twice'); + return Promise.all([writePromise, writer.close()]); + }).then(() => { + return readableStreamToArray(ts.readable); + }).then(array => { + // Because one call to enqueue() is nested inside the other, they finish in the opposite order that they were + // called, so the chunks end up reverse order. + assert_array_equals(array, ['a', 'b'], 'both chunks should have been enqueued'); + assert_equals(calls, 2, 'calls should still be 2'); + }); +}, 'synchronous writer.write() inside size() should work'); + +promise_test(() => { + let writer; + let closePromise; + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + closePromise = writer.close(); + return 1; + }, + highWaterMark: 1 + }); + writer = ts.writable.getWriter(); + const reader = ts.readable.getReader(); + // Wait for the promise returned by start() to be resolved so that the call to close() will result in a synchronous + // call to TransformStreamDefaultSink. + return delay(0).then(() => { + controller.enqueue('a'); + return reader.read(); + }).then(({ value, done }) => { + assert_false(done, 'done should be false'); + assert_equals(value, 'a', 'value should be correct'); + return reader.read(); + }).then(({ done }) => { + assert_true(done, 'done should be true'); + return closePromise; + }); +}, 'writer.close() inside size() should work'); + +promise_test(t => { + let abortPromise; + let controller; + const ts = new TransformStream({ + start(c) { + controller = c; + } + }, undefined, { + size() { + abortPromise = ts.writable.abort(error1); + return 1; + }, + highWaterMark: 1 + }); + const reader = ts.readable.getReader(); + // Wait for the promise returned by start() to be resolved so that the call to abort() will result in a synchronous + // call to TransformStreamDefaultSink. + return delay(0).then(() => { + controller.enqueue('a'); + return Promise.all([promise_rejects_exactly(t, error1, reader.read(), 'read() should reject'), abortPromise]); + }); +}, 'writer.abort() inside size() should work'); diff --git a/test/fixtures/wpt/streams/transform-streams/strategies.any.js b/test/fixtures/wpt/streams/transform-streams/strategies.any.js new file mode 100644 index 00000000000000..d465d31ab09736 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/strategies.any.js @@ -0,0 +1,150 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/test-utils.js +'use strict'; + +// Here we just test that the strategies are correctly passed to the readable and writable sides. We assume that +// ReadableStream and WritableStream will correctly apply the strategies when they are being used by a TransformStream +// and so it isn't necessary to repeat their tests here. + +test(() => { + const ts = new TransformStream({}, { highWaterMark: 17 }); + assert_equals(ts.writable.getWriter().desiredSize, 17, 'desiredSize should be 17'); +}, 'writableStrategy highWaterMark should work'); + +promise_test(() => { + const ts = recordingTransformStream({}, undefined, { highWaterMark: 9 }); + const writer = ts.writable.getWriter(); + for (let i = 0; i < 10; ++i) { + writer.write(i); + } + return delay(0).then(() => { + assert_array_equals(ts.events, [ + 'transform', 0, 'transform', 1, 'transform', 2, 'transform', 3, 'transform', 4, + 'transform', 5, 'transform', 6, 'transform', 7, 'transform', 8], + 'transform() should have been called 9 times'); + }); +}, 'readableStrategy highWaterMark should work'); + +promise_test(t => { + let writableSizeCalled = false; + let readableSizeCalled = false; + let transformCalled = false; + const ts = new TransformStream( + { + transform(chunk, controller) { + t.step(() => { + transformCalled = true; + assert_true(writableSizeCalled, 'writableStrategy.size() should have been called'); + assert_false(readableSizeCalled, 'readableStrategy.size() should not have been called'); + controller.enqueue(chunk); + assert_true(readableSizeCalled, 'readableStrategy.size() should have been called'); + }); + } + }, + { + size() { + writableSizeCalled = true; + return 1; + } + }, + { + size() { + readableSizeCalled = true; + return 1; + }, + highWaterMark: Infinity + }); + return ts.writable.getWriter().write().then(() => { + assert_true(transformCalled, 'transform() should be called'); + }); +}, 'writable should have the correct size() function'); + +test(() => { + const ts = new TransformStream(); + const writer = ts.writable.getWriter(); + assert_equals(writer.desiredSize, 1, 'default writable HWM is 1'); + writer.write(undefined); + assert_equals(writer.desiredSize, 0, 'default chunk size is 1'); +}, 'default writable strategy should be equivalent to { highWaterMark: 1 }'); + +promise_test(t => { + const ts = new TransformStream({ + transform(chunk, controller) { + return t.step(() => { + assert_equals(controller.desiredSize, 0, 'desiredSize should be 0'); + controller.enqueue(undefined); + // The first chunk enqueued is consumed by the pending read(). + assert_equals(controller.desiredSize, 0, 'desiredSize should still be 0'); + controller.enqueue(undefined); + assert_equals(controller.desiredSize, -1, 'desiredSize should be -1'); + }); + } + }); + const writePromise = ts.writable.getWriter().write(); + return ts.readable.getReader().read().then(() => writePromise); +}, 'default readable strategy should be equivalent to { highWaterMark: 0 }'); + +test(() => { + assert_throws_js(RangeError, () => new TransformStream(undefined, { highWaterMark: -1 }), + 'should throw RangeError for negative writableHighWaterMark'); + assert_throws_js(RangeError, () => new TransformStream(undefined, undefined, { highWaterMark: -1 }), + 'should throw RangeError for negative readableHighWaterMark'); + assert_throws_js(RangeError, () => new TransformStream(undefined, { highWaterMark: NaN }), + 'should throw RangeError for NaN writableHighWaterMark'); + assert_throws_js(RangeError, () => new TransformStream(undefined, undefined, { highWaterMark: NaN }), + 'should throw RangeError for NaN readableHighWaterMark'); +}, 'a RangeError should be thrown for an invalid highWaterMark'); + +const objectThatConvertsTo42 = { + toString() { + return '42'; + } +}; + +test(() => { + const ts = new TransformStream(undefined, { highWaterMark: objectThatConvertsTo42 }); + const writer = ts.writable.getWriter(); + assert_equals(writer.desiredSize, 42, 'writable HWM is 42'); +}, 'writableStrategy highWaterMark should be converted to a number'); + +test(() => { + const ts = new TransformStream({ + start(controller) { + assert_equals(controller.desiredSize, 42, 'desiredSize should be 42'); + } + }, undefined, { highWaterMark: objectThatConvertsTo42 }); +}, 'readableStrategy highWaterMark should be converted to a number'); + +promise_test(t => { + const ts = new TransformStream(undefined, undefined, { + size() { return NaN; }, + highWaterMark: 1 + }); + const writer = ts.writable.getWriter(); + return promise_rejects_js(t, RangeError, writer.write(), 'write should reject'); +}, 'a bad readableStrategy size function should cause writer.write() to reject on an identity transform'); + +promise_test(t => { + const ts = new TransformStream({ + transform(chunk, controller) { + // This assert has the important side-effect of catching the error, so transform() does not throw. + assert_throws_js(RangeError, () => controller.enqueue(chunk), 'enqueue should throw'); + } + }, undefined, { + size() { + return -1; + }, + highWaterMark: 1 + }); + + const writer = ts.writable.getWriter(); + return writer.write().then(() => { + return Promise.all([ + promise_rejects_js(t, RangeError, writer.ready, 'ready should reject'), + promise_rejects_js(t, RangeError, writer.closed, 'closed should reject'), + promise_rejects_js(t, RangeError, ts.readable.getReader().closed, 'readable closed should reject') + ]); + }); +}, 'a bad readableStrategy size function should error the stream on enqueue even when transformer.transform() ' + + 'catches the exception'); diff --git a/test/fixtures/wpt/streams/transform-streams/terminate.any.js b/test/fixtures/wpt/streams/transform-streams/terminate.any.js new file mode 100644 index 00000000000000..8cb10679348b50 --- /dev/null +++ b/test/fixtures/wpt/streams/transform-streams/terminate.any.js @@ -0,0 +1,100 @@ +// META: global=window,worker,jsshell +// META: script=../resources/recording-streams.js +// META: script=../resources/test-utils.js +'use strict'; + +promise_test(t => { + const ts = recordingTransformStream({}, undefined, { highWaterMark: 0 }); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(0); + } + }); + let pipeToRejected = false; + const pipeToPromise = promise_rejects_js(t, TypeError, rs.pipeTo(ts.writable), 'pipeTo should reject').then(() => { + pipeToRejected = true; + }); + return delay(0).then(() => { + assert_array_equals(ts.events, [], 'transform() should have seen no chunks'); + assert_false(pipeToRejected, 'pipeTo() should not have rejected yet'); + ts.controller.terminate(); + return pipeToPromise; + }).then(() => { + assert_array_equals(ts.events, [], 'transform() should still have seen no chunks'); + assert_true(pipeToRejected, 'pipeToRejected must be true'); + }); +}, 'controller.terminate() should error pipeTo()'); + +promise_test(t => { + const ts = recordingTransformStream({}, undefined, { highWaterMark: 1 }); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(0); + controller.enqueue(1); + } + }); + const pipeToPromise = rs.pipeTo(ts.writable); + return delay(0).then(() => { + assert_array_equals(ts.events, ['transform', 0], 'transform() should have seen one chunk'); + ts.controller.terminate(); + return promise_rejects_js(t, TypeError, pipeToPromise, 'pipeTo() should reject'); + }).then(() => { + assert_array_equals(ts.events, ['transform', 0], 'transform() should still have seen only one chunk'); + }); +}, 'controller.terminate() should prevent remaining chunks from being processed'); + +test(() => { + new TransformStream({ + start(controller) { + controller.enqueue(0); + controller.terminate(); + assert_throws_js(TypeError, () => controller.enqueue(1), 'enqueue should throw'); + } + }); +}, 'controller.enqueue() should throw after controller.terminate()'); + +const error1 = new Error('error1'); +error1.name = 'error1'; + +promise_test(t => { + const ts = new TransformStream({ + start(controller) { + controller.enqueue(0); + controller.terminate(); + controller.error(error1); + } + }); + return Promise.all([ + promise_rejects_js(t, TypeError, ts.writable.abort(), 'abort() should reject with a TypeError'), + promise_rejects_exactly(t, error1, ts.readable.cancel(), 'cancel() should reject with error1'), + promise_rejects_exactly(t, error1, ts.readable.getReader().closed, 'closed should reject with error1') + ]); +}, 'controller.error() after controller.terminate() with queued chunk should error the readable'); + +promise_test(t => { + const ts = new TransformStream({ + start(controller) { + controller.terminate(); + controller.error(error1); + } + }); + return Promise.all([ + promise_rejects_js(t, TypeError, ts.writable.abort(), 'abort() should reject with a TypeError'), + ts.readable.cancel(), + ts.readable.getReader().closed + ]); +}, 'controller.error() after controller.terminate() without queued chunk should do nothing'); + +promise_test(() => { + const ts = new TransformStream({ + flush(controller) { + controller.terminate(); + } + }); + const writer = ts.writable.getWriter(); + return Promise.all([ + writer.close(), + writer.closed, + ts.readable.getReader().closed + ]); +}, 'controller.terminate() inside flush() should not prevent writer.close() from succeeding'); diff --git a/test/fixtures/wpt/streams/writable-streams/aborting.any.js b/test/fixtures/wpt/streams/writable-streams/aborting.any.js new file mode 100644 index 00000000000000..ab154a705ed0e9 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/aborting.any.js @@ -0,0 +1,1486 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +promise_test(t => { + const ws = new WritableStream({ + write: t.unreached_func('write() should not be called') + }); + + const writer = ws.getWriter(); + const writePromise = writer.write('a'); + + const readyPromise = writer.ready; + + writer.abort(error1); + + assert_equals(writer.ready, readyPromise, 'the ready promise property should not change'); + + return Promise.all([ + promise_rejects_exactly(t, error1, readyPromise, 'the ready promise should reject with error1'), + promise_rejects_exactly(t, error1, writePromise, 'the write() promise should reject with error1') + ]); +}, 'Aborting a WritableStream before it starts should cause the writer\'s unsettled ready promise to reject'); + +promise_test(t => { + const ws = new WritableStream(); + + const writer = ws.getWriter(); + writer.write('a'); + + const readyPromise = writer.ready; + + return readyPromise.then(() => { + writer.abort(error1); + + assert_not_equals(writer.ready, readyPromise, 'the ready promise property should change'); + return promise_rejects_exactly(t, error1, writer.ready, 'the ready promise should reject with error1'); + }); +}, 'Aborting a WritableStream should cause the writer\'s fulfilled ready promise to reset to a rejected one'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, writer.abort(), 'abort() should reject with a TypeError'); +}, 'abort() on a released writer rejects'); + +promise_test(t => { + const ws = recordingWritableStream(); + + return delay(0) + .then(() => { + const writer = ws.getWriter(); + + const abortPromise = writer.abort(error1); + + return Promise.all([ + promise_rejects_exactly(t, error1, writer.write(1), 'write(1) must reject with error1'), + promise_rejects_exactly(t, error1, writer.write(2), 'write(2) must reject with error1'), + abortPromise + ]); + }) + .then(() => { + assert_array_equals(ws.events, ['abort', error1]); + }); +}, 'Aborting a WritableStream immediately prevents future writes'); + +promise_test(t => { + const ws = recordingWritableStream(); + const results = []; + + return delay(0) + .then(() => { + const writer = ws.getWriter(); + + results.push( + writer.write(1), + promise_rejects_exactly(t, error1, writer.write(2), 'write(2) must reject with error1'), + promise_rejects_exactly(t, error1, writer.write(3), 'write(3) must reject with error1') + ); + + const abortPromise = writer.abort(error1); + + results.push( + promise_rejects_exactly(t, error1, writer.write(4), 'write(4) must reject with error1'), + promise_rejects_exactly(t, error1, writer.write(5), 'write(5) must reject with error1') + ); + + return abortPromise; + }).then(() => { + assert_array_equals(ws.events, ['write', 1, 'abort', error1]); + + return Promise.all(results); + }); +}, 'Aborting a WritableStream prevents further writes after any that are in progress'); + +promise_test(() => { + const ws = new WritableStream({ + abort() { + return 'Hello'; + } + }); + const writer = ws.getWriter(); + + return writer.abort('a').then(value => { + assert_equals(value, undefined, 'fulfillment value must be undefined'); + }); +}, 'Fulfillment value of writer.abort() call must be undefined even if the underlying sink returns a non-undefined ' + + 'value'); + +promise_test(t => { + const ws = new WritableStream({ + abort() { + throw error1; + } + }); + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.abort(undefined), + 'rejection reason of abortPromise must be the error thrown by abort'); +}, 'WritableStream if sink\'s abort throws, the promise returned by writer.abort() rejects'); + +promise_test(t => { + const ws = new WritableStream({ + abort() { + throw error1; + } + }); + const writer = ws.getWriter(); + + const abortPromise1 = writer.abort(undefined); + const abortPromise2 = writer.abort(undefined); + + assert_equals(abortPromise1, abortPromise2, 'the promises must be the same'); + + return promise_rejects_exactly(t, error1, abortPromise1, 'promise must have matching rejection'); +}, 'WritableStream if sink\'s abort throws, the promise returned by multiple writer.abort()s is the same and rejects'); + +promise_test(t => { + const ws = new WritableStream({ + abort() { + throw error1; + } + }); + + return promise_rejects_exactly(t, error1, ws.abort(undefined), + 'rejection reason of abortPromise must be the error thrown by abort'); +}, 'WritableStream if sink\'s abort throws, the promise returned by ws.abort() rejects'); + +promise_test(t => { + let resolveWritePromise; + const ws = new WritableStream({ + write() { + return new Promise(resolve => { + resolveWritePromise = resolve; + }); + }, + abort() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + writer.write().catch(() => {}); + return flushAsyncEvents().then(() => { + const abortPromise = writer.abort(undefined); + + resolveWritePromise(); + return promise_rejects_exactly(t, error1, abortPromise, + 'rejection reason of abortPromise must be the error thrown by abort'); + }); +}, 'WritableStream if sink\'s abort throws, for an abort performed during a write, the promise returned by ' + + 'ws.abort() rejects'); + +promise_test(() => { + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + + return writer.abort(error1).then(() => { + assert_array_equals(ws.events, ['abort', error1]); + }); +}, 'Aborting a WritableStream passes through the given reason'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + + const abortPromise = writer.abort(error1); + + const events = []; + writer.ready.catch(() => { + events.push('ready'); + }); + writer.closed.catch(() => { + events.push('closed'); + }); + + return Promise.all([ + abortPromise, + promise_rejects_exactly(t, error1, writer.write(), 'writing should reject with error1'), + promise_rejects_exactly(t, error1, writer.close(), 'closing should reject with error1'), + promise_rejects_exactly(t, error1, writer.ready, 'ready should reject with error1'), + promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1') + ]).then(() => { + assert_array_equals(['ready', 'closed'], events, 'ready should reject before closed'); + }); +}, 'Aborting a WritableStream puts it in an errored state with the error passed to abort()'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + + const writePromise = promise_rejects_exactly(t, error1, writer.write('a'), + 'writing should reject with error1'); + + writer.abort(error1); + + return writePromise; +}, 'Aborting a WritableStream causes any outstanding write() promises to be rejected with the reason supplied'); + +promise_test(t => { + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + + const closePromise = writer.close(); + const abortPromise = writer.abort(error1); + + return Promise.all([ + promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1'), + promise_rejects_exactly(t, error1, closePromise, 'close() should reject with error1'), + abortPromise + ]).then(() => { + assert_array_equals(ws.events, ['abort', error1]); + }); +}, 'Closing but then immediately aborting a WritableStream causes the stream to error'); + +promise_test(() => { + let resolveClose; + const ws = new WritableStream({ + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + const writer = ws.getWriter(); + + const closePromise = writer.close(); + + return delay(0).then(() => { + const abortPromise = writer.abort(error1); + resolveClose(); + return Promise.all([ + writer.closed, + abortPromise, + closePromise + ]); + }); +}, 'Closing a WritableStream and aborting it while it closes causes the stream to ignore the abort attempt'); + +promise_test(() => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + + writer.close(); + + return delay(0).then(() => writer.abort()); +}, 'Aborting a WritableStream after it is closed is a no-op'); + +promise_test(t => { + // Testing that per https://github.com/whatwg/streams/issues/620#issuecomment-263483953 the fallback to close was + // removed. + + // Cannot use recordingWritableStream since it always has an abort + let closeCalled = false; + const ws = new WritableStream({ + close() { + closeCalled = true; + } + }); + + const writer = ws.getWriter(); + + writer.abort(error1); + + return promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1').then(() => { + assert_false(closeCalled, 'close must not have been called'); + }); +}, 'WritableStream should NOT call underlying sink\'s close if no abort is supplied (historical)'); + +promise_test(() => { + let thenCalled = false; + const ws = new WritableStream({ + abort() { + return { + then(onFulfilled) { + thenCalled = true; + onFulfilled(); + } + }; + } + }); + const writer = ws.getWriter(); + return writer.abort().then(() => assert_true(thenCalled, 'then() should be called')); +}, 'returning a thenable from abort() should work'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return flushAsyncEvents(); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + writer.abort(error1); + let closedRejected = false; + return Promise.all([ + writePromise.then(() => assert_false(closedRejected, '.closed should not resolve before write()')), + promise_rejects_exactly(t, error1, writer.closed, '.closed should reject').then(() => { + closedRejected = true; + }) + ]); + }); +}, '.closed should not resolve before fulfilled write()'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return Promise.reject(error1); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + const abortPromise = writer.abort(error2); + let closedRejected = false; + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise, 'write() should reject') + .then(() => assert_false(closedRejected, '.closed should not resolve before write()')), + promise_rejects_exactly(t, error2, writer.closed, '.closed should reject') + .then(() => { + closedRejected = true; + }), + abortPromise + ]); + }); +}, '.closed should not resolve before rejected write(); write() error should not overwrite abort() error'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return flushAsyncEvents(); + } + }, new CountQueuingStrategy({ highWaterMark: 4 })); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const settlementOrder = []; + return Promise.all([ + writer.write('1').then(() => settlementOrder.push(1)), + promise_rejects_exactly(t, error1, writer.write('2'), 'first queued write should be rejected') + .then(() => settlementOrder.push(2)), + promise_rejects_exactly(t, error1, writer.write('3'), 'second queued write should be rejected') + .then(() => settlementOrder.push(3)), + writer.abort(error1) + ]).then(() => assert_array_equals([1, 2, 3], settlementOrder, 'writes should be satisfied in order')); + }); +}, 'writes should be satisfied in order when aborting'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return Promise.reject(error1); + } + }, new CountQueuingStrategy({ highWaterMark: 4 })); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const settlementOrder = []; + return Promise.all([ + promise_rejects_exactly(t, error1, writer.write('1'), 'in-flight write should be rejected') + .then(() => settlementOrder.push(1)), + promise_rejects_exactly(t, error2, writer.write('2'), 'first queued write should be rejected') + .then(() => settlementOrder.push(2)), + promise_rejects_exactly(t, error2, writer.write('3'), 'second queued write should be rejected') + .then(() => settlementOrder.push(3)), + writer.abort(error2) + ]).then(() => assert_array_equals([1, 2, 3], settlementOrder, 'writes should be satisfied in order')); + }); +}, 'writes should be satisfied in order after rejected write when aborting'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return Promise.reject(error1); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + return Promise.all([ + promise_rejects_exactly(t, error1, writer.write('a'), 'writer.write() should reject with error from underlying write()'), + promise_rejects_exactly(t, error2, writer.close(), + 'writer.close() should reject with error from underlying write()'), + writer.abort(error2) + ]); + }); +}, 'close() should reject with abort reason why abort() is first error'); + +promise_test(() => { + let resolveWrite; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + const abortPromise = writer.abort('b'); + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a'], 'abort should not be called while write is in-flight'); + resolveWrite(); + return abortPromise.then(() => { + assert_array_equals(ws.events, ['write', 'a', 'abort', 'b'], 'abort should be called after the write finishes'); + }); + }); + }); +}, 'underlying abort() should not be called until underlying write() completes'); + +promise_test(() => { + let resolveClose; + const ws = recordingWritableStream({ + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.close(); + const abortPromise = writer.abort(); + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['close'], 'abort should not be called while close is in-flight'); + resolveClose(); + return abortPromise.then(() => { + assert_array_equals(ws.events, ['close'], 'abort should not be called'); + }); + }); + }); +}, 'underlying abort() should not be called if underlying close() has started'); + +promise_test(t => { + let rejectClose; + let abortCalled = false; + const ws = new WritableStream({ + close() { + return new Promise((resolve, reject) => { + rejectClose = reject; + }); + }, + abort() { + abortCalled = true; + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const abortPromise = writer.abort(); + return flushAsyncEvents().then(() => { + assert_false(abortCalled, 'underlying abort should not be called while close is in-flight'); + rejectClose(error1); + return promise_rejects_exactly(t, error1, abortPromise, 'abort should reject with the same reason').then(() => { + return promise_rejects_exactly(t, error1, closePromise, 'close should reject with the same reason'); + }).then(() => { + assert_false(abortCalled, 'underlying abort should not be called after close completes'); + }); + }); + }); +}, 'if underlying close() has started and then rejects, the abort() and close() promises should reject with the ' + + 'underlying close rejection reason'); + +promise_test(t => { + let resolveWrite; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + const closePromise = writer.close(); + const abortPromise = writer.abort(error1); + + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, ['write', 'a'], 'abort should not be called while write is in-flight'); + resolveWrite(); + return abortPromise.then(() => { + assert_array_equals(ws.events, ['write', 'a', 'abort', error1], 'abort should be called after write completes'); + return promise_rejects_exactly(t, error1, closePromise, 'promise returned by close() should be rejected'); + }); + }); + }); +}, 'an abort() that happens during a write() should trigger the underlying abort() even with a close() queued'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + writer.abort(error1); + writer.releaseLock(); + const writer2 = ws.getWriter(); + return promise_rejects_exactly(t, error1, writer2.ready, + 'ready of the second writer should be rejected with error1'); + }); +}, 'if a writer is created for a stream with a pending abort, its ready should be rejected with the abort error'); + +promise_test(() => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const abortPromise = writer.abort(); + const events = []; + return Promise.all([ + closePromise.then(() => { events.push('close'); }), + abortPromise.then(() => { events.push('abort'); }) + ]).then(() => { + assert_array_equals(events, ['close', 'abort']); + }); + }); +}, 'writer close() promise should resolve before abort() promise'); + +promise_test(t => { + const ws = new WritableStream({ + write(chunk, controller) { + controller.error(error1); + return new Promise(() => {}); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + return promise_rejects_exactly(t, error1, writer.ready, 'writer.ready should reject'); + }); +}, 'writer.ready should reject on controller error without waiting for underlying write'); + +promise_test(t => { + let rejectWrite; + const ws = new WritableStream({ + write() { + return new Promise((resolve, reject) => { + rejectWrite = reject; + }); + } + }); + + let writePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.catch(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + writePromise = writer.write('a'); + writePromise.catch(() => { + events.push('writePromise'); + }); + + abortPromise = writer.abort(error1); + abortPromise.then(() => { + events.push('abortPromise'); + }); + + const writePromise2 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise2, 'writePromise2 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, [], 'writePromise, abortPromise and writer.closed must not be rejected yet'); + + rejectWrite(error2); + + return Promise.all([ + promise_rejects_exactly(t, error2, writePromise, + 'writePromise must reject with the error returned from the sink\'s write method'), + abortPromise, + promise_rejects_exactly(t, error1, writer.closed, + 'writer.closed must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'], + 'writePromise, abortPromise and writer.closed must settle'); + + const writePromise3 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise3, + 'writePromise3 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort') + ]); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'writer.abort() while there is an in-flight write, and then finish the write with rejection'); + +promise_test(t => { + let resolveWrite; + let controller; + const ws = new WritableStream({ + write(chunk, c) { + controller = c; + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + + let writePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.catch(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + writePromise = writer.write('a'); + writePromise.then(() => { + events.push('writePromise'); + }); + + abortPromise = writer.abort(error1); + abortPromise.then(() => { + events.push('abortPromise'); + }); + + const writePromise2 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise2, 'writePromise2 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, [], 'writePromise, abortPromise and writer.closed must not be fulfilled/rejected yet'); + + // This error is too late to change anything. abort() has already changed the stream state to 'erroring'. + controller.error(error2); + + const writePromise3 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise3, + 'writePromise3 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals( + events, [], + 'writePromise, abortPromise and writer.closed must not be fulfilled/rejected yet even after ' + + 'controller.error() call'); + + resolveWrite(); + + return Promise.all([ + writePromise, + abortPromise, + promise_rejects_exactly(t, error1, writer.closed, + 'writer.closed must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'], + 'writePromise, abortPromise and writer.closed must settle'); + + const writePromise4 = writer.write('a'); + + return Promise.all([ + writePromise, + promise_rejects_exactly(t, error1, writePromise4, + 'writePromise4 must reject with the error from abort'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort') + ]); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'writer.abort(), controller.error() while there is an in-flight write, and then finish the write'); + +promise_test(t => { + let resolveClose; + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + + let closePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.then(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + closePromise = writer.close(); + closePromise.then(() => { + events.push('closePromise'); + }); + + abortPromise = writer.abort(error1); + abortPromise.then(() => { + events.push('abortPromise'); + }); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.close(), + 'writer.close() must reject with an error indicating already closing'), + promise_rejects_exactly(t, error1, writer.ready, 'writer.ready must reject with the error from abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, [], 'closePromise, abortPromise and writer.closed must not be fulfilled/rejected yet'); + + controller.error(error2); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.close(), + 'writer.close() must reject with an error indicating already closing'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals( + events, [], + 'closePromise, abortPromise and writer.closed must not be fulfilled/rejected yet even after ' + + 'controller.error() call'); + + resolveClose(); + + return Promise.all([ + closePromise, + abortPromise, + writer.closed, + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'], + 'closedPromise, abortPromise and writer.closed must fulfill'); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.close(), + 'writer.close() must reject with an error indicating already closing'), + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must be still rejected with the error indicating abort') + ]); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.close(), + 'writer.close() must reject with an error indicating release'), + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'writer.abort(), controller.error() while there is an in-flight close, and then finish the close'); + +promise_test(t => { + let resolveWrite; + let controller; + const ws = recordingWritableStream({ + write(chunk, c) { + controller = c; + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + + let writePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.catch(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + writePromise = writer.write('a'); + writePromise.then(() => { + events.push('writePromise'); + }); + + controller.error(error2); + + const writePromise2 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error2, writePromise2, + 'writePromise2 must reject with the error passed to the controller\'s error method'), + promise_rejects_exactly(t, error2, writer.ready, + 'writer.ready must reject with the error passed to the controller\'s error method'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, [], 'writePromise and writer.closed must not be fulfilled/rejected yet'); + + abortPromise = writer.abort(error1); + abortPromise.catch(() => { + events.push('abortPromise'); + }); + + const writePromise3 = writer.write('a'); + + return Promise.all([ + promise_rejects_exactly(t, error2, writePromise3, + 'writePromise3 must reject with the error passed to the controller\'s error method'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals( + events, [], + 'writePromise and writer.closed must not be fulfilled/rejected yet even after writer.abort()'); + + resolveWrite(); + + return Promise.all([ + promise_rejects_exactly(t, error2, abortPromise, + 'abort() must reject with the error passed to the controller\'s error method'), + promise_rejects_exactly(t, error2, writer.closed, + 'writer.closed must reject with the error passed to the controller\'s error method'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['writePromise', 'abortPromise', 'closed'], + 'writePromise, abortPromise and writer.closed must fulfill/reject'); + assert_array_equals(ws.events, ['write', 'a'], 'sink abort() should not be called'); + + const writePromise4 = writer.write('a'); + + return Promise.all([ + writePromise, + promise_rejects_exactly(t, error2, writePromise4, + 'writePromise4 must reject with the error passed to the controller\'s error method'), + promise_rejects_exactly(t, error2, writer.ready, + 'writer.ready must be still rejected with the error passed to the controller\'s error method') + ]); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'controller.error(), writer.abort() while there is an in-flight write, and then finish the write'); + +promise_test(t => { + let resolveClose; + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + + let closePromise; + let abortPromise; + + const events = []; + + const writer = ws.getWriter(); + + writer.closed.then(() => { + events.push('closed'); + }); + + // Wait for ws to start + return flushAsyncEvents().then(() => { + closePromise = writer.close(); + closePromise.then(() => { + events.push('closePromise'); + }); + + controller.error(error2); + + return flushAsyncEvents(); + }).then(() => { + assert_array_equals(events, [], 'closePromise must not be fulfilled/rejected yet'); + + abortPromise = writer.abort(error1); + abortPromise.then(() => { + events.push('abortPromise'); + }); + + return Promise.all([ + promise_rejects_exactly(t, error2, writer.ready, + 'writer.ready must reject with the error passed to the controller\'s error method'), + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals( + events, [], + 'closePromise and writer.closed must not be fulfilled/rejected yet even after writer.abort()'); + + resolveClose(); + + return Promise.all([ + closePromise, + promise_rejects_exactly(t, error2, writer.ready, + 'writer.ready must be still rejected with the error passed to the controller\'s error method'), + writer.closed, + flushAsyncEvents() + ]); + }).then(() => { + assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'], + 'abortPromise, closePromise and writer.closed must fulfill/reject'); + }).then(() => { + writer.releaseLock(); + + return Promise.all([ + promise_rejects_js(t, TypeError, writer.ready, + 'writer.ready must be rejected with an error indicating release'), + promise_rejects_js(t, TypeError, writer.closed, + 'writer.closed must be rejected with an error indicating release') + ]); + }); +}, 'controller.error(), writer.abort() while there is an in-flight close, and then finish the close'); + +promise_test(t => { + let resolveWrite; + const ws = new WritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + const closed = writer.closed; + const abortPromise = writer.abort(); + writer.releaseLock(); + resolveWrite(); + return Promise.all([ + writePromise, + abortPromise, + promise_rejects_js(t, TypeError, closed, 'closed should reject')]); + }); +}, 'releaseLock() while aborting should reject the original closed promise'); + +// TODO(ricea): Consider removing this test if it is no longer useful. +promise_test(t => { + let resolveWrite; + let resolveAbort; + let resolveAbortStarted; + const abortStarted = new Promise(resolve => { + resolveAbortStarted = resolve; + }); + const ws = new WritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + }, + abort() { + resolveAbortStarted(); + return new Promise(resolve => { + resolveAbort = resolve; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + const closed = writer.closed; + const abortPromise = writer.abort(); + resolveWrite(); + return abortStarted.then(() => { + writer.releaseLock(); + assert_equals(writer.closed, closed, 'closed promise should not have changed'); + resolveAbort(); + return Promise.all([ + writePromise, + abortPromise, + promise_rejects_js(t, TypeError, closed, 'closed should reject')]); + }); + }); +}, 'releaseLock() during delayed async abort() should reject the writer.closed promise'); + +promise_test(() => { + let resolveStart; + const ws = recordingWritableStream({ + start() { + return new Promise(resolve => { + resolveStart = resolve; + }); + } + }); + const abortPromise = ws.abort('done'); + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, [], 'abort() should not be called during start()'); + resolveStart(); + return abortPromise.then(() => { + assert_array_equals(ws.events, ['abort', 'done'], 'abort() should be called after start() is done'); + }); + }); +}, 'sink abort() should not be called until sink start() is done'); + +promise_test(() => { + let resolveStart; + let controller; + const ws = recordingWritableStream({ + start(c) { + controller = c; + return new Promise(resolve => { + resolveStart = resolve; + }); + } + }); + const abortPromise = ws.abort('done'); + controller.error(error1); + resolveStart(); + return abortPromise.then(() => + assert_array_equals(ws.events, ['abort', 'done'], + 'abort() should still be called if start() errors the controller')); +}, 'if start attempts to error the controller after abort() has been called, then it should lose'); + +promise_test(() => { + const ws = recordingWritableStream({ + start() { + return Promise.reject(error1); + } + }); + return ws.abort('done').then(() => + assert_array_equals(ws.events, ['abort', 'done'], 'abort() should still be called if start() rejects')); +}, 'stream abort() promise should still resolve if sink start() rejects'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + const writerReady1 = writer.ready; + writer.abort(error1); + const writerReady2 = writer.ready; + assert_not_equals(writerReady1, writerReady2, 'abort() should replace the ready promise with a rejected one'); + return Promise.all([writerReady1, + promise_rejects_exactly(t, error1, writerReady2, 'writerReady2 should reject')]); +}, 'writer abort() during sink start() should replace the writer.ready promise synchronously'); + +promise_test(t => { + const events = []; + const ws = recordingWritableStream(); + const writer = ws.getWriter(); + const writePromise1 = writer.write(1); + const abortPromise = writer.abort(error1); + const writePromise2 = writer.write(2); + const closePromise = writer.close(); + writePromise1.catch(() => events.push('write1')); + abortPromise.then(() => events.push('abort')); + writePromise2.catch(() => events.push('write2')); + closePromise.catch(() => events.push('close')); + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise1, 'first write() should reject'), + abortPromise, + promise_rejects_exactly(t, error1, writePromise2, 'second write() should reject'), + promise_rejects_exactly(t, error1, closePromise, 'close() should reject') + ]) + .then(() => { + assert_array_equals(events, ['write2', 'write1', 'abort', 'close'], + 'promises should resolve in the standard order'); + assert_array_equals(ws.events, ['abort', error1], 'underlying sink write() should not be called'); + }); +}, 'promises returned from other writer methods should be rejected when writer abort() happens during sink start()'); + +promise_test(t => { + let writeReject; + let controller; + const ws = new WritableStream({ + write(chunk, c) { + controller = c; + return new Promise((resolve, reject) => { + writeReject = reject; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('a'); + const abortPromise = writer.abort(); + controller.error(error1); + writeReject(error2); + return Promise.all([ + promise_rejects_exactly(t, error2, writePromise, 'write() should reject with error2'), + abortPromise + ]); + }); +}, 'abort() should succeed despite rejection from write'); + +promise_test(t => { + let closeReject; + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + return new Promise((resolve, reject) => { + closeReject = reject; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const abortPromise = writer.abort(); + controller.error(error1); + closeReject(error2); + return Promise.all([ + promise_rejects_exactly(t, error2, closePromise, 'close() should reject with error2'), + promise_rejects_exactly(t, error2, abortPromise, 'abort() should reject with error2') + ]); + }); +}, 'abort() should be rejected with the rejection returned from close()'); + +promise_test(t => { + let rejectWrite; + const ws = recordingWritableStream({ + write() { + return new Promise((resolve, reject) => { + rejectWrite = reject; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('1'); + const abortPromise = writer.abort(error2); + rejectWrite(error1); + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise, 'write should reject'), + abortPromise, + promise_rejects_exactly(t, error2, writer.closed, 'closed should reject with error2') + ]); + }).then(() => { + assert_array_equals(ws.events, ['write', '1', 'abort', error2], 'abort sink method should be called'); + }); +}, 'a rejecting sink.write() should not prevent sink.abort() from being called'); + +promise_test(() => { + const ws = recordingWritableStream({ + start() { + return Promise.reject(error1); + } + }); + return ws.abort(error2) + .then(() => { + assert_array_equals(ws.events, ['abort', error2]); + }); +}, 'when start errors after stream abort(), underlying sink abort() should be called anyway'); + +promise_test(() => { + const ws = new WritableStream(); + const abortPromise1 = ws.abort(); + const abortPromise2 = ws.abort(); + assert_equals(abortPromise1, abortPromise2, 'the promises must be the same'); + + return abortPromise1.then( + v => assert_equals(v, undefined, 'abort() should fulfill with undefined')); +}, 'when calling abort() twice on the same stream, both should give the same promise that fulfills with undefined'); + +promise_test(() => { + const ws = new WritableStream(); + const abortPromise1 = ws.abort(); + + return abortPromise1.then(v1 => { + assert_equals(v1, undefined, 'first abort() should fulfill with undefined'); + + const abortPromise2 = ws.abort(); + assert_not_equals(abortPromise2, abortPromise1, 'because we waited, the second promise should be a new promise'); + + return abortPromise2.then(v2 => { + assert_equals(v2, undefined, 'second abort() should fulfill with undefined'); + }); + }); +}, 'when calling abort() twice on the same stream, but sequentially so so there\'s no pending abort the second time, ' + + 'both should fulfill with undefined'); + +promise_test(t => { + const ws = new WritableStream({ + start(c) { + c.error(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.closed, 'writer.closed should reject').then(() => { + return writer.abort().then( + v => assert_equals(v, undefined, 'abort() should fulfill with undefined')); + }); +}, 'calling abort() on an errored stream should fulfill with undefined'); + +promise_test(t => { + let controller; + let resolveWrite; + const ws = recordingWritableStream({ + start(c) { + controller = c; + }, + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('chunk'); + controller.error(error1); + const abortPromise = writer.abort(error2); + resolveWrite(); + return Promise.all([ + writePromise, + promise_rejects_exactly(t, error1, abortPromise, 'abort() should reject') + ]).then(() => { + assert_array_equals(ws.events, ['write', 'chunk'], 'sink abort() should not be called'); + }); + }); +}, 'sink abort() should not be called if stream was erroring due to controller.error() before abort() was called'); + +promise_test(t => { + let resolveWrite; + let size = 1; + const ws = recordingWritableStream({ + write() { + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }, { + size() { + return size; + }, + highWaterMark: 1 + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise1 = writer.write('chunk1'); + size = NaN; + const writePromise2 = writer.write('chunk2'); + const abortPromise = writer.abort(error2); + resolveWrite(); + return Promise.all([ + writePromise1, + promise_rejects_js(t, RangeError, writePromise2, 'second write() should reject'), + promise_rejects_js(t, RangeError, abortPromise, 'abort() should reject') + ]).then(() => { + assert_array_equals(ws.events, ['write', 'chunk1'], 'sink abort() should not be called'); + }); + }); +}, 'sink abort() should not be called if stream was erroring due to bad strategy before abort() was called'); + +promise_test(t => { + const ws = new WritableStream(); + return ws.abort().then(() => { + const writer = ws.getWriter(); + return writer.closed.then(t.unreached_func('closed promise should not fulfill'), + e => assert_equals(e, undefined, 'e should be undefined')); + }); +}, 'abort with no arguments should set the stored error to undefined'); + +promise_test(t => { + const ws = new WritableStream(); + return ws.abort(undefined).then(() => { + const writer = ws.getWriter(); + return writer.closed.then(t.unreached_func('closed promise should not fulfill'), + e => assert_equals(e, undefined, 'e should be undefined')); + }); +}, 'abort with an undefined argument should set the stored error to undefined'); + +promise_test(t => { + const ws = new WritableStream(); + return ws.abort('string argument').then(() => { + const writer = ws.getWriter(); + return writer.closed.then(t.unreached_func('closed promise should not fulfill'), + e => assert_equals(e, 'string argument', 'e should be \'string argument\'')); + }); +}, 'abort with a string argument should set the stored error to that argument'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + return promise_rejects_js(t, TypeError, ws.abort(), 'abort should reject') + .then(() => writer.ready); +}, 'abort on a locked stream should reject'); + +test(t => { + let ctrl; + const ws = new WritableStream({start(c) { ctrl = c; }}); + const e = Error('hello'); + + assert_true(ctrl.signal instanceof AbortSignal); + assert_false(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); + ws.abort(e); + assert_true(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, e); +}, 'WritableStreamDefaultController.signal'); + +promise_test(async t => { + let ctrl; + let resolve; + const called = new Promise(r => resolve = r); + + const ws = new WritableStream({ + start(c) { ctrl = c; }, + write() { resolve(); return new Promise(() => {}); } + }); + const writer = ws.getWriter(); + + writer.write(99); + await called; + + assert_false(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); + writer.abort(); + assert_true(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); +}, 'the abort signal is signalled synchronously - write'); + +promise_test(async t => { + let ctrl; + let resolve; + const called = new Promise(r => resolve = r); + + const ws = new WritableStream({ + start(c) { ctrl = c; }, + close() { resolve(); return new Promise(() => {}); } + }); + const writer = ws.getWriter(); + + writer.close(99); + await called; + + assert_false(ctrl.signal.aborted); + writer.abort(); + assert_true(ctrl.signal.aborted); +}, 'the abort signal is signalled synchronously - close'); + +promise_test(async t => { + let ctrl; + const ws = new WritableStream({start(c) { ctrl = c; }}); + const writer = ws.getWriter(); + + const e = TypeError(); + ctrl.error(e); + await promise_rejects_exactly(t, e, writer.closed); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on error'); + +promise_test(async t => { + let ctrl; + const e = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + async write() { throw e; } + }); + const writer = ws.getWriter(); + + await promise_rejects_exactly(t, e, writer.write('hello'), 'write result'); + await promise_rejects_exactly(t, e, writer.closed, 'closed'); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on write failure'); + +promise_test(async t => { + let ctrl; + const e = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + async close() { throw e; } + }); + const writer = ws.getWriter(); + + await promise_rejects_exactly(t, e, writer.close(), 'close result'); + await promise_rejects_exactly(t, e, writer.closed, 'closed'); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on close failure'); + +promise_test(async t => { + let ctrl; + const e1 = SyntaxError(); + const e2 = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + }); + + const writer = ws.getWriter(); + ctrl.signal.addEventListener('abort', () => writer.abort(e2)); + writer.abort(e1); + assert_true(ctrl.signal.aborted); + + await promise_rejects_exactly(t, e2, writer.closed, 'closed'); +}, 'recursive abort() call'); diff --git a/test/fixtures/wpt/streams/writable-streams/bad-strategies.any.js b/test/fixtures/wpt/streams/writable-streams/bad-strategies.any.js new file mode 100644 index 00000000000000..b180bae57c0585 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/bad-strategies.any.js @@ -0,0 +1,95 @@ +// META: global=window,worker,jsshell +'use strict'; + +const error1 = new Error('a unique string'); +error1.name = 'error1'; + +test(() => { + assert_throws_exactly(error1, () => { + new WritableStream({}, { + get size() { + throw error1; + }, + highWaterMark: 5 + }); + }, 'construction should re-throw the error'); +}, 'Writable stream: throwing strategy.size getter'); + +test(() => { + assert_throws_js(TypeError, () => { + new WritableStream({}, { size: 'a string' }); + }); +}, 'reject any non-function value for strategy.size'); + +test(() => { + assert_throws_exactly(error1, () => { + new WritableStream({}, { + size() { + return 1; + }, + get highWaterMark() { + throw error1; + } + }); + }, 'construction should re-throw the error'); +}, 'Writable stream: throwing strategy.highWaterMark getter'); + +test(() => { + + for (const highWaterMark of [-1, -Infinity, NaN, 'foo', {}]) { + assert_throws_js(RangeError, () => { + new WritableStream({}, { + size() { + return 1; + }, + highWaterMark + }); + }, `construction should throw a RangeError for ${highWaterMark}`); + } +}, 'Writable stream: invalid strategy.highWaterMark'); + +promise_test(t => { + const ws = new WritableStream({}, { + size() { + throw error1; + }, + highWaterMark: 5 + }); + + const writer = ws.getWriter(); + + const p1 = promise_rejects_exactly(t, error1, writer.write('a'), 'write should reject with the thrown error'); + + const p2 = promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the thrown error'); + + return Promise.all([p1, p2]); +}, 'Writable stream: throwing strategy.size method'); + +promise_test(() => { + const sizes = [NaN, -Infinity, Infinity, -1]; + return Promise.all(sizes.map(size => { + const ws = new WritableStream({}, { + size() { + return size; + }, + highWaterMark: 5 + }); + + const writer = ws.getWriter(); + + return writer.write('a').then(() => assert_unreached('write must reject'), writeE => { + assert_equals(writeE.name, 'RangeError', `write must reject with a RangeError for ${size}`); + + return writer.closed.then(() => assert_unreached('write must reject'), closedE => { + assert_equals(closedE, writeE, `closed should reject with the same error as write`); + }); + }); + })); +}, 'Writable stream: invalid strategy.size return value'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream(undefined, { + size: 'not a function', + highWaterMark: NaN + }), 'WritableStream constructor should throw a TypeError'); +}, 'Writable stream: invalid size beats invalid highWaterMark'); diff --git a/test/fixtures/wpt/streams/writable-streams/bad-underlying-sinks.any.js b/test/fixtures/wpt/streams/writable-streams/bad-underlying-sinks.any.js new file mode 100644 index 00000000000000..0bfc036246a870 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/bad-underlying-sinks.any.js @@ -0,0 +1,204 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +test(() => { + assert_throws_exactly(error1, () => { + new WritableStream({ + get start() { + throw error1; + } + }); + }, 'constructor should throw same error as throwing start getter'); + + assert_throws_exactly(error1, () => { + new WritableStream({ + start() { + throw error1; + } + }); + }, 'constructor should throw same error as throwing start method'); + + assert_throws_js(TypeError, () => { + new WritableStream({ + start: 'not a function or undefined' + }); + }, 'constructor should throw TypeError when passed a non-function start property'); + + assert_throws_js(TypeError, () => { + new WritableStream({ + start: { apply() {} } + }); + }, 'constructor should throw TypeError when passed a non-function start property with an .apply method'); +}, 'start: errors in start cause WritableStream constructor to throw'); + +promise_test(t => { + + const ws = recordingWritableStream({ + close() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.close(), 'close() promise must reject with the thrown error') + .then(() => promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the thrown error')) + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'closed promise must reject with the thrown error')) + .then(() => { + assert_array_equals(ws.events, ['close']); + }); + +}, 'close: throwing method should cause writer close() and ready to reject'); + +promise_test(t => { + + const ws = recordingWritableStream({ + close() { + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.close(), 'close() promise must reject with the same error') + .then(() => promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error')) + .then(() => assert_array_equals(ws.events, ['close'])); + +}, 'close: returning a rejected promise should cause writer close() and ready to reject'); + +test(() => { + assert_throws_exactly(error1, () => new WritableStream({ + get close() { + throw error1; + } + }), 'constructor should throw'); +}, 'close: throwing getter should cause constructor to throw'); + +test(() => { + assert_throws_exactly(error1, () => new WritableStream({ + get write() { + throw error1; + } + }), 'constructor should throw'); +}, 'write: throwing getter should cause write() and closed to reject'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('a'), 'write should reject with the thrown error') + .then(() => promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the thrown error')); +}, 'write: throwing method should cause write() and closed to reject'); + +promise_test(t => { + + let rejectSinkWritePromise; + const ws = recordingWritableStream({ + write() { + return new Promise((r, reject) => { + rejectSinkWritePromise = reject; + }); + } + }); + + return flushAsyncEvents().then(() => { + const writer = ws.getWriter(); + const writePromise = writer.write('a'); + rejectSinkWritePromise(error1); + + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise, 'writer write must reject with the same error'), + promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error') + ]); + }) + .then(() => { + assert_array_equals(ws.events, ['write', 'a']); + }); + +}, 'write: returning a promise that becomes rejected after the writer write() should cause writer write() and ready ' + + 'to reject'); + +promise_test(t => { + + const ws = recordingWritableStream({ + write() { + if (ws.events.length === 2) { + return delay(0); + } + + return Promise.reject(error1); + } + }); + + const writer = ws.getWriter(); + + // Do not wait for this; we want to test the ready promise when the stream is "full" (desiredSize = 0), but if we wait + // then the stream will transition back to "empty" (desiredSize = 1) + writer.write('a'); + const readyPromise = writer.ready; + + return promise_rejects_exactly(t, error1, writer.write('b'), 'second write must reject with the same error').then(() => { + assert_equals(writer.ready, readyPromise, + 'the ready promise must not change, since the queue was full after the first write, so the pending one simply ' + + 'transitioned'); + return promise_rejects_exactly(t, error1, writer.ready, 'ready promise must reject with the same error'); + }) + .then(() => assert_array_equals(ws.events, ['write', 'a', 'write', 'b'])); + +}, 'write: returning a rejected promise (second write) should cause writer write() and ready to reject'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream({ + start: 'test' + }), 'constructor should throw'); +}, 'start: non-function start method'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream({ + write: 'test' + }), 'constructor should throw'); +}, 'write: non-function write method'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream({ + close: 'test' + }), 'constructor should throw'); +}, 'close: non-function close method'); + +test(() => { + assert_throws_js(TypeError, () => new WritableStream({ + abort: { apply() {} } + }), 'constructor should throw'); +}, 'abort: non-function abort method with .apply'); + +test(() => { + assert_throws_exactly(error1, () => new WritableStream({ + get abort() { + throw error1; + } + }), 'constructor should throw'); +}, 'abort: throwing getter should cause abort() and closed to reject'); + +promise_test(t => { + const abortReason = new Error('different string'); + const ws = new WritableStream({ + abort() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.abort(abortReason), 'abort should reject with the thrown error') + .then(() => promise_rejects_exactly(t, abortReason, writer.closed, 'closed should reject with abortReason')); +}, 'abort: throwing method should cause abort() and closed to reject'); diff --git a/test/fixtures/wpt/streams/writable-streams/byte-length-queuing-strategy.any.js b/test/fixtures/wpt/streams/writable-streams/byte-length-queuing-strategy.any.js new file mode 100644 index 00000000000000..9a61dd7cc69787 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/byte-length-queuing-strategy.any.js @@ -0,0 +1,28 @@ +// META: global=window,worker,jsshell +'use strict'; + +promise_test(t => { + let isDone = false; + const ws = new WritableStream( + { + write() { + return new Promise(resolve => { + t.step_timeout(() => { + isDone = true; + resolve(); + }, 200); + }); + }, + + close() { + assert_true(isDone, 'close is only called once the promise has been resolved'); + } + }, + new ByteLengthQueuingStrategy({ highWaterMark: 1024 * 16 }) + ); + + const writer = ws.getWriter(); + writer.write({ byteLength: 1024 }); + + return writer.close(); +}, 'Closing a writable stream with in-flight writes below the high water mark delays the close call properly'); diff --git a/test/fixtures/wpt/streams/writable-streams/close.any.js b/test/fixtures/wpt/streams/writable-streams/close.any.js new file mode 100644 index 00000000000000..cf997ed84cdcac --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/close.any.js @@ -0,0 +1,470 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +promise_test(() => { + const ws = new WritableStream({ + close() { + return 'Hello'; + } + }); + + const writer = ws.getWriter(); + + const closePromise = writer.close(); + return closePromise.then(value => assert_equals(value, undefined, 'fulfillment value must be undefined')); +}, 'fulfillment value of writer.close() call must be undefined even if the underlying sink returns a non-undefined ' + + 'value'); + +promise_test(() => { + let controller; + let resolveClose; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + return new Promise(resolve => { + resolveClose = resolve; + }); + } + }); + + const writer = ws.getWriter(); + + const closePromise = writer.close(); + return flushAsyncEvents().then(() => { + controller.error(error1); + return flushAsyncEvents(); + }).then(() => { + resolveClose(); + return Promise.all([ + closePromise, + writer.closed, + flushAsyncEvents().then(() => writer.closed)]); + }); +}, 'when sink calls error asynchronously while sink close is in-flight, the stream should not become errored'); + +promise_test(() => { + let controller; + const passedError = new Error('error me'); + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + controller.error(passedError); + } + }); + + const writer = ws.getWriter(); + + return writer.close().then(() => writer.closed); +}, 'when sink calls error synchronously while closing, the stream should not become errored'); + +promise_test(t => { + const ws = new WritableStream({ + close() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return Promise.all([ + writer.write('y'), + promise_rejects_exactly(t, error1, writer.close(), 'close() must reject with the error'), + promise_rejects_exactly(t, error1, writer.closed, 'closed must reject with the error') + ]); +}, 'when the sink throws during close, and the close is requested while a write is still in-flight, the stream should ' + + 'become errored during the close'); + +promise_test(() => { + const ws = new WritableStream({ + write(chunk, controller) { + controller.error(error1); + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + writer.write('a'); + + return delay(0).then(() => { + writer.releaseLock(); + }); +}, 'releaseLock on a stream with a pending write in which the stream has been errored'); + +promise_test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + }, + close() { + controller.error(error1); + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + writer.close(); + + return delay(0).then(() => { + writer.releaseLock(); + }); +}, 'releaseLock on a stream with a pending close in which controller.error() was called'); + +promise_test(() => { + const ws = recordingWritableStream(); + + const writer = ws.getWriter(); + + return writer.ready.then(() => { + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + + writer.close(); + assert_equals(writer.desiredSize, 1, 'desiredSize should be still 1'); + + return writer.ready.then(v => { + assert_equals(v, undefined, 'ready promise should be fulfilled with undefined'); + assert_array_equals(ws.events, ['close'], 'write and abort should not be called'); + }); + }); +}, 'when close is called on a WritableStream in writable state, ready should return a fulfilled promise'); + +promise_test(() => { + const ws = recordingWritableStream({ + write() { + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + + return writer.ready.then(() => { + writer.write('a'); + + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0'); + + let calledClose = false; + return Promise.all([ + writer.ready.then(v => { + assert_equals(v, undefined, 'ready promise should be fulfilled with undefined'); + assert_true(calledClose, 'ready should not be fulfilled before writer.close() is called'); + assert_array_equals(ws.events, ['write', 'a'], 'sink abort() should not be called'); + }), + flushAsyncEvents().then(() => { + writer.close(); + calledClose = true; + }) + ]); + }); +}, 'when close is called on a WritableStream in waiting state, ready promise should be fulfilled'); + +promise_test(() => { + let asyncCloseFinished = false; + const ws = recordingWritableStream({ + close() { + return flushAsyncEvents().then(() => { + asyncCloseFinished = true; + }); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + writer.write('a'); + + writer.close(); + + return writer.ready.then(v => { + assert_false(asyncCloseFinished, 'ready promise should be fulfilled before async close completes'); + assert_equals(v, undefined, 'ready promise should be fulfilled with undefined'); + assert_array_equals(ws.events, ['write', 'a', 'close'], 'sink abort() should not be called'); + }); + }); +}, 'when close is called on a WritableStream in waiting state, ready should be fulfilled immediately even if close ' + + 'takes a long time'); + +promise_test(t => { + const rejection = { name: 'letter' }; + const ws = new WritableStream({ + close() { + return { + then(onFulfilled, onRejected) { onRejected(rejection); } + }; + } + }); + return promise_rejects_exactly(t, rejection, ws.getWriter().close(), 'close() should return a rejection'); +}, 'returning a thenable from close() should work'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const closedPromise = writer.closed; + writer.releaseLock(); + return Promise.all([ + closePromise, + promise_rejects_js(t, TypeError, closedPromise, '.closed promise should be rejected') + ]); + }); +}, 'releaseLock() should not change the result of sync close()'); + +promise_test(t => { + const ws = new WritableStream({ + close() { + return flushAsyncEvents(); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const closePromise = writer.close(); + const closedPromise = writer.closed; + writer.releaseLock(); + return Promise.all([ + closePromise, + promise_rejects_js(t, TypeError, closedPromise, '.closed promise should be rejected') + ]); + }); +}, 'releaseLock() should not change the result of async close()'); + +promise_test(() => { + let resolveClose; + const ws = new WritableStream({ + close() { + const promise = new Promise(resolve => { + resolveClose = resolve; + }); + return promise; + } + }); + const writer = ws.getWriter(); + const closePromise = writer.close(); + writer.releaseLock(); + return delay(0).then(() => { + resolveClose(); + return closePromise.then(() => { + assert_equals(ws.getWriter().desiredSize, 0, 'desiredSize should be 0'); + }); + }); +}, 'close() should set state to CLOSED even if writer has detached'); + +promise_test(() => { + let resolveClose; + const ws = new WritableStream({ + close() { + const promise = new Promise(resolve => { + resolveClose = resolve; + }); + return promise; + } + }); + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + return delay(0).then(() => { + const abortingWriter = ws.getWriter(); + const abortPromise = abortingWriter.abort(); + abortingWriter.releaseLock(); + resolveClose(); + return abortPromise; + }); +}, 'the promise returned by async abort during close should resolve'); + +// Though the order in which the promises are fulfilled or rejected is arbitrary, we're checking it for +// interoperability. We can change the order as long as we file bugs on all implementers to update to the latest tests +// to keep them interoperable. + +promise_test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + + const closePromise = writer.close(); + + const events = []; + return Promise.all([ + closePromise.then(() => { + events.push('closePromise'); + }), + writer.closed.then(() => { + events.push('closed'); + }) + ]).then(() => { + assert_array_equals(events, ['closePromise', 'closed'], + 'promises must fulfill/reject in the expected order'); + }); +}, 'promises must fulfill/reject in the expected order on closure'); + +promise_test(() => { + const ws = new WritableStream({}); + + // Wait until the WritableStream starts so that the close() call gets processed. Otherwise, abort() will be + // processed without waiting for completion of the close(). + return delay(0).then(() => { + const writer = ws.getWriter(); + + const closePromise = writer.close(); + const abortPromise = writer.abort(error1); + + const events = []; + return Promise.all([ + closePromise.then(() => { + events.push('closePromise'); + }), + abortPromise.then(() => { + events.push('abortPromise'); + }), + writer.closed.then(() => { + events.push('closed'); + }) + ]).then(() => { + assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'], + 'promises must fulfill/reject in the expected order'); + }); + }); +}, 'promises must fulfill/reject in the expected order on aborted closure'); + +promise_test(t => { + const ws = new WritableStream({ + close() { + return Promise.reject(error1); + } + }); + + // Wait until the WritableStream starts so that the close() call gets processed. + return delay(0).then(() => { + const writer = ws.getWriter(); + + const closePromise = writer.close(); + const abortPromise = writer.abort(error2); + + const events = []; + closePromise.catch(() => events.push('closePromise')); + abortPromise.catch(() => events.push('abortPromise')); + writer.closed.catch(() => events.push('closed')); + return Promise.all([ + promise_rejects_exactly(t, error1, closePromise, + 'closePromise must reject with the error returned from the sink\'s close method'), + promise_rejects_exactly(t, error1, abortPromise, + 'abortPromise must reject with the error returned from the sink\'s close method'), + promise_rejects_exactly(t, error2, writer.closed, + 'writer.closed must reject with error2') + ]).then(() => { + assert_array_equals(events, ['closePromise', 'abortPromise', 'closed'], + 'promises must fulfill/reject in the expected order'); + }); + }); +}, 'promises must fulfill/reject in the expected order on aborted and errored closure'); + +promise_test(t => { + let resolveWrite; + let controller; + const ws = new WritableStream({ + write(chunk, c) { + controller = c; + return new Promise(resolve => { + resolveWrite = resolve; + }); + } + }); + const writer = ws.getWriter(); + return writer.ready.then(() => { + const writePromise = writer.write('c'); + controller.error(error1); + const closePromise = writer.close(); + let closeRejected = false; + closePromise.catch(() => { + closeRejected = true; + }); + return flushAsyncEvents().then(() => { + assert_false(closeRejected); + resolveWrite(); + return Promise.all([ + writePromise, + promise_rejects_exactly(t, error1, closePromise, 'close() should reject') + ]).then(() => { + assert_true(closeRejected); + }); + }); + }); +}, 'close() should not reject until no sink methods are in flight'); + +promise_test(() => { + const ws = new WritableStream(); + const writer1 = ws.getWriter(); + return writer1.close().then(() => { + writer1.releaseLock(); + const writer2 = ws.getWriter(); + const ready = writer2.ready; + assert_equals(ready.constructor, Promise); + return ready; + }); +}, 'ready promise should be initialised as fulfilled for a writer on a closed stream'); + +promise_test(() => { + const ws = new WritableStream(); + ws.close(); + const writer = ws.getWriter(); + return writer.closed; +}, 'close() on a writable stream should work'); + +promise_test(t => { + const ws = new WritableStream(); + ws.getWriter(); + return promise_rejects_js(t, TypeError, ws.close(), 'close should reject'); +}, 'close() on a locked stream should reject'); + +promise_test(t => { + const ws = new WritableStream({ + start(controller) { + controller.error(error1); + } + }); + return promise_rejects_exactly(t, error1, ws.close(), 'close should reject with error1'); +}, 'close() on an erroring stream should reject'); + +promise_test(t => { + const ws = new WritableStream({ + start(controller) { + controller.error(error1); + } + }); + const writer = ws.getWriter(); + return promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with the error').then(() => { + writer.releaseLock(); + return promise_rejects_js(t, TypeError, ws.close(), 'close should reject'); + }); +}, 'close() on an errored stream should reject'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + return writer.close().then(() => { + return promise_rejects_js(t, TypeError, ws.close(), 'close should reject'); + }); +}, 'close() on an closed stream should reject'); + +promise_test(t => { + const ws = new WritableStream({ + close() { + return new Promise(() => {}); + } + }); + + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + return promise_rejects_js(t, TypeError, ws.close(), 'close should reject'); +}, 'close() on a stream with a pending close should reject'); diff --git a/test/fixtures/wpt/streams/writable-streams/constructor.any.js b/test/fixtures/wpt/streams/writable-streams/constructor.any.js new file mode 100644 index 00000000000000..75eed2a993fe5e --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/constructor.any.js @@ -0,0 +1,155 @@ +// META: global=window,worker,jsshell +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +promise_test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + + // Now error the stream after its construction. + controller.error(error1); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, null, 'desiredSize should be null'); + return writer.closed.catch(r => { + assert_equals(r, error1, 'ws should be errored by the passed error'); + }); +}, 'controller argument should be passed to start method'); + +promise_test(t => { + const ws = new WritableStream({ + write(chunk, controller) { + controller.error(error1); + } + }); + + const writer = ws.getWriter(); + + return Promise.all([ + writer.write('a'), + promise_rejects_exactly(t, error1, writer.closed, 'controller.error() in write() should error the stream') + ]); +}, 'controller argument should be passed to write method'); + +// Older versions of the standard had the controller argument passed to close(). It wasn't useful, and so has been +// removed. This test remains to identify implementations that haven't been updated. +promise_test(t => { + const ws = new WritableStream({ + close(...args) { + t.step(() => { + assert_array_equals(args, [], 'no arguments should be passed to close'); + }); + } + }); + + return ws.getWriter().close(); +}, 'controller argument should not be passed to close method'); + +promise_test(() => { + const ws = new WritableStream({}, { + highWaterMark: 1000, + size() { return 1; } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1000, 'desiredSize should be 1000'); + return writer.ready.then(v => { + assert_equals(v, undefined, 'ready promise should fulfill with undefined'); + }); +}, 'highWaterMark should be reflected to desiredSize'); + +promise_test(() => { + const ws = new WritableStream({}, { + highWaterMark: Infinity, + size() { return 0; } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, Infinity, 'desiredSize should be Infinity'); + + return writer.ready; +}, 'WritableStream should be writable and ready should fulfill immediately if the strategy does not apply ' + + 'backpressure'); + +test(() => { + new WritableStream(); +}, 'WritableStream should be constructible with no arguments'); + +test(() => { + const underlyingSink = { get start() { throw error1; } }; + const queuingStrategy = { highWaterMark: 0, get size() { throw error2; } }; + + // underlyingSink is converted in prose in the method body, whereas queuingStrategy is done at the IDL layer. + // So the queuingStrategy exception should be encountered first. + assert_throws_exactly(error2, () => new WritableStream(underlyingSink, queuingStrategy)); +}, 'underlyingSink argument should be converted after queuingStrategy argument'); + +test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + + assert_equals(typeof writer.write, 'function', 'writer should have a write method'); + assert_equals(typeof writer.abort, 'function', 'writer should have an abort method'); + assert_equals(typeof writer.close, 'function', 'writer should have a close method'); + + assert_equals(writer.desiredSize, 1, 'desiredSize should start at 1'); + + assert_not_equals(typeof writer.ready, 'undefined', 'writer should have a ready property'); + assert_equals(typeof writer.ready.then, 'function', 'ready property should be thenable'); + assert_not_equals(typeof writer.closed, 'undefined', 'writer should have a closed property'); + assert_equals(typeof writer.closed.then, 'function', 'closed property should be thenable'); +}, 'WritableStream instances should have standard methods and properties'); + +test(() => { + let WritableStreamDefaultController; + new WritableStream({ + start(c) { + WritableStreamDefaultController = c.constructor; + } + }); + + assert_throws_js(TypeError, () => new WritableStreamDefaultController({}), + 'constructor should throw a TypeError exception'); +}, 'WritableStreamDefaultController constructor should throw'); + +test(() => { + let WritableStreamDefaultController; + const stream = new WritableStream({ + start(c) { + WritableStreamDefaultController = c.constructor; + } + }); + + assert_throws_js(TypeError, () => new WritableStreamDefaultController(stream), + 'constructor should throw a TypeError exception'); +}, 'WritableStreamDefaultController constructor should throw when passed an initialised WritableStream'); + +test(() => { + const stream = new WritableStream(); + const writer = stream.getWriter(); + const WritableStreamDefaultWriter = writer.constructor; + writer.releaseLock(); + assert_throws_js(TypeError, () => new WritableStreamDefaultWriter({}), + 'constructor should throw a TypeError exception'); +}, 'WritableStreamDefaultWriter should throw unless passed a WritableStream'); + +test(() => { + const stream = new WritableStream(); + const writer = stream.getWriter(); + const WritableStreamDefaultWriter = writer.constructor; + assert_throws_js(TypeError, () => new WritableStreamDefaultWriter(stream), + 'constructor should throw a TypeError exception'); +}, 'WritableStreamDefaultWriter constructor should throw when stream argument is locked'); diff --git a/test/fixtures/wpt/streams/writable-streams/count-queuing-strategy.any.js b/test/fixtures/wpt/streams/writable-streams/count-queuing-strategy.any.js new file mode 100644 index 00000000000000..30edb3eb315c62 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/count-queuing-strategy.any.js @@ -0,0 +1,124 @@ +// META: global=window,worker,jsshell +'use strict'; + +test(() => { + new WritableStream({}, new CountQueuingStrategy({ highWaterMark: 4 })); +}, 'Can construct a writable stream with a valid CountQueuingStrategy'); + +promise_test(() => { + const dones = Object.create(null); + + const ws = new WritableStream( + { + write(chunk) { + return new Promise(resolve => { + dones[chunk] = resolve; + }); + } + }, + new CountQueuingStrategy({ highWaterMark: 0 }) + ); + + const writer = ws.getWriter(); + let writePromiseB; + let writePromiseC; + + return Promise.resolve().then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be initially 0'); + + const writePromiseA = writer.write('a'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 1st write()'); + + writePromiseB = writer.write('b'); + assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after 2nd write()'); + + dones.a(); + return writePromiseA; + }).then(() => { + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after completing 1st write()'); + + dones.b(); + return writePromiseB; + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 2nd write()'); + + writePromiseC = writer.write('c'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 3rd write()'); + + dones.c(); + return writePromiseC; + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 3rd write()'); + }); +}, 'Correctly governs the value of a WritableStream\'s state property (HWM = 0)'); + +promise_test(() => { + const dones = Object.create(null); + + const ws = new WritableStream( + { + write(chunk) { + return new Promise(resolve => { + dones[chunk] = resolve; + }); + } + }, + new CountQueuingStrategy({ highWaterMark: 4 }) + ); + + const writer = ws.getWriter(); + let writePromiseB; + let writePromiseC; + let writePromiseD; + + return Promise.resolve().then(() => { + assert_equals(writer.desiredSize, 4, 'desiredSize should be initially 4'); + + const writePromiseA = writer.write('a'); + assert_equals(writer.desiredSize, 3, 'desiredSize should be 3 after 1st write()'); + + writePromiseB = writer.write('b'); + assert_equals(writer.desiredSize, 2, 'desiredSize should be 2 after 2nd write()'); + + writePromiseC = writer.write('c'); + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 after 3rd write()'); + + writePromiseD = writer.write('d'); + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after 4th write()'); + + writer.write('e'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 5th write()'); + + writer.write('f'); + assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after 6th write()'); + + writer.write('g'); + assert_equals(writer.desiredSize, -3, 'desiredSize should be -3 after 7th write()'); + + dones.a(); + return writePromiseA; + }).then(() => { + assert_equals(writer.desiredSize, -2, 'desiredSize should be -2 after completing 1st write()'); + + dones.b(); + return writePromiseB; + }).then(() => { + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after completing 2nd write()'); + + dones.c(); + return writePromiseC; + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 3rd write()'); + + writer.write('h'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 8th write()'); + + dones.d(); + return writePromiseD; + }).then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after completing 4th write()'); + + writer.write('i'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1 after 9th write()'); + }); +}, 'Correctly governs the value of a WritableStream\'s state property (HWM = 4)'); diff --git a/test/fixtures/wpt/streams/writable-streams/error.any.js b/test/fixtures/wpt/streams/writable-streams/error.any.js new file mode 100644 index 00000000000000..be986fccc6eac6 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/error.any.js @@ -0,0 +1,64 @@ +// META: global=window,worker,jsshell +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +promise_test(t => { + const ws = new WritableStream({ + start(controller) { + controller.error(error1); + } + }); + return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'stream should be errored'); +}, 'controller.error() should error the stream'); + +test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + ws.abort(); + controller.error(error1); +}, 'controller.error() on erroring stream should not throw'); + +promise_test(t => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + controller.error(error1); + controller.error(error2); + return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'first controller.error() should win'); +}, 'surplus calls to controller.error() should be a no-op'); + +promise_test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + return ws.abort().then(() => { + controller.error(error1); + }); +}, 'controller.error() on errored stream should not throw'); + +promise_test(() => { + let controller; + const ws = new WritableStream({ + start(c) { + controller = c; + } + }); + return ws.getWriter().close().then(() => { + controller.error(error1); + }); +}, 'controller.error() on closed stream should not throw'); diff --git a/test/fixtures/wpt/streams/writable-streams/floating-point-total-queue-size.any.js b/test/fixtures/wpt/streams/writable-streams/floating-point-total-queue-size.any.js new file mode 100644 index 00000000000000..8e77ba0bb31185 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/floating-point-total-queue-size.any.js @@ -0,0 +1,87 @@ +// META: global=window,worker,jsshell +'use strict'; + +// Due to the limitations of floating-point precision, the calculation of desiredSize sometimes gives different answers +// than adding up the items in the queue would. It is important that implementations give the same result in these edge +// cases so that developers do not come to depend on non-standard behaviour. See +// https://github.com/whatwg/streams/issues/582 and linked issues for further discussion. + +promise_test(() => { + const writer = setupTestStream(); + + const writePromises = [ + writer.write(2), + writer.write(Number.MAX_SAFE_INTEGER) + ]; + + assert_equals(writer.desiredSize, 0 - 2 - Number.MAX_SAFE_INTEGER, + 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)'); + + return Promise.all(writePromises).then(() => { + assert_equals(writer.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative'); + }); +}, 'Floating point arithmetic must manifest near NUMBER.MAX_SAFE_INTEGER (total ends up positive)'); + +promise_test(() => { + const writer = setupTestStream(); + + const writePromises = [ + writer.write(1e-16), + writer.write(1) + ]; + + assert_equals(writer.desiredSize, 0 - 1e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)'); + + return Promise.all(writePromises).then(() => { + assert_equals(writer.desiredSize, 0, '[[queueTotalSize]] must clamp to 0 if it becomes negative'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up positive, but clamped)'); + +promise_test(() => { + const writer = setupTestStream(); + + const writePromises = [ + writer.write(1e-16), + writer.write(1), + writer.write(2e-16) + ]; + + assert_equals(writer.desiredSize, 0 - 1e-16 - 1 - 2e-16, + 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing three chunks)'); + + return Promise.all(writePromises).then(() => { + assert_equals(writer.desiredSize, 0 - 1e-16 - 1 - 2e-16 + 1e-16 + 1 + 2e-16, + 'desiredSize must be calculated using floating-point arithmetic (after the three chunks have finished writing)'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up positive, and not clamped)'); + +promise_test(() => { + const writer = setupTestStream(); + + const writePromises = [ + writer.write(2e-16), + writer.write(1) + ]; + + assert_equals(writer.desiredSize, 0 - 2e-16 - 1, + 'desiredSize must be calculated using double-precision floating-point arithmetic (after writing two chunks)'); + + return Promise.all(writePromises).then(() => { + assert_equals(writer.desiredSize, 0 - 2e-16 - 1 + 2e-16 + 1, + 'desiredSize must be calculated using floating-point arithmetic (after the two chunks have finished writing)'); + }); +}, 'Floating point arithmetic must manifest near 0 (total ends up zero)'); + +function setupTestStream() { + const strategy = { + size(x) { + return x; + }, + highWaterMark: 0 + }; + + const ws = new WritableStream({}, strategy); + + return ws.getWriter(); +} diff --git a/test/fixtures/wpt/streams/writable-streams/general.any.js b/test/fixtures/wpt/streams/writable-streams/general.any.js new file mode 100644 index 00000000000000..fdd10b29aa0ebc --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/general.any.js @@ -0,0 +1,277 @@ +// META: global=window,worker,jsshell +'use strict'; + +test(() => { + const ws = new WritableStream({}); + const writer = ws.getWriter(); + writer.releaseLock(); + + assert_throws_js(TypeError, () => writer.desiredSize, 'desiredSize should throw a TypeError'); +}, 'desiredSize on a released writer'); + +test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); +}, 'desiredSize initial value'); + +promise_test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + + writer.close(); + + return writer.closed.then(() => { + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0'); + }); +}, 'desiredSize on a writer for a closed stream'); + +test(() => { + const ws = new WritableStream({ + start(c) { + c.error(); + } + }); + + const writer = ws.getWriter(); + assert_equals(writer.desiredSize, null, 'desiredSize should be null'); +}, 'desiredSize on a writer for an errored stream'); + +test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + writer.close(); + writer.releaseLock(); + + ws.getWriter(); +}, 'ws.getWriter() on a closing WritableStream'); + +promise_test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + return writer.close().then(() => { + writer.releaseLock(); + + ws.getWriter(); + }); +}, 'ws.getWriter() on a closed WritableStream'); + +test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + writer.abort(); + writer.releaseLock(); + + ws.getWriter(); +}, 'ws.getWriter() on an aborted WritableStream'); + +promise_test(() => { + const ws = new WritableStream({ + start(c) { + c.error(); + } + }); + + const writer = ws.getWriter(); + return writer.closed.then( + v => assert_unreached('writer.closed fulfilled unexpectedly with: ' + v), + () => { + writer.releaseLock(); + + ws.getWriter(); + } + ); +}, 'ws.getWriter() on an errored WritableStream'); + +promise_test(() => { + const ws = new WritableStream({}); + + const writer = ws.getWriter(); + writer.releaseLock(); + + return writer.closed.then( + v => assert_unreached('writer.closed fulfilled unexpectedly with: ' + v), + closedRejection => { + assert_equals(closedRejection.name, 'TypeError', 'closed promise should reject with a TypeError'); + return writer.ready.then( + v => assert_unreached('writer.ready fulfilled unexpectedly with: ' + v), + readyRejection => assert_equals(readyRejection, closedRejection, + 'ready promise should reject with the same error') + ); + } + ); +}, 'closed and ready on a released writer'); + +promise_test(t => { + let thisObject = null; + // Calls to Sink methods after the first are implicitly ignored. Only the first value that is passed to the resolver + // is used. + class Sink { + start() { + // Called twice + t.step(() => { + assert_equals(this, thisObject, 'start should be called as a method'); + }); + } + + write() { + t.step(() => { + assert_equals(this, thisObject, 'write should be called as a method'); + }); + } + + close() { + t.step(() => { + assert_equals(this, thisObject, 'close should be called as a method'); + }); + } + + abort() { + t.step(() => { + assert_equals(this, thisObject, 'abort should be called as a method'); + }); + } + } + + const theSink = new Sink(); + thisObject = theSink; + const ws = new WritableStream(theSink); + + const writer = ws.getWriter(); + + writer.write('a'); + const closePromise = writer.close(); + + const ws2 = new WritableStream(theSink); + const writer2 = ws2.getWriter(); + const abortPromise = writer2.abort(); + + return Promise.all([ + closePromise, + abortPromise + ]); +}, 'WritableStream should call underlying sink methods as methods'); + +promise_test(t => { + function functionWithOverloads() {} + functionWithOverloads.apply = t.unreached_func('apply() should not be called'); + functionWithOverloads.call = t.unreached_func('call() should not be called'); + const underlyingSink = { + start: functionWithOverloads, + write: functionWithOverloads, + close: functionWithOverloads, + abort: functionWithOverloads + }; + // Test start(), write(), close(). + const ws1 = new WritableStream(underlyingSink); + const writer1 = ws1.getWriter(); + writer1.write('a'); + writer1.close(); + + // Test abort(). + const abortError = new Error(); + abortError.name = 'abort error'; + + const ws2 = new WritableStream(underlyingSink); + const writer2 = ws2.getWriter(); + writer2.abort(abortError); + + // Test abort() with a close underlying sink method present. (Historical; see + // https://github.com/whatwg/streams/issues/620#issuecomment-263483953 for what used to be + // tested here. But more coverage can't hurt.) + const ws3 = new WritableStream({ + start: functionWithOverloads, + write: functionWithOverloads, + close: functionWithOverloads + }); + const writer3 = ws3.getWriter(); + writer3.abort(abortError); + + return writer1.closed + .then(() => promise_rejects_exactly(t, abortError, writer2.closed, 'writer2.closed should be rejected')) + .then(() => promise_rejects_exactly(t, abortError, writer3.closed, 'writer3.closed should be rejected')); +}, 'methods should not not have .apply() or .call() called'); + +promise_test(() => { + const strategy = { + size() { + if (this !== undefined) { + throw new Error('size called as a method'); + } + return 1; + } + }; + + const ws = new WritableStream({}, strategy); + const writer = ws.getWriter(); + return writer.write('a'); +}, 'WritableStream\'s strategy.size should not be called as a method'); + +promise_test(() => { + const ws = new WritableStream(); + const writer1 = ws.getWriter(); + assert_equals(undefined, writer1.releaseLock(), 'releaseLock() should return undefined'); + const writer2 = ws.getWriter(); + assert_equals(undefined, writer1.releaseLock(), 'no-op releaseLock() should return undefined'); + // Calling releaseLock() on writer1 should not interfere with writer2. If it did, then the ready promise would be + // rejected. + return writer2.ready; +}, 'redundant releaseLock() is no-op'); + +promise_test(() => { + const events = []; + const ws = new WritableStream(); + const writer = ws.getWriter(); + return writer.ready.then(() => { + // Force the ready promise back to a pending state. + const writerPromise = writer.write('dummy'); + const readyPromise = writer.ready.catch(() => events.push('ready')); + const closedPromise = writer.closed.catch(() => events.push('closed')); + writer.releaseLock(); + return Promise.all([readyPromise, closedPromise]).then(() => { + assert_array_equals(events, ['ready', 'closed'], 'ready promise should fire before closed promise'); + // Stop the writer promise hanging around after the test has finished. + return Promise.all([ + writerPromise, + ws.abort() + ]); + }); + }); +}, 'ready promise should fire before closed on releaseLock'); + +test(() => { + class Subclass extends WritableStream { + extraFunction() { + return true; + } + } + assert_equals( + Object.getPrototypeOf(Subclass.prototype), WritableStream.prototype, + 'Subclass.prototype\'s prototype should be WritableStream.prototype'); + assert_equals(Object.getPrototypeOf(Subclass), WritableStream, + 'Subclass\'s prototype should be WritableStream'); + const sub = new Subclass(); + assert_true(sub instanceof WritableStream, + 'Subclass object should be an instance of WritableStream'); + assert_true(sub instanceof Subclass, + 'Subclass object should be an instance of Subclass'); + const lockedGetter = Object.getOwnPropertyDescriptor( + WritableStream.prototype, 'locked').get; + assert_equals(lockedGetter.call(sub), sub.locked, + 'Subclass object should pass brand check'); + assert_true(sub.extraFunction(), + 'extraFunction() should be present on Subclass object'); +}, 'Subclassing WritableStream should work'); + +test(() => { + const ws = new WritableStream(); + assert_false(ws.locked, 'stream should not be locked'); + ws.getWriter(); + assert_true(ws.locked, 'stream should be locked'); +}, 'the locked getter should return true if the stream has a writer'); diff --git a/test/fixtures/wpt/streams/writable-streams/properties.any.js b/test/fixtures/wpt/streams/writable-streams/properties.any.js new file mode 100644 index 00000000000000..0f7f876d8b6fc4 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/properties.any.js @@ -0,0 +1,53 @@ +// META: global=window,worker,jsshell +'use strict'; + +const sinkMethods = { + start: { + length: 1, + trigger: () => Promise.resolve() + }, + write: { + length: 2, + trigger: writer => writer.write() + }, + close: { + length: 0, + trigger: writer => writer.close() + }, + abort: { + length: 1, + trigger: writer => writer.abort() + } +}; + +for (const method in sinkMethods) { + const { length, trigger } = sinkMethods[method]; + + // Some semantic tests of how sink methods are called can be found in general.js, as well as in the test files + // specific to each method. + promise_test(() => { + let argCount; + const ws = new WritableStream({ + [method](...args) { + argCount = args.length; + } + }); + return Promise.resolve(trigger(ws.getWriter())).then(() => { + assert_equals(argCount, length, `${method} should be called with ${length} arguments`); + }); + }, `sink method ${method} should be called with the right number of arguments`); + + promise_test(() => { + let methodWasCalled = false; + function Sink() {} + Sink.prototype = { + [method]() { + methodWasCalled = true; + } + }; + const ws = new WritableStream(new Sink()); + return Promise.resolve(trigger(ws.getWriter())).then(() => { + assert_true(methodWasCalled, `${method} should be called`); + }); + }, `sink method ${method} should be called even when it's located on the prototype chain`); +} diff --git a/test/fixtures/wpt/streams/writable-streams/reentrant-strategy.any.js b/test/fixtures/wpt/streams/writable-streams/reentrant-strategy.any.js new file mode 100644 index 00000000000000..afde413b4252d1 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/reentrant-strategy.any.js @@ -0,0 +1,174 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +// These tests exercise the pathological case of calling WritableStream* methods from within the strategy.size() +// callback. This is not something any real code should ever do. Failures here indicate subtle deviations from the +// standard that may affect real, non-pathological code. + +const error1 = { name: 'error1' }; + +promise_test(() => { + let writer; + const strategy = { + size(chunk) { + if (chunk > 0) { + writer.write(chunk - 1); + } + return chunk; + } + }; + + const ws = recordingWritableStream({}, strategy); + writer = ws.getWriter(); + return writer.write(2) + .then(() => { + assert_array_equals(ws.events, ['write', 0, 'write', 1, 'write', 2], 'writes should appear in order'); + }); +}, 'writes should be written in the standard order'); + +promise_test(() => { + let writer; + const events = []; + const strategy = { + size(chunk) { + events.push('size', chunk); + if (chunk > 0) { + writer.write(chunk - 1) + .then(() => events.push('writer.write done', chunk - 1)); + } + return chunk; + } + }; + const ws = new WritableStream({ + write(chunk) { + events.push('sink.write', chunk); + } + }, strategy); + writer = ws.getWriter(); + return writer.write(2) + .then(() => events.push('writer.write done', 2)) + .then(() => flushAsyncEvents()) + .then(() => { + assert_array_equals(events, ['size', 2, 'size', 1, 'size', 0, + 'sink.write', 0, 'sink.write', 1, 'writer.write done', 0, + 'sink.write', 2, 'writer.write done', 1, + 'writer.write done', 2], + 'events should happen in standard order'); + }); +}, 'writer.write() promises should resolve in the standard order'); + +promise_test(t => { + let controller; + const strategy = { + size() { + controller.error(error1); + return 1; + } + }; + const ws = recordingWritableStream({ + start(c) { + controller = c; + } + }, strategy); + const resolved = []; + const writer = ws.getWriter(); + const readyPromise1 = writer.ready.then(() => resolved.push('ready1')); + const writePromise = promise_rejects_exactly(t, error1, writer.write(), + 'write() should reject with the error') + .then(() => resolved.push('write')); + const readyPromise2 = promise_rejects_exactly(t, error1, writer.ready, 'ready should reject with error1') + .then(() => resolved.push('ready2')); + const closedPromise = promise_rejects_exactly(t, error1, writer.closed, 'closed should reject with error1') + .then(() => resolved.push('closed')); + return Promise.all([readyPromise1, writePromise, readyPromise2, closedPromise]) + .then(() => { + assert_array_equals(resolved, ['ready1', 'write', 'ready2', 'closed'], + 'promises should resolve in standard order'); + assert_array_equals(ws.events, [], 'underlying sink write should not be called'); + }); +}, 'controller.error() should work when called from within strategy.size()'); + +promise_test(t => { + let writer; + const strategy = { + size() { + writer.close(); + return 1; + } + }; + + const ws = recordingWritableStream({}, strategy); + writer = ws.getWriter(); + return promise_rejects_js(t, TypeError, writer.write('a'), 'write() promise should reject') + .then(() => { + assert_array_equals(ws.events, ['close'], 'sink.write() should not be called'); + }); +}, 'close() should work when called from within strategy.size()'); + +promise_test(t => { + let writer; + const strategy = { + size() { + writer.abort(error1); + return 1; + } + }; + + const ws = recordingWritableStream({}, strategy); + writer = ws.getWriter(); + return promise_rejects_exactly(t, error1, writer.write('a'), 'write() promise should reject') + .then(() => { + assert_array_equals(ws.events, ['abort', error1], 'sink.write() should not be called'); + }); +}, 'abort() should work when called from within strategy.size()'); + +promise_test(t => { + let writer; + const strategy = { + size() { + writer.releaseLock(); + return 1; + } + }; + + const ws = recordingWritableStream({}, strategy); + writer = ws.getWriter(); + const writePromise = promise_rejects_js(t, TypeError, writer.write('a'), 'write() promise should reject'); + const readyPromise = promise_rejects_js(t, TypeError, writer.ready, 'ready promise should reject'); + const closedPromise = promise_rejects_js(t, TypeError, writer.closed, 'closed promise should reject'); + return Promise.all([writePromise, readyPromise, closedPromise]) + .then(() => { + assert_array_equals(ws.events, [], 'sink.write() should not be called'); + }); +}, 'releaseLock() should abort the write() when called within strategy.size()'); + +promise_test(t => { + let writer1; + let ws; + let writePromise2; + let closePromise; + let closedPromise2; + const strategy = { + size(chunk) { + if (chunk > 0) { + writer1.releaseLock(); + const writer2 = ws.getWriter(); + writePromise2 = writer2.write(0); + closePromise = writer2.close(); + closedPromise2 = writer2.closed; + } + return 1; + } + }; + ws = recordingWritableStream({}, strategy); + writer1 = ws.getWriter(); + const writePromise1 = promise_rejects_js(t, TypeError, writer1.write(1), 'write() promise should reject'); + const readyPromise = promise_rejects_js(t, TypeError, writer1.ready, 'ready promise should reject'); + const closedPromise1 = promise_rejects_js(t, TypeError, writer1.closed, 'closed promise should reject'); + return Promise.all([writePromise1, readyPromise, closedPromise1, writePromise2, closePromise, closedPromise2]) + .then(() => { + assert_array_equals(ws.events, ['write', 0, 'close'], 'sink.write() should only be called once'); + }); +}, 'original reader should error when new reader is created within strategy.size()'); diff --git a/test/fixtures/wpt/streams/writable-streams/start.any.js b/test/fixtures/wpt/streams/writable-streams/start.any.js new file mode 100644 index 00000000000000..02b5f2a387625a --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/start.any.js @@ -0,0 +1,163 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = { name: 'error1' }; + +promise_test(() => { + let resolveStartPromise; + const ws = recordingWritableStream({ + start() { + return new Promise(resolve => { + resolveStartPromise = resolve; + }); + } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + writer.write('a'); + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after writer.write()'); + + // Wait and verify that write isn't called. + return flushAsyncEvents() + .then(() => { + assert_array_equals(ws.events, [], 'write should not be called until start promise resolves'); + resolveStartPromise(); + return writer.ready; + }) + .then(() => assert_array_equals(ws.events, ['write', 'a'], + 'write should not be called until start promise resolves')); +}, 'underlying sink\'s write should not be called until start finishes'); + +promise_test(() => { + let resolveStartPromise; + const ws = recordingWritableStream({ + start() { + return new Promise(resolve => { + resolveStartPromise = resolve; + }); + } + }); + + const writer = ws.getWriter(); + + writer.close(); + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + + // Wait and verify that write isn't called. + return flushAsyncEvents().then(() => { + assert_array_equals(ws.events, [], 'close should not be called until start promise resolves'); + resolveStartPromise(); + return writer.closed; + }); +}, 'underlying sink\'s close should not be called until start finishes'); + +test(() => { + const passedError = new Error('horrible things'); + + let writeCalled = false; + let closeCalled = false; + assert_throws_exactly(passedError, () => { + // recordingWritableStream cannot be used here because the exception in the + // constructor prevents assigning the object to a variable. + new WritableStream({ + start() { + throw passedError; + }, + write() { + writeCalled = true; + }, + close() { + closeCalled = true; + } + }); + }, 'constructor should throw passedError'); + assert_false(writeCalled, 'write should not be called'); + assert_false(closeCalled, 'close should not be called'); +}, 'underlying sink\'s write or close should not be called if start throws'); + +promise_test(() => { + const ws = recordingWritableStream({ + start() { + return Promise.reject(); + } + }); + + // Wait and verify that write or close aren't called. + return flushAsyncEvents() + .then(() => assert_array_equals(ws.events, [], 'write and close should not be called')); +}, 'underlying sink\'s write or close should not be invoked if the promise returned by start is rejected'); + +promise_test(t => { + const ws = new WritableStream({ + start() { + return { + then(onFulfilled, onRejected) { onRejected(error1); } + }; + } + }); + return promise_rejects_exactly(t, error1, ws.getWriter().closed, 'closed promise should be rejected'); +}, 'returning a thenable from start() should work'); + +promise_test(t => { + const ws = recordingWritableStream({ + start(controller) { + controller.error(error1); + } + }); + return promise_rejects_exactly(t, error1, ws.getWriter().write('a'), 'write() should reject with the error') + .then(() => { + assert_array_equals(ws.events, [], 'sink write() should not have been called'); + }); +}, 'controller.error() during start should cause writes to fail'); + +promise_test(t => { + let controller; + let resolveStart; + const ws = recordingWritableStream({ + start(c) { + controller = c; + return new Promise(resolve => { + resolveStart = resolve; + }); + } + }); + const writer = ws.getWriter(); + const writePromise = writer.write('a'); + const closePromise = writer.close(); + controller.error(error1); + resolveStart(); + return Promise.all([ + promise_rejects_exactly(t, error1, writePromise, 'write() should fail'), + promise_rejects_exactly(t, error1, closePromise, 'close() should fail') + ]).then(() => { + assert_array_equals(ws.events, [], 'sink write() and close() should not have been called'); + }); +}, 'controller.error() during async start should cause existing writes to fail'); + +promise_test(t => { + const events = []; + const promises = []; + function catchAndRecord(promise, name) { + promises.push(promise.then(t.unreached_func(`promise ${name} should not resolve`), + () => { + events.push(name); + })); + } + const ws = new WritableStream({ + start() { + return Promise.reject(); + } + }, { highWaterMark: 0 }); + const writer = ws.getWriter(); + catchAndRecord(writer.ready, 'ready'); + catchAndRecord(writer.closed, 'closed'); + catchAndRecord(writer.write(), 'write'); + return Promise.all(promises) + .then(() => { + assert_array_equals(events, ['ready', 'write', 'closed'], 'promises should reject in standard order'); + }); +}, 'when start() rejects, writer promises should reject in standard order'); diff --git a/test/fixtures/wpt/streams/writable-streams/write.any.js b/test/fixtures/wpt/streams/writable-streams/write.any.js new file mode 100644 index 00000000000000..e3defa834820d4 --- /dev/null +++ b/test/fixtures/wpt/streams/writable-streams/write.any.js @@ -0,0 +1,284 @@ +// META: global=window,worker,jsshell +// META: script=../resources/test-utils.js +// META: script=../resources/recording-streams.js +'use strict'; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +const error2 = new Error('error2'); +error2.name = 'error2'; + +function writeArrayToStream(array, writableStreamWriter) { + array.forEach(chunk => writableStreamWriter.write(chunk)); + return writableStreamWriter.close(); +} + +promise_test(() => { + let storage; + const ws = new WritableStream({ + start() { + storage = []; + }, + + write(chunk) { + return delay(0).then(() => storage.push(chunk)); + }, + + close() { + return delay(0); + } + }); + + const writer = ws.getWriter(); + + const input = [1, 2, 3, 4, 5]; + return writeArrayToStream(input, writer) + .then(() => assert_array_equals(storage, input, 'correct data should be relayed to underlying sink')); +}, 'WritableStream should complete asynchronous writes before close resolves'); + +promise_test(() => { + const ws = recordingWritableStream(); + + const writer = ws.getWriter(); + + const input = [1, 2, 3, 4, 5]; + return writeArrayToStream(input, writer) + .then(() => assert_array_equals(ws.events, ['write', 1, 'write', 2, 'write', 3, 'write', 4, 'write', 5, 'close'], + 'correct data should be relayed to underlying sink')); +}, 'WritableStream should complete synchronous writes before close resolves'); + +promise_test(() => { + const ws = new WritableStream({ + write() { + return 'Hello'; + } + }); + + const writer = ws.getWriter(); + + const writePromise = writer.write('a'); + return writePromise + .then(value => assert_equals(value, undefined, 'fulfillment value must be undefined')); +}, 'fulfillment value of ws.write() call should be undefined even if the underlying sink returns a non-undefined ' + + 'value'); + +promise_test(() => { + let resolveSinkWritePromise; + const ws = new WritableStream({ + write() { + return new Promise(resolve => { + resolveSinkWritePromise = resolve; + }); + } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + + return writer.ready.then(() => { + const writePromise = writer.write('a'); + let writePromiseResolved = false; + assert_not_equals(resolveSinkWritePromise, undefined, 'resolveSinkWritePromise should not be undefined'); + + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0 after writer.write()'); + + return Promise.all([ + writePromise.then(value => { + writePromiseResolved = true; + assert_equals(resolveSinkWritePromise, undefined, 'sinkWritePromise should be fulfilled before writePromise'); + + assert_equals(value, undefined, 'writePromise should be fulfilled with undefined'); + }), + writer.ready.then(value => { + assert_equals(resolveSinkWritePromise, undefined, 'sinkWritePromise should be fulfilled before writer.ready'); + assert_true(writePromiseResolved, 'writePromise should be fulfilled before writer.ready'); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 again'); + + assert_equals(value, undefined, 'writePromise should be fulfilled with undefined'); + }), + flushAsyncEvents().then(() => { + resolveSinkWritePromise(); + resolveSinkWritePromise = undefined; + }) + ]); + }); +}, 'WritableStream should transition to waiting until write is acknowledged'); + +promise_test(t => { + let sinkWritePromiseRejectors = []; + const ws = new WritableStream({ + write() { + const sinkWritePromise = new Promise((r, reject) => sinkWritePromiseRejectors.push(reject)); + return sinkWritePromise; + } + }); + + const writer = ws.getWriter(); + + assert_equals(writer.desiredSize, 1, 'desiredSize should be 1'); + + return writer.ready.then(() => { + const writePromise = writer.write('a'); + assert_equals(sinkWritePromiseRejectors.length, 1, 'there should be 1 rejector'); + assert_equals(writer.desiredSize, 0, 'desiredSize should be 0'); + + const writePromise2 = writer.write('b'); + assert_equals(sinkWritePromiseRejectors.length, 1, 'there should be still 1 rejector'); + assert_equals(writer.desiredSize, -1, 'desiredSize should be -1'); + + const closedPromise = writer.close(); + + assert_equals(writer.desiredSize, -1, 'desiredSize should still be -1'); + + return Promise.all([ + promise_rejects_exactly(t, error1, closedPromise, + 'closedPromise should reject with the error returned from the sink\'s write method') + .then(() => assert_equals(sinkWritePromiseRejectors.length, 0, + 'sinkWritePromise should reject before closedPromise')), + promise_rejects_exactly(t, error1, writePromise, + 'writePromise should reject with the error returned from the sink\'s write method') + .then(() => assert_equals(sinkWritePromiseRejectors.length, 0, + 'sinkWritePromise should reject before writePromise')), + promise_rejects_exactly(t, error1, writePromise2, + 'writePromise2 should reject with the error returned from the sink\'s write method') + .then(() => assert_equals(sinkWritePromiseRejectors.length, 0, + 'sinkWritePromise should reject before writePromise2')), + flushAsyncEvents().then(() => { + sinkWritePromiseRejectors[0](error1); + sinkWritePromiseRejectors = []; + }) + ]); + }); +}, 'when write returns a rejected promise, queued writes and close should be cleared'); + +promise_test(t => { + const ws = new WritableStream({ + write() { + throw error1; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error1, writer.write('a'), + 'write() should reject with the error returned from the sink\'s write method') + .then(() => promise_rejects_js(t, TypeError, writer.close(), 'close() should be rejected')); +}, 'when sink\'s write throws an error, the stream should become errored and the promise should reject'); + +promise_test(t => { + const ws = new WritableStream({ + write(chunk, controller) { + controller.error(error1); + throw error2; + } + }); + + const writer = ws.getWriter(); + + return promise_rejects_exactly(t, error2, writer.write('a'), + 'write() should reject with the error returned from the sink\'s write method ') + .then(() => { + return Promise.all([ + promise_rejects_exactly(t, error1, writer.ready, + 'writer.ready must reject with the error passed to the controller'), + promise_rejects_exactly(t, error1, writer.closed, + 'writer.closed must reject with the error passed to the controller') + ]); + }); +}, 'writer.write(), ready and closed reject with the error passed to controller.error() made before sink.write' + + ' rejection'); + +promise_test(() => { + const numberOfWrites = 1000; + + let resolveFirstWritePromise; + let writeCount = 0; + const ws = new WritableStream({ + write() { + ++writeCount; + if (!resolveFirstWritePromise) { + return new Promise(resolve => { + resolveFirstWritePromise = resolve; + }); + } + return Promise.resolve(); + } + }); + + const writer = ws.getWriter(); + return writer.ready.then(() => { + for (let i = 1; i < numberOfWrites; ++i) { + writer.write('a'); + } + const writePromise = writer.write('a'); + + assert_equals(writeCount, 1, 'should have called sink\'s write once'); + + resolveFirstWritePromise(); + + return writePromise + .then(() => + assert_equals(writeCount, numberOfWrites, `should have called sink's write ${numberOfWrites} times`)); + }); +}, 'a large queue of writes should be processed completely'); + +promise_test(() => { + const stream = recordingWritableStream(); + const w = stream.getWriter(); + const WritableStreamDefaultWriter = w.constructor; + w.releaseLock(); + const writer = new WritableStreamDefaultWriter(stream); + return writer.ready.then(() => { + writer.write('a'); + assert_array_equals(stream.events, ['write', 'a'], 'write() should be passed to sink'); + }); +}, 'WritableStreamDefaultWriter should work when manually constructed'); + +promise_test(() => { + let thenCalled = false; + const ws = new WritableStream({ + write() { + return { + then(onFulfilled) { + thenCalled = true; + onFulfilled(); + } + }; + } + }); + return ws.getWriter().write('a').then(() => assert_true(thenCalled, 'thenCalled should be true')); +}, 'returning a thenable from write() should work'); + +promise_test(() => { + const stream = new WritableStream(); + const writer = stream.getWriter(); + const WritableStreamDefaultWriter = writer.constructor; + assert_throws_js(TypeError, () => new WritableStreamDefaultWriter(stream), + 'should not be able to construct on locked stream'); + // If stream.[[writer]] no longer points to |writer| then the closed Promise + // won't work properly. + return Promise.all([writer.close(), writer.closed]); +}, 'failing DefaultWriter constructor should not release an existing writer'); + +promise_test(t => { + const ws = new WritableStream({ + start() { + return Promise.reject(error1); + } + }, { highWaterMark: 0 }); + const writer = ws.getWriter(); + return Promise.all([ + promise_rejects_exactly(t, error1, writer.ready, 'ready should be rejected'), + promise_rejects_exactly(t, error1, writer.write(), 'write() should be rejected') + ]); +}, 'write() on a stream with HWM 0 should not cause the ready Promise to resolve'); + +promise_test(t => { + const ws = new WritableStream(); + const writer = ws.getWriter(); + writer.releaseLock(); + return promise_rejects_js(t, TypeError, writer.write(), 'write should reject'); +}, 'writing to a released writer should reject the returned promise'); diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index b610d0ab47ec94..6567782a1d47c9 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -36,13 +36,17 @@ "path": "html/webappapis/timers" }, "interfaces": { - "commit": "79fa4cf76e0d39e3fc1b7ead85e067b0a064b892", + "commit": "fcb671ed8b068b25cee87429d803833777f35c2c", "path": "interfaces" }, "resources": { "commit": "972ca5b6693bffebebc5805e1b9da68a6876e1f6", "path": "resources" }, + "streams": { + "commit": "8f60d9443949c323522a2009518d54d5d6ab5541", + "path": "streams" + }, "url": { "commit": "1fcb39223d3009fbb46c1b254755d6cc75e290f1", "path": "url" diff --git a/test/message/esm_loader_not_found.out b/test/message/esm_loader_not_found.out index d2329d7c77ad86..61b1623cdf176f 100644 --- a/test/message/esm_loader_not_found.out +++ b/test/message/esm_loader_not_found.out @@ -1,8 +1,8 @@ (node:*) ExperimentalWarning: --experimental-loader is an experimental feature. This feature could change at any time (Use `* --trace-warnings ...` to show where the warning was created) -node:internal/process/esm_loader:* - internalBinding('errors').triggerUncaughtException( - ^ +node:internal/errors:* + ErrorCaptureStackTrace(err); + ^ Error [ERR_MODULE_NOT_FOUND]: Cannot find package 'i-dont-exist' imported from * at new NodeError (node:internal/errors:*:*) at packageResolve (node:internal/modules/esm/resolve:*:*) diff --git a/test/parallel/test-blocklist.js b/test/parallel/test-blocklist.js index c72d9e30b5f2da..51f19e07bc649c 100644 --- a/test/parallel/test-blocklist.js +++ b/test/parallel/test-blocklist.js @@ -209,6 +209,27 @@ const util = require('util'); assert(!blockList.check('8592:757c:efaf:2fff:ffff:ffff:ffff:ffff', 'ipv6')); } +{ + // Regression test for https://github.com/nodejs/node/issues/39074 + const blockList = new BlockList(); + + blockList.addRange('10.0.0.2', '10.0.0.10'); + + // IPv4 checks against IPv4 range. + assert(blockList.check('10.0.0.2')); + assert(blockList.check('10.0.0.10')); + assert(!blockList.check('192.168.0.3')); + assert(!blockList.check('2.2.2.2')); + assert(!blockList.check('255.255.255.255')); + + // IPv6 checks against IPv4 range. + assert(blockList.check('::ffff:0a00:0002', 'ipv6')); + assert(blockList.check('::ffff:0a00:000a', 'ipv6')); + assert(!blockList.check('::ffff:c0a8:0003', 'ipv6')); + assert(!blockList.check('::ffff:0202:0202', 'ipv6')); + assert(!blockList.check('::ffff:ffff:ffff', 'ipv6')); +} + { const blockList = new BlockList(); assert.throws(() => blockList.addRange('1.1.1.2', '1.1.1.1'), /ERR_INVALID_ARG_VALUE/); diff --git a/test/parallel/test-bootstrap-modules.js b/test/parallel/test-bootstrap-modules.js index f18f85728d36e3..0ca00f31adce8c 100644 --- a/test/parallel/test-bootstrap-modules.js +++ b/test/parallel/test-bootstrap-modules.js @@ -23,6 +23,7 @@ const expectedModules = new Set([ 'Internal Binding module_wrap', 'Internal Binding native_module', 'Internal Binding options', + 'Internal Binding performance', 'Internal Binding process_methods', 'Internal Binding report', 'Internal Binding serdes', @@ -60,6 +61,7 @@ const expectedModules = new Set([ 'NativeModule internal/fs/rimraf', 'NativeModule internal/fs/watchers', 'NativeModule internal/heap_utils', + 'NativeModule internal/histogram', 'NativeModule internal/idna', 'NativeModule internal/linkedlist', 'NativeModule internal/modules/run_main', @@ -77,6 +79,15 @@ const expectedModules = new Set([ 'NativeModule internal/modules/esm/translators', 'NativeModule internal/process/esm_loader', 'NativeModule internal/options', + 'NativeModule internal/perf/event_loop_delay', + 'NativeModule internal/perf/event_loop_utilization', + 'NativeModule internal/perf/nodetiming', + 'NativeModule internal/perf/observe', + 'NativeModule internal/perf/performance', + 'NativeModule internal/perf/performance_entry', + 'NativeModule internal/perf/timerify', + 'NativeModule internal/perf/usertiming', + 'NativeModule internal/perf/utils', 'NativeModule internal/priority_queue', 'NativeModule internal/process/execution', 'NativeModule internal/process/per_thread', @@ -116,6 +127,7 @@ const expectedModules = new Set([ 'NativeModule internal/blob', 'NativeModule async_hooks', 'NativeModule path', + 'NativeModule perf_hooks', 'NativeModule querystring', 'NativeModule stream', 'NativeModule stream/promises', diff --git a/test/parallel/test-crypto-ecdh-convert-key.js b/test/parallel/test-crypto-ecdh-convert-key.js index f4d5a651ed6b88..c0046099df9ec0 100644 --- a/test/parallel/test-crypto-ecdh-convert-key.js +++ b/test/parallel/test-crypto-ecdh-convert-key.js @@ -117,7 +117,7 @@ if (getCurves().includes('secp256k1')) { // rather than Node's generic error message. const badKey = 'f'.repeat(128); assert.throws( - () => ECDH.convertKey(badKey, 'secp256k1', 'hex', 'hex', 'compressed'), + () => ECDH.convertKey(badKey, 'secp521r1', 'hex', 'hex', 'compressed'), /Failed to convert Buffer to EC_POINT/); // Next statement should not throw an exception. diff --git a/test/parallel/test-crypto-subtle-zero-length.js b/test/parallel/test-crypto-subtle-zero-length.js new file mode 100644 index 00000000000000..ffca84cf56129e --- /dev/null +++ b/test/parallel/test-crypto-subtle-zero-length.js @@ -0,0 +1,39 @@ +'use strict'; + +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const crypto = require('crypto').webcrypto; + +(async () => { + const k = await crypto.subtle.importKey( + 'raw', + new Uint8Array(32), + { name: 'AES-GCM' }, + false, + [ 'encrypt', 'decrypt' ]); + assert(k instanceof crypto.CryptoKey); + + const e = await crypto.subtle.encrypt({ + name: 'AES-GCM', + iv: new Uint8Array(12), + }, k, new Uint8Array(0)); + assert(e instanceof ArrayBuffer); + assert.deepStrictEqual( + Buffer.from(e), + Buffer.from([ + 0x53, 0x0f, 0x8a, 0xfb, 0xc7, 0x45, 0x36, 0xb9, + 0xa9, 0x63, 0xb4, 0xf1, 0xc4, 0xcb, 0x73, 0x8b ])); + + const v = await crypto.subtle.decrypt({ + name: 'AES-GCM', + iv: new Uint8Array(12), + }, k, e); + assert(v instanceof ArrayBuffer); + assert.strictEqual(v.byteLength, 0); +})().then(common.mustCall()).catch((e) => { + assert.ifError(e); +}); diff --git a/test/sequential/test-debugger-address.js b/test/parallel/test-debugger-address.js similarity index 96% rename from test/sequential/test-debugger-address.js rename to test/parallel/test-debugger-address.js index ff31747016c2d4..95dd1c6e3f8283 100644 --- a/test/sequential/test-debugger-address.js +++ b/test/parallel/test-debugger-address.js @@ -37,7 +37,7 @@ function launchTarget(...args) { } { - const script = fixtures.path('inspector-cli/alive.js'); + const script = fixtures.path('debugger/alive.js'); let cli = null; let target = null; diff --git a/test/parallel/test-debugger-unavailable-port.js b/test/parallel/test-debugger-unavailable-port.js new file mode 100644 index 00000000000000..e2920312ffc21c --- /dev/null +++ b/test/parallel/test-debugger-unavailable-port.js @@ -0,0 +1,36 @@ +'use strict'; +const common = require('../common'); + +common.skipIfInspectorDisabled(); + +const fixtures = require('../common/fixtures'); +const startCLI = require('../common/debugger'); + +const assert = require('assert'); +const { createServer } = require('net'); + +// Launch w/ unavailable port. +(async () => { + const blocker = createServer((socket) => socket.end()); + const port = await new Promise((resolve, reject) => { + blocker.on('error', reject); + blocker.listen(0, '127.0.0.1', () => resolve(blocker.address().port)); + }); + + try { + const script = fixtures.path('debugger', 'three-lines.js'); + const cli = startCLI([`--port=${port}`, script]); + const code = await cli.quit(); + + assert.doesNotMatch( + cli.output, + /report this bug/, + 'Omits message about reporting this as a bug'); + assert.ok( + cli.output.includes(`waiting for 127.0.0.1:${port} to be free`), + 'Tells the user that the port wasn\'t available'); + assert.strictEqual(code, 1); + } finally { + blocker.close(); + } +})().then(common.mustCall()); diff --git a/test/parallel/test-fs-mkdtemp-prefix-check.js b/test/parallel/test-fs-mkdtemp-prefix-check.js index 1d9d88232a067e..33a06914a46e10 100644 --- a/test/parallel/test-fs-mkdtemp-prefix-check.js +++ b/test/parallel/test-fs-mkdtemp-prefix-check.js @@ -3,7 +3,7 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -const prefixValues = [undefined, null, 0, true, false, 1, '']; +const prefixValues = [undefined, null, 0, true, false, 1]; function fail(value) { assert.throws( diff --git a/test/parallel/test-https-agent-unref-socket.js b/test/parallel/test-https-agent-unref-socket.js index b2863a74d817d4..49169b523e1042 100644 --- a/test/parallel/test-https-agent-unref-socket.js +++ b/test/parallel/test-https-agent-unref-socket.js @@ -6,8 +6,24 @@ if (!common.hasCrypto) const https = require('https'); -const request = https.get('https://example.com'); +if (process.argv[2] === 'localhost') { + const request = https.get('https://localhost:' + process.argv[3]); -request.on('socket', (socket) => { - socket.unref(); -}); + request.on('socket', (socket) => { + socket.unref(); + }); +} else { + const assert = require('assert'); + const net = require('net'); + const server = net.createServer(); + server.listen(0); + server.on('listening', () => { + const port = server.address().port; + const { fork } = require('child_process'); + const child = fork(__filename, ['localhost', port], {}); + child.on('close', (exit_code) => { + server.close(); + assert.strictEqual(exit_code, 0); + }); + }); +} diff --git a/test/parallel/test-https-insecure-parse-per-stream.js b/test/parallel/test-https-insecure-parse-per-stream.js new file mode 100644 index 00000000000000..645fbcf2637654 --- /dev/null +++ b/test/parallel/test-https-insecure-parse-per-stream.js @@ -0,0 +1,131 @@ +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) { + common.skip('missing crypto'); +} + +const fixtures = require('../common/fixtures'); +const assert = require('assert'); +const https = require('https'); +const MakeDuplexPair = require('../common/duplexpair'); +const tls = require('tls'); +const { finished } = require('stream'); + +const certFixture = { + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem'), + ca: fixtures.readKey('ca1-cert.pem'), +}; + + +// Test that setting the `insecureHTTPParse` option works on a per-stream-basis. + +// Test 1: The server sends an invalid header. +{ + const { clientSide, serverSide } = MakeDuplexPair(); + + const req = https.request({ + rejectUnauthorized: false, + createConnection: common.mustCall(() => clientSide), + insecureHTTPParser: true + }, common.mustCall((res) => { + assert.strictEqual(res.headers.hello, 'foo\x08foo'); + res.resume(); // We don’t actually care about contents. + res.on('end', common.mustCall()); + })); + req.end(); + + serverSide.resume(); // Dump the request + serverSide.end('HTTP/1.1 200 OK\r\n' + + 'Hello: foo\x08foo\r\n' + + 'Content-Length: 0\r\n' + + '\r\n\r\n'); +} + +// Test 2: The same as Test 1 except without the option, to make sure it fails. +{ + const { clientSide, serverSide } = MakeDuplexPair(); + + const req = https.request({ + rejectUnauthorized: false, + createConnection: common.mustCall(() => clientSide) + }, common.mustNotCall()); + req.end(); + req.on('error', common.mustCall()); + + serverSide.resume(); // Dump the request + serverSide.end('HTTP/1.1 200 OK\r\n' + + 'Hello: foo\x08foo\r\n' + + 'Content-Length: 0\r\n' + + '\r\n\r\n'); +} + +// Test 3: The client sends an invalid header. +{ + const testData = 'Hello, World!\n'; + const server = https.createServer( + { insecureHTTPParser: true, + ...certFixture }, + common.mustCall((req, res) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'text/plain'); + res.end(testData); + })); + + server.on('clientError', common.mustNotCall()); + + server.listen(0, common.mustCall(() => { + const client = tls.connect({ + port: server.address().port, + rejectUnauthorized: false + }); + client.write( + 'GET / HTTP/1.1\r\n' + + 'Hello: foo\x08foo\r\n' + + '\r\n\r\n'); + client.end(); + + client.on('data', () => {}); + finished(client, common.mustCall(() => { + server.close(); + })); + })); +} + +// Test 4: The same as Test 3 except without the option, to make sure it fails. +{ + const server = https.createServer( + { ...certFixture }, + common.mustNotCall()); + + server.on('clientError', common.mustCall()); + + server.listen(0, common.mustCall(() => { + const client = tls.connect({ + port: server.address().port, + rejectUnauthorized: false + }); + client.write( + 'GET / HTTP/1.1\r\n' + + 'Hello: foo\x08foo\r\n' + + '\r\n\r\n'); + client.end(); + + client.on('data', () => {}); + finished(client, common.mustCall(() => { + server.close(); + })); + })); +} + +// Test 5: Invalid argument type +{ + assert.throws( + () => https.request({ insecureHTTPParser: 0 }, common.mustNotCall()), + common.expectsError({ + code: 'ERR_INVALID_ARG_TYPE', + message: 'The "options.insecureHTTPParser" property must be of' + + ' type boolean. Received type number (0)' + }) + ); +} diff --git a/test/parallel/test-https-max-header-size-per-stream.js b/test/parallel/test-https-max-header-size-per-stream.js new file mode 100644 index 00000000000000..f7117e16fb43f6 --- /dev/null +++ b/test/parallel/test-https-max-header-size-per-stream.js @@ -0,0 +1,119 @@ +'use strict'; +const common = require('../common'); + +if (!common.hasCrypto) { + common.skip('missing crypto'); +} + +const fixtures = require('../common/fixtures'); +const assert = require('assert'); +const https = require('https'); +const http = require('http'); +const tls = require('tls'); +const MakeDuplexPair = require('../common/duplexpair'); +const { finished } = require('stream'); + +const certFixture = { + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem'), + ca: fixtures.readKey('ca1-cert.pem'), +}; + + +// Test that setting the `maxHeaderSize` option works on a per-stream-basis. + +// Test 1: The server sends larger headers than what would otherwise be allowed. +{ + const { clientSide, serverSide } = MakeDuplexPair(); + + const req = https.request({ + createConnection: common.mustCall(() => clientSide), + maxHeaderSize: http.maxHeaderSize * 4 + }, common.mustCall((res) => { + assert.strictEqual(res.headers.hello, 'A'.repeat(http.maxHeaderSize * 3)); + res.resume(); // We don’t actually care about contents. + res.on('end', common.mustCall()); + })); + req.end(); + + serverSide.resume(); // Dump the request + serverSide.end('HTTP/1.1 200 OK\r\n' + + 'Hello: ' + 'A'.repeat(http.maxHeaderSize * 3) + '\r\n' + + 'Content-Length: 0\r\n' + + '\r\n\r\n'); +} + +// Test 2: The same as Test 1 except without the option, to make sure it fails. +{ + const { clientSide, serverSide } = MakeDuplexPair(); + + const req = https.request({ + createConnection: common.mustCall(() => clientSide) + }, common.mustNotCall()); + req.end(); + req.on('error', common.mustCall()); + + serverSide.resume(); // Dump the request + serverSide.end('HTTP/1.1 200 OK\r\n' + + 'Hello: ' + 'A'.repeat(http.maxHeaderSize * 3) + '\r\n' + + 'Content-Length: 0\r\n' + + '\r\n\r\n'); +} + +// Test 3: The client sends larger headers than what would otherwise be allowed. +{ + const testData = 'Hello, World!\n'; + const server = https.createServer( + { maxHeaderSize: http.maxHeaderSize * 4, + ...certFixture }, + common.mustCall((req, res) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'text/plain'); + res.end(testData); + })); + + server.on('clientError', common.mustNotCall()); + + server.listen(0, common.mustCall(() => { + const client = tls.connect({ + port: server.address().port, + rejectUnauthorized: false + }); + client.write( + 'GET / HTTP/1.1\r\n' + + 'Hello: ' + 'A'.repeat(http.maxHeaderSize * 3) + '\r\n' + + '\r\n\r\n'); + client.end(); + + client.on('data', () => {}); + finished(client, common.mustCall(() => { + server.close(); + })); + })); +} + +// Test 4: The same as Test 3 except without the option, to make sure it fails. +{ + const server = https.createServer({ ...certFixture }, common.mustNotCall()); + + // clientError may be emitted multiple times when header is larger than + // maxHeaderSize. + server.on('clientError', common.mustCallAtLeast(() => {}, 1)); + + server.listen(0, common.mustCall(() => { + const client = tls.connect({ + port: server.address().port, + rejectUnauthorized: false + }); + client.write( + 'GET / HTTP/1.1\r\n' + + 'Hello: ' + 'A'.repeat(http.maxHeaderSize * 3) + '\r\n' + + '\r\n\r\n'); + client.end(); + + client.on('data', () => {}); + finished(client, common.mustCall(() => { + server.close(); + })); + })); +} diff --git a/test/parallel/test-readline-tab-complete.js b/test/parallel/test-readline-tab-complete.js index c283d446f9af28..be993911c6fe16 100644 --- a/test/parallel/test-readline-tab-complete.js +++ b/test/parallel/test-readline-tab-complete.js @@ -100,3 +100,39 @@ common.skipIfDumbTerminal(); }); rli.close(); } + +{ + let output = ''; + class FakeInput extends EventEmitter { + columns = 80 + + write = common.mustCall((data) => { + output += data; + }, 9) + + resume() {} + pause() {} + end() {} + } + + const fi = new FakeInput(); + const rli = new readline.Interface({ + input: fi, + output: fi, + terminal: true, + completer: common.mustCall((input, cb) => { + cb(null, [[input[0].toUpperCase() + input.slice(1)], input]); + }), + }); + + rli.on('line', common.mustNotCall()); + fi.emit('data', 'input'); + queueMicrotask(() => { + fi.emit('data', '\t'); + queueMicrotask(() => { + assert.match(output, /> Input/); + output = ''; + rli.close(); + }); + }); +} diff --git a/test/parallel/test-repl-preprocess-top-level-await.js b/test/parallel/test-repl-preprocess-top-level-await.js index ed1fe90e43e459..3ec4da7e8fb72f 100644 --- a/test/parallel/test-repl-preprocess-top-level-await.js +++ b/test/parallel/test-repl-preprocess-top-level-await.js @@ -29,38 +29,93 @@ const testCases = [ [ 'await 0; return 0;', null ], [ 'var a = await 1', - '(async () => { void (a = await 1) })()' ], + 'var a; (async () => { void (a = await 1) })()' ], [ 'let a = await 1', - '(async () => { void (a = await 1) })()' ], + 'let a; (async () => { void (a = await 1) })()' ], [ 'const a = await 1', - '(async () => { void (a = await 1) })()' ], + 'let a; (async () => { void (a = await 1) })()' ], [ 'for (var i = 0; i < 1; ++i) { await i }', - '(async () => { for (void (i = 0); i < 1; ++i) { await i } })()' ], + 'var i; (async () => { for (void (i = 0); i < 1; ++i) { await i } })()' ], [ 'for (let i = 0; i < 1; ++i) { await i }', '(async () => { for (let i = 0; i < 1; ++i) { await i } })()' ], [ 'var {a} = {a:1}, [b] = [1], {c:{d}} = {c:{d: await 1}}', - '(async () => { void ( ({a} = {a:1}), ([b] = [1]), ' + + 'var a, b, d; (async () => { void ( ({a} = {a:1}), ([b] = [1]), ' + '({c:{d}} = {c:{d: await 1}})) })()' ], + [ 'let [a, b, c] = await ([1, 2, 3])', + 'let a, b, c; (async () => { void ([a, b, c] = await ([1, 2, 3])) })()'], + [ 'let {a,b,c} = await ({a: 1, b: 2, c: 3})', + 'let a, b, c; (async () => { void ({a,b,c} = ' + + 'await ({a: 1, b: 2, c: 3})) })()'], + [ 'let {a: [b]} = {a: [await 1]}, [{d}] = [{d: 3}]', + 'let b, d; (async () => { void ( ({a: [b]} = {a: [await 1]}),' + + ' ([{d}] = [{d: 3}])) })()'], /* eslint-disable no-template-curly-in-string */ [ 'console.log(`${(await { a: 1 }).a}`)', '(async () => { return (console.log(`${(await { a: 1 }).a}`)) })()' ], /* eslint-enable no-template-curly-in-string */ [ 'await 0; function foo() {}', - '(async () => { await 0; foo=function foo() {} })()' ], + 'var foo; (async () => { await 0; foo=function foo() {} })()' ], [ 'await 0; class Foo {}', - '(async () => { await 0; Foo=class Foo {} })()' ], + 'let Foo; (async () => { await 0; Foo=class Foo {} })()' ], [ 'if (await true) { function foo() {} }', - '(async () => { if (await true) { foo=function foo() {} } })()' ], + 'var foo; (async () => { if (await true) { foo=function foo() {} } })()' ], [ 'if (await true) { class Foo{} }', '(async () => { if (await true) { class Foo{} } })()' ], [ 'if (await true) { var a = 1; }', - '(async () => { if (await true) { void (a = 1); } })()' ], + 'var a; (async () => { if (await true) { void (a = 1); } })()' ], [ 'if (await true) { let a = 1; }', '(async () => { if (await true) { let a = 1; } })()' ], [ 'var a = await 1; let b = 2; const c = 3;', - '(async () => { void (a = await 1); void (b = 2); void (c = 3); })()' ], + 'var a; let b; let c; (async () => { void (a = await 1); void (b = 2);' + + ' void (c = 3); })()' ], [ 'let o = await 1, p', - '(async () => { void ( (o = await 1), (p=undefined)) })()' ], + 'let o, p; (async () => { void ( (o = await 1), (p=undefined)) })()' ], + [ 'await (async () => { let p = await 1; return p; })()', + '(async () => { return (await (async () => ' + + '{ let p = await 1; return p; })()) })()' ], + [ '{ let p = await 1; }', + '(async () => { { let p = await 1; } })()' ], + [ 'var p = await 1', + 'var p; (async () => { void (p = await 1) })()' ], + [ 'await (async () => { var p = await 1; return p; })()', + '(async () => { return (await (async () => ' + + '{ var p = await 1; return p; })()) })()' ], + [ '{ var p = await 1; }', + 'var p; (async () => { { void (p = await 1); } })()' ], + [ 'for await (var i of asyncIterable) { i; }', + 'var i; (async () => { for await (i of asyncIterable) { i; } })()'], + [ 'for await (var [i] of asyncIterable) { i; }', + 'var i; (async () => { for await ([i] of asyncIterable) { i; } })()'], + [ 'for await (var {i} of asyncIterable) { i; }', + 'var i; (async () => { for await ({i} of asyncIterable) { i; } })()'], + [ 'for await (var [{i}, [j]] of asyncIterable) { i; }', + 'var i, j; (async () => { for await ([{i}, [j]] of asyncIterable)' + + ' { i; } })()'], + [ 'for await (let i of asyncIterable) { i; }', + '(async () => { for await (let i of asyncIterable) { i; } })()'], + [ 'for await (const i of asyncIterable) { i; }', + '(async () => { for await (const i of asyncIterable) { i; } })()'], + [ 'for (var i of [1,2,3]) { await 1; }', + 'var i; (async () => { for (i of [1,2,3]) { await 1; } })()'], + [ 'for (var [i] of [[1], [2]]) { await 1; }', + 'var i; (async () => { for ([i] of [[1], [2]]) { await 1; } })()'], + [ 'for (var {i} of [{i: 1}, {i: 2}]) { await 1; }', + 'var i; (async () => { for ({i} of [{i: 1}, {i: 2}]) { await 1; } })()'], + [ 'for (var [{i}, [j]] of [[{i: 1}, [2]]]) { await 1; }', + 'var i, j; (async () => { for ([{i}, [j]] of [[{i: 1}, [2]]])' + + ' { await 1; } })()'], + [ 'for (let i of [1,2,3]) { await 1; }', + '(async () => { for (let i of [1,2,3]) { await 1; } })()'], + [ 'for (const i of [1,2,3]) { await 1; }', + '(async () => { for (const i of [1,2,3]) { await 1; } })()'], + [ 'for (var i in {x:1}) { await 1 }', + 'var i; (async () => { for (i in {x:1}) { await 1 } })()'], + [ 'for (var [a,b] in {xy:1}) { await 1 }', + 'var a, b; (async () => { for ([a,b] in {xy:1}) { await 1 } })()'], + [ 'for (let i in {x:1}) { await 1 }', + '(async () => { for (let i in {x:1}) { await 1 } })()'], + [ 'for (const i in {x:1}) { await 1 }', + '(async () => { for (const i in {x:1}) { await 1 } })()'], ]; for (const [input, expected] of testCases) { diff --git a/test/parallel/test-repl-top-level-await.js b/test/parallel/test-repl-top-level-await.js index 39227d4f8bad29..1388ce9334c883 100644 --- a/test/parallel/test-repl-top-level-await.js +++ b/test/parallel/test-repl-top-level-await.js @@ -142,6 +142,46 @@ async function ordinaryTests() { 'undefined', ], ], + ['await Promise..resolve()', + [ + 'await Promise..resolve()\r', + 'Uncaught SyntaxError: ', + 'await Promise..resolve()', + ' ^', + '', + 'Unexpected token \'.\'', + ], + ], + ['for (const x of [1,2,3]) {\nawait x\n}', [ + 'for (const x of [1,2,3]) {\r', + '... await x\r', + '... }\r', + 'undefined', + ]], + ['for (const x of [1,2,3]) {\nawait x;\n}', [ + 'for (const x of [1,2,3]) {\r', + '... await x;\r', + '... }\r', + 'undefined', + ]], + ['for await (const x of [1,2,3]) {\nconsole.log(x)\n}', [ + 'for await (const x of [1,2,3]) {\r', + '... console.log(x)\r', + '... }\r', + '1', + '2', + '3', + 'undefined', + ]], + ['for await (const x of [1,2,3]) {\nconsole.log(x);\n}', [ + 'for await (const x of [1,2,3]) {\r', + '... console.log(x);\r', + '... }\r', + '1', + '2', + '3', + 'undefined', + ]], ]; for (const [input, expected = [`${input}\r`], options = {}] of testCases) { @@ -165,15 +205,17 @@ async function ordinaryTests() { async function ctrlCTest() { console.log('Testing Ctrl+C'); - assert.deepStrictEqual(await runAndWait([ + const output = await runAndWait([ 'await new Promise(() => {})', { ctrl: true, name: 'c' }, - ]), [ + ]); + assert.deepStrictEqual(output.slice(0, 3), [ 'await new Promise(() => {})\r', 'Uncaught:', - '[Error [ERR_SCRIPT_EXECUTION_INTERRUPTED]: ' + - 'Script execution was interrupted by `SIGINT`] {', - " code: 'ERR_SCRIPT_EXECUTION_INTERRUPTED'", + 'Error [ERR_SCRIPT_EXECUTION_INTERRUPTED]: ' + + 'Script execution was interrupted by `SIGINT`', + ]); + assert.deepStrictEqual(output.slice(-2), [ '}', PROMPT, ]); diff --git a/test/parallel/test-source-map-enable.js b/test/parallel/test-source-map-enable.js index e0c222d1a333bf..eab718813fc379 100644 --- a/test/parallel/test-source-map-enable.js +++ b/test/parallel/test-source-map-enable.js @@ -324,6 +324,25 @@ function nextdir() { assert.ok(sourceMap); } +// Does not throw TypeError when exception occurs as result of missing named +// export. +{ + const coverageDirectory = nextdir(); + const output = spawnSync(process.execPath, [ + '--enable-source-maps', + require.resolve('../fixtures/source-map/esm-export-missing.mjs'), + ], { env: { ...process.env, NODE_V8_COVERAGE: coverageDirectory } }); + const sourceMap = getSourceMapFromCache( + 'esm-export-missing.mjs', + coverageDirectory + ); + // Module loader error displayed. + assert.match(output.stderr.toString(), + /does not provide an export named 'Something'/); + // Source map should have been serialized. + assert.ok(sourceMap); +} + function getSourceMapFromCache(fixtureFile, coverageDirectory) { const jsonFiles = fs.readdirSync(coverageDirectory); for (const jsonFile of jsonFiles) { diff --git a/test/parallel/test-stream-construct-async-error.js b/test/parallel/test-stream-construct-async-error.js index 34e450c853a850..8101ec93fc0d74 100644 --- a/test/parallel/test-stream-construct-async-error.js +++ b/test/parallel/test-stream-construct-async-error.js @@ -98,6 +98,9 @@ const assert = require('assert'); const foo = new Foo(); foo.write('test', common.mustCall()); + foo.on('error', common.expectsError({ + code: 'ERR_MULTIPLE_CALLBACK' + })); } { diff --git a/test/parallel/test-stream-pipeline.js b/test/parallel/test-stream-pipeline.js index aaf726ea5a0350..e2e5fe2e0d561a 100644 --- a/test/parallel/test-stream-pipeline.js +++ b/test/parallel/test-stream-pipeline.js @@ -1387,36 +1387,3 @@ const net = require('net'); assert.strictEqual(res, content); })); } - -{ - const writableLike = new Stream(); - writableLike.writableNeedDrain = true; - - pipeline( - async function *() {}, - writableLike, - common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - }) - ); - - writableLike.emit('close'); -} - -{ - const writableLike = new Stream(); - writableLike.write = () => false; - - pipeline( - async function *() { - yield null; - yield null; - }, - writableLike, - common.mustCall((err) => { - assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE'); - }) - ); - - writableLike.emit('close'); -} diff --git a/test/parallel/test-stream-writable-final-destroy.js b/test/parallel/test-stream-writable-final-destroy.js new file mode 100644 index 00000000000000..8d3bf72c89126f --- /dev/null +++ b/test/parallel/test-stream-writable-final-destroy.js @@ -0,0 +1,21 @@ +'use strict'; +const common = require('../common'); + +const { Writable } = require('stream'); + +{ + const w = new Writable({ + write(chunk, encoding, callback) { + callback(null); + }, + final(callback) { + queueMicrotask(callback); + } + }); + w.end(); + w.destroy(); + + w.on('prefinish', common.mustNotCall()); + w.on('finish', common.mustNotCall()); + w.on('close', common.mustCall()); +} diff --git a/test/parallel/test-tls-parse-cert-string.js b/test/parallel/test-tls-parse-cert-string.js index f5412cad4074c4..c1f32524d578b5 100644 --- a/test/parallel/test-tls-parse-cert-string.js +++ b/test/parallel/test-tls-parse-cert-string.js @@ -11,7 +11,7 @@ const { } = require('../common/hijackstdio'); const assert = require('assert'); // Flags: --expose-internals -const internalTLS = require('internal/tls'); +const { parseCertString } = require('internal/tls/parse-cert-string'); const tls = require('tls'); const noOutput = common.mustNotCall(); @@ -20,7 +20,7 @@ hijackStderr(noOutput); { const singles = 'C=US\nST=CA\nL=SF\nO=Node.js Foundation\nOU=Node.js\n' + 'CN=ca1\nemailAddress=ry@clouds.org'; - const singlesOut = internalTLS.parseCertString(singles); + const singlesOut = parseCertString(singles); assert.deepStrictEqual(singlesOut, { __proto__: null, C: 'US', @@ -36,7 +36,7 @@ hijackStderr(noOutput); { const doubles = 'OU=Domain Control Validated\nOU=PositiveSSL Wildcard\n' + 'CN=*.nodejs.org'; - const doublesOut = internalTLS.parseCertString(doubles); + const doublesOut = parseCertString(doubles); assert.deepStrictEqual(doublesOut, { __proto__: null, OU: [ 'Domain Control Validated', 'PositiveSSL Wildcard' ], @@ -46,7 +46,7 @@ hijackStderr(noOutput); { const invalid = 'fhqwhgads'; - const invalidOut = internalTLS.parseCertString(invalid); + const invalidOut = parseCertString(invalid); assert.deepStrictEqual(invalidOut, { __proto__: null }); } @@ -55,7 +55,7 @@ hijackStderr(noOutput); const expected = Object.create(null); expected.__proto__ = 'mostly harmless'; expected.hasOwnProperty = 'not a function'; - assert.deepStrictEqual(internalTLS.parseCertString(input), expected); + assert.deepStrictEqual(parseCertString(input), expected); } restoreStderr(); diff --git a/test/parallel/test-whatwg-readablebytestream.js b/test/parallel/test-whatwg-readablebytestream.js new file mode 100644 index 00000000000000..eb4355505053ef --- /dev/null +++ b/test/parallel/test-whatwg-readablebytestream.js @@ -0,0 +1,238 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + ReadableStream, + ReadableByteStreamController, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, +} = require('stream/web'); + +const { + kState, +} = require('internal/webstreams/util'); + +const { + open, +} = require('fs/promises'); + +const { + readFileSync, +} = require('fs'); + +const { + Buffer, +} = require('buffer'); + +const { + inspect, +} = require('util'); + +{ + const r = new ReadableStream({ + type: 'bytes', + }); + + assert(r[kState].controller instanceof ReadableByteStreamController); + + assert.strictEqual(typeof r.locked, 'boolean'); + assert.strictEqual(typeof r.cancel, 'function'); + assert.strictEqual(typeof r.getReader, 'function'); + assert.strictEqual(typeof r.pipeThrough, 'function'); + assert.strictEqual(typeof r.pipeTo, 'function'); + assert.strictEqual(typeof r.tee, 'function'); + + ['', null, 'asdf'].forEach((mode) => { + assert.throws(() => r.getReader({ mode }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + }); + + [1, 'asdf'].forEach((options) => { + assert.throws(() => r.getReader(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + assert(!r.locked); + const defaultReader = r.getReader(); + assert(r.locked); + assert(defaultReader instanceof ReadableStreamDefaultReader); + defaultReader.releaseLock(); + const byobReader = r.getReader({ mode: 'byob' }); + assert(byobReader instanceof ReadableStreamBYOBReader); +} + +class Source { + constructor() { + this.controllerClosed = false; + } + + async start(controller) { + this.file = await open(__filename); + this.controller = controller; + } + + async pull(controller) { + const byobRequest = controller.byobRequest; + assert.match(inspect(byobRequest), /ReadableStreamBYOBRequest/); + + const view = byobRequest.view; + const { + bytesRead, + } = await this.file.read({ + buffer: view, + offset: view.byteOffset, + length: view.byteLength + }); + + if (bytesRead === 0) { + await this.file.close(); + this.controller.close(); + } + + assert.throws(() => byobRequest.respondWithNewView({}), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + byobRequest.respond(bytesRead); + + assert.throws(() => byobRequest.respond(bytesRead), { + code: 'ERR_INVALID_STATE', + }); + assert.throws(() => byobRequest.respondWithNewView(view), { + code: 'ERR_INVALID_STATE', + }); + } + + get type() { return 'bytes'; } + + get autoAllocateChunkSize() { return 1024; } +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + const reader = stream.getReader({ mode: 'byob' }); + + const chunks = []; + let result; + do { + result = await reader.read(Buffer.alloc(100)); + if (result.value !== undefined) + chunks.push(Buffer.from(result.value)); + } while (!result.done); + + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(check, data); + })); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + const chunks = []; + for await (const chunk of stream) + chunks.push(chunk); + + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(check, data); + })); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream) + break; + } + + read(stream).then(common.mustCall()); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream) + throw error; + } + + assert.rejects(read(stream), error); +} + +{ + assert.throws(() => { + Reflect.get(ReadableStreamBYOBRequest.prototype, 'view', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => ReadableStreamBYOBRequest.prototype.respond.call({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + ReadableStreamBYOBRequest.prototype.respondWithNewView.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); +} + +{ + const readable = new ReadableStream({ type: 'bytes' }); + const reader = readable.getReader({ mode: 'byob' }); + reader.releaseLock(); + reader.releaseLock(); + assert.rejects(reader.read(new Uint8Array(10)), { + code: 'ERR_INVALID_STATE', + }); + assert.rejects(reader.cancel(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let controller; + new ReadableStream({ + type: 'bytes', + start(c) { controller = c; } + }); + assert.throws(() => controller.enqueue(1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + controller.close(); + assert.throws(() => controller.enqueue(new Uint8Array(10)), { + code: 'ERR_INVALID_STATE', + }); + assert.throws(() => controller.close(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + +} diff --git a/test/parallel/test-whatwg-readablestream.js b/test/parallel/test-whatwg-readablestream.js new file mode 100644 index 00000000000000..1c18efeec41963 --- /dev/null +++ b/test/parallel/test-whatwg-readablestream.js @@ -0,0 +1,1522 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { + isPromise, +} = require('util/types'); +const { + setImmediate: delay +} = require('timers/promises'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamDefaultController, + ReadableByteStreamController, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + WritableStream, +} = require('stream/web'); + +const { + readableStreamPipeTo, + readableStreamTee, + readableByteStreamControllerConvertPullIntoDescriptor, + readableStreamDefaultControllerEnqueue, + readableByteStreamControllerEnqueue, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableByteStreamControllerClose, + readableByteStreamControllerRespond, +} = require('internal/webstreams/readablestream'); + +const { + kState +} = require('internal/webstreams/util'); + +const { + createReadStream, + readFileSync, +} = require('fs'); +const { + Buffer, +} = require('buffer'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +{ + const r = new ReadableStream(); + assert.strictEqual(typeof r.locked, 'boolean'); + assert.strictEqual(typeof r.cancel, 'function'); + assert.strictEqual(typeof r.getReader, 'function'); + assert.strictEqual(typeof r.pipeThrough, 'function'); + assert.strictEqual(typeof r.pipeTo, 'function'); + assert.strictEqual(typeof r.tee, 'function'); + + ['', null, 'asdf'].forEach((mode) => { + assert.throws(() => r.getReader({ mode }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + }); + + [1, 'asdf'].forEach((options) => { + assert.throws(() => r.getReader(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + assert(!r.locked); + r.getReader(); + assert(r.locked); +} + +{ + const source = { + start: common.mustCall((controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + pull: common.mustCall((controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + cancel: common.mustNotCall(), + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + pull: common.mustCall(async (controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + cancel: common.mustNotCall(), + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall((controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustNotCall(), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustNotCall(), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source, { highWaterMark: 10 }); +} + +{ + // These are silly but they should all work per spec + new ReadableStream(1); + new ReadableStream('hello'); + new ReadableStream(false); + new ReadableStream([]); + new ReadableStream(1, 1); + new ReadableStream(1, 'hello'); + new ReadableStream(1, false); + new ReadableStream(1, []); +} + +['a', {}, false].forEach((size) => { + assert.throws(() => { + new ReadableStream({}, { size }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +['a', {}].forEach((highWaterMark) => { + assert.throws(() => { + new ReadableStream({}, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert.throws(() => { + new ReadableStream({ type: 'bytes' }, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +[-1, NaN].forEach((highWaterMark) => { + assert.throws(() => { + new ReadableStream({}, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert.throws(() => { + new ReadableStream({ type: 'bytes' }, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +{ + new ReadableStream({}, new ByteLengthQueuingStrategy({ highWaterMark: 1 })); + new ReadableStream({}, new CountQueuingStrategy({ highWaterMark: 1 })); +} + +{ + const strategy = new ByteLengthQueuingStrategy({ highWaterMark: 1 }); + assert.strictEqual(strategy.highWaterMark, 1); + assert.strictEqual(strategy.size(new ArrayBuffer(10)), 10); + + const { size } = strategy; + assert.strictEqual(size(new ArrayBuffer(10)), 10); +} + +{ + const strategy = new CountQueuingStrategy({ highWaterMark: 1 }); + assert.strictEqual(strategy.highWaterMark, 1); + assert.strictEqual(strategy.size(new ArrayBuffer(10)), 1); + + const { size } = strategy; + assert.strictEqual(size(new ArrayBuffer(10)), 1); +} + +{ + const r = new ReadableStream({ + async start() { + throw new Error('boom'); + } + }); + + setImmediate(() => { + assert.strictEqual(r[kState].state, 'errored'); + assert.match(r[kState].storedError?.message, /boom/); + }); +} + +{ + const data = Buffer.from('hello'); + const r = new ReadableStream({ + start(controller) { + controller.enqueue(data); + controller.close(); + }, + }); + + (async function read() { + const reader = r.getReader(); + let res = await reader.read(); + if (res.done) return; + const buf = Buffer.from(res.value); + assert.strictEqual(buf.toString(), data.toString()); + res = await reader.read(); + assert(res.done); + })().then(common.mustCall()); +} + +{ + const r = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + (async function read() { + const reader = r.getReader(); + const res = await reader.read(); + assert(res.done); + })().then(common.mustCall()); +} + +assert.throws(() => { + new ReadableStream({ + get start() { throw new Error('boom1'); } + }, { + get size() { throw new Error('boom2'); } + }); +}, /boom2/); + +{ + const stream = new ReadableStream(); + const reader = stream.getReader(); + + assert(stream.locked); + assert.strictEqual(reader[kState].stream, stream); + assert.strictEqual(stream[kState].reader, reader); + + assert.throws(() => stream.getReader(), { + code: 'ERR_INVALID_STATE', + }); + + assert(reader instanceof ReadableStreamDefaultReader); + + assert(isPromise(reader.closed)); + assert.strictEqual(typeof reader.cancel, 'function'); + assert.strictEqual(typeof reader.read, 'function'); + assert.strictEqual(typeof reader.releaseLock, 'function'); + + const read1 = reader.read(); + const read2 = reader.read(); + + // The stream is empty so the read will never settle. + read1.then( + common.mustNotCall(), + common.mustNotCall() + ); + + // The stream is empty so the read will never settle. + read2.then( + common.mustNotCall(), + common.mustNotCall() + ); + + assert.notStrictEqual(read1, read2); + + assert.strictEqual(reader[kState].readRequests.length, 2); + + delay().then(common.mustCall()); + + assert.throws(() => reader.releaseLock(), { + code: 'ERR_INVALID_STATE', + }); + assert(stream.locked); +} + +{ + const stream = new ReadableStream(); + const reader = stream.getReader(); + const closedBefore = reader.closed; + assert(stream.locked); + reader.releaseLock(); + assert(!stream.locked); + const closedAfter = reader.closed; + + assert.strictEqual(closedBefore, closedAfter); + + assert.rejects(reader.read(), { + code: 'ERR_INVALID_STATE', + }); + + assert.rejects(closedBefore, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(Buffer.from('hello')); + } + }); + + const reader = stream.getReader(); + + assert.rejects(stream.cancel(), { + code: 'ERR_INVALID_STATE', + }); + + reader.cancel(); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + assert(!stream.locked); + + const cancel1 = stream.cancel(); + const cancel2 = stream.cancel(); + + assert.notStrictEqual(cancel1, cancel2); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + stream.getReader().releaseLock(); + stream.getReader().releaseLock(); + stream.getReader(); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + stream.getReader(); + + assert.throws(() => stream.getReader(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + reader.closed.then(common.mustCall()); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + assert.notStrictEqual(closedBefore, closedAfter); + + closedBefore.then(common.mustCall()); + assert.rejects(closedAfter, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + }, + }); + + const reader = stream.getReader(); + c.close(); + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + assert.notStrictEqual(closedBefore, closedAfter); + + closedBefore.then(common.mustCall()); + assert.rejects(closedAfter, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + const closed = reader.closed; + + assert.notStrictEqual(cancel1, cancel2); + assert.notStrictEqual(cancel1, closed); + assert.notStrictEqual(cancel2, closed); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + }, + }); + + const reader = stream.getReader(); + c.close(); + + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + const closed = reader.closed; + + assert.notStrictEqual(cancel1, cancel2); + assert.notStrictEqual(cancel1, closed); + assert.notStrictEqual(cancel2, closed); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + const stream = new ReadableStream(); + const cancel1 = stream.cancel(); + const cancel2 = stream.cancel(); + assert.notStrictEqual(cancel1, cancel2); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); + + stream.getReader().read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + stream.getReader().releaseLock(); + const reader = stream.getReader(); + assert.rejects(reader.closed, error); + assert.rejects(reader.read(), error); + assert.rejects(reader.read(), error); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + const reader = stream.getReader(); + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + assert.notStrictEqual(cancel1, cancel2); + assert.rejects(cancel1, error); + assert.rejects(cancel2, error); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + async start(controller) { + throw error; + } + }); + stream.getReader().releaseLock(); + const reader = stream.getReader(); + assert.rejects(reader.closed, error); + assert.rejects(reader.read(), error); + assert.rejects(reader.read(), error); +} + +{ + const buf1 = Buffer.from('hello'); + const buf2 = Buffer.from('there'); + let doClose; + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf1); + controller.enqueue(buf2); + doClose = controller.close.bind(controller); + } + }); + const reader = stream.getReader(); + doClose(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf1); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf2); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + })); + })); +} + +{ + const buf1 = Buffer.from('hello'); + const buf2 = Buffer.from('there'); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf1); + controller.enqueue(buf2); + } + }); + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf1); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf2); + assert(!done); + reader.read().then(common.mustNotCall()); + delay().then(common.mustCall()); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + async function read(stream) { + const reader = stream.getReader(); + assert.deepStrictEqual( + await reader.read(), { value: 'a', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: 'b', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: undefined, done: true }); + } + + Promise.all([ + read(s1), + read(s2), + ]).then(common.mustCall()); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + }, + pull() { throw error; } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(stream.locked); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + const reader1 = s1.getReader(); + const reader2 = s2.getReader(); + + const closed1 = reader1.closed; + const closed2 = reader2.closed; + + assert.notStrictEqual(closed1, closed2); + + assert.rejects(closed1, error); + assert.rejects(closed2, error); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + s2.cancel(); + + async function read(stream, canceled = false) { + const reader = stream.getReader(); + if (!canceled) { + assert.deepStrictEqual( + await reader.read(), { value: 'a', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: 'b', done: false }); + } + assert.deepStrictEqual( + await reader.read(), { value: undefined, done: true }); + } + + Promise.all([ + read(s1), + read(s2, true), + ]).then(common.mustCall()); +} + +{ + const error1 = new Error('boom1'); + const error2 = new Error('boom2'); + + const stream = new ReadableStream({ + cancel(reason) { + assert.deepStrictEqual(reason, [error1, error2]); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + s1.cancel(error1); + s2.cancel(error2); +} + +{ + const error1 = new Error('boom1'); + const error2 = new Error('boom2'); + + const stream = new ReadableStream({ + cancel(reason) { + assert.deepStrictEqual(reason, [error1, error2]); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + s2.cancel(error2); + s1.cancel(error1); +} + +{ + const error = new Error('boom1'); + + const stream = new ReadableStream({ + cancel() { + throw error; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert.rejects(s1.cancel(), error); + assert.rejects(s2.cancel(), error); +} + +{ + const error = new Error('boom1'); + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + c.error(error); + + assert.rejects(s1.cancel(), error); + assert.rejects(s2.cancel(), error); +} + +{ + const error = new Error('boom1'); + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + const reader1 = s1.getReader(); + const reader2 = s2.getReader(); + + assert.rejects(reader1.closed, error); + assert.rejects(reader2.closed, error); + + assert.rejects(reader1.read(), error); + assert.rejects(reader2.read(), error); + + setImmediate(() => c.error(error)); +} + +{ + let pullCount = 0; + const stream = new ReadableStream({ + pull(controller) { + if (pullCount) + controller.enqueue(pullCount); + pullCount++; + }, + }); + + const reader = stream.getReader(); + + queueMicrotask(common.mustCall(() => { + assert.strictEqual(pullCount, 1); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 1); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 2); + assert(!done); + })); + + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, + pull: common.mustCall(), + }); + + stream.getReader().read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + }, + pull: common.mustCall(), + }); + + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'b'); + assert(!done); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + }, + pull: common.mustNotCall(), + }); + + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'b'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + + })); + })); +} + +{ + let res; + let promise; + let calls = 0; + const stream = new ReadableStream({ + pull(controller) { + controller.enqueue(++calls); + promise = new Promise((resolve) => res = resolve); + return promise; + } + }); + + const reader = stream.getReader(); + + (async () => { + await reader.read(); + assert.strictEqual(calls, 1); + await delay(); + assert.strictEqual(calls, 1); + res(); + await delay(); + assert.strictEqual(calls, 2); + })().then(common.mustCall()); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + }, + pull: common.mustCall(4), + }, { + highWaterMark: Infinity, + size() { return 1; } + }); + + const reader = stream.getReader(); + (async () => { + await delay(); + await reader.read(); + await reader.read(); + await reader.read(); + })().then(common.mustCall()); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + controller.close(); + }, + pull: common.mustNotCall(), + }, { + highWaterMark: Infinity, + size() { return 1; } + }); + + const reader = stream.getReader(); + (async () => { + await delay(); + await reader.read(); + await reader.read(); + await reader.read(); + })().then(common.mustCall()); +} + +{ + let calls = 0; + let res; + const ready = new Promise((resolve) => res = resolve); + + new ReadableStream({ + pull(controller) { + controller.enqueue(++calls); + if (calls === 4) + res(); + } + }, { + size() { return 1; }, + highWaterMark: 4 + }); + + ready.then(common.mustCall(() => { + assert.strictEqual(calls, 4); + })); +} + +{ + const stream = new ReadableStream({ + pull: common.mustCall((controller) => controller.close()) + }); + + const reader = stream.getReader(); + + reader.closed.then(common.mustCall()); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + pull: common.mustCall((controller) => controller.error(error)) + }); + + const reader = stream.getReader(); + + assert.rejects(reader.closed, error); +} + +{ + const error = new Error('boom'); + const error2 = new Error('boom2'); + const stream = new ReadableStream({ + pull: common.mustCall((controller) => { + controller.error(error); + throw error2; + }) + }); + + const reader = stream.getReader(); + + assert.rejects(reader.closed, error); +} + +{ + let startCalled = false; + new ReadableStream({ + start: common.mustCall((controller) => { + controller.enqueue('a'); + controller.close(); + assert.throws(() => controller.enqueue('b'), { + code: 'ERR_INVALID_STATE' + }); + startCalled = true; + }) + }); + assert(startCalled); +} + +{ + let startCalled = false; + new ReadableStream({ + start: common.mustCall((controller) => { + controller.close(); + assert.throws(() => controller.enqueue('b'), { + code: 'ERR_INVALID_STATE' + }); + startCalled = true; + }) + }); + assert(startCalled); +} + +{ + class Source { + startCalled = false; + pullCalled = false; + cancelCalled = false; + + start(controller) { + assert.strictEqual(this, source); + this.startCalled = true; + controller.enqueue('a'); + } + + pull() { + assert.strictEqual(this, source); + this.pullCalled = true; + } + + cancel() { + assert.strictEqual(this, source); + this.cancelCalled = true; + } + } + + const source = new Source(); + + const stream = new ReadableStream(source); + const reader = stream.getReader(); + + (async () => { + await reader.read(); + reader.releaseLock(); + stream.cancel(); + assert(source.startCalled); + assert(source.pullCalled); + assert(source.cancelCalled); + })().then(common.mustCall()); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 10); + controller.close(); + assert.strictEqual(controller.desiredSize, 0); + startCalled = true; + } + }, { + highWaterMark: 10 + }); + assert(startCalled); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 10); + controller.error(); + assert.strictEqual(controller.desiredSize, null); + startCalled = true; + } + }, { + highWaterMark: 10 + }); + assert(startCalled); +} + +{ + class Foo extends ReadableStream {} + const foo = new Foo(); + foo.getReader(); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 1); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, 0); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -1); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -2); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -3); + startCalled = true; + } + }); + assert(startCalled); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const reader = stream.getReader(); + + (async () => { + assert.strictEqual(c.desiredSize, 1); + c.enqueue(1); + assert.strictEqual(c.desiredSize, 0); + await reader.read(); + assert.strictEqual(c.desiredSize, 1); + c.enqueue(1); + c.enqueue(1); + assert.strictEqual(c.desiredSize, -1); + await reader.read(); + assert.strictEqual(c.desiredSize, 0); + await reader.read(); + assert.strictEqual(c.desiredSize, 1); + })().then(common.mustCall()); +} + +{ + let c; + new ReadableStream({ + start(controller) { + c = controller; + } + }); + assert(c instanceof ReadableStreamDefaultController); + assert.strictEqual(typeof c.desiredSize, 'number'); + assert.strictEqual(typeof c.enqueue, 'function'); + assert.strictEqual(typeof c.close, 'function'); + assert.strictEqual(typeof c.error, 'function'); +} + +class Source { + constructor() { + this.cancelCalled = false; + } + + start(controller) { + this.stream = createReadStream(__filename); + this.stream.on('data', (chunk) => { + controller.enqueue(chunk); + }); + this.stream.once('end', () => { + if (!this.cancelCalled) + controller.close(); + }); + this.stream.once('error', (error) => { + controller.error(error); + }); + } + + cancel() { + this.cancelCalled = true; + } +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + const reader = stream.getReader(); + const chunks = []; + let read = await reader.read(); + while (!read.done) { + chunks.push(Buffer.from(read.value)); + read = await reader.read(); + } + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(data, check); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + const chunks = []; + for await (const chunk of stream) + chunks.push(chunk); + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(data, check); + + assert.strictEqual(stream[kState].state, 'closed'); + assert(!stream.locked); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + [1, false, ''].forEach((options) => { + assert.throws(() => stream.values(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: true })) + return; + } + + read(stream).then(common.mustCall((data) => { + assert.strictEqual(stream[kState].state, 'readable'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: false })) + return; + } + + read(stream).then(common.mustCall((data) => { + assert.strictEqual(stream[kState].state, 'closed'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: true })) + throw error; + } + + assert.rejects(read(stream), error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'readable'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: false })) + throw error; + } + + assert.rejects(read(stream), error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'closed'); + })); +} + +{ + assert.throws(() => Reflect.get(ReadableStream.prototype, 'locked', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStream.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype.getReader.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype.tee.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype[kTransfer].call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamDefaultReader.prototype.read.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamDefaultReader.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => { + return Reflect.get(ReadableStreamDefaultReader.prototype, 'closed'); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableStreamDefaultReader.prototype.releaseLock.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamBYOBReader.prototype.read.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableStreamBYOBReader.prototype.releaseLock.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => { + return Reflect.get(ReadableStreamBYOBReader.prototype, 'closed'); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamBYOBReader.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(ReadableByteStreamController.prototype, 'byobRequest', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + Reflect.get(ReadableByteStreamController.prototype, 'desiredSize', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.close.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.enqueue.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.error.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => new ReadableStreamBYOBRequest(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); + + assert.throws(() => new ReadableStreamDefaultController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); + + assert.throws(() => new ReadableByteStreamController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); +} + +{ + let controller; + const readable = new ReadableStream({ + start(c) { controller = c; } + }); + + assert.strictEqual( + inspect(readable), + 'ReadableStream { locked: false, state: \'readable\' }'); + assert.strictEqual( + inspect(readable, { depth: null }), + 'ReadableStream { locked: false, state: \'readable\' }'); + assert.strictEqual( + inspect(readable, { depth: 0 }), + 'ReadableStream [Object]'); + + assert.strictEqual( + inspect(controller), + 'ReadableStreamDefaultController {}'); + assert.strictEqual( + inspect(controller, { depth: null }), + 'ReadableStreamDefaultController {}'); + assert.strictEqual( + inspect(controller, { depth: 0 }), + 'ReadableStreamDefaultController {}'); + + const reader = readable.getReader(); + + assert.match( + inspect(reader), + /ReadableStreamDefaultReader/); + assert.match( + inspect(reader, { depth: null }), + /ReadableStreamDefaultReader/); + assert.match( + inspect(reader, { depth: 0 }), + /ReadableStreamDefaultReader/); + + assert.rejects(readableStreamPipeTo(1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.rejects(readableStreamPipeTo(new ReadableStream(), 1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.rejects( + readableStreamPipeTo( + new ReadableStream(), + new WritableStream(), + false, + false, + false, + {}), + { + code: 'ERR_INVALID_ARG_TYPE', + }); +} + +{ + const readable = new ReadableStream(); + const reader = readable.getReader(); + reader.releaseLock(); + reader.releaseLock(); + assert.rejects(reader.read(), { + code: 'ERR_INVALID_STATE', + }); + assert.rejects(reader.cancel(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + // Test tee() cloneForBranch2 argument + const readable = new ReadableStream({ + start(controller) { + controller.enqueue('hello'); + } + }); + const [r1, r2] = readableStreamTee(readable, true); + r1.getReader().read().then( + common.mustCall(({ value }) => assert.strictEqual(value, 'hello'))); + r2.getReader().read().then( + common.mustCall(({ value }) => assert.strictEqual(value, 'hello'))); +} + +{ + assert.throws(() => { + readableByteStreamControllerConvertPullIntoDescriptor({ + bytesFilled: 10, + byteLength: 5 + }); + }, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let controller; + const readable = new ReadableStream({ + start(c) { controller = c; } + }); + + controller[kState].pendingPullIntos = [{}]; + assert.throws(() => readableByteStreamControllerRespond(controller, 0), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + readable.cancel().then(common.mustCall()); + + assert.throws(() => readableByteStreamControllerRespond(controller, 1), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert(!readableStreamDefaultControllerCanCloseOrEnqueue(controller)); + readableStreamDefaultControllerEnqueue(controller); + readableByteStreamControllerClose(controller); + readableByteStreamControllerEnqueue(controller); +} diff --git a/test/parallel/test-whatwg-transformstream.js b/test/parallel/test-whatwg-transformstream.js new file mode 100644 index 00000000000000..0cbc76cc4ce8c0 --- /dev/null +++ b/test/parallel/test-whatwg-transformstream.js @@ -0,0 +1,188 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + ReadableStream, + TransformStream, + TransformStreamDefaultController, +} = require('stream/web'); + +const { + createReadStream, + readFileSync, +} = require('fs'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +assert.throws(() => new TransformStream({ readableType: 1 }), { + code: 'ERR_INVALID_ARG_VALUE', +}); +assert.throws(() => new TransformStream({ writableType: 1 }), { + code: 'ERR_INVALID_ARG_VALUE', +}); + + +{ + const stream = new TransformStream(); + + async function test(stream) { + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + + const { 1: result } = await Promise.all([ + writer.write('hello'), + reader.read(), + ]); + + assert.strictEqual(result.value, 'hello'); + } + + test(stream).then(common.mustCall()); +} + +class Transform { + start(controller) { + this.started = true; + } + + async transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } + + async flush() { + this.flushed = true; + } +} + +{ + const transform = new Transform(); + const stream = new TransformStream(transform); + assert(transform.started); + + async function test(stream) { + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + + const { 1: result } = await Promise.all([ + writer.write('hello'), + reader.read(), + ]); + + assert.strictEqual(result.value, 'HELLO'); + + await writer.close(); + } + + test(stream).then(common.mustCall(() => { + assert(transform.flushed); + })); +} + +class Source { + constructor() { + this.cancelCalled = false; + } + + start(controller) { + this.stream = createReadStream(__filename); + this.stream.on('data', (chunk) => { + controller.enqueue(chunk.toString()); + }); + this.stream.once('end', () => { + if (!this.cancelCalled) + controller.close(); + }); + this.stream.once('error', (error) => { + controller.error(error); + }); + } + + cancel() { + this.cancelCalled = true; + } +} + +{ + const instream = new ReadableStream(new Source()); + const tstream = new TransformStream(new Transform()); + const r = instream.pipeThrough(tstream); + + async function read(stream) { + let res = ''; + for await (const chunk of stream) + res += chunk; + return res; + } + + read(r).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.strictEqual(check.toString().toUpperCase(), data); + })); +} + +{ + assert.throws(() => Reflect.get(TransformStream.prototype, 'readable', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => Reflect.get(TransformStream.prototype, 'writable', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => TransformStream.prototype[kTransfer]({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(TransformStreamDefaultController.prototype, 'desiredSize', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.enqueue({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.error({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.terminate({}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => new TransformStreamDefaultController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); +} + +{ + let controller; + const transform = new TransformStream({ + start(c) { + controller = c; + } + }); + + assert.match(inspect(transform), /TransformStream/); + assert.match(inspect(transform, { depth: null }), /TransformStream/); + assert.match(inspect(transform, { depth: 0 }), /TransformStream \[/); + + assert.match(inspect(controller), /TransformStreamDefaultController/); + assert.match( + inspect(controller, { depth: null }), + /TransformStreamDefaultController/); + assert.match( + inspect(controller, { depth: 0 }), + /TransformStreamDefaultController \[/); +} diff --git a/test/parallel/test-whatwg-webstreams-coverage.js b/test/parallel/test-whatwg-webstreams-coverage.js new file mode 100644 index 00000000000000..f0036723b05977 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-coverage.js @@ -0,0 +1,70 @@ +// Flags: --no-warnings --expose-internals +'use strict'; + +require('../common'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +} = require('stream/web'); + +const { + inspect, +} = require('util'); + +const { + isPromisePending, +} = require('internal/webstreams/util'); + +const assert = require('assert'); + +assert(!isPromisePending({})); +assert(!isPromisePending(Promise.resolve())); +assert(isPromisePending(new Promise(() => {}))); + +// Brand checking works +assert.throws(() => { + Reflect.get(ByteLengthQueuingStrategy.prototype, 'highWaterMark', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(ByteLengthQueuingStrategy.prototype, 'size', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(CountQueuingStrategy.prototype, 'highWaterMark', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(CountQueuingStrategy.prototype, 'size', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +// Custom Inspect Works + +{ + const strategy = new CountQueuingStrategy({ highWaterMark: 1 }); + + assert.strictEqual( + inspect(strategy, { depth: null }), + 'CountQueuingStrategy { highWaterMark: 1 }'); + + assert.strictEqual( + inspect(strategy), + 'CountQueuingStrategy { highWaterMark: 1 }'); + + assert.strictEqual( + inspect(strategy, { depth: 0 }), + 'CountQueuingStrategy [Object]'); + + assert.strictEqual( + inspect(new ByteLengthQueuingStrategy({ highWaterMark: 1 })), + 'ByteLengthQueuingStrategy { highWaterMark: 1 }'); +} diff --git a/test/parallel/test-whatwg-webstreams-transfer.js b/test/parallel/test-whatwg-webstreams-transfer.js new file mode 100644 index 00000000000000..2b7333d9c6fbf7 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-transfer.js @@ -0,0 +1,503 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); + +const { + ReadableStream, + WritableStream, + TransformStream, +} = require('stream/web'); + +const { + Worker +} = require('worker_threads'); + +const { + isReadableStream, +} = require('internal/webstreams/readablestream'); + +const { + isWritableStream, +} = require('internal/webstreams/writablestream'); + +const { + isTransformStream, +} = require('internal/webstreams/transformstream'); + +const { + makeTransferable, + kClone, + kTransfer, + kDeserialize, +} = require('internal/worker/js_transferable'); + +const assert = require('assert'); + +const theData = 'hello'; + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // This test takes the ReadableStream and transfers it to the + // port1 first, then again to port2, which reads the data. + // Internally, this sets up a pipelined data flow that is + // important to understand in case this test fails.. + // + // Specifically: + // + // 1. We start with ReadableStream R1, + // 2. Calling port2.postMessage causes a new internal WritableStream W1 + // and a new ReadableStream R2 to be created, both of which are coupled + // to each other via a pair of MessagePorts P1 and P2. + // 3. ReadableStream R2 is passed to the port1.onmessage callback as the + // data property of the MessageEvent, and R1 is configured to pipeTo W1. + // 4. Within port1.onmessage, we transfer ReadableStream R2 to port1, which + // creates a new internal WritableStream W2 and a new ReadableStream R3, + // both of which are coupled to each other via a pair of MessagePorts + // P3 and P4. + // 5. ReadableStream R3 is passed to the port2.onmessage callback as the + // data property of the MessageEvent, and R2 is configured to pipeTo W2. + // 6. Once the reader is attached to R3 in the port2.onmessage callback, + // a message is sent along the path: R3 -> P4 -> P3 -> R2 -> P2 -> P1 -> R1 + // to begin pulling the data. The data is then pushed along the pipeline + // R1 -> W1 -> P1 -> P2 -> R2 -> W2 -> P3 -> P4 -> R3 + // 7. The MessagePorts P1, P2, P3, and P4 serve as a control channel for + // passing data and control instructions, potentially across realms, + // to the other ReadableStream and WritableStream instances. + // + // If this test experiences timeouts (hangs without finishing), it's most + // likely because the control instructions are somehow broken and the + // MessagePorts are not being closed properly or it could be caused by + // failing the close R1's controller which signals the end of the data + // flow. + + const readable = new ReadableStream({ + start: common.mustCall((controller) => { + controller.enqueue(theData); + controller.close(); + }), + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isReadableStream(data)); + + const reader = data.getReader(); + reader.read().then(common.mustCall((chunk) => { + assert.deepStrictEqual(chunk, { done: false, value: theData }); + })); + + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isReadableStream(data)); + assert(!data.locked); + port1.postMessage(data, [data]); + assert(data.locked); + }); + + assert.throws(() => port2.postMessage(readable), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(readable, [readable]); + assert(readable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // Like the ReadableStream test above, this sets up a pipeline + // through which the data flows... + // + // We start with WritableStream W1, which is transfered to port1. + // Doing so creates an internal ReadableStream R1 and WritableStream W2, + // which are coupled together with MessagePorts P1 and P2. + // The port1.onmessage callback receives WritableStream W2 and + // immediately transfers that to port2. Doing so creates an internal + // ReadableStream R2 and WritableStream W3, which are coupled together + // with MessagePorts P3 and P4. WritableStream W3 is handed off to + // port2.onmessage. + // + // When the writer on port2.onmessage writes the chunk of data, it + // gets passed along the pipeline: + // W3 -> P4 -> P3 -> R2 -> W2 -> P2 -> P1 -> R1 -> W1 + + const writable = new WritableStream({ + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, theData); + }), + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isWritableStream(data)); + assert(!data.locked); + const writer = data.getWriter(); + writer.write(theData).then(common.mustCall()); + writer.close(); + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isWritableStream(data)); + assert(!data.locked); + port1.postMessage(data, [data]); + assert(data.locked); + }); + + assert.throws(() => port2.postMessage(writable), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(writable, [writable]); + assert(writable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // The data flow here is actually quite complicated, and is a combination + // of the WritableStream and ReadableStream examples above. + // + // We start with TransformStream T1, which creates ReadableStream R1, + // and WritableStream W1. + // + // When T1 is transfered to port1.onmessage, R1 and W1 are individually + // transfered. + // + // When R1 is transfered, it creates internal WritableStream W2, and + // new ReadableStream R2, coupled together via MessagePorts P1 and P2. + // + // When W1 is transfered, it creates internal ReadableStream R3 and + // new WritableStream W3, coupled together via MessagePorts P3 and P4. + // + // A new TransformStream T2 is created that owns ReadableStream R2 and + // WritableStream W3. The port1.onmessage callback immediately transfers + // that to port2.onmessage. + // + // When T2 is transfered, R2 and W3 are individually transfered. + // + // When R2 is transfered, it creates internal WritableStream W4, and + // ReadableStream R4, coupled together via MessagePorts P5 and P6. + // + // When W3 is transfered, it creates internal ReadableStream R5, and + // WritableStream W5, coupled together via MessagePorts P7 and P8. + // + // A new TransformStream T3 is created that owns ReadableStream R4 and + // WritableStream W5. + // + // port1.onmessage then writes a chunk of data. That chunk of data + // flows through the pipeline to T1: + // + // W5 -> P8 -> P7 -> R5 -> W3 -> P4 -> P3 -> R3 -> W1 -> T1 + // + // T1 performs the transformation, then pushes the chunk back out + // along the pipeline: + // + // T1 -> R1 -> W2 -> P1 -> P2 -> R2 -> W4 -> P5 -> P6 -> R4 + + const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isTransformStream(data)); + const writer = data.writable.getWriter(); + const reader = data.readable.getReader(); + Promise.all([ + writer.write(theData), + writer.close(), + reader.read().then(common.mustCall((result) => { + assert(!result.done); + assert.strictEqual(result.value, theData.toUpperCase()); + })), + reader.read().then(common.mustCall((result) => { + assert(result.done); + })), + ]).then(common.mustCall()); + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isTransformStream(data)); + assert(!data.readable.locked); + assert(!data.writable.locked); + port1.postMessage(data, [data]); + assert(data.readable.locked); + assert(data.writable.locked); + }); + + assert.throws(() => port2.postMessage(transform), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(transform, [transform]); + assert(transform.readable.locked); + assert(transform.writable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + let controller; + + const readable = new ReadableStream({ + start(c) { controller = c; }, + + cancel: common.mustCall((error) => { + assert.strictEqual(error.code, 25); // DataCloneError + }), + }); + + port1.onmessage = ({ data }) => { + const reader = data.getReader(); + assert.rejects(reader.read(), { + code: 25, // DataCloneError + }); + port1.close(); + }; + + port2.postMessage(readable, [readable]); + + const notActuallyTransferable = makeTransferable({ + [kClone]() { + return { + data: {}, + deserializeInfo: 'nothing that will work', + }; + }, + [kDeserialize]: common.mustNotCall(), + }); + + controller.enqueue(notActuallyTransferable); +} + +{ + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => { + assert.strictEqual(error.code, 25); + assert.strictEqual(error.name, 'DataCloneError'); + }); + }) + }; + + const writable = new WritableStream(source); + + const notActuallyTransferable = makeTransferable({ + [kClone]() { + return { + data: {}, + deserializeInfo: 'nothing that will work', + }; + }, + [kDeserialize]: common.mustNotCall(), + }); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + assert.rejects(writer.closed, { + code: 25, + name: 'DataCloneError', + }); + + writer.write(notActuallyTransferable).then(common.mustCall()); + + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const error = new Error('boom'); + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((reason) => { + process.nextTick(() => { + assert.deepStrictEqual(reason, error); + + // Reason is a clone of the original error. + assert.notStrictEqual(reason, error); + }); + }), + }; + + const writable = new WritableStream(source); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + assert.rejects(writer.closed, error); + + writer.abort(error).then(common.mustCall()); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => assert.strictEqual(error.code, 25)); + }) + }; + + const writable = new WritableStream(source); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + const m = new WebAssembly.Memory({ initial: 1 }); + + assert.rejects(writer.abort(m), { + code: 25 + }); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + // Verify that the communication works across worker threads... + + const worker = new Worker(` + const { + isReadableStream, + } = require('internal/webstreams/readablestream'); + + const { + parentPort, + } = require('worker_threads'); + + const assert = require('assert'); + + const tracker = new assert.CallTracker(); + process.on('exit', () => { + tracker.verify(); + }); + + parentPort.onmessage = tracker.calls(({ data }) => { + assert(isReadableStream(data)); + const reader = data.getReader(); + reader.read().then(tracker.calls((result) => { + assert(!result.done); + assert(result.value instanceof Uint8Array); + })); + parentPort.close(); + }); + parentPort.onmessageerror = () => assert.fail('should not be called'); + `, { eval: true }); + + worker.on('error', common.mustNotCall()); + + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array(10)); + controller.close(); + } + }); + + worker.postMessage(readable, [readable]); +} + +{ + const source = { + cancel: common.mustCall(), + }; + + const readable = new ReadableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + data.cancel().then(common.mustCall()); + port1.close(); + }); + + port2.postMessage(readable, [readable]); +} + +{ + const source = { + cancel: common.mustCall((error) => { + process.nextTick(() => assert(error.code, 25)); + }), + }; + + const readable = new ReadableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + const m = new WebAssembly.Memory({ initial: 1 }); + + const reader = data.getReader(); + + const cancel = reader.cancel(m); + + reader.closed.then(common.mustCall()); + + assert.rejects(cancel, { + code: 25 + }); + + port1.close(); + }); + + port2.postMessage(readable, [readable]); +} + +{ + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => { + assert.strictEqual(error.code, 25); + }); + }), + }; + + const writable = new WritableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + const m = new WebAssembly.Memory({ initial: 1 }); + const writer = data.getWriter(); + const write = writer.write(m); + assert.rejects(write, { code: 25 }); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const readable = new ReadableStream(); + readable.getReader(); + assert.throws(() => readable[kTransfer](), { + code: 25 + }); + + const writable = new WritableStream(); + writable.getWriter(); + assert.throws(() => writable[kTransfer](), { + code: 25 + }); +} diff --git a/test/parallel/test-whatwg-writablestream.js b/test/parallel/test-whatwg-writablestream.js new file mode 100644 index 00000000000000..91e3c098462949 --- /dev/null +++ b/test/parallel/test-whatwg-writablestream.js @@ -0,0 +1,260 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + WritableStream, + WritableStreamDefaultController, + WritableStreamDefaultWriter, + CountQueuingStrategy, +} = require('stream/web'); + +const { + kState, +} = require('internal/webstreams/util'); + +const { + isPromise, +} = require('util/types'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +class Sink { + constructor() { + this.chunks = []; + } + + start() { + this.started = true; + } + + write(chunk) { + this.chunks.push(chunk); + } + + close() { + this.closed = true; + } + + abort() { + this.aborted = true; + } +} + +{ + const stream = new WritableStream(); + + assert(stream[kState].controller instanceof WritableStreamDefaultController); + assert(!stream.locked); + + assert.strictEqual(typeof stream.abort, 'function'); + assert.strictEqual(typeof stream.close, 'function'); + assert.strictEqual(typeof stream.getWriter, 'function'); +} + +[1, false, ''].forEach((type) => { + assert.throws(() => new WritableStream({ type }), { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +['a', {}].forEach((highWaterMark) => { + assert.throws(() => new WritableStream({}, { highWaterMark }), { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +['a', false, {}].forEach((size) => { + assert.throws(() => new WritableStream({}, { size }), { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +{ + new WritableStream({}, 1); + new WritableStream({}, 'a'); + new WritableStream({}, null); +} + +{ + const sink = new Sink(); + const stream = new WritableStream( + sink, + new CountQueuingStrategy({ highWaterMark: 1 })); + + assert(!stream.locked); + const writer = stream.getWriter(); + assert(stream.locked); + assert(writer instanceof WritableStreamDefaultWriter); + + assert(isPromise(writer.closed)); + assert(isPromise(writer.ready)); + assert(typeof writer.desiredSize, 'number'); + assert(typeof writer.abort, 'function'); + assert(typeof writer.close, 'function'); + assert(typeof writer.releaseLock, 'function'); + assert(typeof writer.write, 'function'); + + writer.releaseLock(); + assert(!stream.locked); + + const writer2 = stream.getWriter(); + + assert(sink.started); + + writer2.closed.then(common.mustCall()); + writer2.ready.then(common.mustCall()); + + writer2.close().then(common.mustCall(() => { + assert.strict(sink.closed); + })); +} + +{ + const sink = new Sink(); + + const stream = new WritableStream( + sink, + new CountQueuingStrategy({ highWaterMark: 1 })); + + const error = new Error('boom'); + + const writer = stream.getWriter(); + + assert.rejects(writer.closed, error); + + writer.abort(error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'errored'); + assert(sink.aborted); + })); +} + +{ + const sink = new Sink(); + + const stream = new WritableStream( + sink, { highWaterMark: 1 } + ); + + async function write(stream) { + const writer = stream.getWriter(); + const p = writer.write('hello'); + assert.strictEqual(writer.desiredSize, 0); + await p; + assert.strictEqual(writer.desiredSize, 1); + } + + write(stream).then(common.mustCall(() => { + assert.deepStrictEqual(['hello'], sink.chunks); + })); +} + +{ + assert.throws(() => Reflect.get(WritableStream.prototype, 'locked', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => WritableStream.prototype.abort({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => WritableStream.prototype.close({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStream.prototype.getWriter.call(), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStream.prototype[kTransfer].call(), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects( + Reflect.get(WritableStreamDefaultWriter.prototype, 'closed'), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects( + Reflect.get(WritableStreamDefaultWriter.prototype, 'ready'), { + code: 'ERR_INVALID_THIS', + }); + assert.throws( + () => Reflect.get(WritableStreamDefaultWriter.prototype, 'desiredSize'), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.abort({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.close({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.write({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStreamDefaultWriter.prototype.releaseLock({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(WritableStreamDefaultController.prototype, 'abortReason', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(WritableStreamDefaultController.prototype, 'signal', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + WritableStreamDefaultController.prototype.error({}); + }, { + code: 'ERR_INVALID_THIS', + }); +} + +{ + let controller; + const writable = new WritableStream({ + start(c) { controller = c; } + }); + assert.strictEqual( + inspect(writable), + 'WritableStream { locked: false, state: \'writable\' }'); + assert.strictEqual( + inspect(writable, { depth: null }), + 'WritableStream { locked: false, state: \'writable\' }'); + assert.strictEqual( + inspect(writable, { depth: 0 }), + 'WritableStream [Object]'); + + const writer = writable.getWriter(); + assert.match( + inspect(writer), + /WritableStreamDefaultWriter/); + assert.match( + inspect(writer, { depth: null }), + /WritableStreamDefaultWriter/); + assert.match( + inspect(writer, { depth: 0 }), + /WritableStreamDefaultWriter \[/); + + assert.match( + inspect(controller), + /WritableStreamDefaultController/); + assert.match( + inspect(controller, { depth: null }), + /WritableStreamDefaultController/); + assert.match( + inspect(controller, { depth: 0 }), + /WritableStreamDefaultController \[/); + + writer.abort(new Error('boom')); + + assert.strictEqual(writer.desiredSize, null); + setImmediate(() => assert.strictEqual(writer.desiredSize, null)); +} diff --git a/test/pummel/test-crypto-dh-hash-modp18.js b/test/pummel/test-crypto-dh-hash-modp18.js index 06121d1f6dc956..e2a7f43c45074d 100644 --- a/test/pummel/test-crypto-dh-hash-modp18.js +++ b/test/pummel/test-crypto-dh-hash-modp18.js @@ -26,9 +26,8 @@ if (!common.hasCrypto) { common.skip('node compiled without OpenSSL.'); } -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } const assert = require('assert'); diff --git a/test/pummel/test-crypto-dh-hash.js b/test/pummel/test-crypto-dh-hash.js index 5b7002ce7fe16b..a5932eb764792f 100644 --- a/test/pummel/test-crypto-dh-hash.js +++ b/test/pummel/test-crypto-dh-hash.js @@ -26,9 +26,8 @@ if (!common.hasCrypto) { common.skip('node compiled without OpenSSL.'); } -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } const assert = require('assert'); diff --git a/test/pummel/test-crypto-dh-keys.js b/test/pummel/test-crypto-dh-keys.js index 33c2ed86073045..99be0e517fd640 100644 --- a/test/pummel/test-crypto-dh-keys.js +++ b/test/pummel/test-crypto-dh-keys.js @@ -26,9 +26,8 @@ if (!common.hasCrypto) { common.skip('node compiled without OpenSSL.'); } -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } const assert = require('assert'); diff --git a/test/pummel/test-dh-regr.js b/test/pummel/test-dh-regr.js index fff20c3ed08816..c55b7ff54d9ca9 100644 --- a/test/pummel/test-dh-regr.js +++ b/test/pummel/test-dh-regr.js @@ -26,9 +26,8 @@ if (!common.hasCrypto) { common.skip('missing crypto'); } -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } const assert = require('assert'); diff --git a/test/pummel/test-fs-watch-system-limit.js b/test/pummel/test-fs-watch-system-limit.js index 6662986a1a8ee0..20995c514fa569 100644 --- a/test/pummel/test-fs-watch-system-limit.js +++ b/test/pummel/test-fs-watch-system-limit.js @@ -9,13 +9,8 @@ if (!common.isLinux) { common.skip('The fs watch limit is OS-dependent'); } -if (!common.enoughTestCpu) { - common.skip('This test is resource-intensive'); -} - -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } try { diff --git a/test/pummel/test-hash-seed.js b/test/pummel/test-hash-seed.js index d63754cb56ed02..42b626b079e873 100644 --- a/test/pummel/test-hash-seed.js +++ b/test/pummel/test-hash-seed.js @@ -3,9 +3,9 @@ // Check that spawn child doesn't create duplicated entries const common = require('../common'); -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); +} const kRepetitions = 2; const assert = require('assert'); diff --git a/test/pummel/test-heapsnapshot-near-heap-limit-bounded.js b/test/pummel/test-heapsnapshot-near-heap-limit-bounded.js index 0ad6a898d126eb..faf5c4755aac75 100644 --- a/test/pummel/test-heapsnapshot-near-heap-limit-bounded.js +++ b/test/pummel/test-heapsnapshot-near-heap-limit-bounded.js @@ -2,9 +2,8 @@ const common = require('../common'); -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } const tmpdir = require('../common/tmpdir'); diff --git a/test/pummel/test-heapsnapshot-near-heap-limit.js b/test/pummel/test-heapsnapshot-near-heap-limit.js index 6651f2ae9f52d9..420ba04205945b 100644 --- a/test/pummel/test-heapsnapshot-near-heap-limit.js +++ b/test/pummel/test-heapsnapshot-near-heap-limit.js @@ -2,9 +2,8 @@ const common = require('../common'); -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } const tmpdir = require('../common/tmpdir'); diff --git a/test/pummel/test-net-bytes-per-incoming-chunk-overhead.js b/test/pummel/test-net-bytes-per-incoming-chunk-overhead.js index fed903c2639d99..f556e9881f728c 100644 --- a/test/pummel/test-net-bytes-per-incoming-chunk-overhead.js +++ b/test/pummel/test-net-bytes-per-incoming-chunk-overhead.js @@ -7,9 +7,8 @@ if (process.config.variables.asan) { common.skip('ASAN messes with memory measurements'); } -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } const assert = require('assert'); diff --git a/test/pummel/test-next-tick-infinite-calls.js b/test/pummel/test-next-tick-infinite-calls.js index 7ae3b2261358af..bf837f5ebc92c0 100644 --- a/test/pummel/test-next-tick-infinite-calls.js +++ b/test/pummel/test-next-tick-infinite-calls.js @@ -22,9 +22,8 @@ 'use strict'; const common = require('../common'); -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } let complete = 0; diff --git a/test/pummel/test-policy-integrity.js b/test/pummel/test-policy-integrity.js index 9383f881fbe514..1626a4a4158f90 100644 --- a/test/pummel/test-policy-integrity.js +++ b/test/pummel/test-policy-integrity.js @@ -6,9 +6,8 @@ if (!common.hasCrypto) { common.skip('missing crypto'); } -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } common.requireNoPackageJSONAbove(); diff --git a/test/pummel/test-webcrypto-derivebits-pbkdf2.js b/test/pummel/test-webcrypto-derivebits-pbkdf2.js index 745071f3458aba..512662025c66f7 100644 --- a/test/pummel/test-webcrypto-derivebits-pbkdf2.js +++ b/test/pummel/test-webcrypto-derivebits-pbkdf2.js @@ -6,9 +6,8 @@ if (!common.hasCrypto) { common.skip('missing crypto'); } -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) { - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); } const assert = require('assert'); diff --git a/test/sequential/test-child-process-pass-fd.js b/test/sequential/test-child-process-pass-fd.js index 98832c57a3df67..ad4e5d693ee2e5 100644 --- a/test/sequential/test-child-process-pass-fd.js +++ b/test/sequential/test-child-process-pass-fd.js @@ -9,9 +9,9 @@ const common = require('../common'); // This test is basically `test-cluster-net-send` but creating lots of workers // so the issue reproduces on OS X consistently. -if ((process.config.variables.arm_version === '6') || - (process.config.variables.arm_version === '7')) - common.skip('Too slow for armv6 and armv7 bots'); +if (process.config.variables.arm_version === '7') { + common.skip('Too slow for armv7 bots'); +} const assert = require('assert'); const { fork } = require('child_process'); diff --git a/test/sequential/test-debugger-auto-resume.js b/test/sequential/test-debugger-auto-resume.js index 9a210176a51705..8a25f5fc804e1a 100644 --- a/test/sequential/test-debugger-auto-resume.js +++ b/test/sequential/test-debugger-auto-resume.js @@ -14,7 +14,7 @@ addLibraryPath(process.env); // Auto-resume on start if the environment variable is defined. { - const scriptFullPath = fixtures.path('inspector-cli', 'break.js'); + const scriptFullPath = fixtures.path('debugger', 'break.js'); const script = path.relative(process.cwd(), scriptFullPath); const env = { ...process.env }; diff --git a/test/sequential/test-debugger-backtrace.js b/test/sequential/test-debugger-backtrace.js index baf2cfe8b58673..f362e98068f15e 100644 --- a/test/sequential/test-debugger-backtrace.js +++ b/test/sequential/test-debugger-backtrace.js @@ -11,7 +11,7 @@ const path = require('path'); // Display and navigate backtrace. { - const scriptFullPath = fixtures.path('inspector-cli', 'backtrace.js'); + const scriptFullPath = fixtures.path('debugger', 'backtrace.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-break.js b/test/sequential/test-debugger-break.js index ec6f354a7e7169..fdfe9bd3c40064 100644 --- a/test/sequential/test-debugger-break.js +++ b/test/sequential/test-debugger-break.js @@ -11,7 +11,7 @@ const path = require('path'); // Stepping through breakpoints. { - const scriptFullPath = fixtures.path('inspector-cli', 'break.js'); + const scriptFullPath = fixtures.path('debugger', 'break.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-clear-breakpoints.js b/test/sequential/test-debugger-clear-breakpoints.js index 0b9f3b113d5b2f..91349e105a1160 100644 --- a/test/sequential/test-debugger-clear-breakpoints.js +++ b/test/sequential/test-debugger-clear-breakpoints.js @@ -11,7 +11,7 @@ const path = require('path'); // clearBreakpoint { - const scriptFullPath = fixtures.path('inspector-cli', 'break.js'); + const scriptFullPath = fixtures.path('debugger', 'break.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-custom-port.js b/test/sequential/test-debugger-custom-port.js index 85d7e4154a16fe..e6cee10ffa53b5 100644 --- a/test/sequential/test-debugger-custom-port.js +++ b/test/sequential/test-debugger-custom-port.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Custom port. { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI([`--port=${common.PORT}`, script]); diff --git a/test/sequential/test-debugger-exceptions.js b/test/sequential/test-debugger-exceptions.js index dc579d0197303a..9b1163316268c7 100644 --- a/test/sequential/test-debugger-exceptions.js +++ b/test/sequential/test-debugger-exceptions.js @@ -11,7 +11,7 @@ const path = require('path'); // Break on (uncaught) exceptions. { - const scriptFullPath = fixtures.path('inspector-cli', 'exceptions.js'); + const scriptFullPath = fixtures.path('debugger', 'exceptions.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-exec-scope.js b/test/sequential/test-debugger-exec-scope.js index 23e376815319c8..9e5d2ac7ebaeeb 100644 --- a/test/sequential/test-debugger-exec-scope.js +++ b/test/sequential/test-debugger-exec-scope.js @@ -10,7 +10,7 @@ const assert = require('assert'); // exec .scope { - const cli = startCLI([fixtures.path('inspector-cli/backtrace.js')]); + const cli = startCLI([fixtures.path('debugger/backtrace.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-exec.js b/test/sequential/test-debugger-exec.js index e1de786ab21302..68a9b37d09d6aa 100644 --- a/test/sequential/test-debugger-exec.js +++ b/test/sequential/test-debugger-exec.js @@ -10,7 +10,7 @@ const assert = require('assert'); { - const cli = startCLI([fixtures.path('inspector-cli/alive.js')]); + const cli = startCLI([fixtures.path('debugger/alive.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-heap-profiler.js b/test/sequential/test-debugger-heap-profiler.js index 8602b8f8d11268..0f0fdc22fbb3b4 100644 --- a/test/sequential/test-debugger-heap-profiler.js +++ b/test/sequential/test-debugger-heap-profiler.js @@ -20,7 +20,7 @@ const filename = 'node.heapsnapshot'; // Heap profiler take snapshot. { - const cli = startCLI([fixtures.path('inspector-cli/empty.js')]); + const cli = startCLI([fixtures.path('debugger/empty.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-help.js b/test/sequential/test-debugger-help.js index 78a48b6f9ab159..e24f873212b589 100644 --- a/test/sequential/test-debugger-help.js +++ b/test/sequential/test-debugger-help.js @@ -9,7 +9,7 @@ const startCLI = require('../common/debugger'); const assert = require('assert'); { - const cli = startCLI([fixtures.path('inspector-cli/empty.js')]); + const cli = startCLI([fixtures.path('debugger/empty.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-invalid-args.js b/test/sequential/test-debugger-invalid-args.js index 327b076d78cfb8..36f8e588b04a6e 100644 --- a/test/sequential/test-debugger-invalid-args.js +++ b/test/sequential/test-debugger-invalid-args.js @@ -3,11 +3,9 @@ const common = require('../common'); common.skipIfInspectorDisabled(); -const fixtures = require('../common/fixtures'); const startCLI = require('../common/debugger'); const assert = require('assert'); -const { createServer } = require('net'); // Launch CLI w/o args. { @@ -21,7 +19,7 @@ const { createServer } = require('net'); // Launch w/ invalid host:port. { - const cli = startCLI(['localhost:914']); + const cli = startCLI([`localhost:${common.PORT}`]); cli.quit() .then((code) => { assert.match( @@ -31,29 +29,3 @@ const { createServer } = require('net'); assert.strictEqual(code, 1); }); } - -// Launch w/ unavailable port. -(async () => { - const blocker = createServer((socket) => socket.end()); - const port = await new Promise((resolve, reject) => { - blocker.on('error', reject); - blocker.listen(0, '127.0.0.1', () => resolve(blocker.address().port)); - }); - - try { - const script = fixtures.path('inspector-cli', 'three-lines.js'); - const cli = startCLI([`--port=${port}`, script]); - const code = await cli.quit(); - - assert.doesNotMatch( - cli.output, - /report this bug/, - 'Omits message about reporting this as a bug'); - assert.ok( - cli.output.includes(`waiting for 127.0.0.1:${port} to be free`), - 'Tells the user that the port wasn\'t available'); - assert.strictEqual(code, 1); - } finally { - blocker.close(); - } -})().then(common.mustCall()); diff --git a/test/sequential/test-debugger-launch.js b/test/sequential/test-debugger-launch.js index e501a6b6123c47..3bfe541ecca05c 100644 --- a/test/sequential/test-debugger-launch.js +++ b/test/sequential/test-debugger-launch.js @@ -9,7 +9,7 @@ const startCLI = require('../common/debugger'); const assert = require('assert'); { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI([script]); cli.waitForInitialBreak() diff --git a/test/sequential/test-debugger-low-level.js b/test/sequential/test-debugger-low-level.js index 2613e4a4bbc2a3..f6d97f2dfe153d 100644 --- a/test/sequential/test-debugger-low-level.js +++ b/test/sequential/test-debugger-low-level.js @@ -9,8 +9,8 @@ const assert = require('assert'); // Debugger agent direct access. { - const cli = startCLI([fixtures.path('inspector-cli/three-lines.js')]); - const scriptPattern = /^\* (\d+): \S+inspector-cli(?:\/|\\)three-lines\.js/m; + const cli = startCLI([fixtures.path('debugger/three-lines.js')]); + const scriptPattern = /^\* (\d+): \S+debugger(?:\/|\\)three-lines\.js/m; function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-pid.js b/test/sequential/test-debugger-pid.js index 97de9f40369d2d..402c1f86dd4ed9 100644 --- a/test/sequential/test-debugger-pid.js +++ b/test/sequential/test-debugger-pid.js @@ -16,7 +16,7 @@ function launchTarget(...args) { } { - const script = fixtures.path('inspector-cli', 'alive.js'); + const script = fixtures.path('debugger', 'alive.js'); let cli = null; let target = null; diff --git a/test/sequential/test-debugger-preserve-breaks.js b/test/sequential/test-debugger-preserve-breaks.js index 6863aaa45ae1b6..fbc463af96a1e6 100644 --- a/test/sequential/test-debugger-preserve-breaks.js +++ b/test/sequential/test-debugger-preserve-breaks.js @@ -11,7 +11,7 @@ const path = require('path'); // Run after quit. { - const scriptFullPath = fixtures.path('inspector-cli', 'three-lines.js'); + const scriptFullPath = fixtures.path('debugger', 'three-lines.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); @@ -53,19 +53,9 @@ const path = require('path'); }) .then(() => cli.command('breakpoints')) .then(() => { - // TODO: There is a known issue on AIX and some other operating systems - // where the breakpoints aren't properly resolved yet when we reach this - // point. Eventually that should be figured out but for now we don't - // want to fail builds because of it. - // What it should be: - // - // const msg = `SCRIPT: ${script}, OUTPUT: ${cli.output}`; - // assert.ok(cli.output.includes(`#0 ${script}:2`), msg); - // assert.ok(cli.output.includes(`#1 ${script}:3`), msg); - // - // What we're doing for now instead: - assert.match(cli.output, /#0 [^\n]+three-lines\.js\$?:2/); - assert.match(cli.output, /#1 [^\n]+three-lines\.js\$?:3/); + const msg = `SCRIPT: ${script}, OUTPUT: ${cli.output}`; + assert.ok(cli.output.includes(`#0 ${script}:2`), msg); + assert.ok(cli.output.includes(`#1 ${script}:3`), msg); }) .then(() => cli.quit()) .then(null, onFatal); diff --git a/test/sequential/test-debugger-profile.js b/test/sequential/test-debugger-profile.js index 39b57448ffe628..992c6f71c00775 100644 --- a/test/sequential/test-debugger-profile.js +++ b/test/sequential/test-debugger-profile.js @@ -14,7 +14,7 @@ function delay(ms) { // Profiles. { - const cli = startCLI([fixtures.path('inspector-cli/empty.js')]); + const cli = startCLI([fixtures.path('debugger/empty.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-random-port-with-inspect-port.js b/test/sequential/test-debugger-random-port-with-inspect-port.js index 83c2b68b014b48..5617e130f02585 100644 --- a/test/sequential/test-debugger-random-port-with-inspect-port.js +++ b/test/sequential/test-debugger-random-port-with-inspect-port.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Random port with --inspect-port=0. { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI(['--inspect-port=0', script]); diff --git a/test/sequential/test-debugger-random-port.js b/test/sequential/test-debugger-random-port.js index 22de14661b8f64..da8656cf1c7115 100644 --- a/test/sequential/test-debugger-random-port.js +++ b/test/sequential/test-debugger-random-port.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Random port. { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI(['--port=0', script]); diff --git a/test/sequential/test-debugger-restart-message.js b/test/sequential/test-debugger-restart-message.js new file mode 100644 index 00000000000000..bcd06b4e230131 --- /dev/null +++ b/test/sequential/test-debugger-restart-message.js @@ -0,0 +1,39 @@ +'use strict'; + +const common = require('../common'); + +common.skipIfInspectorDisabled(); + +const assert = require('assert'); + +const RESTARTS = 10; + +const fixtures = require('../common/fixtures'); +const startCLI = require('../common/debugger'); + +// Using `restart` should result in only one "Connect/For help" message. +{ + const script = fixtures.path('debugger', 'three-lines.js'); + const cli = startCLI([script]); + + function onFatal(error) { + cli.quit(); + throw error; + } + + const listeningRegExp = /Debugger listening on/g; + + cli.waitForInitialBreak() + .then(() => cli.waitForPrompt()) + .then(() => { + assert.strictEqual(cli.output.match(listeningRegExp).length, 1); + }) + .then(async () => { + for (let i = 0; i < RESTARTS; i++) { + await cli.stepCommand('restart'); + assert.strictEqual(cli.output.match(listeningRegExp).length, 1); + } + }) + .then(() => cli.quit()) + .then(null, onFatal); +} diff --git a/test/sequential/test-debugger-run-after-quit-restart.js b/test/sequential/test-debugger-run-after-quit-restart.js index 7f31d467198f23..a9da07dcdff8bd 100644 --- a/test/sequential/test-debugger-run-after-quit-restart.js +++ b/test/sequential/test-debugger-run-after-quit-restart.js @@ -11,7 +11,7 @@ const path = require('path'); // Run after quit/restart. { - const scriptFullPath = fixtures.path('inspector-cli', 'three-lines.js'); + const scriptFullPath = fixtures.path('debugger', 'three-lines.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-sb-before-load.js b/test/sequential/test-debugger-sb-before-load.js index c99fb664a70768..586687800e8e90 100644 --- a/test/sequential/test-debugger-sb-before-load.js +++ b/test/sequential/test-debugger-sb-before-load.js @@ -11,10 +11,10 @@ const path = require('path'); // Using sb before loading file. { - const scriptFullPath = fixtures.path('inspector-cli', 'cjs', 'index.js'); + const scriptFullPath = fixtures.path('debugger', 'cjs', 'index.js'); const script = path.relative(process.cwd(), scriptFullPath); - const otherScriptFullPath = fixtures.path('inspector-cli', 'cjs', 'other.js'); + const otherScriptFullPath = fixtures.path('debugger', 'cjs', 'other.js'); const otherScript = path.relative(process.cwd(), otherScriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-scripts.js b/test/sequential/test-debugger-scripts.js index 893420d2aa4c2d..c6d4e67920921d 100644 --- a/test/sequential/test-debugger-scripts.js +++ b/test/sequential/test-debugger-scripts.js @@ -10,7 +10,7 @@ const assert = require('assert'); // List scripts. { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI([script]); function onFatal(error) { @@ -24,7 +24,7 @@ const assert = require('assert'); .then(() => { assert.match( cli.output, - /^\* \d+: \S+inspector-cli(?:\/|\\)three-lines\.js/m, + /^\* \d+: \S+debugger(?:\/|\\)three-lines\.js/m, 'lists the user script'); assert.doesNotMatch( cli.output, @@ -35,7 +35,7 @@ const assert = require('assert'); .then(() => { assert.match( cli.output, - /\* \d+: \S+inspector-cli(?:\/|\\)three-lines\.js/, + /\* \d+: \S+debugger(?:\/|\\)three-lines\.js/, 'lists the user script'); assert.match( cli.output, diff --git a/test/sequential/test-debugger-use-strict.js b/test/sequential/test-debugger-use-strict.js index c5b46b4f00e839..ae82a9fc82352b 100644 --- a/test/sequential/test-debugger-use-strict.js +++ b/test/sequential/test-debugger-use-strict.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Test for files that start with strict directive. { - const script = fixtures.path('inspector-cli', 'use-strict.js'); + const script = fixtures.path('debugger', 'use-strict.js'); const cli = startCLI([script]); function onFatal(error) { diff --git a/test/sequential/test-debugger-watchers.js b/test/sequential/test-debugger-watchers.js index e239e7ac74c216..e856132b74e28a 100644 --- a/test/sequential/test-debugger-watchers.js +++ b/test/sequential/test-debugger-watchers.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Stepping through breakpoints. { - const cli = startCLI([fixtures.path('inspector-cli/break.js')]); + const cli = startCLI([fixtures.path('debugger/break.js')]); function onFatal(error) { cli.quit(); diff --git a/test/tick-processor/test-tick-processor-builtin.js b/test/tick-processor/test-tick-processor-builtin.js index 1f38abe08c32e2..f01f0986682dc9 100644 --- a/test/tick-processor/test-tick-processor-builtin.js +++ b/test/tick-processor/test-tick-processor-builtin.js @@ -2,9 +2,6 @@ const common = require('../common'); const { isCPPSymbolsNotMapped } = require('./util'); -if (!common.enoughTestCpu) - common.skip('test is CPU-intensive'); - if (isCPPSymbolsNotMapped) { common.skip('C++ symbols are not mapped for this os.'); } diff --git a/test/tick-processor/test-tick-processor-cpp-core.js b/test/tick-processor/test-tick-processor-cpp-core.js index e76d99ab09db7b..89943cca5c8329 100644 --- a/test/tick-processor/test-tick-processor-cpp-core.js +++ b/test/tick-processor/test-tick-processor-cpp-core.js @@ -2,9 +2,6 @@ const common = require('../common'); const { isCPPSymbolsNotMapped } = require('./util'); -if (!common.enoughTestCpu) - common.skip('test is CPU-intensive'); - if (isCPPSymbolsNotMapped) { common.skip('C++ symbols are not mapped for this os.'); } diff --git a/test/tick-processor/test-tick-processor-polyfill-brokenfile.js b/test/tick-processor/test-tick-processor-polyfill-brokenfile.js index 98cbf5bb414789..926c96e4d7e6cd 100644 --- a/test/tick-processor/test-tick-processor-polyfill-brokenfile.js +++ b/test/tick-processor/test-tick-processor-polyfill-brokenfile.js @@ -4,9 +4,6 @@ const { isCPPSymbolsNotMapped } = require('./util'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -if (!common.enoughTestCpu) - common.skip('test is CPU-intensive'); - if (isCPPSymbolsNotMapped) { common.skip('C++ symbols are not mapped for this OS.'); } diff --git a/test/tick-processor/test-tick-processor-preprocess-flag.js b/test/tick-processor/test-tick-processor-preprocess-flag.js index 8b1ec9920f47ed..b003f7c6b8b01f 100644 --- a/test/tick-processor/test-tick-processor-preprocess-flag.js +++ b/test/tick-processor/test-tick-processor-preprocess-flag.js @@ -2,9 +2,6 @@ const common = require('../common'); const { isCPPSymbolsNotMapped } = require('./util'); -if (!common.enoughTestCpu) - common.skip('test is CPU-intensive'); - if (isCPPSymbolsNotMapped) { common.skip('C++ symbols are not mapped for this os.'); } diff --git a/test/tick-processor/test-tick-processor-unknown.js b/test/tick-processor/test-tick-processor-unknown.js index f894ae919acf66..42208f3bb36d36 100644 --- a/test/tick-processor/test-tick-processor-unknown.js +++ b/test/tick-processor/test-tick-processor-unknown.js @@ -9,9 +9,6 @@ const common = require('../common'); if (common.isAIX) common.skip('AIX address range too big for scripts.'); -if (!common.enoughTestCpu) - common.skip('test is CPU-intensive'); - const base = require('./tick-processor-base.js'); // Unknown checked for to prevent flakiness, if pattern is not found, diff --git a/test/wasi/test-wasi-initialize-validation.js b/test/wasi/test-wasi-initialize-validation.js index 79b0bd8485a483..40dfd864d1874e 100644 --- a/test/wasi/test-wasi-initialize-validation.js +++ b/test/wasi/test-wasi-initialize-validation.js @@ -78,7 +78,8 @@ const bufferSource = fixtures.readSync('simple.wasm'); () => { wasi.initialize(instance); }, { code: 'ERR_INVALID_ARG_TYPE', - message: /"instance\.exports\._start" property must be undefined/ + message: 'The "instance.exports._start" property must be' + + ' undefined. Received function _start', } ); } diff --git a/test/wasi/test-wasi-start-validation.js b/test/wasi/test-wasi-start-validation.js index 5c6a1ede5d4fd7..2059ff081e88dd 100644 --- a/test/wasi/test-wasi-start-validation.js +++ b/test/wasi/test-wasi-start-validation.js @@ -78,7 +78,8 @@ const bufferSource = fixtures.readSync('simple.wasm'); () => { wasi.start(instance); }, { code: 'ERR_INVALID_ARG_TYPE', - message: /"instance\.exports\._initialize" property must be undefined/ + message: 'The "instance.exports._initialize" property must be' + + ' undefined. Received function _initialize', } ); } diff --git a/test/wpt/status/streams.json b/test/wpt/status/streams.json new file mode 100644 index 00000000000000..c1b80d69dd8cd3 --- /dev/null +++ b/test/wpt/status/streams.json @@ -0,0 +1,11 @@ +{ + "queuing-strategies-size-function-per-global.window.js": { + "skip": "Browser-specific test" + }, + "transferable/deserialize-error.window.js": { + "skip": "Browser-specific test" + }, + "readable-byte-streams/bad-buffers-and-views.any.js": { + "fail": "TODO: implement detached ArrayBuffer support" + } +} diff --git a/test/wpt/test-streams.js b/test/wpt/test-streams.js new file mode 100644 index 00000000000000..987676d8c49125 --- /dev/null +++ b/test/wpt/test-streams.js @@ -0,0 +1,139 @@ +'use strict'; + +require('../common'); +const { WPTRunner } = require('../common/wpt'); + +const runner = new WPTRunner('streams'); + +// Set Node.js flags required for the tests. +runner.setFlags(['--expose-internals']); + +// Set a script that will be executed in the worker before running the tests. +runner.setInitScript(` + let { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransformStream, + TransformStreamDefaultController, + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + ByteLengthQueuingStrategy, + CountQueuingStrategy, + } = require('stream/web'); + + const { internalBinding } = require('internal/test/binding'); + const { DOMException } = internalBinding('messaging'); + global.DOMException = DOMException; + + Object.defineProperties(global, { + ReadableStream: { + value: ReadableStream, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamDefaultReader: { + value: ReadableStreamDefaultReader, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamBYOBReader: { + value: ReadableStreamBYOBReader, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamBYOBRequest: { + value: ReadableStreamBYOBRequest, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableByteStreamController: { + value: ReadableByteStreamController, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamDefaultController: { + value: ReadableStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + TransformStream: { + value: TransformStream, + configurable: true, + writable: true, + enumerable: false, + }, + TransformStreamDefaultController: { + value: TransformStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStream: { + value: WritableStream, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStreamDefaultWriter: { + value: WritableStreamDefaultWriter, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStreamDefaultController: { + value: WritableStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + ByteLengthQueuingStrategy: { + value: ByteLengthQueuingStrategy, + configurable: true, + writable: true, + enumerable: false, + }, + CountQueuingStrategy: { + value: CountQueuingStrategy, + configurable: true, + writable: true, + enumerable: false, + }, + }); + + // Simulate global postMessage for enqueue-with-detached-buffer.window.js + function postMessage(value, origin, transferList) { + const mc = new MessageChannel(); + mc.port1.postMessage(value, transferList); + mc.port2.close(); + } + + // TODO(@jasnell): This is a bit of a hack to get the idl harness test + // working. Later we should investigate a better approach. + // See: https://github.com/nodejs/node/pull/39062#discussion_r659383373 + Object.defineProperties(global, { + DedicatedWorkerGlobalScope: { + get() { + // Pretend that we're a DedicatedWorker, but *only* for the + // IDL harness. For everything else, keep the JavaScript shell + // environment. + if (new Error().stack.includes('idlharness.js')) + return global.constructor; + else + return function() {}; + } + } + }); +`); + +runner.runJsTests(); diff --git a/tools/clang-format/package-lock.json b/tools/clang-format/package-lock.json index af57b9891ff830..61f17967f0e4a5 100644 --- a/tools/clang-format/package-lock.json +++ b/tools/clang-format/package-lock.json @@ -92,9 +92,9 @@ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "resolve": { "version": "1.8.1", diff --git a/tools/doc/html.mjs b/tools/doc/html.mjs index d1662a08c44924..c2c3a7a1836e88 100644 --- a/tools/doc/html.mjs +++ b/tools/doc/html.mjs @@ -489,8 +489,6 @@ function altDocs(filename, docCreated, versions) { ` : ''; } -// eslint-disable-next-line max-len -const githubLogo = ''; function editOnGitHub(filename) { - return `
  • ${githubLogo}Edit on GitHub
  • `; + return `
  • Edit on GitHub
  • `; } diff --git a/tools/doc/package-lock.json b/tools/doc/package-lock.json index 9d9ce759c0896e..598a8fccd30e78 100644 --- a/tools/doc/package-lock.json +++ b/tools/doc/package-lock.json @@ -11,7 +11,7 @@ "node-doc-generator": "generate.js" }, "devDependencies": { - "highlight.js": "10.7.3", + "highlight.js": "11.0.1", "js-yaml": "4.1.0", "rehype-raw": "5.1.0", "rehype-stringify": "8.0.0", @@ -25,7 +25,7 @@ "unist-util-visit": "3.1.0" }, "engines": { - "node": ">=12.10.0" + "node": ">=14.8.0" } }, "node_modules/@types/hast": { @@ -356,12 +356,12 @@ } }, "node_modules/highlight.js": { - "version": "10.7.3", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", - "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.0.1.tgz", + "integrity": "sha512-EqYpWyTF2s8nMfttfBA2yLKPNoZCO33pLS4MnbXQ4hECf1TKujCt1Kq7QAdrio7roL4+CqsfjqwYj4tYgq0pJQ==", "dev": true, "engines": { - "node": "*" + "node": ">=12.0.0" } }, "node_modules/html-void-elements": { @@ -1513,9 +1513,9 @@ } }, "highlight.js": { - "version": "10.7.3", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", - "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.0.1.tgz", + "integrity": "sha512-EqYpWyTF2s8nMfttfBA2yLKPNoZCO33pLS4MnbXQ4hECf1TKujCt1Kq7QAdrio7roL4+CqsfjqwYj4tYgq0pJQ==", "dev": true }, "html-void-elements": { diff --git a/tools/doc/package.json b/tools/doc/package.json index 223220bcc1c7e5..4a5624952f85ed 100644 --- a/tools/doc/package.json +++ b/tools/doc/package.json @@ -7,7 +7,7 @@ "node": ">=14.8.0" }, "devDependencies": { - "highlight.js": "10.7.3", + "highlight.js": "11.0.1", "js-yaml": "4.1.0", "rehype-raw": "5.1.0", "rehype-stringify": "8.0.0", diff --git a/tools/doc/type-parser.mjs b/tools/doc/type-parser.mjs index c2586a43254ecb..fff8f5afbd06cb 100644 --- a/tools/doc/type-parser.mjs +++ b/tools/doc/type-parser.mjs @@ -226,6 +226,33 @@ const customTypesMap = { 'X509Certificate': 'crypto.html#crypto_class_x509certificate', 'zlib options': 'zlib.html#zlib_class_options', + + 'ReadableStream': + 'webstreams.md#webstreamsapi_class_readablestream', + 'ReadableStreamDefaultReader': + 'webstreams.md#webstreamsapi_class_readablestreamdefaultreader', + 'ReadableStreamBYOBReader': + 'webstreams.md#webstreamsapi_class_readablestreambyobreader', + 'ReadableStreamDefaultController': + 'webstreams.md#webstreamsapi_class_readablestreamdefaultcontroller', + 'ReadableByteStreamController': + 'webstreams.md#webstreamsapi_class_readablebytestreamcontroller', + 'ReadableStreamBYOBRequest': + 'webstreams.md#webstreamsapi_class_readablestreambyobrequest', + 'WritableStream': + 'webstreams.md#webstreamsapi_class_writablestream', + 'WritableStreamDefaultWriter': + 'webstreams.md#webstreamsapi_class_writablestreamdefaultwriter', + 'WritableStreamDefaultController': + 'webstreams.md#webstreamsapi_class_writablestreamdefaultcontroller', + 'TransformStream': + 'webstreams.md#webstreamsapi_class_transformstream', + 'TransformStreamDefaultController': + 'webstreams.md#webstreamsapi_class_transformstreamdefaultcontroller', + 'ByteLengthQueuingStrategy': + 'webstreams.md#webstreamsapi_class_bytelengthqueuingstrategy', + 'CountQueuingStrategy': + 'webstreams.md#webstreamsapi_class_countqueuingstrategy', }; const arrayPart = /(?:\[])+$/; @@ -261,7 +288,7 @@ export function toLink(typeInput) { } else { throw new Error( `Unrecognized type: '${typeTextFull}'.\n` + - "Please, edit the type or update the 'tools/doc/type-parser.js'." + `Please, edit the type or update '${import.meta.url}'.` ); } } else { diff --git a/tools/find-inactive-collaborators.mjs b/tools/find-inactive-collaborators.mjs new file mode 100755 index 00000000000000..578adf05fb78c7 --- /dev/null +++ b/tools/find-inactive-collaborators.mjs @@ -0,0 +1,95 @@ +#!/usr/bin/env node + +// Identify inactive collaborators. "Inactive" is not quite right, as the things +// this checks for are not the entirety of collaborator activities. Still, it is +// a pretty good proxy. Feel free to suggest or implement further metrics. + +import cp from 'node:child_process'; +import fs from 'node:fs'; +import readline from 'node:readline'; + +const SINCE = process.argv[2] || '6 months ago'; + +async function runGitCommand(cmd, mapFn) { + const childProcess = cp.spawn('/bin/sh', ['-c', cmd], { + cwd: new URL('..', import.meta.url), + encoding: 'utf8', + stdio: ['inherit', 'pipe', 'inherit'], + }); + const lines = readline.createInterface({ + input: childProcess.stdout, + }); + const errorHandler = new Promise( + (_, reject) => childProcess.on('error', reject) + ); + const returnedSet = new Set(); + await Promise.race([errorHandler, Promise.resolve()]); + for await (const line of lines) { + await Promise.race([errorHandler, Promise.resolve()]); + const val = mapFn(line); + if (val) { + returnedSet.add(val); + } + } + return Promise.race([errorHandler, Promise.resolve(returnedSet)]); +} + +// Get all commit authors during the time period. +const authors = await runGitCommand( + `git shortlog -n -s --since="${SINCE}"`, + (line) => line.trim().split('\t', 2)[1] +); + +// Get all commit landers during the time period. +const landers = await runGitCommand( + `git shortlog -n -s -c --since="${SINCE}"`, + (line) => line.trim().split('\t', 2)[1] +); + +// Get all approving reviewers of landed commits during the time period. +const approvingReviewers = await runGitCommand( + `git log --since="${SINCE}" | egrep "^ Reviewed-By: "`, + (line) => /^ Reviewed-By: ([^<]+)/.exec(line)[1].trim() +); + +async function retrieveCollaboratorsFromReadme() { + const readmeText = readline.createInterface({ + input: fs.createReadStream(new URL('../README.md', import.meta.url)), + crlfDelay: Infinity, + }); + const returnedArray = []; + let processingCollaborators = false; + for await (const line of readmeText) { + const isCollaborator = processingCollaborators && line.length; + if (line === '### Collaborators') { + processingCollaborators = true; + } + if (line === '### Collaborator emeriti') { + processingCollaborators = false; + break; + } + if (line.startsWith('**') && isCollaborator) { + returnedArray.push(line.split('**', 2)[1].trim()); + } + } + return returnedArray; +} + +// Get list of current collaborators from README.md. +const collaborators = await retrieveCollaboratorsFromReadme(); + +console.log(`${authors.size.toLocaleString()} authors have made commits since ${SINCE}.`); +console.log(`${landers.size.toLocaleString()} landers have landed commits since ${SINCE}.`); +console.log(`${approvingReviewers.size.toLocaleString()} reviewers have approved landed commits since ${SINCE}.`); +console.log(`${collaborators.length.toLocaleString()} collaborators currently in the project.`); + +const inactive = collaborators.filter((collaborator) => + !authors.has(collaborator) && + !landers.has(collaborator) && + !approvingReviewers.has(collaborator) +); + +if (inactive.length) { + console.log('\nInactive collaborators:'); + console.log(inactive.join('\n')); +} diff --git a/tools/gyp/CHANGELOG.md b/tools/gyp/CHANGELOG.md index 6d66b3acd22d5a..d84ee08238d37f 100644 --- a/tools/gyp/CHANGELOG.md +++ b/tools/gyp/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +### [0.9.3](https://www.github.com/nodejs/gyp-next/compare/v0.9.2...v0.9.3) (2021-07-07) + + +### Bug Fixes + +* build failure with ninja and Python 3 on Windows ([#113](https://www.github.com/nodejs/gyp-next/issues/113)) ([c172d10](https://www.github.com/nodejs/gyp-next/commit/c172d105deff5db4244e583942215918fa80dd3c)) + +### [0.9.2](https://www.github.com/nodejs/gyp-next/compare/v0.9.1...v0.9.2) (2021-05-21) + + +### Bug Fixes + +* add support of utf8 encoding ([#105](https://www.github.com/nodejs/gyp-next/issues/105)) ([4d0f93c](https://www.github.com/nodejs/gyp-next/commit/4d0f93c249286d1f0c0f665f5fe7346119f98cf1)) + ### [0.9.1](https://www.github.com/nodejs/gyp-next/compare/v0.9.0...v0.9.1) (2021-05-14) diff --git a/tools/gyp/pylib/gyp/MSVSUtil.py b/tools/gyp/pylib/gyp/MSVSUtil.py index cb55305eaeed24..36bb782bd319a2 100644 --- a/tools/gyp/pylib/gyp/MSVSUtil.py +++ b/tools/gyp/pylib/gyp/MSVSUtil.py @@ -55,7 +55,7 @@ def _SuffixName(name, suffix): Target name with suffix added (foo_suffix#target) """ parts = name.rsplit("#", 1) - parts[0] = "{}_{}".format(parts[0], suffix) + parts[0] = f"{parts[0]}_{suffix}" return "#".join(parts) diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py index ba310ce247f078..9213fcc5e82bb7 100644 --- a/tools/gyp/pylib/gyp/common.py +++ b/tools/gyp/pylib/gyp/common.py @@ -562,7 +562,7 @@ def pop(self, last=True): # pylint: disable=W0221 def __repr__(self): if not self: return f"{self.__class__.__name__}()" - return "{}({!r})".format(self.__class__.__name__, list(self)) + return f"{self.__class__.__name__}({list(self)!r})" def __eq__(self, other): if isinstance(other, OrderedSet): diff --git a/tools/gyp/pylib/gyp/easy_xml.py b/tools/gyp/pylib/gyp/easy_xml.py index e475b5530c1434..0c99e29ecf8ce0 100644 --- a/tools/gyp/pylib/gyp/easy_xml.py +++ b/tools/gyp/pylib/gyp/easy_xml.py @@ -2,6 +2,7 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import sys import re import os import locale @@ -84,7 +85,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0): rest = specification[1:] if rest and isinstance(rest[0], dict): for at, val in sorted(rest[0].items()): - xml_parts.append(' {}="{}"'.format(at, _XmlEscape(val, attr=True))) + xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"') rest = rest[1:] if rest: xml_parts.append(">") @@ -106,7 +107,8 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0): xml_parts.append("/>%s" % new_line) -def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, win32=False): +def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, + win32=(sys.platform == "win32")): """ Writes the XML content to disk, touching the file only if it has changed. Args: @@ -121,7 +123,10 @@ def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, win32=False default_encoding = locale.getdefaultlocale()[1] if default_encoding and default_encoding.upper() != encoding.upper(): - xml_string = xml_string.encode(encoding) + if win32 and sys.version_info < (3, 7): + xml_string = xml_string.decode("cp1251").encode(encoding) + else: + xml_string = xml_string.encode(encoding) # Get the old content try: diff --git a/tools/gyp/pylib/gyp/generator/android.py b/tools/gyp/pylib/gyp/generator/android.py index 040d8088a24a03..cdf1a4832cf1ad 100644 --- a/tools/gyp/pylib/gyp/generator/android.py +++ b/tools/gyp/pylib/gyp/generator/android.py @@ -349,7 +349,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs): for output in outputs[1:]: # Make each output depend on the main output, with an empty command # to force make to notice that the mtime has changed. - self.WriteLn("{}: {} ;".format(self.LocalPathify(output), main_output)) + self.WriteLn(f"{self.LocalPathify(output)}: {main_output} ;") extra_outputs += outputs self.WriteLn() @@ -616,7 +616,7 @@ def WriteSources(self, spec, configs, extra_sources): if IsCPPExtension(ext) and ext != local_cpp_extension: local_file = root + local_cpp_extension if local_file != source: - self.WriteLn("{}: {}".format(local_file, self.LocalPathify(source))) + self.WriteLn(f"{local_file}: {self.LocalPathify(source)}") self.WriteLn("\tmkdir -p $(@D); cp $< $@") origin_src_dirs.append(os.path.dirname(source)) final_generated_sources.append(local_file) @@ -908,7 +908,7 @@ def WriteTarget( if isinstance(v, list): self.WriteList(v, k) else: - self.WriteLn("{} := {}".format(k, make.QuoteIfNecessary(v))) + self.WriteLn(f"{k} := {make.QuoteIfNecessary(v)}") self.WriteLn("") # Add to the set of targets which represent the gyp 'all' target. We use the diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index eb9102dd15a810..c595f20fe2df12 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -2133,7 +2133,7 @@ def WriteSortedXcodeEnv(self, target, env): # export foo := a\ b # it does not -- the backslash is written to the env as literal character. # So don't escape spaces in |env[k]|. - self.WriteLn("{}: export {} := {}".format(QuoteSpaces(target), k, v)) + self.WriteLn(f"{QuoteSpaces(target)}: export {k} := {v}") def Objectify(self, path): """Convert a path to its output directory form.""" diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py index 5435eb1e1f3317..31d5396fe56978 100644 --- a/tools/gyp/pylib/gyp/generator/msvs.py +++ b/tools/gyp/pylib/gyp/generator/msvs.py @@ -314,7 +314,7 @@ def _ConfigBaseName(config_name, platform_name): def _ConfigFullName(config_name, config_data): platform_name = _ConfigPlatform(config_data) - return "{}|{}".format(_ConfigBaseName(config_name, platform_name), platform_name) + return f"{_ConfigBaseName(config_name, platform_name)}|{platform_name}" def _ConfigWindowsTargetPlatformVersion(config_data, version): @@ -335,7 +335,7 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version): # Find a matching entry in sdk_dir\include. expected_sdk_dir = r"%s\include" % sdk_dir names = sorted( - [ + ( x for x in ( os.listdir(expected_sdk_dir) @@ -343,7 +343,7 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version): else [] ) if x.startswith(version) - ], + ), reverse=True, ) if names: diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py index ca032aef20ff6a..b66e674a7b3871 100644 --- a/tools/gyp/pylib/gyp/generator/ninja.py +++ b/tools/gyp/pylib/gyp/generator/ninja.py @@ -638,7 +638,7 @@ def GenerateDescription(self, verb, message, fallback): if self.toolset != "target": verb += "(%s)" % self.toolset if message: - return "{} {}".format(verb, self.ExpandSpecial(message)) + return f"{verb} {self.ExpandSpecial(message)}" else: return f"{verb} {self.name}: {fallback}" @@ -2389,7 +2389,6 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name ) if flavor == "win": master_ninja.variable("ld_host", ld_host) - master_ninja.variable("ldxx_host", ldxx_host) else: master_ninja.variable( "ld_host", CommandWithWrapper("LINK", wrappers, ld_host) diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py index ca7ce44eab87df..354958bfb2ab55 100644 --- a/tools/gyp/pylib/gyp/input.py +++ b/tools/gyp/pylib/gyp/input.py @@ -225,7 +225,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check return data[build_file_path] if os.path.exists(build_file_path): - build_file_contents = open(build_file_path).read() + build_file_contents = open(build_file_path, encoding='utf-8').read() else: raise GypError(f"{build_file_path} not found (cwd: {os.getcwd()})") diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/tools/gyp/pylib/gyp/msvs_emulation.py index f744e38df1639b..6fcabd049d843c 100644 --- a/tools/gyp/pylib/gyp/msvs_emulation.py +++ b/tools/gyp/pylib/gyp/msvs_emulation.py @@ -333,7 +333,7 @@ def _TargetConfig(self, config): # first level is globally for the configuration (this is what we consider # "the" config at the gyp level, which will be something like 'Debug' or # 'Release'), VS2015 and later only use this level - if self.vs_version.short_name >= 2015: + if int(self.vs_version.short_name) >= 2015: return config # and a second target-specific configuration, which is an # override for the global one. |config| is remapped here to take into @@ -537,7 +537,7 @@ def GetCflags(self, config): ) ] ) - if self.vs_version.project_version >= 12.0: + if float(self.vs_version.project_version) >= 12.0: # New flag introduced in VS2013 (project version 12.0) Forces writes to # the program database (PDB) to be serialized through MSPDBSRV.EXE. # https://msdn.microsoft.com/en-us/library/dn502518.aspx diff --git a/tools/gyp/pylib/gyp/win_tool.py b/tools/gyp/pylib/gyp/win_tool.py index 4dbcda50a4c0c6..638eee40029411 100755 --- a/tools/gyp/pylib/gyp/win_tool.py +++ b/tools/gyp/pylib/gyp/win_tool.py @@ -221,8 +221,9 @@ def ExecLinkWithManifests( # and sometimes doesn't unfortunately. with open(our_manifest) as our_f: with open(assert_manifest) as assert_f: - our_data = our_f.read().translate(None, string.whitespace) - assert_data = assert_f.read().translate(None, string.whitespace) + translator = str.maketrans('', '', string.whitespace) + our_data = our_f.read().translate(translator) + assert_data = assert_f.read().translate(translator) if our_data != assert_data: os.unlink(out) diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py index 5863ef45df2379..076eea37211179 100644 --- a/tools/gyp/pylib/gyp/xcodeproj_file.py +++ b/tools/gyp/pylib/gyp/xcodeproj_file.py @@ -299,8 +299,8 @@ def __repr__(self): try: name = self.Name() except NotImplementedError: - return "<{} at 0x{:x}>".format(self.__class__.__name__, id(self)) - return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self)) + return f"<{self.__class__.__name__} at 0x{id(self):x}>" + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" def Copy(self): """Make a copy of this object. @@ -2251,7 +2251,7 @@ class PBXContainerItemProxy(XCObject): def __repr__(self): props = self._properties name = "{}.gyp:{}".format(props["containerPortal"].Name(), props["remoteInfo"]) - return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self)) + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" def Name(self): # Admittedly not the best name, but it's what Xcode uses. @@ -2288,7 +2288,7 @@ class PBXTargetDependency(XCObject): def __repr__(self): name = self._properties.get("name") or self._properties["target"].Name() - return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self)) + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" def Name(self): # Admittedly not the best name, but it's what Xcode uses. diff --git a/tools/gyp/setup.py b/tools/gyp/setup.py index f4a9481937547a..1ff298a12326b0 100644 --- a/tools/gyp/setup.py +++ b/tools/gyp/setup.py @@ -15,7 +15,7 @@ setup( name="gyp-next", - version="0.9.1", + version="0.9.3", description="A fork of the GYP build system for use in the Node.js projects", long_description=long_description, long_description_content_type="text/markdown", diff --git a/tools/gyp/test_gyp.py b/tools/gyp/test_gyp.py index 757d2fc0b0a16f..9ba264170f43ab 100755 --- a/tools/gyp/test_gyp.py +++ b/tools/gyp/test_gyp.py @@ -140,10 +140,7 @@ def main(argv=None): if not args.quiet: runner.print_results() - if runner.failures: - return 1 - else: - return 0 + return 1 if runner.failures else 0 def print_configuration_info(): @@ -152,8 +149,8 @@ def print_configuration_info(): sys.path.append(os.path.abspath("test/lib")) import TestMac - print(" Mac {} {}".format(platform.mac_ver()[0], platform.mac_ver()[2])) - print(" Xcode %s" % TestMac.Xcode.Version()) + print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}") + print(f" Xcode {TestMac.Xcode.Version()}") elif sys.platform == "win32": sys.path.append(os.path.abspath("pylib")) import gyp.MSVSVersion @@ -162,8 +159,8 @@ def print_configuration_info(): print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description()) elif sys.platform in ("linux", "linux2"): print(" Linux %s" % " ".join(platform.linux_distribution())) - print(" Python %s" % platform.python_version()) - print(" PYTHONPATH=%s" % os.environ["PYTHONPATH"]) + print(f" Python {platform.python_version()}") + print(f" PYTHONPATH={os.environ['PYTHONPATH']}") print() @@ -222,13 +219,9 @@ def run_test(self, test, fmt, i): res_msg = f" {res} {took:.3f}s" self.print_(res_msg) - if ( - stdout - and not stdout.endswith("PASSED\n") - and not (stdout.endswith("NO RESULT\n")) - ): + if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")): print() - print("\n".join(" %s" % line for line in stdout.splitlines())) + print("\n".join(f" {line}" for line in stdout.splitlines())) elif not self.isatty: print() diff --git a/tools/install.py b/tools/install.py index 045d406d84be30..24cf51e73199e6 100755 --- a/tools/install.py +++ b/tools/install.py @@ -160,6 +160,9 @@ def headers(action): def wanted_v8_headers(files_arg, dest): v8_headers = [ 'deps/v8/include/cppgc/common.h', + 'deps/v8/include/libplatform/libplatform.h', + 'deps/v8/include/libplatform/libplatform-export.h', + 'deps/v8/include/libplatform/v8-tracing.h', 'deps/v8/include/v8.h', 'deps/v8/include/v8-internal.h', 'deps/v8/include/v8-platform.h', diff --git a/tools/js2c.py b/tools/js2c.py index d40f28ce2bff2b..d93be2123e0f8c 100755 --- a/tools/js2c.py +++ b/tools/js2c.py @@ -36,6 +36,7 @@ import re import functools import codecs +import utils def ReadFile(filename): if is_verbose: @@ -204,12 +205,23 @@ def main(): fromfile_prefix_chars='@' ) parser.add_argument('--target', help='output file') + parser.add_argument( + '--directory', + default=None, + help='input file directory') parser.add_argument('--verbose', action='store_true', help='output file') parser.add_argument('sources', nargs='*', help='input files') options = parser.parse_args() global is_verbose is_verbose = options.verbose - source_files = functools.reduce(SourceFileByExt, options.sources, {}) + sources = options.sources + if options.directory is not None: + js_files = utils.SearchFiles(options.directory, 'js') + mjs_files = utils.SearchFiles(options.directory, 'mjs') + sources = js_files + mjs_files + options.sources + + source_files = functools.reduce(SourceFileByExt, sources, {}) + # Should have exactly 3 types: `.js`, `.mjs` and `.gypi` assert len(source_files) == 3 # Currently config.gypi is the only `.gypi` file allowed diff --git a/tools/license-builder.sh b/tools/license-builder.sh index 04b631a09b7d54..56f2840c046646 100755 --- a/tools/license-builder.sh +++ b/tools/license-builder.sh @@ -30,7 +30,6 @@ fi # Dependencies bundled in distributions addlicense "Acorn" "deps/acorn" "$(cat "${rootdir}"/deps/acorn/acorn/LICENSE)" -addlicense "Acorn plugins" "deps/acorn-plugins" "$(cat "${rootdir}"/deps/acorn-plugins/acorn-class-fields/LICENSE)" addlicense "c-ares" "deps/cares" "$(tail -n +3 "${rootdir}"/deps/cares/LICENSE.md)" addlicense "cjs-module-lexer" "deps/cjs-module-lexer" "$(cat "${rootdir}"/deps/cjs-module-lexer/LICENSE)" if [ -f "${rootdir}/deps/icu/LICENSE" ]; then diff --git a/tools/lint-md.js b/tools/lint-md.js index 411e4553db1228..23a3e76a86f86e 100644 --- a/tools/lint-md.js +++ b/tools/lint-md.js @@ -12,6 +12,7 @@ var tty = require('tty'); var fs$1 = require('fs'); var events = require('events'); var assert = require('assert'); +var require$$0$4 = require('url'); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } @@ -24,6 +25,7 @@ var tty__default = /*#__PURE__*/_interopDefaultLegacy(tty); var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs$1); var events__default = /*#__PURE__*/_interopDefaultLegacy(events); var assert__default = /*#__PURE__*/_interopDefaultLegacy(assert); +var require$$0__default$1 = /*#__PURE__*/_interopDefaultLegacy(require$$0$4); var vfileStatistics = statistics; @@ -45048,6 +45050,51 @@ function noTrailingSpaces(ast, file) { } } +const { pathToFileURL } = require$$0__default$1['default']; + + + +function* getLinksRecursively(node) { + if (node.url) { + yield node; + } + for (const child of node.children || []) { + yield* getLinksRecursively(child); + } +} + +function validateLinks(tree, vfile) { + const currentFileURL = pathToFileURL(path__default['default'].join(vfile.cwd, vfile.path)); + let previousDefinitionLabel; + for (const node of getLinksRecursively(tree)) { + if (node.url[0] !== "#") { + const targetURL = new URL(node.url, currentFileURL); + if (targetURL.protocol === "file:" && !fs__default['default'].existsSync(targetURL)) { + vfile.message("Broken link", node); + } else if (targetURL.pathname === currentFileURL.pathname) { + const expected = node.url.includes("#") + ? node.url.slice(node.url.indexOf("#")) + : "#"; + vfile.message( + `Self-reference must start with hash (expected "${expected}", got "${node.url}")`, + node + ); + } + } + if (node.type === "definition") { + if (previousDefinitionLabel && previousDefinitionLabel > node.label) { + vfile.message( + `Unordered reference ("${node.label}" should be before "${previousDefinitionLabel}")`, + node + ); + } + previousDefinitionLabel = node.label; + } + } +} + +var remarkLintNodejsLinks = unifiedLintRule("remark-lint:nodejs-links", validateLinks); + function isNothing$1(subject) { return (typeof subject === 'undefined') || (subject === null); } @@ -49632,7 +49679,7 @@ function validateMeta(node, file, meta) { case kWrongKeyOrder: file.message( - "YAML dictionary keys should be respect this order: " + + "YAML dictionary keys should be in this order: " + allowedKeys.join(", "), node ); @@ -50532,6 +50579,7 @@ var plugins$2 = [ remarkLintNoTableIndentation, remarkLintNoTabs, remarkLintNoTrailingSpaces, + remarkLintNodejsLinks, remarkLintNodejsYamlComments, [ remarkLintProhibitedStrings, diff --git a/tools/lint-pr-commit-message.sh b/tools/lint-pr-commit-message.sh deleted file mode 100644 index 443e64469d3b24..00000000000000 --- a/tools/lint-pr-commit-message.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/sh - -# Shell script to lint the message of the first commit in a pull request. -# -# Depends on curl, git, node, npm and npx being in $PATH. -# -# The pull request is either: -# 1) supplied as an argument to this shell script -# 2) derived from the HEAD commit via the GitHub API - -GH_API_URL="https://api.github.com" -PR_ID=$1; -if [ -z "${PR_ID}" ]; then - # Attempt to work out the PR number based on current HEAD - if HEAD_COMMIT="$( git rev-parse HEAD )"; then - if SEARCH_RESULTS="$( curl -s "${GH_API_URL}/search/issues?q=sha:${HEAD_COMMIT}+type:pr+repo:nodejs/node" )"; then - if FOUND_PR="$( node -p 'JSON.parse(process.argv[1]).items[0].number' "${SEARCH_RESULTS}" 2> /dev/null )"; then - PR_ID=${FOUND_PR} - fi - fi - fi -fi -if [ -z "${PR_ID}" ]; then - echo "Unable to determine the pull request number to check. Please specify, " - echo " e.g. $0 " - exit 1 -fi - -PATCH=$( curl -sL "https://github.com/nodejs/node/pull/${PR_ID}.patch" | grep '^From ' ) -if FIRST_COMMIT="$( echo "$PATCH" | awk '/^From [0-9a-f]{40} / { if (count++ == 0) print $2 }' )"; then - MESSAGE=$( git show --quiet --format='format:%B' "$FIRST_COMMIT" ) - echo " -*** Linting the first commit message for pull request ${PR_ID} -*** according to the guidelines at https://goo.gl/p2fr5Q. -*** Commit message for $(echo "$FIRST_COMMIT" | cut -c 1-10) is: -${MESSAGE} -" - npx -q core-validate-commit --no-validate-metadata "${FIRST_COMMIT}" -fi diff --git a/tools/make-v8.sh b/tools/make-v8.sh index 79ab02af275aa9..c63cb9ccf77301 100755 --- a/tools/make-v8.sh +++ b/tools/make-v8.sh @@ -7,7 +7,7 @@ V8_BUILD_OPTIONS=$2 cd deps/v8 || exit find . -type d -name .git -print0 | xargs -0 rm -rf -tools/node/fetch_deps.py . +../../tools/v8/fetch_deps.py . ARCH="`arch`" if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then diff --git a/tools/node-lint-md-cli-rollup/package-lock.json b/tools/node-lint-md-cli-rollup/package-lock.json index 70cf0ee00459dd..6f13ecea4c2cc9 100644 --- a/tools/node-lint-md-cli-rollup/package-lock.json +++ b/tools/node-lint-md-cli-rollup/package-lock.json @@ -2111,9 +2111,9 @@ } }, "node_modules/remark-preset-lint-node": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-2.3.0.tgz", - "integrity": "sha512-k2whtqbZ0/RRH2ffumvsNEkpKrbrcYLA2K8bZ5bnPeM/jahmJukas0uk9cYXUmsfDLzV6kuJHaO0EAzaYmVvMg==", + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-2.4.1.tgz", + "integrity": "sha512-lNRqQulYuu8ADd4lSNhjOnZck/iDzLVY8oWA9U4BptfHQcDVwFm3bkNDfsVuEA7iJAFJzF7QuuHIcxzPV5VeFg==", "dependencies": { "js-yaml": "^4.0.0", "remark-lint": "^8.0.0", @@ -2147,7 +2147,9 @@ "remark-lint-table-pipes": "^3.0.0", "remark-lint-unordered-list-marker-style": "^2.0.0", "remark-preset-lint-recommended": "^5.0.0", - "semver": "^7.3.2" + "semver": "^7.3.2", + "unified-lint-rule": "^1.0.6", + "unist-util-visit": "^2.0.3" } }, "node_modules/remark-preset-lint-node/node_modules/argparse": { @@ -4226,9 +4228,9 @@ } }, "remark-preset-lint-node": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-2.3.0.tgz", - "integrity": "sha512-k2whtqbZ0/RRH2ffumvsNEkpKrbrcYLA2K8bZ5bnPeM/jahmJukas0uk9cYXUmsfDLzV6kuJHaO0EAzaYmVvMg==", + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-2.4.1.tgz", + "integrity": "sha512-lNRqQulYuu8ADd4lSNhjOnZck/iDzLVY8oWA9U4BptfHQcDVwFm3bkNDfsVuEA7iJAFJzF7QuuHIcxzPV5VeFg==", "requires": { "js-yaml": "^4.0.0", "remark-lint": "^8.0.0", @@ -4262,7 +4264,9 @@ "remark-lint-table-pipes": "^3.0.0", "remark-lint-unordered-list-marker-style": "^2.0.0", "remark-preset-lint-recommended": "^5.0.0", - "semver": "^7.3.2" + "semver": "^7.3.2", + "unified-lint-rule": "^1.0.6", + "unist-util-visit": "^2.0.3" }, "dependencies": { "argparse": { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/compat-data/package.json b/tools/node_modules/@babel/core/node_modules/@babel/compat-data/package.json index 06660dcf68c3e4..cf21e99cbcba41 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/compat-data/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/compat-data/package.json @@ -1,6 +1,6 @@ { "name": "@babel/compat-data", - "version": "7.14.5", + "version": "7.14.7", "author": "The Babel Team (https://babel.dev/team)", "license": "MIT", "description": "", @@ -30,7 +30,7 @@ ], "devDependencies": { "@mdn/browser-compat-data": "^3.3.4", - "core-js-compat": "^3.14.0", + "core-js-compat": "^3.15.0", "electron-to-chromium": "^1.3.749" }, "engines": { diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-member-expression-to-functions/lib/index.js b/tools/node_modules/@babel/core/node_modules/@babel/helper-member-expression-to-functions/lib/index.js index 99507086d19725..a281bf3c70a664 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-member-expression-to-functions/lib/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-member-expression-to-functions/lib/index.js @@ -411,7 +411,11 @@ const handle = { return; } - member.replaceWith(this.get(member)); + if (parentPath.isTaggedTemplateExpression()) { + member.replaceWith(this.boundGet(member)); + } else { + member.replaceWith(this.get(member)); + } } }; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/helper-member-expression-to-functions/package.json b/tools/node_modules/@babel/core/node_modules/@babel/helper-member-expression-to-functions/package.json index da12274e34a3ba..e27a704c53f876 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/helper-member-expression-to-functions/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/helper-member-expression-to-functions/package.json @@ -1,6 +1,6 @@ { "name": "@babel/helper-member-expression-to-functions", - "version": "7.14.5", + "version": "7.14.7", "description": "Helper function to replace certain member expressions with function calls", "repository": { "type": "git", @@ -18,7 +18,7 @@ "@babel/types": "^7.14.5" }, "devDependencies": { - "@babel/traverse": "7.14.5" + "@babel/traverse": "7.14.7" }, "engines": { "node": ">=6.9.0" diff --git a/tools/node_modules/@babel/core/node_modules/@babel/parser/lib/index.js b/tools/node_modules/@babel/core/node_modules/@babel/parser/lib/index.js index fa9b6fa7aded22..94154ff717d190 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/parser/lib/index.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/parser/lib/index.js @@ -1173,24 +1173,17 @@ class TokContext { } const types = { brace: new TokContext("{"), - templateQuasi: new TokContext("${"), template: new TokContext("`", true) }; types$1.braceR.updateContext = context => { - if (context.length > 1) { - context.pop(); - } + context.pop(); }; -types$1.braceL.updateContext = types$1.braceHashL.updateContext = context => { +types$1.braceL.updateContext = types$1.braceHashL.updateContext = types$1.dollarBraceL.updateContext = context => { context.push(types.brace); }; -types$1.dollarBraceL.updateContext = context => { - context.push(types.templateQuasi); -}; - types$1.backQuote.updateContext = context => { if (context[context.length - 1] === types.template) { context.pop(); @@ -3004,14 +2997,17 @@ var flow = (superClass => class extends superClass { return super.parseExportDefaultExpression(); } - parseConditional(expr, startPos, startLoc, refNeedsArrowPos) { + parseConditional(expr, startPos, startLoc, refExpressionErrors) { if (!this.match(types$1.question)) return expr; - if (refNeedsArrowPos) { + if (this.state.maybeInArrowParameters) { const result = this.tryParse(() => super.parseConditional(expr, startPos, startLoc)); if (!result.node) { - refNeedsArrowPos.start = result.error.pos || this.state.start; + if (result.error) { + super.setOptionalParametersError(refExpressionErrors, result.error); + } + return expr; } @@ -3066,7 +3062,7 @@ var flow = (superClass => class extends superClass { this.expect(types$1.colon); node.test = expr; node.consequent = consequent; - node.alternate = this.forwardNoArrowParamsConversionAt(node, () => this.parseMaybeAssign(undefined, undefined, undefined)); + node.alternate = this.forwardNoArrowParamsConversionAt(node, () => this.parseMaybeAssign(undefined, undefined)); return this.finishNode(node, "ConditionalExpression"); } @@ -3710,7 +3706,7 @@ var flow = (superClass => class extends superClass { return this.match(types$1.colon) || super.shouldParseAsyncArrow(); } - parseMaybeAssign(refExpressionErrors, afterLeftParse, refNeedsArrowPos) { + parseMaybeAssign(refExpressionErrors, afterLeftParse) { var _jsx; let state = null; @@ -3718,15 +3714,16 @@ var flow = (superClass => class extends superClass { if (this.hasPlugin("jsx") && (this.match(types$1.jsxTagStart) || this.isRelational("<"))) { state = this.state.clone(); - jsx = this.tryParse(() => super.parseMaybeAssign(refExpressionErrors, afterLeftParse, refNeedsArrowPos), state); + jsx = this.tryParse(() => super.parseMaybeAssign(refExpressionErrors, afterLeftParse), state); if (!jsx.error) return jsx.node; const { context } = this.state; + const curContext = context[context.length - 1]; - if (context[context.length - 1] === types.j_oTag) { + if (curContext === types.j_oTag) { context.length -= 2; - } else if (context[context.length - 1] === types.j_expr) { + } else if (curContext === types.j_expr) { context.length -= 1; } } @@ -3741,7 +3738,7 @@ var flow = (superClass => class extends superClass { typeParameters = this.flowParseTypeParameterDeclaration(); const arrowExpression = this.forwardNoArrowParamsConversionAt(typeParameters, () => { - const result = super.parseMaybeAssign(refExpressionErrors, afterLeftParse, refNeedsArrowPos); + const result = super.parseMaybeAssign(refExpressionErrors, afterLeftParse); this.resetStartLocationFromNode(result, typeParameters); return result; }); @@ -3784,7 +3781,7 @@ var flow = (superClass => class extends superClass { throw this.raise(typeParameters.start, FlowErrors.UnexpectedTokenAfterTypeParameter); } - return super.parseMaybeAssign(refExpressionErrors, afterLeftParse, refNeedsArrowPos); + return super.parseMaybeAssign(refExpressionErrors, afterLeftParse); } parseArrow(node) { @@ -4834,8 +4831,7 @@ types$1.jsxTagStart = new TokenType("jsxTagStart", { types$1.jsxTagEnd = new TokenType("jsxTagEnd"); types$1.jsxTagStart.updateContext = context => { - context.push(types.j_expr); - context.push(types.j_oTag); + context.push(types.j_expr, types.j_oTag); }; function isFragment(object) { @@ -5290,19 +5286,8 @@ var jsx = (superClass => class extends superClass { type } = this.state; - if (type === types$1.braceL) { - const curContext = context[context.length - 1]; - - if (curContext === types.j_oTag) { - context.push(types.brace); - } else if (curContext === types.j_expr) { - context.push(types.templateQuasi); - } - - this.state.exprAllowed = true; - } else if (type === types$1.slash && prevType === types$1.jsxTagStart) { - context.length -= 2; - context.push(types.j_cTag); + if (type === types$1.slash && prevType === types$1.jsxTagStart) { + context.splice(-2, 2, types.j_cTag); this.state.exprAllowed = false; } else if (type === types$1.jsxTagEnd) { const out = context.pop(); @@ -7399,15 +7384,18 @@ var typescript = (superClass => class extends superClass { return super.shouldParseExportDeclaration(); } - parseConditional(expr, startPos, startLoc, refNeedsArrowPos) { - if (!refNeedsArrowPos || !this.match(types$1.question)) { - return super.parseConditional(expr, startPos, startLoc, refNeedsArrowPos); + parseConditional(expr, startPos, startLoc, refExpressionErrors) { + if (!this.state.maybeInArrowParameters || !this.match(types$1.question)) { + return super.parseConditional(expr, startPos, startLoc, refExpressionErrors); } const result = this.tryParse(() => super.parseConditional(expr, startPos, startLoc)); if (!result.node) { - refNeedsArrowPos.start = result.error.pos || this.state.start; + if (result.error) { + super.setOptionalParametersError(refExpressionErrors, result.error); + } + return expr; } @@ -9986,9 +9974,13 @@ class UtilParser extends Tokenizer { if (!refExpressionErrors) return false; const { shorthandAssign, - doubleProto + doubleProto, + optionalParameters } = refExpressionErrors; - if (!andThrow) return shorthandAssign >= 0 || doubleProto >= 0; + + if (!andThrow) { + return shorthandAssign >= 0 || doubleProto >= 0 || optionalParameters >= 0; + } if (shorthandAssign >= 0) { this.unexpected(shorthandAssign); @@ -9997,6 +9989,10 @@ class UtilParser extends Tokenizer { if (doubleProto >= 0) { this.raise(doubleProto, ErrorMessages.DuplicateProto); } + + if (optionalParameters >= 0) { + this.unexpected(optionalParameters); + } } isLiteralPropertyName() { @@ -10070,6 +10066,7 @@ class ExpressionErrors { constructor() { this.shorthandAssign = -1; this.doubleProto = -1; + this.optionalParameters = -1; } } @@ -10590,15 +10587,21 @@ class ExpressionParser extends LValParser { return expr; } - parseMaybeAssignDisallowIn(refExpressionErrors, afterLeftParse, refNeedsArrowPos) { - return this.disallowInAnd(() => this.parseMaybeAssign(refExpressionErrors, afterLeftParse, refNeedsArrowPos)); + parseMaybeAssignDisallowIn(refExpressionErrors, afterLeftParse) { + return this.disallowInAnd(() => this.parseMaybeAssign(refExpressionErrors, afterLeftParse)); } - parseMaybeAssignAllowIn(refExpressionErrors, afterLeftParse, refNeedsArrowPos) { - return this.allowInAnd(() => this.parseMaybeAssign(refExpressionErrors, afterLeftParse, refNeedsArrowPos)); + parseMaybeAssignAllowIn(refExpressionErrors, afterLeftParse) { + return this.allowInAnd(() => this.parseMaybeAssign(refExpressionErrors, afterLeftParse)); } - parseMaybeAssign(refExpressionErrors, afterLeftParse, refNeedsArrowPos) { + setOptionalParametersError(refExpressionErrors, resultError) { + var _resultError$pos; + + refExpressionErrors.optionalParameters = (_resultError$pos = resultError == null ? void 0 : resultError.pos) != null ? _resultError$pos : this.state.start; + } + + parseMaybeAssign(refExpressionErrors, afterLeftParse) { const startPos = this.state.start; const startLoc = this.state.startLoc; @@ -10627,7 +10630,7 @@ class ExpressionParser extends LValParser { this.state.potentialArrowAt = this.state.start; } - let left = this.parseMaybeConditional(refExpressionErrors, refNeedsArrowPos); + let left = this.parseMaybeConditional(refExpressionErrors); if (afterLeftParse) { left = afterLeftParse.call(this, left, startPos, startLoc); @@ -10660,7 +10663,7 @@ class ExpressionParser extends LValParser { return left; } - parseMaybeConditional(refExpressionErrors, refNeedsArrowPos) { + parseMaybeConditional(refExpressionErrors) { const startPos = this.state.start; const startLoc = this.state.startLoc; const potentialArrowAt = this.state.potentialArrowAt; @@ -10670,10 +10673,10 @@ class ExpressionParser extends LValParser { return expr; } - return this.parseConditional(expr, startPos, startLoc, refNeedsArrowPos); + return this.parseConditional(expr, startPos, startLoc, refExpressionErrors); } - parseConditional(expr, startPos, startLoc, refNeedsArrowPos) { + parseConditional(expr, startPos, startLoc, refExpressionErrors) { if (this.eat(types$1.question)) { const node = this.startNodeAt(startPos, startLoc); node.test = expr; @@ -11079,9 +11082,7 @@ class ExpressionParser extends LValParser { } } - elts.push(this.parseExprListItem(false, refExpressionErrors, { - start: 0 - }, allowPlaceholder)); + elts.push(this.parseExprListItem(false, refExpressionErrors, allowPlaceholder)); } this.state.inFSharpPipelineDirectBody = oldInFSharpPipelineDirectBody; @@ -11499,9 +11500,6 @@ class ExpressionParser extends LValParser { const innerStartLoc = this.state.startLoc; const exprList = []; const refExpressionErrors = new ExpressionErrors(); - const refNeedsArrowPos = { - start: 0 - }; let first = true; let spreadStart; let optionalCommaStart; @@ -11510,7 +11508,7 @@ class ExpressionParser extends LValParser { if (first) { first = false; } else { - this.expect(types$1.comma, refNeedsArrowPos.start || null); + this.expect(types$1.comma, refExpressionErrors.optionalParameters === -1 ? null : refExpressionErrors.optionalParameters); if (this.match(types$1.parenR)) { optionalCommaStart = this.state.start; @@ -11526,7 +11524,7 @@ class ExpressionParser extends LValParser { this.checkCommaAfterRest(41); break; } else { - exprList.push(this.parseMaybeAssignAllowIn(refExpressionErrors, this.parseParenItem, refNeedsArrowPos)); + exprList.push(this.parseMaybeAssignAllowIn(refExpressionErrors, this.parseParenItem)); } } @@ -11553,7 +11551,6 @@ class ExpressionParser extends LValParser { if (optionalCommaStart) this.unexpected(optionalCommaStart); if (spreadStart) this.unexpected(spreadStart); this.checkExpressionErrors(refExpressionErrors, true); - if (refNeedsArrowPos.start) this.unexpected(refNeedsArrowPos.start); this.toReferencedListDeep(exprList, true); if (exprList.length > 1) { @@ -12052,7 +12049,7 @@ class ExpressionParser extends LValParser { return elts; } - parseExprListItem(allowEmpty, refExpressionErrors, refNeedsArrowPos, allowPlaceholder) { + parseExprListItem(allowEmpty, refExpressionErrors, allowPlaceholder) { let elt; if (this.match(types$1.comma)) { @@ -12064,7 +12061,7 @@ class ExpressionParser extends LValParser { } else if (this.match(types$1.ellipsis)) { const spreadNodeStartPos = this.state.start; const spreadNodeStartLoc = this.state.startLoc; - elt = this.parseParenItem(this.parseSpread(refExpressionErrors, refNeedsArrowPos), spreadNodeStartPos, spreadNodeStartLoc); + elt = this.parseParenItem(this.parseSpread(refExpressionErrors), spreadNodeStartPos, spreadNodeStartLoc); } else if (this.match(types$1.question)) { this.expectPlugin("partialApplication"); @@ -12076,7 +12073,7 @@ class ExpressionParser extends LValParser { this.next(); elt = this.finishNode(node, "ArgumentPlaceholder"); } else { - elt = this.parseMaybeAssignAllowIn(refExpressionErrors, this.parseParenItem, refNeedsArrowPos); + elt = this.parseMaybeAssignAllowIn(refExpressionErrors, this.parseParenItem); } return elt; @@ -12105,14 +12102,6 @@ class ExpressionParser extends LValParser { name = this.state.value; } else if (type.keyword) { name = type.keyword; - - if (type === types$1._class || type === types$1._function) { - const curContext = this.curContext(); - - if (curContext === types.functionStatement || curContext === types.functionExpression) { - this.state.context.pop(); - } - } } else { throw this.unexpected(); } diff --git a/tools/node_modules/@babel/core/node_modules/@babel/parser/package.json b/tools/node_modules/@babel/core/node_modules/@babel/parser/package.json index 5a342e78974aba..d38f0daa0af069 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/parser/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/parser/package.json @@ -1,6 +1,6 @@ { "name": "@babel/parser", - "version": "7.14.6", + "version": "7.14.7", "description": "A JavaScript parser", "author": "The Babel Team (https://babel.dev/team)", "homepage": "https://babel.dev/docs/en/next/babel-parser", diff --git a/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/context.js b/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/context.js index b175cdd36111fb..c3a0424aa98c53 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/context.js +++ b/tools/node_modules/@babel/core/node_modules/@babel/traverse/lib/context.js @@ -9,8 +9,6 @@ var _path = require("./path"); var t = require("@babel/types"); -const testing = process.env.NODE_ENV === "test"; - class TraversalContext { constructor(scope, opts, state, parentPath) { this.queue = null; @@ -46,10 +44,6 @@ class TraversalContext { } maybeQueue(path, notPriority) { - if (this.trap) { - throw new Error("Infinite cycle detected"); - } - if (this.queue) { if (notPriority) { this.queue.push(path); @@ -96,11 +90,6 @@ class TraversalContext { } if (path.key === null) continue; - - if (testing && queue.length >= 10000) { - this.trap = true; - } - const { node } = path; diff --git a/tools/node_modules/@babel/core/node_modules/@babel/traverse/package.json b/tools/node_modules/@babel/core/node_modules/@babel/traverse/package.json index 03995f89336e28..495c81b22ff26a 100644 --- a/tools/node_modules/@babel/core/node_modules/@babel/traverse/package.json +++ b/tools/node_modules/@babel/core/node_modules/@babel/traverse/package.json @@ -1,6 +1,6 @@ { "name": "@babel/traverse", - "version": "7.14.5", + "version": "7.14.7", "description": "The Babel Traverse module maintains the overall tree state, and is responsible for replacing, removing, and adding nodes", "author": "The Babel Team (https://babel.dev/team)", "homepage": "https://babel.dev/docs/en/next/babel-traverse", @@ -21,7 +21,7 @@ "@babel/helper-function-name": "^7.14.5", "@babel/helper-hoist-variables": "^7.14.5", "@babel/helper-split-export-declaration": "^7.14.5", - "@babel/parser": "^7.14.5", + "@babel/parser": "^7.14.7", "@babel/types": "^7.14.5", "debug": "^4.1.0", "globals": "^11.1.0" diff --git a/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/audio-api.js b/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/audio-api.js index fd2d7793f3ae92..f323341d01caaa 100644 --- a/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/audio-api.js +++ b/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/audio-api.js @@ -1 +1 @@ -module.exports={A:{A:{"2":"J D E F A B gB"},B:{"1":"C K L G M N O R S T U V W X Y Z P a H"},C:{"1":"0 1 2 3 4 5 6 7 8 9 i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB YB GB ZB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T iB U V W X Y Z P a H","2":"hB XB I b J D E F A B C K L G M N O c d e f g h jB kB"},D:{"1":"0 1 2 3 4 5 6 7 8 9 r s t u v w x y z AB BB CB DB EB FB YB GB ZB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T U V W X Y Z P a H lB mB nB","2":"I b J D E F A B C K","33":"L G M N O c d e f g h i j k l m n o p q"},E:{"1":"G uB vB","2":"I b oB cB pB","33":"J D E F A B C K L qB rB sB dB VB WB tB"},F:{"1":"0 1 2 3 4 5 6 7 8 9 f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB","2":"F B C wB xB yB zB VB eB 0B WB","33":"G M N O c d e"},G:{"1":"JC","2":"cB 1B fB 2B","33":"E 3B 4B 5B 6B 7B 8B 9B AC BC CC DC EC FC GC HC IC"},H:{"2":"KC"},I:{"1":"H","2":"XB I LC MC NC OC fB PC QC"},J:{"2":"D A"},K:{"1":"Q","2":"A B C VB eB WB"},L:{"1":"H"},M:{"1":"P"},N:{"2":"A B"},O:{"1":"RC"},P:{"1":"I SC TC UC VC WC dB XC YC ZC aC"},Q:{"1":"bC"},R:{"1":"cC"},S:{"1":"dC"}},B:5,C:"Web Audio API"}; +module.exports={A:{A:{"2":"J D E F A B gB"},B:{"1":"C K L G M N O R S T U V W X Y Z P a H"},C:{"1":"0 1 2 3 4 5 6 7 8 9 i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB YB GB ZB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T iB U V W X Y Z P a H","2":"hB XB I b J D E F A B C K L G M N O c d e f g h jB kB"},D:{"1":"0 1 2 3 4 5 6 7 8 9 r s t u v w x y z AB BB CB DB EB FB YB GB ZB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T U V W X Y Z P a H lB mB nB","2":"I b J D E F A B C K","33":"L G M N O c d e f g h i j k l m n o p q"},E:{"1":"G uB vB","2":"I b oB cB pB","33":"J D E F A B C K L qB rB sB dB VB WB tB"},F:{"1":"0 1 2 3 4 5 6 7 8 9 f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB","2":"F B C wB xB yB zB VB eB 0B WB","33":"G M N O c d e"},G:{"1":"JC","2":"cB 1B fB 2B","33":"E 3B 4B 5B 6B 7B 8B 9B AC BC CC DC EC FC GC HC IC"},H:{"2":"KC"},I:{"1":"H","2":"XB I LC MC NC OC fB PC QC"},J:{"2":"D A"},K:{"1":"Q","2":"A B C VB eB WB"},L:{"1":"H"},M:{"1":"P"},N:{"2":"A B"},O:{"1":"RC"},P:{"1":"I SC TC UC VC WC dB XC YC ZC aC"},Q:{"1":"bC"},R:{"1":"cC"},S:{"1":"dC"}},B:2,C:"Web Audio API"}; diff --git a/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/css-color-function.js b/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/css-color-function.js index a13a441cb0d692..af2e1b64612d98 100644 --- a/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/css-color-function.js +++ b/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/css-color-function.js @@ -1 +1 @@ -module.exports={A:{A:{"2":"J D E F A B gB"},B:{"2":"C K L G M N O R S T U V W X Y Z P a H"},C:{"2":"0 1 2 3 4 5 6 7 8 9 hB XB I b J D E F A B C K L G M N O c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB YB GB ZB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T iB U V W X Y Z P a H jB kB"},D:{"2":"0 1 2 3 4 5 6 7 8 9 I b J D E F A B C K L G M N O c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB YB GB ZB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T U V W X Y Z P a H lB mB nB"},E:{"1":"B C K L G dB VB WB tB uB vB","2":"I b J D E F A oB cB pB qB rB sB"},F:{"2":"0 1 2 3 4 5 6 7 8 9 F B C G M N O c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB wB xB yB zB VB eB 0B WB"},G:{"1":"9B AC BC CC DC EC FC GC HC IC JC","2":"E cB 1B fB 2B 3B 4B 5B 6B 7B 8B"},H:{"2":"KC"},I:{"2":"XB I H LC MC NC OC fB PC QC"},J:{"2":"D A"},K:{"2":"A B C Q VB eB WB"},L:{"2":"H"},M:{"2":"P"},N:{"2":"A B"},O:{"2":"RC"},P:{"2":"I SC TC UC VC WC dB XC YC ZC aC"},Q:{"2":"bC"},R:{"2":"cC"},S:{"2":"dC"}},B:5,C:"CSS color() function"}; +module.exports={A:{A:{"2":"J D E F A B gB"},B:{"2":"C K L G M N O R S T U V W X Y Z P a H"},C:{"2":"0 1 2 3 4 5 6 7 8 9 hB XB I b J D E F A B C K L G M N O c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB YB GB ZB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T iB U V W X Y Z P a H jB kB"},D:{"2":"0 1 2 3 4 5 6 7 8 9 I b J D E F A B C K L G M N O c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB YB GB ZB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T U V W X Y Z P a H lB mB nB"},E:{"1":"G vB","2":"I b J D E F A oB cB pB qB rB sB","132":"B C K L dB VB WB tB uB"},F:{"2":"0 1 2 3 4 5 6 7 8 9 F B C G M N O c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB wB xB yB zB VB eB 0B WB"},G:{"2":"E cB 1B fB 2B 3B 4B 5B 6B 7B 8B","132":"9B AC BC CC DC EC FC GC HC IC JC"},H:{"2":"KC"},I:{"2":"XB I H LC MC NC OC fB PC QC"},J:{"2":"D A"},K:{"2":"A B C Q VB eB WB"},L:{"2":"H"},M:{"2":"P"},N:{"2":"A B"},O:{"2":"RC"},P:{"2":"I SC TC UC VC WC dB XC YC ZC aC"},Q:{"2":"bC"},R:{"2":"cC"},S:{"2":"dC"}},B:5,C:"CSS color() function"}; diff --git a/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/variable-fonts.js b/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/variable-fonts.js index 75a46f7589aa94..042a7d39f23fb3 100644 --- a/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/variable-fonts.js +++ b/tools/node_modules/@babel/core/node_modules/caniuse-lite/data/features/variable-fonts.js @@ -1 +1 @@ -module.exports={A:{A:{"2":"J D E F A B gB"},B:{"1":"N O R S T U V W X Y Z P a H","2":"C K L G M"},C:{"2":"0 1 2 3 4 5 6 7 8 9 hB XB I b J D E F A B C K L G M N O c d e f g h i j k l m n o p q r s t u v w x y z jB kB","4609":"Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB aB bB R S T iB U V W X Y Z P a H","4674":"ZB","5698":"GB","7490":"AB BB CB DB EB","7746":"FB YB"},D:{"1":"LB MB NB OB PB QB RB SB TB UB aB bB R S T U V W X Y Z P a H lB mB nB","2":"0 1 2 3 4 5 6 7 8 9 I b J D E F A B C K L G M N O c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB","4097":"KB","4290":"YB GB ZB","6148":"Q HB IB JB"},E:{"1":"G vB","2":"I b J D E F A oB cB pB qB rB sB dB","4609":"B C VB WB","8193":"K L tB uB"},F:{"1":"BB CB DB EB FB GB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB","2":"0 1 2 3 4 5 F B C G M N O c d e f g h i j k l m n o p q r s t u v w x y z wB xB yB zB VB eB 0B WB","4097":"AB","6148":"6 7 8 9"},G:{"1":"EC FC GC HC IC JC","2":"E cB 1B fB 2B 3B 4B 5B 6B 7B 8B 9B","4097":"AC BC CC DC"},H:{"2":"KC"},I:{"1":"H","2":"XB I LC MC NC OC fB PC QC"},J:{"2":"D A"},K:{"1":"Q","2":"A B C VB eB WB"},L:{"1":"H"},M:{"4097":"P"},N:{"2":"A B"},O:{"2":"RC"},P:{"2":"I SC TC UC","4097":"VC WC dB XC YC ZC aC"},Q:{"4097":"bC"},R:{"2":"cC"},S:{"2":"dC"}},B:5,C:"Variable fonts"}; +module.exports={A:{A:{"2":"J D E F A B gB"},B:{"1":"N O R S T U V W X Y Z P a H","2":"C K L G M"},C:{"2":"0 1 2 3 4 5 6 7 8 9 hB XB I b J D E F A B C K L G M N O c d e f g h i j k l m n o p q r s t u v w x y z jB kB","4609":"Q HB IB JB KB LB MB NB OB","4674":"ZB","5698":"GB","7490":"AB BB CB DB EB","7746":"FB YB","8705":"PB QB RB SB TB UB aB bB R S T iB U V W X Y Z P a H"},D:{"1":"LB MB NB OB PB QB RB SB TB UB aB bB R S T U V W X Y Z P a H lB mB nB","2":"0 1 2 3 4 5 6 7 8 9 I b J D E F A B C K L G M N O c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB","4097":"KB","4290":"YB GB ZB","6148":"Q HB IB JB"},E:{"1":"G vB","2":"I b J D E F A oB cB pB qB rB sB dB","4609":"B C VB WB","8193":"K L tB uB"},F:{"1":"BB CB DB EB FB GB Q HB IB JB KB LB MB NB OB PB QB RB SB TB UB","2":"0 1 2 3 4 5 F B C G M N O c d e f g h i j k l m n o p q r s t u v w x y z wB xB yB zB VB eB 0B WB","4097":"AB","6148":"6 7 8 9"},G:{"1":"EC FC GC HC IC JC","2":"E cB 1B fB 2B 3B 4B 5B 6B 7B 8B 9B","4097":"AC BC CC DC"},H:{"2":"KC"},I:{"1":"H","2":"XB I LC MC NC OC fB PC QC"},J:{"2":"D A"},K:{"1":"Q","2":"A B C VB eB WB"},L:{"1":"H"},M:{"4097":"P"},N:{"2":"A B"},O:{"2":"RC"},P:{"2":"I SC TC UC","4097":"VC WC dB XC YC ZC aC"},Q:{"4097":"bC"},R:{"2":"cC"},S:{"2":"dC"}},B:5,C:"Variable fonts"}; diff --git a/tools/node_modules/@babel/core/node_modules/caniuse-lite/package.json b/tools/node_modules/@babel/core/node_modules/caniuse-lite/package.json index f7a920c729c3e5..ffeeb92afdba72 100644 --- a/tools/node_modules/@babel/core/node_modules/caniuse-lite/package.json +++ b/tools/node_modules/@babel/core/node_modules/caniuse-lite/package.json @@ -1,6 +1,6 @@ { "name": "caniuse-lite", - "version": "1.0.30001238", + "version": "1.0.30001240", "description": "A smaller version of caniuse-db, with only the essentials!", "main": "dist/unpacker/index.js", "files": [ diff --git a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/chromium-versions.js b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/chromium-versions.js index 7d2c06f6a597ed..382e7c87778f5b 100644 --- a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/chromium-versions.js +++ b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/chromium-versions.js @@ -34,5 +34,6 @@ module.exports = { "89": "12.0", "90": "13.0", "91": "13.0", - "92": "14.0" + "92": "14.0", + "93": "14.0" }; \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/full-chromium-versions.js b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/full-chromium-versions.js index a9dce19d68550c..27519e331f1648 100644 --- a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/full-chromium-versions.js +++ b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/full-chromium-versions.js @@ -1124,7 +1124,8 @@ module.exports = { "11.4.5", "11.4.6", "11.4.7", - "11.4.8" + "11.4.8", + "11.4.9" ], "89.0.4328.0": [ "12.0.0-beta.1", @@ -1221,7 +1222,8 @@ module.exports = { "12.0.8", "12.0.9", "12.0.10", - "12.0.11" + "12.0.11", + "12.0.12" ], "90.0.4402.0": [ "13.0.0-beta.2", @@ -1332,6 +1334,10 @@ module.exports = { "13.1.1", "13.1.2" ], + "91.0.4472.106": [ + "13.1.3", + "13.1.4" + ], "92.0.4511.0": [ "14.0.0-beta.1", "14.0.0-beta.2", @@ -1345,6 +1351,25 @@ module.exports = { "15.0.0-nightly.20210601", "15.0.0-nightly.20210602" ], + "93.0.4536.0": [ + "14.0.0-beta.5", + "14.0.0-beta.6", + "14.0.0-beta.7", + "14.0.0-beta.8", + "15.0.0-nightly.20210609", + "15.0.0-nightly.20210610", + "15.0.0-nightly.20210611", + "15.0.0-nightly.20210614", + "15.0.0-nightly.20210615", + "15.0.0-nightly.20210616" + ], + "93.0.4539.0": [ + "14.0.0-beta.9", + "15.0.0-nightly.20210617", + "15.0.0-nightly.20210618", + "15.0.0-nightly.20210621", + "15.0.0-nightly.20210622" + ], "92.0.4475.0": [ "14.0.0-nightly.20210426", "14.0.0-nightly.20210427" @@ -1379,7 +1404,11 @@ module.exports = { "93.0.4535.0": [ "15.0.0-nightly.20210608" ], - "93.0.4536.0": [ - "15.0.0-nightly.20210609" + "93.0.4550.0": [ + "15.0.0-nightly.20210623", + "15.0.0-nightly.20210624" + ], + "93.0.4552.0": [ + "15.0.0-nightly.20210625" ] }; \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/full-versions.js b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/full-versions.js index a29e6e01be0f8d..072e9e6f5bcc94 100644 --- a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/full-versions.js +++ b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/full-versions.js @@ -792,6 +792,7 @@ module.exports = { "11.4.6": "87.0.4280.141", "11.4.7": "87.0.4280.141", "11.4.8": "87.0.4280.141", + "11.4.9": "87.0.4280.141", "12.0.0-beta.1": "89.0.4328.0", "12.0.0-beta.3": "89.0.4328.0", "12.0.0-beta.4": "89.0.4328.0", @@ -853,6 +854,7 @@ module.exports = { "12.0.9": "89.0.4389.128", "12.0.10": "89.0.4389.128", "12.0.11": "89.0.4389.128", + "12.0.12": "89.0.4389.128", "13.0.0-beta.2": "90.0.4402.0", "13.0.0-beta.3": "90.0.4402.0", "13.0.0-beta.4": "90.0.4415.0", @@ -932,9 +934,16 @@ module.exports = { "13.1.0": "91.0.4472.77", "13.1.1": "91.0.4472.77", "13.1.2": "91.0.4472.77", + "13.1.3": "91.0.4472.106", + "13.1.4": "91.0.4472.106", "14.0.0-beta.1": "92.0.4511.0", "14.0.0-beta.2": "92.0.4511.0", "14.0.0-beta.3": "92.0.4511.0", + "14.0.0-beta.5": "93.0.4536.0", + "14.0.0-beta.6": "93.0.4536.0", + "14.0.0-beta.7": "93.0.4536.0", + "14.0.0-beta.8": "93.0.4536.0", + "14.0.0-beta.9": "93.0.4539.0", "14.0.0-nightly.20210304": "90.0.4402.0", "14.0.0-nightly.20210305": "90.0.4415.0", "14.0.0-nightly.20210308": "90.0.4415.0", @@ -985,5 +994,17 @@ module.exports = { "15.0.0-nightly.20210603": "93.0.4530.0", "15.0.0-nightly.20210604": "93.0.4530.0", "15.0.0-nightly.20210608": "93.0.4535.0", - "15.0.0-nightly.20210609": "93.0.4536.0" + "15.0.0-nightly.20210609": "93.0.4536.0", + "15.0.0-nightly.20210610": "93.0.4536.0", + "15.0.0-nightly.20210611": "93.0.4536.0", + "15.0.0-nightly.20210614": "93.0.4536.0", + "15.0.0-nightly.20210615": "93.0.4536.0", + "15.0.0-nightly.20210616": "93.0.4536.0", + "15.0.0-nightly.20210617": "93.0.4539.0", + "15.0.0-nightly.20210618": "93.0.4539.0", + "15.0.0-nightly.20210621": "93.0.4539.0", + "15.0.0-nightly.20210622": "93.0.4539.0", + "15.0.0-nightly.20210623": "93.0.4550.0", + "15.0.0-nightly.20210624": "93.0.4550.0", + "15.0.0-nightly.20210625": "93.0.4552.0" }; \ No newline at end of file diff --git a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/package.json b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/package.json index eddb7fe7e0950b..06fd111a772f80 100644 --- a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/package.json +++ b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/package.json @@ -1,6 +1,6 @@ { "name": "electron-to-chromium", - "version": "1.3.752", + "version": "1.3.759", "description": "Provides a list of electron-to-chromium version mappings", "main": "index.js", "files": [ @@ -30,7 +30,7 @@ "devDependencies": { "ava": "^3.8.2", "codecov": "^3.8.0", - "electron-releases": "^3.704.0", + "electron-releases": "^3.712.0", "nyc": "^15.1.0", "request": "^2.88.0", "shelljs": "^0.8.4" diff --git a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/versions.js b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/versions.js index 4914b18a2a9ff0..75fe0384fa7d07 100644 --- a/tools/node_modules/@babel/core/node_modules/electron-to-chromium/versions.js +++ b/tools/node_modules/@babel/core/node_modules/electron-to-chromium/versions.js @@ -64,5 +64,5 @@ module.exports = { "12.0": "89", "13.0": "91", "13.1": "91", - "14.0": "92" + "14.0": "93" }; \ No newline at end of file diff --git a/tools/node_modules/@babel/eslint-parser/lib/configuration.cjs b/tools/node_modules/@babel/eslint-parser/lib/configuration.cjs index a84d422dc6571b..6db4d7187acbe2 100644 --- a/tools/node_modules/@babel/eslint-parser/lib/configuration.cjs +++ b/tools/node_modules/@babel/eslint-parser/lib/configuration.cjs @@ -1,3 +1,5 @@ +const _excluded = ["babelOptions", "ecmaVersion", "sourceType", "allowImportExportEverywhere", "requireConfigFile"]; + function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; } exports.normalizeESLintConfig = function (options) { @@ -8,7 +10,7 @@ exports.normalizeESLintConfig = function (options) { allowImportExportEverywhere = false, requireConfigFile = true } = options, - otherOptions = _objectWithoutPropertiesLoose(options, ["babelOptions", "ecmaVersion", "sourceType", "allowImportExportEverywhere", "requireConfigFile"]); + otherOptions = _objectWithoutPropertiesLoose(options, _excluded); return Object.assign({ babelOptions, diff --git a/tools/node_modules/@babel/eslint-parser/lib/convert/convertTokens.cjs b/tools/node_modules/@babel/eslint-parser/lib/convert/convertTokens.cjs index dc25b951e7667b..31984ddd882e48 100644 --- a/tools/node_modules/@babel/eslint-parser/lib/convert/convertTokens.cjs +++ b/tools/node_modules/@babel/eslint-parser/lib/convert/convertTokens.cjs @@ -96,7 +96,7 @@ function convertToken(token, source, tl) { if (label === tl.name) { token.type = "Identifier"; - } else if (label === tl.semi || label === tl.comma || label === tl.parenL || label === tl.parenR || label === tl.braceL || label === tl.braceR || label === tl.slash || label === tl.dot || label === tl.bracketL || label === tl.bracketR || label === tl.ellipsis || label === tl.arrow || label === tl.pipeline || label === tl.star || label === tl.incDec || label === tl.colon || label === tl.question || label === tl.template || label === tl.backQuote || label === tl.dollarBraceL || label === tl.at || label === tl.logicalOR || label === tl.logicalAND || label === tl.nullishCoalescing || label === tl.bitwiseOR || label === tl.bitwiseXOR || label === tl.bitwiseAND || label === tl.equality || label === tl.relational || label === tl.bitShift || label === tl.plusMin || label === tl.modulo || label === tl.exponent || label === tl.bang || label === tl.tilde || label === tl.doubleColon || label === tl.hash || label === tl.questionDot || type.isAssign) { + } else if (label === tl.semi || label === tl.comma || label === tl.parenL || label === tl.parenR || label === tl.braceL || label === tl.braceR || label === tl.slash || label === tl.dot || label === tl.bracketL || label === tl.bracketR || label === tl.ellipsis || label === tl.arrow || label === tl.pipeline || label === tl.star || label === tl.incDec || label === tl.colon || label === tl.question || label === tl.template || label === tl.backQuote || label === tl.dollarBraceL || label === tl.at || label === tl.logicalOR || label === tl.logicalAND || label === tl.nullishCoalescing || label === tl.bitwiseOR || label === tl.bitwiseXOR || label === tl.bitwiseAND || label === tl.equality || label === tl.relational || label === tl.bitShift || label === tl.plusMin || label === tl.modulo || label === tl.exponent || label === tl.bang || label === tl.tilde || label === tl.doubleColon || label === tl.hash || label === tl.questionDot || label === tl.braceHashL || label === tl.braceBarL || label === tl.braceBarR || label === tl.bracketHashL || label === tl.bracketBarL || label === tl.bracketBarR || type.isAssign) { var _token$value; token.type = "Punctuator"; diff --git a/tools/node_modules/@babel/eslint-parser/lib/index.cjs b/tools/node_modules/@babel/eslint-parser/lib/index.cjs index bc8ac8017ffbb5..6098ac755ac388 100644 --- a/tools/node_modules/@babel/eslint-parser/lib/index.cjs +++ b/tools/node_modules/@babel/eslint-parser/lib/index.cjs @@ -38,7 +38,7 @@ function baseParse(code, options) { } if (!isRunningMinSupportedCoreVersion) { - throw new Error(`@babel/eslint-parser@${"7.14.5"} does not support @babel/core@${getVersion()}. Please upgrade to @babel/core@${minSupportedCoreVersion}.`); + throw new Error(`@babel/eslint-parser@${"7.14.7"} does not support @babel/core@${getVersion()}. Please upgrade to @babel/core@${minSupportedCoreVersion}.`); } const { diff --git a/tools/node_modules/@babel/eslint-parser/package.json b/tools/node_modules/@babel/eslint-parser/package.json index 410985bd3d7058..75bb04fc82ee45 100644 --- a/tools/node_modules/@babel/eslint-parser/package.json +++ b/tools/node_modules/@babel/eslint-parser/package.json @@ -1,6 +1,6 @@ { "name": "@babel/eslint-parser", - "version": "7.14.5", + "version": "7.14.7", "description": "ESLint parser that allows for linting of experimental syntax transformed by Babel", "author": "The Babel Team (https://babel.dev/team)", "license": "MIT", @@ -35,7 +35,7 @@ "semver": "^6.3.0" }, "devDependencies": { - "@babel/core": "7.14.5", + "@babel/core": "7.14.6", "dedent": "^0.7.0", "eslint": "^7.27.0" } diff --git a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE old mode 100644 new mode 100755 index 4b1ad51b2f0efc..9e841e7a26e4eb --- a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE +++ b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE @@ -1,21 +1,21 @@ - MIT License - - Copyright (c) Microsoft Corporation. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md old mode 100644 new mode 100755 index a15402a4524206..78a310d17c9fab --- a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md +++ b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md @@ -2,15 +2,117 @@ > `npm install --save @types/unist` # Summary -This package contains type definitions for non-npm package Unist ( https://github.com/syntax-tree/unist ). +This package contains type definitions for Unist (https://github.com/syntax-tree/unist). # Details -Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist. +## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist/index.d.ts) +````ts +// Type definitions for non-npm package Unist 2.0 +// Project: https://github.com/syntax-tree/unist +// Definitions by: bizen241 +// Jun Lu +// Hernan Rajchert +// Titus Wormer +// Junyoung Choi +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// TypeScript Version: 3.0 + +/** + * Syntactic units in unist syntax trees are called nodes. + */ +export interface Node { + /** + * The variant of a node. + */ + type: string; + + /** + * Information from the ecosystem. + */ + data?: Data | undefined; + + /** + * Location of a node in a source document. + * Must not be present if a node is generated. + */ + position?: Position | undefined; + + [key: string]: unknown; +} + +/** + * Information associated by the ecosystem with the node. + * Space is guaranteed to never be specified by unist or specifications + * implementing unist. + */ +export interface Data { + [key: string]: unknown; +} + +/** + * Location of a node in a source file. + */ +export interface Position { + /** + * Place of the first character of the parsed source region. + */ + start: Point; + + /** + * Place of the first character after the parsed source region. + */ + end: Point; + + /** + * Start column at each index (plus start line) in the source region, + * for elements that span multiple lines. + */ + indent?: number[] | undefined; +} + +/** + * One place in a source file. + */ +export interface Point { + /** + * Line in a source file (1-indexed integer). + */ + line: number; + + /** + * Column in a source file (1-indexed integer). + */ + column: number; + /** + * Character in a source file (0-indexed integer). + */ + offset?: number | undefined; +} + +/** + * Nodes containing other nodes. + */ +export interface Parent extends Node { + /** + * List representing the children of a node. + */ + children: Node[]; +} + +/** + * Nodes containing a value. + */ +export interface Literal extends Node { + value: unknown; +} -Additional Details - * Last updated: Thu, 14 Feb 2019 18:10:46 GMT +```` + +### Additional Details + * Last updated: Fri, 02 Jul 2021 18:04:49 GMT * Dependencies: none * Global values: none # Credits -These definitions were written by bizen241 , Jun Lu , Hernan Rajchert , Titus Wormer , Junyoung Choi . +These definitions were written by [bizen241](https://github.com/bizen241), [Jun Lu](https://github.com/lujun2), [Hernan Rajchert](https://github.com/hrajchert), [Titus Wormer](https://github.com/wooorm), and [Junyoung Choi](https://github.com/rokt33r). diff --git a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json old mode 100644 new mode 100755 index 78fa62811fa768..d4d18b7cbe9b59 --- a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json +++ b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json @@ -1,7 +1,8 @@ { "name": "@types/unist", - "version": "2.0.3", - "description": "TypeScript definitions for non-npm package Unist", + "version": "2.0.4", + "description": "TypeScript definitions for Unist", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist", "license": "MIT", "contributors": [ { @@ -31,13 +32,14 @@ } ], "main": "", - "types": "index", + "types": "index.d.ts", "repository": { "type": "git", - "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git" + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/unist" }, "scripts": {}, "dependencies": {}, - "typesPublisherContentHash": "555fe20f164ccded02a3f69d8b45c8c9d2ec6fd53844a7c7858a3001c281bc9b", - "typeScriptVersion": "3.0" + "typesPublisherContentHash": "373f3a8c09fdf9fa50470b9d6b720dbe014e0fe93cb797a34481c4231e8fab59", + "typeScriptVersion": "3.6" } \ No newline at end of file diff --git a/tools/node_modules/eslint/README.md b/tools/node_modules/eslint/README.md index 2202900d1940fc..c87d07e0dcd1ac 100644 --- a/tools/node_modules/eslint/README.md +++ b/tools/node_modules/eslint/README.md @@ -268,6 +268,11 @@ Anix
    YeonJuan + + +
    +Nitin Kumar +
    diff --git a/tools/node_modules/eslint/lib/config/default-config.js b/tools/node_modules/eslint/lib/config/default-config.js new file mode 100644 index 00000000000000..cb6f403380dfe1 --- /dev/null +++ b/tools/node_modules/eslint/lib/config/default-config.js @@ -0,0 +1,52 @@ +/** + * @fileoverview Default configuration + * @author Nicholas C. Zakas + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const Rules = require("../rules"); + +//----------------------------------------------------------------------------- +// Helpers +//----------------------------------------------------------------------------- + + +exports.defaultConfig = [ + { + plugins: { + "@": { + parsers: { + espree: require("espree") + }, + + /* + * Because we try to delay loading rules until absolutely + * necessary, a proxy allows us to hook into the lazy-loading + * aspect of the rules map while still keeping all of the + * relevant configuration inside of the config array. + */ + rules: new Proxy({}, { + get(target, property) { + return Rules.get(property); + }, + + has(target, property) { + return Rules.has(property); + } + }) + } + }, + ignores: [ + "**/node_modules/**", + ".git/**" + ], + languageOptions: { + parser: "@/espree" + } + } +]; diff --git a/tools/node_modules/eslint/lib/config/flat-config-array.js b/tools/node_modules/eslint/lib/config/flat-config-array.js new file mode 100644 index 00000000000000..ecf396a3314c18 --- /dev/null +++ b/tools/node_modules/eslint/lib/config/flat-config-array.js @@ -0,0 +1,125 @@ +/** + * @fileoverview Flat Config Array + * @author Nicholas C. Zakas + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const { ConfigArray, ConfigArraySymbol } = require("@humanwhocodes/config-array"); +const { flatConfigSchema } = require("./flat-config-schema"); +const { RuleValidator } = require("./rule-validator"); +const { defaultConfig } = require("./default-config"); +const recommendedConfig = require("../../conf/eslint-recommended"); +const allConfig = require("../../conf/eslint-all"); + +//----------------------------------------------------------------------------- +// Helpers +//----------------------------------------------------------------------------- + +const ruleValidator = new RuleValidator(); + +/** + * Splits a plugin identifier in the form a/b/c into two parts: a/b and c. + * @param {string} identifier The identifier to parse. + * @returns {{objectName: string, pluginName: string}} The parts of the plugin + * name. + */ +function splitPluginIdentifier(identifier) { + const parts = identifier.split("/"); + + return { + objectName: parts.pop(), + pluginName: parts.join("/") + }; +} + +//----------------------------------------------------------------------------- +// Exports +//----------------------------------------------------------------------------- + +/** + * Represents an array containing configuration information for ESLint. + */ +class FlatConfigArray extends ConfigArray { + + /** + * Creates a new instance. + * @param {*[]} configs An array of configuration information. + * @param {{basePath: string, baseConfig: FlatConfig}} options The options + * to use for the config array instance. + */ + constructor(configs, { basePath, baseConfig = defaultConfig }) { + super(configs, { + basePath, + schema: flatConfigSchema + }); + + this.unshift(baseConfig); + } + + /* eslint-disable class-methods-use-this */ + /** + * Replaces a config with another config to allow us to put strings + * in the config array that will be replaced by objects before + * normalization. + * @param {Object} config The config to preprocess. + * @returns {Object} The preprocessed config. + */ + [ConfigArraySymbol.preprocessConfig](config) { + if (config === "eslint:recommended") { + return recommendedConfig; + } + + if (config === "eslint:all") { + return allConfig; + } + + return config; + } + + /** + * Finalizes the config by replacing plugin references with their objects + * and validating rule option schemas. + * @param {Object} config The config to finalize. + * @returns {Object} The finalized config. + * @throws {TypeError} If the config is invalid. + */ + [ConfigArraySymbol.finalizeConfig](config) { + + const { plugins, languageOptions, processor } = config; + + // Check parser value + if (languageOptions && languageOptions.parser && typeof languageOptions.parser === "string") { + const { pluginName, objectName: parserName } = splitPluginIdentifier(languageOptions.parser); + + if (!plugins || !plugins[pluginName] || !plugins[pluginName].parsers || !plugins[pluginName].parsers[parserName]) { + throw new TypeError(`Key "parser": Could not find "${parserName}" in plugin "${pluginName}".`); + } + + languageOptions.parser = plugins[pluginName].parsers[parserName]; + } + + // Check processor value + if (processor && typeof processor === "string") { + const { pluginName, objectName: processorName } = splitPluginIdentifier(processor); + + if (!plugins || !plugins[pluginName] || !plugins[pluginName].processors || !plugins[pluginName].processors[processorName]) { + throw new TypeError(`Key "processor": Could not find "${processorName}" in plugin "${pluginName}".`); + } + + config.processor = plugins[pluginName].processors[processorName]; + } + + ruleValidator.validate(config); + + return config; + } + /* eslint-enable class-methods-use-this */ + +} + +exports.FlatConfigArray = FlatConfigArray; diff --git a/tools/node_modules/eslint/lib/config/flat-config-schema.js b/tools/node_modules/eslint/lib/config/flat-config-schema.js new file mode 100644 index 00000000000000..8078547613352c --- /dev/null +++ b/tools/node_modules/eslint/lib/config/flat-config-schema.js @@ -0,0 +1,452 @@ +/** + * @fileoverview Flat config schema + * @author Nicholas C. Zakas + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Type Definitions +//----------------------------------------------------------------------------- + +/** + * @typedef ObjectPropertySchema + * @property {Function|string} merge The function or name of the function to call + * to merge multiple objects with this property. + * @property {Function|string} validate The function or name of the function to call + * to validate the value of this property. + */ + +//----------------------------------------------------------------------------- +// Helpers +//----------------------------------------------------------------------------- + +const ruleSeverities = new Map([ + [0, 0], ["off", 0], + [1, 1], ["warn", 1], + [2, 2], ["error", 2] +]); + +const globalVariablesValues = new Set([ + true, "true", "writable", "writeable", + false, "false", "readonly", "readable", null, + "off" +]); + +/** + * Check if a value is a non-null object. + * @param {any} value The value to check. + * @returns {boolean} `true` if the value is a non-null object. + */ +function isNonNullObject(value) { + return typeof value === "object" && value !== null; +} + +/** + * Check if a value is undefined. + * @param {any} value The value to check. + * @returns {boolean} `true` if the value is undefined. + */ +function isUndefined(value) { + return typeof value === "undefined"; +} + +/** + * Deeply merges two objects. + * @param {Object} first The base object. + * @param {Object} second The overrides object. + * @returns {Object} An object with properties from both first and second. + */ +function deepMerge(first = {}, second = {}) { + + /* + * If the second value is an array, just return it. We don't merge + * arrays because order matters and we can't know the correct order. + */ + if (Array.isArray(second)) { + return second; + } + + /* + * First create a result object where properties from the second object + * overwrite properties from the first. This sets up a baseline to use + * later rather than needing to inspect and change every property + * individually. + */ + const result = { + ...first, + ...second + }; + + for (const key of Object.keys(second)) { + + // avoid hairy edge case + if (key === "__proto__") { + continue; + } + + const firstValue = first[key]; + const secondValue = second[key]; + + if (isNonNullObject(firstValue)) { + result[key] = deepMerge(firstValue, secondValue); + } else if (isUndefined(firstValue)) { + if (isNonNullObject(secondValue)) { + result[key] = deepMerge( + Array.isArray(secondValue) ? [] : {}, + secondValue + ); + } else if (!isUndefined(secondValue)) { + result[key] = secondValue; + } + } + } + + return result; + +} + +/** + * Normalizes the rule options config for a given rule by ensuring that + * it is an array and that the first item is 0, 1, or 2. + * @param {Array|string|number} ruleOptions The rule options config. + * @returns {Array} An array of rule options. + */ +function normalizeRuleOptions(ruleOptions) { + + const finalOptions = Array.isArray(ruleOptions) + ? ruleOptions.slice(0) + : [ruleOptions]; + + finalOptions[0] = ruleSeverities.get(finalOptions[0]); + return finalOptions; +} + +//----------------------------------------------------------------------------- +// Assertions +//----------------------------------------------------------------------------- + +/** + * Validates that a value is a valid rule options entry. + * @param {any} value The value to check. + * @returns {void} + * @throws {TypeError} If the value isn't a valid rule options. + */ +function assertIsRuleOptions(value) { + + if (typeof value !== "string" && typeof value !== "number" && !Array.isArray(value)) { + throw new TypeError("Expected a string, number, or array."); + } +} + +/** + * Validates that a value is valid rule severity. + * @param {any} value The value to check. + * @returns {void} + * @throws {TypeError} If the value isn't a valid rule severity. + */ +function assertIsRuleSeverity(value) { + const severity = typeof value === "string" + ? ruleSeverities.get(value.toLowerCase()) + : ruleSeverities.get(value); + + if (typeof severity === "undefined") { + throw new TypeError("Expected severity of \"off\", 0, \"warn\", 1, \"error\", or 2."); + } +} + +/** + * Validates that a given string is the form pluginName/objectName. + * @param {string} value The string to check. + * @returns {void} + * @throws {TypeError} If the string isn't in the correct format. + */ +function assertIsPluginMemberName(value) { + if (!/[@a-z0-9-_$]+(?:\/(?:[a-z0-9-_$]+))+$/iu.test(value)) { + throw new TypeError(`Expected string in the form "pluginName/objectName" but found "${value}".`); + } +} + +/** + * Validates that a value is an object. + * @param {any} value The value to check. + * @returns {void} + * @throws {TypeError} If the value isn't an object. + */ +function assertIsObject(value) { + if (!isNonNullObject(value)) { + throw new TypeError("Expected an object."); + } +} + +/** + * Validates that a value is an object or a string. + * @param {any} value The value to check. + * @returns {void} + * @throws {TypeError} If the value isn't an object or a string. + */ +function assertIsObjectOrString(value) { + if ((!value || typeof value !== "object") && typeof value !== "string") { + throw new TypeError("Expected an object or string."); + } +} + +//----------------------------------------------------------------------------- +// Low-Level Schemas +//----------------------------------------------------------------------------- + + +/** @type {ObjectPropertySchema} */ +const numberSchema = { + merge: "replace", + validate: "number" +}; + +/** @type {ObjectPropertySchema} */ +const booleanSchema = { + merge: "replace", + validate: "boolean" +}; + +/** @type {ObjectPropertySchema} */ +const deepObjectAssignSchema = { + merge(first = {}, second = {}) { + return deepMerge(first, second); + }, + validate: "object" +}; + +//----------------------------------------------------------------------------- +// High-Level Schemas +//----------------------------------------------------------------------------- + +/** @type {ObjectPropertySchema} */ +const globalsSchema = { + merge: "assign", + validate(value) { + + assertIsObject(value); + + for (const key of Object.keys(value)) { + + // avoid hairy edge case + if (key === "__proto__") { + continue; + } + + if (key !== key.trim()) { + throw new TypeError(`Global "${key}" has leading or trailing whitespace.`); + } + + if (!globalVariablesValues.has(value[key])) { + throw new TypeError(`Key "${key}": Expected "readonly", "writable", or "off".`); + } + } + } +}; + +/** @type {ObjectPropertySchema} */ +const parserSchema = { + merge: "replace", + validate(value) { + assertIsObjectOrString(value); + + if (typeof value === "object" && typeof value.parse !== "function" && typeof value.parseForESLint !== "function") { + throw new TypeError("Expected object to have a parse() or parseForESLint() method."); + } + + if (typeof value === "string") { + assertIsPluginMemberName(value); + } + } +}; + +/** @type {ObjectPropertySchema} */ +const pluginsSchema = { + merge(first = {}, second = {}) { + const keys = new Set([...Object.keys(first), ...Object.keys(second)]); + const result = {}; + + // manually validate that plugins are not redefined + for (const key of keys) { + + // avoid hairy edge case + if (key === "__proto__") { + continue; + } + + if (key in first && key in second && first[key] !== second[key]) { + throw new TypeError(`Cannot redefine plugin "${key}".`); + } + + result[key] = second[key] || first[key]; + } + + return result; + }, + validate(value) { + + // first check the value to be sure it's an object + if (value === null || typeof value !== "object") { + throw new TypeError("Expected an object."); + } + + // second check the keys to make sure they are objects + for (const key of Object.keys(value)) { + + // avoid hairy edge case + if (key === "__proto__") { + continue; + } + + if (value[key] === null || typeof value[key] !== "object") { + throw new TypeError(`Key "${key}": Expected an object.`); + } + } + } +}; + +/** @type {ObjectPropertySchema} */ +const processorSchema = { + merge: "replace", + validate(value) { + if (typeof value === "string") { + assertIsPluginMemberName(value); + } else if (value && typeof value === "object") { + if (typeof value.preprocess !== "function" || typeof value.postprocess !== "function") { + throw new TypeError("Object must have a preprocess() and a postprocess() method."); + } + } else { + throw new TypeError("Expected an object or a string."); + } + } +}; + +/** @type {ObjectPropertySchema} */ +const rulesSchema = { + merge(first = {}, second = {}) { + + const result = { + ...first, + ...second + }; + + for (const ruleId of Object.keys(result)) { + + // avoid hairy edge case + if (ruleId === "__proto__") { + + /* eslint-disable-next-line no-proto */ + delete result.__proto__; + continue; + } + + result[ruleId] = normalizeRuleOptions(result[ruleId]); + + /* + * If either rule config is missing, then the correct + * config is already present and we just need to normalize + * the severity. + */ + if (!(ruleId in first) || !(ruleId in second)) { + continue; + } + + const firstRuleOptions = normalizeRuleOptions(first[ruleId]); + const secondRuleOptions = normalizeRuleOptions(second[ruleId]); + + /* + * If the second rule config only has a severity (length of 1), + * then use that severity and keep the rest of the options from + * the first rule config. + */ + if (secondRuleOptions.length === 1) { + result[ruleId] = [secondRuleOptions[0], ...firstRuleOptions.slice(1)]; + continue; + } + + /* + * In any other situation, then the second rule config takes + * precedence. That means the value at `result[ruleId]` is + * already correct and no further work is necessary. + */ + } + + return result; + }, + + validate(value) { + assertIsObject(value); + + let lastRuleId; + + // Performance: One try-catch has less overhead than one per loop iteration + try { + + /* + * We are not checking the rule schema here because there is no + * guarantee that the rule definition is present at this point. Instead + * we wait and check the rule schema during the finalization step + * of calculating a config. + */ + for (const ruleId of Object.keys(value)) { + + // avoid hairy edge case + if (ruleId === "__proto__") { + continue; + } + + lastRuleId = ruleId; + + const ruleOptions = value[ruleId]; + + assertIsRuleOptions(ruleOptions); + + if (Array.isArray(ruleOptions)) { + assertIsRuleSeverity(ruleOptions[0]); + } else { + assertIsRuleSeverity(ruleOptions); + } + } + } catch (error) { + error.message = `Key "${lastRuleId}": ${error.message}`; + throw error; + } + } +}; + +/** @type {ObjectPropertySchema} */ +const sourceTypeSchema = { + merge: "replace", + validate(value) { + if (typeof value !== "string" || !/^(?:script|module|commonjs)$/u.test(value)) { + throw new TypeError("Expected \"script\", \"module\", or \"commonjs\"."); + } + } +}; + +//----------------------------------------------------------------------------- +// Full schema +//----------------------------------------------------------------------------- + +exports.flatConfigSchema = { + settings: deepObjectAssignSchema, + linterOptions: { + schema: { + noInlineConfig: booleanSchema, + reportUnusedDisableDirectives: booleanSchema + } + }, + languageOptions: { + schema: { + ecmaVersion: numberSchema, + sourceType: sourceTypeSchema, + globals: globalsSchema, + parser: parserSchema, + parserOptions: deepObjectAssignSchema + } + }, + processor: processorSchema, + plugins: pluginsSchema, + rules: rulesSchema +}; diff --git a/tools/node_modules/eslint/lib/config/rule-validator.js b/tools/node_modules/eslint/lib/config/rule-validator.js new file mode 100644 index 00000000000000..f162dd81a05095 --- /dev/null +++ b/tools/node_modules/eslint/lib/config/rule-validator.js @@ -0,0 +1,169 @@ +/** + * @fileoverview Rule Validator + * @author Nicholas C. Zakas + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const ajv = require("../shared/ajv")(); + +//----------------------------------------------------------------------------- +// Helpers +//----------------------------------------------------------------------------- + +/** + * Finds a rule with the given ID in the given config. + * @param {string} ruleId The ID of the rule to find. + * @param {Object} config The config to search in. + * @returns {{create: Function, schema: (Array|null)}} THe rule object. + */ +function findRuleDefinition(ruleId, config) { + const ruleIdParts = ruleId.split("/"); + let pluginName, ruleName; + + // built-in rule + if (ruleIdParts.length === 1) { + pluginName = "@"; + ruleName = ruleIdParts[0]; + } else { + ruleName = ruleIdParts.pop(); + pluginName = ruleIdParts.join("/"); + } + + if (!config.plugins || !config.plugins[pluginName]) { + throw new TypeError(`Key "rules": Key "${ruleId}": Could not find plugin "${pluginName}".`); + } + + if (!config.plugins[pluginName].rules || !config.plugins[pluginName].rules[ruleName]) { + throw new TypeError(`Key "rules": Key "${ruleId}": Could not find "${ruleName}" in plugin "${pluginName}".`); + } + + return config.plugins[pluginName].rules[ruleName]; + +} + +/** + * Gets a complete options schema for a rule. + * @param {{create: Function, schema: (Array|null)}} rule A new-style rule object + * @returns {Object} JSON Schema for the rule's options. + */ +function getRuleOptionsSchema(rule) { + + if (!rule) { + return null; + } + + const schema = rule.schema || rule.meta && rule.meta.schema; + + if (Array.isArray(schema)) { + if (schema.length) { + return { + type: "array", + items: schema, + minItems: 0, + maxItems: schema.length + }; + } + return { + type: "array", + minItems: 0, + maxItems: 0 + }; + + } + + // Given a full schema, leave it alone + return schema || null; +} + +//----------------------------------------------------------------------------- +// Exports +//----------------------------------------------------------------------------- + +/** + * Implements validation functionality for the rules portion of a config. + */ +class RuleValidator { + + /** + * Creates a new instance. + */ + constructor() { + + /** + * A collection of compiled validators for rules that have already + * been validated. + * @type {WeakMap} + * @property validators + */ + this.validators = new WeakMap(); + } + + /** + * Validates all of the rule configurations in a config against each + * rule's schema. + * @param {Object} config The full config to validate. This object must + * contain both the rules section and the plugins section. + * @returns {void} + * @throws {Error} If a rule's configuration does not match its schema. + */ + validate(config) { + + if (!config.rules) { + return; + } + + for (const [ruleId, ruleOptions] of Object.entries(config.rules)) { + + // check for edge case + if (ruleId === "__proto__") { + continue; + } + + /* + * If a rule is disabled, we don't do any validation. This allows + * users to safely set any value to 0 or "off" without worrying + * that it will cause a validation error. + * + * Note: ruleOptions is always an array at this point because + * this validation occurs after FlatConfigArray has merged and + * normalized values. + */ + if (ruleOptions[0] === 0) { + continue; + } + + const rule = findRuleDefinition(ruleId, config); + + // Precompile and cache validator the first time + if (!this.validators.has(rule)) { + const schema = getRuleOptionsSchema(rule); + + if (schema) { + this.validators.set(rule, ajv.compile(schema)); + } + } + + const validateRule = this.validators.get(rule); + + if (validateRule) { + + validateRule(ruleOptions.slice(1)); + + if (validateRule.errors) { + throw new Error(`Key "rules": Key "${ruleId}": ${ + validateRule.errors.map( + error => `\tValue ${JSON.stringify(error.data)} ${error.message}.\n` + ).join("") + }`); + } + } + } + } +} + +exports.RuleValidator = RuleValidator; diff --git a/tools/node_modules/eslint/lib/linter/linter.js b/tools/node_modules/eslint/lib/linter/linter.js index e94b507b5dd30a..4e80926a895ec6 100644 --- a/tools/node_modules/eslint/lib/linter/linter.js +++ b/tools/node_modules/eslint/lib/linter/linter.js @@ -37,8 +37,10 @@ const const debug = require("debug")("eslint:linter"); const MAX_AUTOFIX_PASSES = 10; const DEFAULT_PARSER_NAME = "espree"; +const DEFAULT_ECMA_VERSION = 5; const commentParser = new ConfigCommentParser(); const DEFAULT_ERROR_LOC = { start: { line: 1, column: 0 }, end: { line: 1, column: 1 } }; +const parserSymbol = Symbol.for("eslint.RuleTester.parser"); //------------------------------------------------------------------------------ // Typedefs @@ -432,10 +434,16 @@ function getDirectiveComments(filename, ast, ruleMapper, warnInlineConfig) { /** * Normalize ECMAScript version from the initial config - * @param {number} ecmaVersion ECMAScript version from the initial config + * @param {Parser} parser The parser which uses this options. + * @param {number} ecmaVersion ECMAScript version from the initial config * @returns {number} normalized ECMAScript version */ -function normalizeEcmaVersion(ecmaVersion) { +function normalizeEcmaVersion(parser, ecmaVersion) { + if ((parser[parserSymbol] || parser) === espree) { + if (ecmaVersion === "latest") { + return espree.latestEcmaVersion; + } + } /* * Calculate ECMAScript edition number from official year version starting with @@ -521,12 +529,13 @@ function normalizeVerifyOptions(providedOptions, config) { /** * Combines the provided parserOptions with the options from environments - * @param {string} parserName The parser name which uses this options. + * @param {Parser} parser The parser which uses this options. * @param {ParserOptions} providedOptions The provided 'parserOptions' key in a config * @param {Environment[]} enabledEnvironments The environments enabled in configuration and with inline comments * @returns {ParserOptions} Resulting parser options after merge */ -function resolveParserOptions(parserName, providedOptions, enabledEnvironments) { +function resolveParserOptions(parser, providedOptions, enabledEnvironments) { + const parserOptionsFromEnv = enabledEnvironments .filter(env => env.parserOptions) .reduce((parserOptions, env) => merge(parserOptions, env.parserOptions), {}); @@ -542,12 +551,7 @@ function resolveParserOptions(parserName, providedOptions, enabledEnvironments) mergedParserOptions.ecmaFeatures = Object.assign({}, mergedParserOptions.ecmaFeatures, { globalReturn: false }); } - /* - * TODO: @aladdin-add - * 1. for a 3rd-party parser, do not normalize parserOptions - * 2. for espree, no need to do this (espree will do it) - */ - mergedParserOptions.ecmaVersion = normalizeEcmaVersion(mergedParserOptions.ecmaVersion); + mergedParserOptions.ecmaVersion = normalizeEcmaVersion(parser, mergedParserOptions.ecmaVersion); return mergedParserOptions; } @@ -606,7 +610,7 @@ function getRuleOptions(ruleConfig) { */ function analyzeScope(ast, parserOptions, visitorKeys) { const ecmaFeatures = parserOptions.ecmaFeatures || {}; - const ecmaVersion = parserOptions.ecmaVersion || 5; + const ecmaVersion = parserOptions.ecmaVersion || DEFAULT_ECMA_VERSION; return eslintScope.analyze(ast, { ignoreEval: true, @@ -1123,7 +1127,7 @@ class Linter { .map(envName => getEnv(slots, envName)) .filter(env => env); - const parserOptions = resolveParserOptions(parserName, config.parserOptions || {}, enabledEnvs); + const parserOptions = resolveParserOptions(parser, config.parserOptions || {}, enabledEnvs); const configuredGlobals = resolveGlobals(config.globals || {}, enabledEnvs); const settings = config.settings || {}; diff --git a/tools/node_modules/eslint/lib/rule-tester/rule-tester.js b/tools/node_modules/eslint/lib/rule-tester/rule-tester.js index cac81bc71d150f..2b5524923bea7c 100644 --- a/tools/node_modules/eslint/lib/rule-tester/rule-tester.js +++ b/tools/node_modules/eslint/lib/rule-tester/rule-tester.js @@ -53,6 +53,7 @@ const const ajv = require("../shared/ajv")({ strictDefaults: true }); const espreePath = require.resolve("espree"); +const parserSymbol = Symbol.for("eslint.RuleTester.parser"); //------------------------------------------------------------------------------ // Typedefs @@ -239,6 +240,7 @@ function defineStartEndAsError(objName, node) { }); } + /** * Define `start`/`end` properties of all nodes of the given AST as throwing error. * @param {ASTNode} ast The root node to errorize `start`/`end` properties. @@ -258,8 +260,10 @@ function defineStartEndAsErrorInTree(ast, visitorKeys) { * @returns {Parser} Wrapped parser object. */ function wrapParser(parser) { + if (typeof parser.parseForESLint === "function") { return { + [parserSymbol]: parser, parseForESLint(...args) { const ret = parser.parseForESLint(...args); @@ -268,7 +272,9 @@ function wrapParser(parser) { } }; } + return { + [parserSymbol]: parser, parse(...args) { const ast = parser.parse(...args); diff --git a/tools/node_modules/eslint/lib/rules/dot-notation.js b/tools/node_modules/eslint/lib/rules/dot-notation.js index 751b4628edc4d5..3aa9f3110f55f1 100644 --- a/tools/node_modules/eslint/lib/rules/dot-notation.js +++ b/tools/node_modules/eslint/lib/rules/dot-notation.js @@ -94,7 +94,7 @@ module.exports = { // Don't perform any fixes if there are comments inside the brackets. if (sourceCode.commentsExistBetween(leftBracket, rightBracket)) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // Replace the brackets by an identifier. @@ -154,12 +154,12 @@ module.exports = { // A statement that starts with `let[` is parsed as a destructuring variable declaration, not a MemberExpression. if (node.object.type === "Identifier" && node.object.name === "let" && !node.optional) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // Don't perform any fixes if there are comments between the dot and the property name. if (sourceCode.commentsExistBetween(dotToken, node.property)) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // Replace the identifier to brackets. diff --git a/tools/node_modules/eslint/lib/rules/prefer-arrow-callback.js b/tools/node_modules/eslint/lib/rules/prefer-arrow-callback.js index ee5cfe3c8c7fd9..a01c0340821b20 100644 --- a/tools/node_modules/eslint/lib/rules/prefer-arrow-callback.js +++ b/tools/node_modules/eslint/lib/rules/prefer-arrow-callback.js @@ -295,7 +295,7 @@ module.exports = { * If the callback function has duplicates in its list of parameters (possible in sloppy mode), * don't replace it with an arrow function, because this is a SyntaxError with arrow functions. */ - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // Remove `.bind(this)` if exists. @@ -307,7 +307,7 @@ module.exports = { * E.g. `(foo || function(){}).bind(this)` */ if (memberNode.type !== "MemberExpression") { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } const callNode = memberNode.parent; @@ -320,12 +320,12 @@ module.exports = { * ^^^^^^^^^^^^ */ if (astUtils.isParenthesised(sourceCode, memberNode)) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // If comments exist in the `.bind(this)`, don't remove those. if (sourceCode.commentsExistBetween(firstTokenToRemove, lastTokenToRemove)) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } yield fixer.removeRange([firstTokenToRemove.range[0], lastTokenToRemove.range[1]]); diff --git a/tools/node_modules/eslint/lib/rules/use-isnan.js b/tools/node_modules/eslint/lib/rules/use-isnan.js index 0c7e888c976d8d..ef95b21314a1ce 100644 --- a/tools/node_modules/eslint/lib/rules/use-isnan.js +++ b/tools/node_modules/eslint/lib/rules/use-isnan.js @@ -21,7 +21,10 @@ const astUtils = require("./utils/ast-utils"); * @returns {boolean} `true` if the node is 'NaN' identifier. */ function isNaNIdentifier(node) { - return Boolean(node) && node.type === "Identifier" && node.name === "NaN"; + return Boolean(node) && ( + astUtils.isSpecificId(node, "NaN") || + astUtils.isSpecificMemberAccess(node, "Number", "NaN") + ); } //------------------------------------------------------------------------------ diff --git a/tools/node_modules/eslint/lib/source-code/source-code.js b/tools/node_modules/eslint/lib/source-code/source-code.js index c13ce29b877a4a..cc4524fa74c806 100644 --- a/tools/node_modules/eslint/lib/source-code/source-code.js +++ b/tools/node_modules/eslint/lib/source-code/source-code.js @@ -349,7 +349,7 @@ class SourceCode extends TokenStore { let currentToken = this.getTokenBefore(node, { includeComments: true }); while (currentToken && isCommentToken(currentToken)) { - if (node.parent && (currentToken.start < node.parent.start)) { + if (node.parent && node.parent.type !== "Program" && (currentToken.start < node.parent.start)) { break; } comments.leading.push(currentToken); @@ -361,7 +361,7 @@ class SourceCode extends TokenStore { currentToken = this.getTokenAfter(node, { includeComments: true }); while (currentToken && isCommentToken(currentToken)) { - if (node.parent && (currentToken.end > node.parent.end)) { + if (node.parent && node.parent.type !== "Program" && (currentToken.end > node.parent.end)) { break; } comments.trailing.push(currentToken); diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/LICENSE b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/LICENSE new file mode 100644 index 00000000000000..261eeb9e9f8b2b --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/README.md b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/README.md new file mode 100644 index 00000000000000..9cfe637bb3d2f7 --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/README.md @@ -0,0 +1,258 @@ +# Config Array + +by [Nicholas C. Zakas](https://humanwhocodes.com) + +If you find this useful, please consider supporting my work with a [donation](https://humanwhocodes.com/donate). + +## Description + +A config array is a way of managing configurations that are based on glob pattern matching of filenames. Each config array contains the information needed to determine the correct configuration for any file based on the filename. + +## Background + +In 2019, I submitted an [ESLint RFC](https://github.com/eslint/rfcs/pull/9) proposing a new way of configuring ESLint. The goal was to streamline what had become an increasingly complicated configuration process. Over several iterations, this proposal was eventually born. + +The basic idea is that all configuration, including overrides, can be represented by a single array where each item in the array is a config object. Config objects appearing later in the array override config objects appearing earlier in the array. You can calculate a config for a given file by traversing all config objects in the array to find the ones that match the filename. Matching is done by specifying glob patterns in `files` and `ignores` properties on each config object. Here's an example: + +```js +export default [ + + // match all JSON files + { + name: "JSON Handler", + files: ["**/*.json"], + handler: jsonHandler + }, + + // match only package.json + { + name: "package.json Handler", + files: ["package.json"], + handler: packageJsonHandler + } +]; +``` + +In this example, there are two config objects: the first matches all JSON files in all directories and the second matches just `package.json` in the base path directory (all the globs are evaluated as relative to a base path that can be specified). When you retrieve a configuration for `foo.json`, only the first config object matches so `handler` is equal to `jsonHandler`; when you retrieve a configuration for `package.json`, `handler` is equal to `packageJsonHandler` (because both config objects match, the second one wins). + +## Installation + +You can install the package using npm or Yarn: + +```bash +npm install @humanwhocodes/config-array --save + +# or + +yarn add @humanwhocodes/config-array +``` + +## Usage + +First, import the `ConfigArray` constructor: + +```js +import { ConfigArray } from "@humanwhocodes/config-array"; + +// or using CommonJS + +const { ConfigArray } = require("@humanwhocodes/config-array"); +``` + +When you create a new instance of `ConfigArray`, you must pass in two arguments: an array of configs and an options object. The array of configs is most likely read in from a configuration file, so here's a typical example: + +```js +const configFilename = path.resolve(process.cwd(), "my.config.js"); +const { default: rawConfigs } = await import(configFilename); +const configs = new ConfigArray(rawConfigs, { + + // the path to match filenames from + basePath: process.cwd(), + + // additional items in each config + schema: mySchema +}); +``` + +This example reads in an object or array from `my.config.js` and passes it into the `ConfigArray` constructor as the first argument. The second argument is an object specifying the `basePath` (the directoy in which `my.config.js` is found) and a `schema` to define the additional properties of a config object beyond `files`, `ignores`, and `name`. + +### Specifying a Schema + +The `schema` option is required for you to use additional properties in config objects. The schema is object that follows the format of an [`ObjectSchema`](https://npmjs.com/package/@humanwhocodes/object-schema). The schema specifies both validation and merge rules that the `ConfigArray` instance needs to combine configs when there are multiple matches. Here's an example: + +```js +const configFilename = path.resolve(process.cwd(), "my.config.js"); +const { default: rawConfigs } = await import(configFilename); + +const mySchema = { + + // define the handler key in configs + handler: { + required: true, + merge(a, b) { + if (!b) return a; + if (!a) return b; + }, + validate(value) { + if (typeof value !== "function") { + throw new TypeError("Function expected."); + } + } + } +}; + +const configs = new ConfigArray(rawConfigs, { + + // the path to match filenames from + basePath: process.cwd(), + + // additional items in each config + schema: mySchema +}); +``` + +### Config Arrays + +Config arrays can be multidimensional, so it's possible for a config array to contain another config array, such as: + +```js +export default [ + + // JS config + { + files: ["**/*.js"], + handler: jsHandler + }, + + // JSON configs + [ + + // match all JSON files + { + name: "JSON Handler", + files: ["**/*.json"], + handler: jsonHandler + }, + + // match only package.json + { + name: "package.json Handler", + files: ["package.json"], + handler: packageJsonHandler + } + ], + + // filename must match function + { + files: [ filePath => filePath.endsWith(".md") ], + handler: markdownHandler + }, + + // filename must match all patterns in subarray + { + files: [ ["*.test.*", "*.js"] ], + handler: jsTestHandler + }, + + // filename must not match patterns beginning with ! + { + name: "Non-JS files", + files: ["!*.js"], + settings: { + js: false + } + } +]; +``` + +In this example, the array contains both config objects and a config array. When a config array is normalized (see details below), it is flattened so only config objects remain. However, the order of evaluation remains the same. + +If the `files` array contains a function, then that function is called with the absolute path of the file and is expected to return `true` if there is a match and `false` if not. (The `ignores` array can also contain functions.) + +If the `files` array contains an item that is an array of strings and functions, then all patterns must match in order for the config to match. In the preceding examples, both `*.test.*` and `*.js` must match in order for the config object to be used. + +If a pattern in the files array begins with `!` then it excludes that pattern. In the preceding example, any filename that doesn't end with `.js` will automatically getting a `settings.js` property set to `false`. + +### Config Functions + +Config arrays can also include config functions. A config function accepts a single parameter, `context` (defined by you), and must return either a config object or a config array (it cannot return another function). Config functions allow end users to execute code in the creation of appropriate config objects. Here's an example: + +```js +export default [ + + // JS config + { + files: ["**/*.js"], + handler: jsHandler + }, + + // JSON configs + function (context) { + return [ + + // match all JSON files + { + name: context.name + " JSON Handler", + files: ["**/*.json"], + handler: jsonHandler + }, + + // match only package.json + { + name: context.name + " package.json Handler", + files: ["package.json"], + handler: packageJsonHandler + } + ]; + } +]; +``` + +When a config array is normalized, each function is executed and replaced in the config array with the return value. + +**Note:** Config functions cannot be async. This will be added in a future version. + +### Normalizing Config Arrays + +Once a config array has been created and loaded with all of the raw config data, it must be normalized before it can be used. The normalization process goes through and flattens the config array as well as executing all config functions to get their final values. + +To normalize a config array, call the `normalize()` method and pass in a context object: + +```js +await configs.normalize({ + name: "MyApp" +}); +``` + +The `normalize()` method returns a promise, so be sure to use the `await` operator. The config array instance is normalized in-place, so you don't need to create a new variable. + +**Important:** Once a `ConfigArray` is normalized, it cannot be changed further. You can, however, create a new `ConfigArray` and pass in the normalized instance to create an unnormalized copy. + +### Getting Config for a File + +To get the config for a file, use the `getConfig()` method on a normalized config array and pass in the filename to get a config for: + +```js +// pass in absolute filename +const fileConfig = configs.getConfig(path.resolve(process.cwd(), "package.json")); +``` + +The config array always returns an object, even if there are no configs matching the given filename. You can then inspect the returned config object to determine how to proceed. + +A few things to keep in mind: + +* You must pass in the absolute filename to get a config for. +* The returned config object never has `files`, `ignores`, or `name` properties; the only properties on the object will be the other configuration options specified. +* The config array caches configs, so subsequent calls to `getConfig()` with the same filename will return in a fast lookup rather than another calculation. + +## Acknowledgements + +The design of this project was influenced by feedback on the ESLint RFC, and incorporates ideas from: + +* Teddy Katz (@not-an-aardvark) +* Toru Nagashima (@mysticatea) +* Kai Cataldo (@kaicataldo) + +## License + +Apache 2.0 diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/api.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/api.js new file mode 100644 index 00000000000000..a9aacf46b5cd4b --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/api.js @@ -0,0 +1,457 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var path = _interopDefault(require('path')); +var minimatch = _interopDefault(require('minimatch')); +var createDebug = _interopDefault(require('debug')); +var objectSchema = require('@humanwhocodes/object-schema'); + +/** + * @fileoverview ConfigSchema + * @author Nicholas C. Zakas + */ + +//------------------------------------------------------------------------------ +// Helpers +//------------------------------------------------------------------------------ + +/** + * Assets that a given value is an array. + * @param {*} value The value to check. + * @returns {void} + * @throws {TypeError} When the value is not an array. + */ +function assertIsArray(value) { + if (!Array.isArray(value)) { + throw new TypeError('Expected value to be an array.'); + } +} + +/** + * Assets that a given value is an array containing only strings and functions. + * @param {*} value The value to check. + * @returns {void} + * @throws {TypeError} When the value is not an array of strings and functions. + */ +function assertIsArrayOfStringsAndFunctions(value, name) { + assertIsArray(value); + + if (value.some(item => typeof item !== 'string' && typeof item !== 'function')) { + throw new TypeError('Expected array to only contain strings.'); + } +} + +//------------------------------------------------------------------------------ +// Exports +//------------------------------------------------------------------------------ + +/** + * The base schema that every ConfigArray uses. + * @type Object + */ +const baseSchema = Object.freeze({ + name: { + required: false, + merge() { + return undefined; + }, + validate(value) { + if (typeof value !== 'string') { + throw new TypeError('Property must be a string.'); + } + } + }, + files: { + required: false, + merge() { + return undefined; + }, + validate(value) { + + // first check if it's an array + assertIsArray(value); + + // then check each member + value.forEach(item => { + if (Array.isArray(item)) { + assertIsArrayOfStringsAndFunctions(item); + } else if (typeof item !== 'string' && typeof item !== 'function') { + throw new TypeError('Items must be a string, a function, or an array of strings and functions.'); + } + }); + + } + }, + ignores: { + required: false, + merge() { + return undefined; + }, + validate: assertIsArrayOfStringsAndFunctions + } +}); + +/** + * @fileoverview ConfigArray + * @author Nicholas C. Zakas + */ + +//------------------------------------------------------------------------------ +// Helpers +//------------------------------------------------------------------------------ + +const debug = createDebug('@hwc/config-array'); + +const MINIMATCH_OPTIONS = { + matchBase: true +}; + +/** + * Shorthand for checking if a value is a string. + * @param {any} value The value to check. + * @returns {boolean} True if a string, false if not. + */ +function isString(value) { + return typeof value === 'string'; +} + +/** + * Normalizes a `ConfigArray` by flattening it and executing any functions + * that are found inside. + * @param {Array} items The items in a `ConfigArray`. + * @param {Object} context The context object to pass into any function + * found. + * @returns {Array} A flattened array containing only config objects. + * @throws {TypeError} When a config function returns a function. + */ +async function normalize(items, context) { + + // TODO: Allow async config functions + + function *flatTraverse(array) { + for (let item of array) { + if (typeof item === 'function') { + item = item(context); + } + + if (Array.isArray(item)) { + yield * flatTraverse(item); + } else if (typeof item === 'function') { + throw new TypeError('A config function can only return an object or array.'); + } else { + yield item; + } + } + } + + return [...flatTraverse(items)]; +} + +/** + * Determines if a given file path is matched by a config. If the config + * has no `files` field, then it matches; otherwise, if a `files` field + * is present then we match the globs in `files` and exclude any globs in + * `ignores`. + * @param {string} filePath The absolute file path to check. + * @param {Object} config The config object to check. + * @returns {boolean} True if the file path is matched by the config, + * false if not. + */ +function pathMatches(filePath, basePath, config) { + + // a config without a `files` field always matches + if (!config.files) { + return true; + } + + // if files isn't an array, throw an error + if (!Array.isArray(config.files) || config.files.length === 0) { + throw new TypeError('The files key must be a non-empty array.'); + } + + const relativeFilePath = path.relative(basePath, filePath); + + // match both strings and functions + const match = pattern => { + if (isString(pattern)) { + return minimatch(relativeFilePath, pattern, MINIMATCH_OPTIONS); + } + + if (typeof pattern === 'function') { + return pattern(filePath); + } + }; + + // check for all matches to config.files + let matches = config.files.some(pattern => { + if (Array.isArray(pattern)) { + return pattern.every(match); + } + + return match(pattern); + }); + + /* + * If the file path matches the config.files patterns, then check to see + * if there are any files to ignore. + */ + if (matches && config.ignores) { + matches = !config.ignores.some(pattern => { + return minimatch(filePath, pattern, MINIMATCH_OPTIONS); + }); + } + + return matches; +} + +/** + * Ensures that a ConfigArray has been normalized. + * @param {ConfigArray} configArray The ConfigArray to check. + * @returns {void} + * @throws {Error} When the `ConfigArray` is not normalized. + */ +function assertNormalized(configArray) { + // TODO: Throw more verbose error + if (!configArray.isNormalized()) { + throw new Error('ConfigArray must be normalized to perform this operation.'); + } +} + +//------------------------------------------------------------------------------ +// Public Interface +//------------------------------------------------------------------------------ + +const ConfigArraySymbol = { + isNormalized: Symbol('isNormalized'), + configCache: Symbol('configCache'), + schema: Symbol('schema'), + finalizeConfig: Symbol('finalizeConfig'), + preprocessConfig: Symbol('preprocessConfig') +}; + +/** + * Represents an array of config objects and provides method for working with + * those config objects. + */ +class ConfigArray extends Array { + + /** + * Creates a new instance of ConfigArray. + * @param {Iterable|Function|Object} configs An iterable yielding config + * objects, or a config function, or a config object. + * @param {string} [options.basePath=""] The path of the config file + * @param {boolean} [options.normalized=false] Flag indicating if the + * configs have already been normalized. + * @param {Object} [options.schema] The additional schema + * definitions to use for the ConfigArray schema. + */ + constructor(configs, { basePath = '', normalized = false, schema: customSchema } = {}) { + super(); + + /** + * Tracks if the array has been normalized. + * @property isNormalized + * @type boolean + * @private + */ + this[ConfigArraySymbol.isNormalized] = normalized; + + /** + * The schema used for validating and merging configs. + * @property schema + * @type ObjectSchema + * @private + */ + this[ConfigArraySymbol.schema] = new objectSchema.ObjectSchema({ + ...customSchema, + ...baseSchema + }); + + /** + * The path of the config file that this array was loaded from. + * This is used to calculate filename matches. + * @property basePath + * @type string + */ + this.basePath = basePath; + + /** + * A cache to store calculated configs for faster repeat lookup. + * @property configCache + * @type Map + * @private + */ + this[ConfigArraySymbol.configCache] = new Map(); + + // load the configs into this array + if (Array.isArray(configs)) { + this.push(...configs); + } else { + this.push(configs); + } + + } + + /** + * Prevent normal array methods from creating a new `ConfigArray` instance. + * This is to ensure that methods such as `slice()` won't try to create a + * new instance of `ConfigArray` behind the scenes as doing so may throw + * an error due to the different constructor signature. + * @returns {Function} The `Array` constructor. + */ + static get [Symbol.species]() { + return Array; + } + + /** + * Returns the `files` globs from every config object in the array. + * Negated patterns (those beginning with `!`) are not returned. + * This can be used to determine which files will be matched by a + * config array or to use as a glob pattern when no patterns are provided + * for a command line interface. + * @returns {string[]} An array of string patterns. + */ + get files() { + + assertNormalized(this); + + const result = []; + + for (const config of this) { + if (config.files) { + config.files.forEach(filePattern => { + if (Array.isArray(filePattern)) { + result.push(...filePattern.filter(pattern => { + return isString(pattern) && !pattern.startsWith('!'); + })); + } else if (isString(filePattern) && !filePattern.startsWith('!')) { + result.push(filePattern); + } + }); + } + } + + return result; + } + + /** + * Returns the file globs that should always be ignored regardless of + * the matching `files` fields in any configs. This is necessary to mimic + * the behavior of things like .gitignore and .eslintignore, allowing a + * globbing operation to be faster. + * @returns {string[]} An array of string patterns to be ignored. + */ + get ignores() { + + assertNormalized(this); + + const result = []; + + for (const config of this) { + if (config.ignores && !config.files) { + result.push(...config.ignores.filter(isString)); + } + } + + return result; + } + + /** + * Indicates if the config array has been normalized. + * @returns {boolean} True if the config array is normalized, false if not. + */ + isNormalized() { + return this[ConfigArraySymbol.isNormalized]; + } + + /** + * Normalizes a config array by flattening embedded arrays and executing + * config functions. + * @param {ConfigContext} context The context object for config functions. + * @returns {ConfigArray} A new ConfigArray instance that is normalized. + */ + async normalize(context = {}) { + + if (!this.isNormalized()) { + const normalizedConfigs = await normalize(this, context); + this.length = 0; + this.push(...normalizedConfigs.map(this[ConfigArraySymbol.preprocessConfig])); + this[ConfigArraySymbol.isNormalized] = true; + + // prevent further changes + Object.freeze(this); + } + + return this; + } + + /** + * Finalizes the state of a config before being cached and returned by + * `getConfig()`. Does nothing by default but is provided to be + * overridden by subclasses as necessary. + * @param {Object} config The config to finalize. + * @returns {Object} The finalized config. + */ + [ConfigArraySymbol.finalizeConfig](config) { + return config; + } + + /** + * Preprocesses a config during the normalization process. This is the + * method to override if you want to convert an array item before it is + * validated for the first time. For example, if you want to replace a + * string with an object, this is the method to override. + * @param {Object} config The config to preprocess. + * @returns {Object} The config to use in place of the argument. + */ + [ConfigArraySymbol.preprocessConfig](config) { + return config; + } + + /** + * Returns the config object for a given file path. + * @param {string} filePath The complete path of a file to get a config for. + * @returns {Object} The config object for this file. + */ + getConfig(filePath) { + + assertNormalized(this); + + // first check the cache to avoid duplicate work + let finalConfig = this[ConfigArraySymbol.configCache].get(filePath); + + if (finalConfig) { + return finalConfig; + } + + // No config found in cache, so calculate a new one + + const matchingConfigs = []; + + for (const config of this) { + if (pathMatches(filePath, this.basePath, config)) { + debug(`Matching config found for ${filePath}`); + matchingConfigs.push(config); + } else { + debug(`No matching config found for ${filePath}`); + } + } + + finalConfig = matchingConfigs.reduce((result, config) => { + return this[ConfigArraySymbol.schema].merge(result, config); + }, {}, this); + + finalConfig = this[ConfigArraySymbol.finalizeConfig](finalConfig); + + this[ConfigArraySymbol.configCache].set(filePath, finalConfig); + + return finalConfig; + } + +} + +exports.ConfigArray = ConfigArray; +exports.ConfigArraySymbol = ConfigArraySymbol; diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/package.json b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/package.json new file mode 100644 index 00000000000000..4dc3a4e5d48ab4 --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/package.json @@ -0,0 +1,61 @@ +{ + "name": "@humanwhocodes/config-array", + "version": "0.5.0", + "description": "Glob-based configuration matching.", + "author": "Nicholas C. Zakas", + "main": "api.js", + "files": [ + "api.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/humanwhocodes/config-array.git" + }, + "bugs": { + "url": "https://github.com/humanwhocodes/config-array/issues" + }, + "homepage": "https://github.com/humanwhocodes/config-array#readme", + "scripts": { + "build": "rollup -c", + "format": "nitpik", + "lint": "eslint *.config.js src/*.js tests/*.js", + "prepublish": "npm run build", + "test:coverage": "nyc --include src/*.js npm run test", + "test": "mocha -r esm tests/ --recursive" + }, + "gitHooks": { + "pre-commit": "lint-staged" + }, + "lint-staged": { + "*.js": [ + "nitpik", + "eslint --fix --ignore-pattern '!.eslintrc.js'" + ] + }, + "keywords": [ + "configuration", + "configarray", + "config file" + ], + "license": "Apache-2.0", + "engines": { + "node": ">=10.10.0" + }, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "devDependencies": { + "@nitpik/javascript": "^0.3.3", + "@nitpik/node": "0.0.5", + "chai": "^4.2.0", + "eslint": "^6.7.1", + "esm": "^3.2.25", + "lint-staged": "^10.2.8", + "mocha": "^6.1.4", + "nyc": "^14.1.1", + "rollup": "^1.12.3", + "yorkie": "^2.0.0" + } +} \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/LICENSE b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/LICENSE new file mode 100644 index 00000000000000..a5e3ae46fdfc2b --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2019, Human Who Codes +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/README.md b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/README.md new file mode 100644 index 00000000000000..2163797f8fe15a --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/README.md @@ -0,0 +1,234 @@ +# JavaScript ObjectSchema Package + +by [Nicholas C. Zakas](https://humanwhocodes.com) + +If you find this useful, please consider supporting my work with a [donation](https://humanwhocodes.com/donate). + +## Overview + +A JavaScript object merge/validation utility where you can define a different merge and validation strategy for each key. This is helpful when you need to validate complex data structures and then merge them in a way that is more complex than `Object.assign()`. + +## Installation + +You can install using either npm: + +``` +npm install @humanwhocodes/object-schema +``` + +Or Yarn: + +``` +yarn add @humanwhocodes/object-schema +``` + +## Usage + +Use CommonJS to get access to the `ObjectSchema` constructor: + +```js +const { ObjectSchema } = require("@humanwhocodes/object-schema"); + +const schema = new ObjectSchema({ + + // define a definition for the "downloads" key + downloads: { + required: true, + merge(value1, value2) { + return value1 + value2; + }, + validate(value) { + if (typeof value !== "number") { + throw new Error("Expected downloads to be a number."); + } + } + }, + + // define a strategy for the "versions" key + version: { + required: true, + merge(value1, value2) { + return value1.concat(value2); + }, + validate(value) { + if (!Array.isArray(value)) { + throw new Error("Expected versions to be an array."); + } + } + } +}); + +const record1 = { + downloads: 25, + versions: [ + "v1.0.0", + "v1.1.0", + "v1.2.0" + ] +}; + +const record2 = { + downloads: 125, + versions: [ + "v2.0.0", + "v2.1.0", + "v3.0.0" + ] +}; + +// make sure the records are valid +schema.validate(record1); +schema.validate(record2); + +// merge together (schema.merge() accepts any number of objects) +const result = schema.merge(record1, record2); + +// result looks like this: + +const result = { + downloads: 75, + versions: [ + "v1.0.0", + "v1.1.0", + "v1.2.0", + "v2.0.0", + "v2.1.0", + "v3.0.0" + ] +}; +``` + +## Tips and Tricks + +### Named merge strategies + +Instead of specifying a `merge()` method, you can specify one of the following strings to use a default merge strategy: + +* `"assign"` - use `Object.assign()` to merge the two values into one object. +* `"overwrite"` - the second value always replaces the first. +* `"replace"` - the second value replaces the first if the second is not `undefined`. + +For example: + +```js +const schema = new ObjectSchema({ + name: { + merge: "replace", + validate() {} + } +}); +``` + +### Named validation strategies + +Instead of specifying a `validate()` method, you can specify one of the following strings to use a default validation strategy: + +* `"array"` - value must be an array. +* `"boolean"` - value must be a boolean. +* `"number"` - value must be a number. +* `"object"` - value must be an object. +* `"object?"` - value must be an object or null. +* `"string"` - value must be a string. +* `"string!"` - value must be a non-empty string. + +For example: + +```js +const schema = new ObjectSchema({ + name: { + merge: "replace", + validate: "string" + } +}); +``` + +### Subschemas + +If you are defining a key that is, itself, an object, you can simplify the process by using a subschema. Instead of defining `merge()` and `validate()`, assign a `schema` key that contains a schema definition, like this: + +```js +const schema = new ObjectSchema({ + name: { + schema: { + first: { + merge: "replace", + validate: "string" + }, + last: { + merge: "replace", + validate: "string" + } + } + } +}); + +schema.validate({ + name: { + first: "n", + last: "z" + } +}); +``` + +### Remove Keys During Merge + +If the merge strategy for a key returns `undefined`, then the key will not appear in the final object. For example: + +```js +const schema = new ObjectSchema({ + date: { + merge() { + return undefined; + }, + validate(value) { + Date.parse(value); // throws an error when invalid + } + } +}); + +const object1 = { date: "5/5/2005" }; +const object2 = { date: "6/6/2006" }; + +const result = schema.merge(object1, object2); + +console.log("date" in result); // false +``` + +### Requiring Another Key Be Present + +If you'd like the presence of one key to require the presence of another key, you can use the `requires` property to specify an array of other properties that any key requires. For example: + +```js +const schema = new ObjectSchema(); + +const schema = new ObjectSchema({ + date: { + merge() { + return undefined; + }, + validate(value) { + Date.parse(value); // throws an error when invalid + } + }, + time: { + requires: ["date"], + merge(first, second) { + return second; + }, + validate(value) { + // ... + } + } +}); + +// throws error: Key "time" requires keys "date" +schema.validate({ + time: "13:45" +}); +``` + +In this example, even though `date` is an optional key, it is required to be present whenever `time` is present. + +## License + +BSD 3-Clause diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/package.json b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/package.json new file mode 100644 index 00000000000000..ba829090e55bd4 --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/package.json @@ -0,0 +1,33 @@ +{ + "name": "@humanwhocodes/object-schema", + "version": "1.2.0", + "description": "An object schema merger/validator", + "main": "src/index.js", + "directories": { + "test": "tests" + }, + "scripts": { + "test": "mocha tests/" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/humanwhocodes/object-schema.git" + }, + "keywords": [ + "object", + "validation", + "schema", + "merge" + ], + "author": "Nicholas C. Zakas", + "license": "BSD-3-Clause", + "bugs": { + "url": "https://github.com/humanwhocodes/object-schema/issues" + }, + "homepage": "https://github.com/humanwhocodes/object-schema#readme", + "devDependencies": { + "chai": "^4.2.0", + "eslint": "^5.13.0", + "mocha": "^5.2.0" + } +} \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/index.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/index.js new file mode 100644 index 00000000000000..b2bc4fb96e3cb3 --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/index.js @@ -0,0 +1,7 @@ +/** + * @filedescription Object Schema Package + */ + +exports.ObjectSchema = require("./object-schema").ObjectSchema; +exports.MergeStrategy = require("./merge-strategy").MergeStrategy; +exports.ValidationStrategy = require("./validation-strategy").ValidationStrategy; diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js new file mode 100644 index 00000000000000..82174492764a9d --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js @@ -0,0 +1,53 @@ +/** + * @filedescription Merge Strategy + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Container class for several different merge strategies. + */ +class MergeStrategy { + + /** + * Merges two keys by overwriting the first with the second. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} The second value. + */ + static overwrite(value1, value2) { + return value2; + } + + /** + * Merges two keys by replacing the first with the second only if the + * second is defined. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} The second value if it is defined. + */ + static replace(value1, value2) { + if (typeof value2 !== "undefined") { + return value2; + } + + return value1; + } + + /** + * Merges two properties by assigning properties from the second to the first. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} A new object containing properties from both value1 and + * value2. + */ + static assign(value1, value2) { + return Object.assign({}, value1, value2); + } +} + +exports.MergeStrategy = MergeStrategy; diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/object-schema.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/object-schema.js new file mode 100644 index 00000000000000..25864f5a280cbb --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/object-schema.js @@ -0,0 +1,239 @@ +/** + * @filedescription Object Schema + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const { MergeStrategy } = require("./merge-strategy"); +const { ValidationStrategy } = require("./validation-strategy"); + +//----------------------------------------------------------------------------- +// Private +//----------------------------------------------------------------------------- + +const strategies = Symbol("strategies"); +const requiredKeys = Symbol("requiredKeys"); + +/** + * Validates a schema strategy. + * @param {string} name The name of the key this strategy is for. + * @param {Object} strategy The strategy for the object key. + * @param {boolean} [strategy.required=true] Whether the key is required. + * @param {string[]} [strategy.requires] Other keys that are required when + * this key is present. + * @param {Function} strategy.merge A method to call when merging two objects + * with the same key. + * @param {Function} strategy.validate A method to call when validating an + * object with the key. + * @returns {void} + * @throws {Error} When the strategy is missing a name. + * @throws {Error} When the strategy is missing a merge() method. + * @throws {Error} When the strategy is missing a validate() method. + */ +function validateDefinition(name, strategy) { + + let hasSchema = false; + if (strategy.schema) { + if (typeof strategy.schema === "object") { + hasSchema = true; + } else { + throw new TypeError("Schema must be an object."); + } + } + + if (typeof strategy.merge === "string") { + if (!(strategy.merge in MergeStrategy)) { + throw new TypeError(`Definition for key "${name}" missing valid merge strategy.`); + } + } else if (!hasSchema && typeof strategy.merge !== "function") { + throw new TypeError(`Definition for key "${name}" must have a merge property.`); + } + + if (typeof strategy.validate === "string") { + if (!(strategy.validate in ValidationStrategy)) { + throw new TypeError(`Definition for key "${name}" missing valid validation strategy.`); + } + } else if (!hasSchema && typeof strategy.validate !== "function") { + throw new TypeError(`Definition for key "${name}" must have a validate() method.`); + } +} + + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Represents an object validation/merging schema. + */ +class ObjectSchema { + + /** + * Creates a new instance. + */ + constructor(definitions) { + + if (!definitions) { + throw new Error("Schema definitions missing."); + } + + /** + * Track all strategies in the schema by key. + * @type {Map} + * @property strategies + */ + this[strategies] = new Map(); + + /** + * Separately track any keys that are required for faster validation. + * @type {Map} + * @property requiredKeys + */ + this[requiredKeys] = new Map(); + + // add in all strategies + for (const key of Object.keys(definitions)) { + validateDefinition(key, definitions[key]); + + // normalize merge and validate methods if subschema is present + if (typeof definitions[key].schema === "object") { + const schema = new ObjectSchema(definitions[key].schema); + definitions[key] = { + ...definitions[key], + merge(first, second) { + if (first && second) { + return schema.merge(first, second); + } + + return MergeStrategy.assign(first, second); + }, + validate(value) { + ValidationStrategy.object(value); + schema.validate(value); + } + }; + } + + // normalize the merge method in case there's a string + if (typeof definitions[key].merge === "string") { + definitions[key] = { + ...definitions[key], + merge: MergeStrategy[definitions[key].merge] + }; + }; + + // normalize the validate method in case there's a string + if (typeof definitions[key].validate === "string") { + definitions[key] = { + ...definitions[key], + validate: ValidationStrategy[definitions[key].validate] + }; + }; + + this[strategies].set(key, definitions[key]); + + if (definitions[key].required) { + this[requiredKeys].set(key, definitions[key]); + } + } + } + + /** + * Determines if a strategy has been registered for the given object key. + * @param {string} key The object key to find a strategy for. + * @returns {boolean} True if the key has a strategy registered, false if not. + */ + hasKey(key) { + return this[strategies].has(key); + } + + /** + * Merges objects together to create a new object comprised of the keys + * of the all objects. Keys are merged based on the each key's merge + * strategy. + * @param {...Object} objects The objects to merge. + * @returns {Object} A new object with a mix of all objects' keys. + * @throws {Error} If any object is invalid. + */ + merge(...objects) { + + // double check arguments + if (objects.length < 2) { + throw new Error("merge() requires at least two arguments."); + } + + if (objects.some(object => (object == null || typeof object !== "object"))) { + throw new Error("All arguments must be objects."); + } + + return objects.reduce((result, object) => { + + this.validate(object); + + for (const [key, strategy] of this[strategies]) { + try { + if (key in result || key in object) { + const value = strategy.merge.call(this, result[key], object[key]); + if (value !== undefined) { + result[key] = value; + } + } + } catch (ex) { + ex.message = `Key "${key}": ` + ex.message; + throw ex; + } + } + return result; + }, {}); + } + + /** + * Validates an object's keys based on the validate strategy for each key. + * @param {Object} object The object to validate. + * @returns {void} + * @throws {Error} When the object is invalid. + */ + validate(object) { + + // check existing keys first + for (const key of Object.keys(object)) { + + // check to see if the key is defined + if (!this.hasKey(key)) { + throw new Error(`Unexpected key "${key}" found.`); + } + + // validate existing keys + const strategy = this[strategies].get(key); + + // first check to see if any other keys are required + if (Array.isArray(strategy.requires)) { + if (!strategy.requires.every(otherKey => otherKey in object)) { + throw new Error(`Key "${key}" requires keys "${strategy.requires.join("\", \"")}".`); + } + } + + // now apply remaining validation strategy + try { + strategy.validate.call(strategy, object[key]); + } catch (ex) { + ex.message = `Key "${key}": ` + ex.message; + throw ex; + } + } + + // ensure required keys aren't missing + for (const [key] of this[requiredKeys]) { + if (!(key in object)) { + throw new Error(`Missing required key "${key}".`); + } + } + + } +} + +exports.ObjectSchema = ObjectSchema; diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js new file mode 100644 index 00000000000000..ecf918bdd17b7f --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js @@ -0,0 +1,102 @@ +/** + * @filedescription Validation Strategy + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Container class for several different validation strategies. + */ +class ValidationStrategy { + + /** + * Validates that a value is an array. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static array(value) { + if (!Array.isArray(value)) { + throw new TypeError("Expected an array."); + } + } + + /** + * Validates that a value is a boolean. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static boolean(value) { + if (typeof value !== "boolean") { + throw new TypeError("Expected a Boolean."); + } + } + + /** + * Validates that a value is a number. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static number(value) { + if (typeof value !== "number") { + throw new TypeError("Expected a number."); + } + } + + /** + * Validates that a value is a object. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static object(value) { + if (!value || typeof value !== "object") { + throw new TypeError("Expected an object."); + } + } + + /** + * Validates that a value is a object or null. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static "object?"(value) { + if (typeof value !== "object") { + throw new TypeError("Expected an object or null."); + } + } + + /** + * Validates that a value is a string. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static string(value) { + if (typeof value !== "string") { + throw new TypeError("Expected a string."); + } + } + + /** + * Validates that a value is a non-empty string. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static "string!"(value) { + if (typeof value !== "string" || value.length === 0) { + throw new TypeError("Expected a non-empty string."); + } + } + +} + +exports.ValidationStrategy = ValidationStrategy; diff --git a/tools/node_modules/eslint/node_modules/flatted/README.md b/tools/node_modules/eslint/node_modules/flatted/README.md index 0c1627f7d2dda8..8fd5b4d82f4459 100644 --- a/tools/node_modules/eslint/node_modules/flatted/README.md +++ b/tools/node_modules/eslint/node_modules/flatted/README.md @@ -18,10 +18,10 @@ Usable via [CDN](https://unpkg.com/flatted) or as regular module. ```js // ESM -import {parse, stringify} from 'flatted'; +import {parse, stringify, toJSON, fromJSON} from 'flatted'; // CJS -const {parse, stringify} = require('flatted'); +const {parse, stringify, toJSON, fromJSON} = require('flatted'); const a = [{}]; a[0].a = a; @@ -30,6 +30,34 @@ a.push(a); stringify(a); // [["1","0"],{"a":"0"}] ``` +## toJSON and from JSON + +If you'd like to implicitly survive JSON serialization, these two helpers helps: + +```js +import {toJSON, fromJSON} from 'flatted'; + +class RecursiveMap extends Map { + static fromJSON(any) { + return new this(fromJSON(any)); + } + toJSON() { + return toJSON([...this.entries()]); + } +} + +const recursive = new RecursiveMap; +const same = {}; +same.same = same; +recursive.set('same', same); + +const asString = JSON.stringify(recursive); +const asMap = RecursiveMap.fromJSON(JSON.parse(asString)); +asMap.get('same') === asMap.get('same').same; +// true +``` + + ## Flatted VS JSON As it is for every other specialized format capable of serializing and deserializing circular data, you should never `JSON.parse(Flatted.stringify(data))`, and you should never `Flatted.parse(JSON.stringify(data))`. diff --git a/tools/node_modules/eslint/node_modules/flatted/cjs/index.js b/tools/node_modules/eslint/node_modules/flatted/cjs/index.js index ea7ac895154e46..c2c94866d33659 100644 --- a/tools/node_modules/eslint/node_modules/flatted/cjs/index.js +++ b/tools/node_modules/eslint/node_modules/flatted/cjs/index.js @@ -92,3 +92,8 @@ const stringify = (value, replacer, space) => { } }; exports.stringify = stringify; + +const toJSON = any => $parse(stringify(any)); +exports.toJSON = toJSON; +const fromJSON = any => parse($stringify(any)); +exports.fromJSON = fromJSON; diff --git a/tools/node_modules/eslint/node_modules/flatted/es.js b/tools/node_modules/eslint/node_modules/flatted/es.js index aee7158d2f34f2..8e4cf0cf4aeb53 100644 --- a/tools/node_modules/eslint/node_modules/flatted/es.js +++ b/tools/node_modules/eslint/node_modules/flatted/es.js @@ -1,2 +1,2 @@ self.Flatted=function(t){"use strict"; -/*! (c) 2020 Andrea Giammarchi */const{parse:e,stringify:n}=JSON,{keys:r}=Object,s=String,c="string",l={},o="object",a=(t,e)=>e,i=t=>t instanceof s?s(t):t,f=(t,e)=>typeof e===c?new s(e):e,u=(t,e,n,c)=>{const a=[];for(let i=r(n),{length:f}=i,u=0;u{const r=s(e.push(n)-1);return t.set(n,r),r};return t.parse=(t,n)=>{const r=e(t,f).map(i),s=r[0],c=n||a,l=typeof s===o&&s?u(r,new Set,s,c):s;return c.call({"":l},"",l)},t.stringify=(t,e,r)=>{const s=e&&typeof e===o?(t,n)=>""===t||-1e,f=t=>t instanceof s?s(t):t,i=(t,e)=>typeof e===o?new s(e):e,u=(t,e,n,o)=>{const a=[];for(let f=r(n),{length:i}=f,u=0;u{const r=s(e.push(n)-1);return t.set(n,r),r},y=(t,n)=>{const r=e(t,i).map(f),s=r[0],o=n||a,c=typeof s===l&&s?u(r,new Set,s,o):s;return o.call({"":c},"",c)},g=(t,e,r)=>{const s=e&&typeof e===l?(t,n)=>""===t||-1y(n(t)),t.parse=y,t.stringify=g,t.toJSON=t=>e(g(t)),t}({}); diff --git a/tools/node_modules/eslint/node_modules/flatted/esm/index.js b/tools/node_modules/eslint/node_modules/flatted/esm/index.js index f220bbf7a31109..9a8c239c203ce9 100644 --- a/tools/node_modules/eslint/node_modules/flatted/esm/index.js +++ b/tools/node_modules/eslint/node_modules/flatted/esm/index.js @@ -89,3 +89,6 @@ export const stringify = (value, replacer, space) => { return after; } }; + +export const toJSON = any => $parse(stringify(any)); +export const fromJSON = any => parse($stringify(any)); diff --git a/tools/node_modules/eslint/node_modules/flatted/index.js b/tools/node_modules/eslint/node_modules/flatted/index.js index 2cf0eeb52abc8a..d170879e50ccf8 100644 --- a/tools/node_modules/eslint/node_modules/flatted/index.js +++ b/tools/node_modules/eslint/node_modules/flatted/index.js @@ -110,9 +110,17 @@ self.Flatted = (function (exports) { return after; } }; + var toJSON = function toJSON(any) { + return $parse(stringify(any)); + }; + var fromJSON = function fromJSON(any) { + return parse($stringify(any)); + }; + exports.fromJSON = fromJSON; exports.parse = parse; exports.stringify = stringify; + exports.toJSON = toJSON; return exports; diff --git a/tools/node_modules/eslint/node_modules/flatted/min.js b/tools/node_modules/eslint/node_modules/flatted/min.js index 64372fe4081620..a822de22434f24 100644 --- a/tools/node_modules/eslint/node_modules/flatted/min.js +++ b/tools/node_modules/eslint/node_modules/flatted/min.js @@ -1,2 +1,2 @@ self.Flatted=function(n){"use strict"; -/*! (c) 2020 Andrea Giammarchi */var t=JSON.parse,r=JSON.stringify,e=Object.keys,a=String,u="string",f={},i="object",c=function(n,t){return t},l=function(n){return n instanceof a?a(n):n},o=function(n,t){return typeof t===u?new a(t):t},s=function(n,t,r){var e=a(t.push(r)-1);return n.set(r,e),e};return n.parse=function(n,r){var u=t(n,o).map(l),s=u[0],p=r||c,v=typeof s===i&&s?function n(t,r,u,c){for(var l=[],o=e(u),s=o.length,p=0;pvalue : $value; } @@ -76,42 +76,25 @@ private static function keys(&$value) { $obj = new ReflectionObject($value); $props = $obj->getProperties(); $keys = array(); - foreach ($props as $prop) { + foreach ($props as $prop) $keys[] = $prop->getName(); - } return $keys; } private static function loop($obj, $keys, &$input, &$set, &$output) { foreach ($keys as $key) { $value = $obj ? $output->$key : $output[$key]; - if ($value instanceof FlattedString) { + if ($value instanceof FlattedString) Flatted::ref($obj, $key, $input[$value->value], $input, $set, $output); - } } return $output; } private static function relate(&$known, &$input, &$value) { - if (is_string($value)) { - $key = array_search($value, $known->key, true); - if ($key !== false) { - return $known->value[$key]; - } - return Flatted::index($known, $input, $value); - } - if (is_array($value)) { + if (is_string($value) || is_array($value) || is_object($value)) { $key = array_search($value, $known->key, true); - if ($key !== false) { + if ($key !== false) return $known->value[$key]; - } - return Flatted::index($known, $input, $value); - } - if (is_object($value)) { - $key = array_search($value, $known->key, true); - if ($key !== false) { - return $known->value[$key]; - } return Flatted::index($known, $input, $value); } return $value; @@ -137,7 +120,7 @@ private static function ref($obj, &$key, &$value, &$input, &$set, &$output) { private static function transform(&$known, &$input, &$value) { if (is_array($value)) { return array_map( - function (&$value) use(&$known, &$input) { + function ($value) use(&$known, &$input) { return Flatted::relate($known, $input, $value); }, $value @@ -146,15 +129,14 @@ function (&$value) use(&$known, &$input) { if (is_object($value)) { $object = new stdClass; $keys = Flatted::keys($value); - foreach ($keys as $key) { + foreach ($keys as $key) $object->$key = Flatted::relate($known, $input, $value->$key); - } return $object; } return $value; } - private static function wrap(&$value) { + private static function wrap($value) { if (is_string($value)) { return new FlattedString($value); } diff --git a/tools/node_modules/eslint/package.json b/tools/node_modules/eslint/package.json index f556a5503561e2..ac3a78cc3b0f30 100644 --- a/tools/node_modules/eslint/package.json +++ b/tools/node_modules/eslint/package.json @@ -1,6 +1,6 @@ { "name": "eslint", - "version": "7.29.0", + "version": "7.30.0", "author": "Nicholas C. Zakas ", "description": "An AST-based pattern checker for JavaScript.", "bin": { @@ -45,6 +45,7 @@ "dependencies": { "@babel/code-frame": "7.12.11", "@eslint/eslintrc": "^0.4.2", + "@humanwhocodes/config-array": "^0.5.0", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -95,7 +96,7 @@ "ejs": "^3.0.2", "eslint": "file:.", "eslint-config-eslint": "file:packages/eslint-config-eslint", - "eslint-plugin-eslint-plugin": "^3.0.3", + "eslint-plugin-eslint-plugin": "^3.2.0", "eslint-plugin-internal-rules": "file:tools/internal-rules", "eslint-plugin-jsdoc": "^25.4.3", "eslint-plugin-node": "^11.1.0", diff --git a/tools/search_files.py b/tools/search_files.py new file mode 100644 index 00000000000000..e236eee47dc852 --- /dev/null +++ b/tools/search_files.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python + +""" +This is a utility for recursively searching files under +a specified directory +""" + +import argparse +import utils + +def main(): + parser = argparse.ArgumentParser( + description='Search files with a specific extension under a directory', + fromfile_prefix_chars='@' + ) + parser.add_argument('--ext', required=True, help='extension to search for') + parser.add_argument('directory', help='input directory') + options = parser.parse_args() + print('\n'.join(utils.SearchFiles(options.directory, options.ext))) + +if __name__ == "__main__": + main() diff --git a/tools/test.py b/tools/test.py index c3a7ff07e23b5d..5d4f48700c427b 100755 --- a/tools/test.py +++ b/tools/test.py @@ -896,8 +896,7 @@ def GetTestStatus(self, context, sections, defs): TIMEOUT_SCALEFACTOR = { - 'armv6' : { 'debug' : 12, 'release' : 3 }, # The ARM buildbots are slow. - 'arm' : { 'debug' : 8, 'release' : 2 }, + 'arm' : { 'debug' : 8, 'release' : 2 }, # The ARM buildbots are slow. 'ia32' : { 'debug' : 4, 'release' : 1 }, 'ppc' : { 'debug' : 4, 'release' : 1 }, 's390' : { 'debug' : 4, 'release' : 1 } } diff --git a/tools/utils.py b/tools/utils.py index 9734836db7f06e..9d53bd56bd1036 100644 --- a/tools/utils.py +++ b/tools/utils.py @@ -26,8 +26,10 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +import glob import platform import re +import sys # Reads a .list file into an array of strings @@ -81,9 +83,7 @@ def GuessOS(): def GuessArchitecture(): id = platform.machine() id = id.lower() # Windows 7 capitalizes 'AMD64'. - if id.startswith('armv6'): # Can return 'armv6l'. - return 'armv6' - elif id.startswith('arm') or id == 'aarch64': + if id.startswith('arm') or id == 'aarch64': return 'arm' elif (not id) or (not re.match('(x|i[3-6])86$', id) is None): return 'ia32' @@ -106,3 +106,10 @@ def GuessArchitecture(): def IsWindows(): return GuessOS() == 'win32' + + +def SearchFiles(dir, ext): + list = glob.glob(dir+ '/**/*.' + ext, recursive=True) + if sys.platform == 'win32': + list = [ x.replace('\\', '/')for x in list] + return list diff --git a/tools/v8/fetch_deps.py b/tools/v8/fetch_deps.py new file mode 100755 index 00000000000000..ee5b629e2b0e59 --- /dev/null +++ b/tools/v8/fetch_deps.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python +# Copyright 2017 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Use this script to fetch all dependencies for V8 to run build_gn.py. + +Usage: fetch_deps.py +""" + +# for py2/py3 compatibility +from __future__ import print_function + +import os +import subprocess +import sys + +import node_common + +GCLIENT_SOLUTION = [ + { "name" : "v8", + "url" : "https://chromium.googlesource.com/v8/v8.git", + "deps_file" : "DEPS", + "managed" : False, + "custom_deps" : { + # These deps are already part of Node.js. + "v8/base/trace_event/common" : None, + "v8/third_party/googletest/src" : None, + # These deps are unnecessary for building. + "v8/test/benchmarks/data" : None, + "v8/testing/gmock" : None, + "v8/test/mozilla/data" : None, + "v8/test/test262/data" : None, + "v8/test/test262/harness" : None, + "v8/third_party/android_ndk" : None, + "v8/third_party/android_sdk" : None, + "v8/third_party/catapult" : None, + "v8/third_party/colorama/src" : None, + "v8/third_party/fuchsia-sdk" : None, + "v8/third_party/instrumented_libraries" : None, + "v8/tools/luci-go" : None, + "v8/tools/swarming_client" : None, + "v8/third_party/qemu-linux-x64" : None, + }, + }, +] + +def EnsureGit(v8_path): + def git(args): + # shell=True needed on Windows to resolve git.bat. + return subprocess.check_output( + "git " + args, cwd=v8_path, shell=True).strip() + + expected_git_dir = os.path.join(v8_path, ".git") + actual_git_dir = git("rev-parse --absolute-git-dir") + if expected_git_dir == actual_git_dir: + print("V8 is tracked stand-alone by git.") + return False + print("Initializing temporary git repository in v8.") + git("init") + git("config user.name \"Ada Lovelace\"") + git("config user.email ada@lovela.ce") + git("commit --allow-empty -m init") + return True + +def FetchDeps(v8_path): + # Verify path. + v8_path = os.path.abspath(v8_path) + assert os.path.isdir(v8_path) + + # Check out depot_tools if necessary. + depot_tools = node_common.EnsureDepotTools(v8_path, True) + + temporary_git = EnsureGit(v8_path) + try: + print("Fetching dependencies.") + env = os.environ.copy() + # gclient needs to have depot_tools in the PATH. + env["PATH"] = depot_tools + os.pathsep + env["PATH"] + gclient = os.path.join(depot_tools, "gclient.py") + spec = "solutions = %s" % GCLIENT_SOLUTION + subprocess.check_call([sys.executable, gclient, "sync", "--spec", spec], + cwd=os.path.join(v8_path, os.path.pardir), + env=env) + except: + raise + finally: + if temporary_git: + node_common.UninitGit(v8_path) + # Clean up .gclient_entries file. + gclient_entries = os.path.normpath( + os.path.join(v8_path, os.pardir, ".gclient_entries")) + if os.path.isfile(gclient_entries): + os.remove(gclient_entries) + + return depot_tools + + +if __name__ == "__main__": + FetchDeps(sys.argv[1]) diff --git a/tools/v8/node_common.py b/tools/v8/node_common.py new file mode 100755 index 00000000000000..2efb21860e3cb9 --- /dev/null +++ b/tools/v8/node_common.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# Copyright 2017 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# for py2/py3 compatibility +from __future__ import print_function + +import os +import pipes +import shutil +import stat +import subprocess +import sys + +DEPOT_TOOLS_URL = \ + "https://chromium.googlesource.com/chromium/tools/depot_tools.git" + +def EnsureDepotTools(v8_path, fetch_if_not_exist): + def _Get(v8_path): + depot_tools = os.path.join(v8_path, "_depot_tools") + try: + gclient_path = os.path.join(depot_tools, "gclient.py") + if os.path.isfile(gclient_path): + return depot_tools + except: + pass + if fetch_if_not_exist: + print("Checking out depot_tools.") + # shell=True needed on Windows to resolve git.bat. + subprocess.check_call("git clone {} {}".format( + pipes.quote(DEPOT_TOOLS_URL), + pipes.quote(depot_tools)), shell=True) + # Using check_output to hide warning messages. + subprocess.check_output( + [sys.executable, gclient_path, "metrics", "--opt-out"], + cwd=depot_tools) + return depot_tools + return None + depot_tools = _Get(v8_path) + assert depot_tools is not None + print("Using depot tools in %s" % depot_tools) + return depot_tools + +def UninitGit(v8_path): + print("Uninitializing temporary git repository") + target = os.path.join(v8_path, ".git") + if os.path.isdir(target): + print(">> Cleaning up %s" % target) + def OnRmError(func, path, exec_info): + # This might happen on Windows + os.chmod(path, stat.S_IWRITE) + os.unlink(path) + shutil.rmtree(target, onerror=OnRmError) diff --git a/typings/primordials.d.ts b/typings/primordials.d.ts index 0436e92b1d9b53..beed1d7b83c4c9 100644 --- a/typings/primordials.d.ts +++ b/typings/primordials.d.ts @@ -1,3 +1,5 @@ +import { AsyncIterator } from "internal/webstreams/util"; + type UncurryThis unknown> = (self: ThisParameterType, ...args: Parameters) => ReturnType; type UncurryThisStaticApply unknown> = @@ -9,15 +11,15 @@ type StaticApply unknown> = * Primordials are a way to safely use globals without fear of global mutation * Generally, this means removing `this` parameter usage and instead using * a regular parameter: - * + * * @example - * + * * ```js * 'thing'.startsWith('hello'); * ``` - * + * * becomes - * + * * ```js * primordials.StringPrototypeStartsWith('thing', 'hello') * ``` @@ -142,6 +144,7 @@ declare namespace primordials { export const ArrayBufferPrototype: typeof ArrayBuffer.prototype export const ArrayBufferIsView: typeof ArrayBuffer.isView export const ArrayBufferPrototypeSlice: UncurryThis + export const AsyncIteratorPrototype: UncurryThis export const BigInt: typeof globalThis.BigInt; export const BigIntLength: typeof BigInt.length export const BigIntName: typeof BigInt.name @@ -522,5 +525,5 @@ declare namespace primordials { export const PromiseAny: typeof Promise.any export const PromisePrototypeThen: UncurryThis export const PromisePrototypeCatch: UncurryThis - export const PromisePrototypeFinally: UncurryThis + export const PromisePrototypeFinally: UncurryThis }