diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f347898aeb3252..247b5c17b54805 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,57 +5,57 @@ # 3. PRs touching any code with a codeowner must be signed off by at least one # person on the code owner team. -./.github/CODEOWNERS @nodejs/tsc +/.github/CODEOWNERS @nodejs/tsc # net -# ./deps/cares @nodejs/net -# ./doc/api/dns.md @nodejs/net -# ./doc/api/dgram.md @nodejs/net -# ./doc/api/net.md @nodejs/net -# ./lib/dgram.js @nodejs/net -# ./lib/dns.js @nodejs/net -# ./lib/net.js @nodejs/net @nodejs/quic -# ./lib/internal/dgram.js @nodejs/net -# ./lib/internal/dns/* @nodejs/net -# ./lib/internal/net.js @nodejs/net -# ./lib/internal/socket_list.js @nodejs/net -# ./lib/internal/js_stream_socket.js @nodejs/net -# ./src/cares_wrap.h @nodejs/net -# ./src/connect_wrap.* @nodejs/net -# ./src/connection_wrap.* @nodejs/net -# ./src/node_sockaddr* @nodejs/net -# ./src/tcp_wrap.* @nodejs/net -# ./src/udp_wrap.* @nodejs/net +/deps/cares @nodejs/net +/doc/api/dns.md @nodejs/net +/doc/api/dgram.md @nodejs/net +/doc/api/net.md @nodejs/net +/lib/dgram.js @nodejs/net +/lib/dns.js @nodejs/net +/lib/net.js @nodejs/net @nodejs/quic +/lib/internal/dgram.js @nodejs/net +/lib/internal/dns/* @nodejs/net +/lib/internal/net.js @nodejs/net +/lib/internal/socket_list.js @nodejs/net +/lib/internal/js_stream_socket.js @nodejs/net +/src/cares_wrap.h @nodejs/net +/src/connect_wrap.* @nodejs/net +/src/connection_wrap.* @nodejs/net +/src/node_sockaddr* @nodejs/net +/src/tcp_wrap.* @nodejs/net +/src/udp_wrap.* @nodejs/net # tls/crypto -# ./lib/internal/crypto/* @nodejs/crypto -# ./lib/internal/tls.js @nodejs/crypto @nodejs/net -# ./lib/crypto.js @nodejs/crypto -# ./lib/tls.js @nodejs/crypto @nodejs/net -# ./src/node_crypto* @nodejs/crypto -# ./src/node_crypto_common* @nodejs/crypto @nodejs/quic +/lib/internal/crypto/* @nodejs/crypto +/lib/internal/tls.js @nodejs/crypto @nodejs/net +/lib/crypto.js @nodejs/crypto +/lib/tls.js @nodejs/crypto @nodejs/net +/src/node_crypto* @nodejs/crypto +/src/node_crypto_common* @nodejs/crypto @nodejs/quic # http -# ./deps/llhttp/* @nodejs/http @nodejs/net -# ./doc/api/http.md @nodejs/http @nodejs/net -# ./doc/api/http2.md @nodejs/http @nodejs/net -# ./lib/_http_* @nodejs/http @nodejs/net -# ./lib/http.js @nodejs/http @nodejs/net -# ./lib/https.js @nodejs/crypto @nodejs/net @nodejs/http -# ./src/node_http_common* @nodejs/http @nodejs/http2 @nodejs/quic @nodejs/net -# ./src/node_http_parser.cc @nodejs/http @nodejs/net +/deps/llhttp/* @nodejs/http @nodejs/net +/doc/api/http.md @nodejs/http @nodejs/net +/doc/api/http2.md @nodejs/http @nodejs/net +/lib/_http_* @nodejs/http @nodejs/net +/lib/http.js @nodejs/http @nodejs/net +/lib/https.js @nodejs/crypto @nodejs/net @nodejs/http +/src/node_http_common* @nodejs/http @nodejs/http2 @nodejs/quic @nodejs/net +/src/node_http_parser.cc @nodejs/http @nodejs/net # http2 -# ./deps/nghttp2/* @nodejs/http2 @nodejs/net -# ./doc/api/http2.md @nodejs/http2 @nodejs/net -# ./lib/http2.js @nodejs/http2 @nodejs/net -# ./lib/internal/http2/* @nodejs/http2 @nodejs/net -# ./src/node_http2* @nodejs/http2 @nodejs/net -# ./src/node_mem* @nodejs/http2 +/deps/nghttp2/* @nodejs/http2 @nodejs/net +/doc/api/http2.md @nodejs/http2 @nodejs/net +/lib/http2.js @nodejs/http2 @nodejs/net +/lib/internal/http2/* @nodejs/http2 @nodejs/net +/src/node_http2* @nodejs/http2 @nodejs/net +/src/node_mem* @nodejs/http2 # quic @@ -68,16 +68,16 @@ # modules -# ./doc/api/modules.md @nodejs/modules -# ./doc/api/esm.md @nodejs/modules -# ./lib/module.js @nodejs/modules -# ./lib/internal/modules/* @nodejs/modules -# ./lib/internal/bootstrap/loaders.js @nodejs/modules -# ./src/module_wrap* @nodejs/modules @nodejs/vm +/doc/api/modules.md @nodejs/modules +/doc/api/esm.md @nodejs/modules +/lib/module.js @nodejs/modules +/lib/internal/modules/* @nodejs/modules +/lib/internal/bootstrap/loaders.js @nodejs/modules +/src/module_wrap* @nodejs/modules @nodejs/vm # N-API -# /src/node_api* @nodejs/n-api -# /src/js_native_api* @nodejs/n-api -# /doc/guides/adding-new-napi-api.md @nodejs/n-api -# /doc/api/n-api.md @nodejs/n-api +/src/node_api* @nodejs/n-api +/src/js_native_api* @nodejs/n-api +/doc/guides/adding-new-napi-api.md @nodejs/n-api +/doc/api/n-api.md @nodejs/n-api diff --git a/.github/workflows/auto-start-ci.yml b/.github/workflows/auto-start-ci.yml new file mode 100644 index 00000000000000..0bc9e7c1c65fc2 --- /dev/null +++ b/.github/workflows/auto-start-ci.yml @@ -0,0 +1,65 @@ +--- +name: Auto Start CI + +on: + schedule: + # `schedule` event is used instead of `pull_request` because when a + # `pull_requesst` event is triggered on a PR from a fork, GITHUB_TOKEN will + # be read-only, and the Action won't have access to any other repository + # secrets, which it needs to access Jenkins API. Runs every five minutes + # (fastest the scheduler can run). Five minutes is optimistic, it can take + # longer to run. + - cron: "*/5 * * * *" + +jobs: + startCI: + if: github.repository == 'nodejs/node' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + + # Install dependencies + - name: Install jq + run: sudo apt-get install jq -y + - name: Install Node.js + uses: actions/setup-node@v2-beta + with: + node-version: '12' + - name: Install node-core-utils + run: npm install -g node-core-utils + + - name: Set variables + run: | + echo "::set-env name=REPOSITORY::$(echo ${{ github.repository }} | cut -d/ -f2)" + echo "::set-env name=OWNER::${{ github.repository_owner }}" + + # Get Pull Requests + - name: Get Pull Requests + uses: octokit/graphql-action@v2.x + id: get_prs_for_ci + with: + query: | + query prs($owner:String!, $repo:String!) { + repository(owner:$owner, name:$repo) { + pullRequests(labels: ["request-ci"], states: OPEN, last: 100) { + nodes { + number + } + } + } + } + owner: ${{ env.OWNER }} + repo: ${{ env.REPOSITORY }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup node-core-utils + run: | + ncu-config set username ${{ secrets.JENKINS_USER }} + ncu-config set token none + ncu-config set jenkins_token ${{ secrets.JENKINS_TOKEN }} + ncu-config set owner ${{ env.OWNER }} + ncu-config set repo ${{ env.REPOSITORY }} + + - name: Start CI + run: ./tools/actions/start-ci.sh ${{ secrets.GITHUB_TOKEN }} ${{ env.OWNER }} ${{ env.REPOSITORY }} $(echo '${{ steps.get_prs_for_ci.outputs.data }}' | jq '.repository.pullRequests.nodes | map(.number) | .[]') diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 699efb39d17e35..0caf0bdf961914 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -5,6 +5,8 @@ on: push: branches: - master + - v[0-9]+.x-staging + - v[0-9]+.x env: FLAKY_TESTS: dontcare diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index 76ab1ec8ced2cf..ff6788e06b7858 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -5,6 +5,9 @@ on: push: branches: - master + - canary + - v[0-9]+.x-staging + - v[0-9]+.x env: PYTHON_VERSION: 3.8 diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index afab3284879499..2a9a722e9c0bfc 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -5,6 +5,8 @@ on: push: branches: - master + - v[0-9]+.x-staging + - v[0-9]+.x env: PYTHON_VERSION: 3.8 diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml index 4b4b305e7b25c9..403a47614f8005 100644 --- a/.github/workflows/misc.yml +++ b/.github/workflows/misc.yml @@ -5,6 +5,8 @@ on: push: branches: - master + - v[0-9]+.x-staging + - v[0-9]+.x env: NODE_VERSION: 12.x diff --git a/.github/workflows/test-asan.yml b/.github/workflows/test-asan.yml index d2d619f786cb0e..96975024e399f5 100644 --- a/.github/workflows/test-asan.yml +++ b/.github/workflows/test-asan.yml @@ -3,12 +3,15 @@ name: test-asan on: push: branches: - - master + - master + - canary + - v[0-9]+.x-staging + - v[0-9]+.x paths-ignore: - - 'doc/**' + - 'doc/**' pull_request: paths-ignore: - - 'doc/**' + - 'doc/**' env: PYTHON_VERSION: 3.8 diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index 163f5a1fb7aa90..f5bedead2afa01 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -5,6 +5,9 @@ on: push: branches: - master + - canary + - v[0-9]+.x-staging + - v[0-9]+.x env: PYTHON_VERSION: 3.8 diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index 97074c5238659c..f4a2454e37f635 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -5,6 +5,9 @@ on: push: branches: - master + - canary + - v[0-9]+.x-staging + - v[0-9]+.x env: PYTHON_VERSION: 3.8 diff --git a/.mailmap b/.mailmap index 4a73a2b352a2e7..64f74f4b08a4b9 100644 --- a/.mailmap +++ b/.mailmap @@ -248,10 +248,11 @@ Marti Martz Martial James Jefferson Martijn Schrage Oblosys Masato Ohba -Matheus Marchini -Matheus Marchini -Matheus Marchini -Matheus Marchini +Mary Marchini +Mary Marchini +Mary Marchini +Mary Marchini +Mary Marchini Matt Lang matt-in-a-hat Matt Reed matthewreed26 Matteo Collina diff --git a/AUTHORS b/AUTHORS index e6af564013f31a..839039c541e56d 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1983,7 +1983,7 @@ Pierre-Loic Doulcet Fran Herrero Francois KY suman-mitra -Matheus Marchini +Mary Marchini neta Whien Chiahao Lin diff --git a/CHANGELOG.md b/CHANGELOG.md index c7f796691c38d1..446edd44508f3e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,7 +31,8 @@ release. -14.7.0
+14.8.0
+14.7.0
14.6.0
14.5.0
14.4.0
diff --git a/README.md b/README.md index 84ab9cd5862138..75ecb02a2b85e0 100644 --- a/README.md +++ b/README.md @@ -184,7 +184,7 @@ For information about the governance of the Node.js project, see * [mhdawson](https://github.com/mhdawson) - **Michael Dawson** <michael_dawson@ca.ibm.com> (he/him) * [mmarchini](https://github.com/mmarchini) - -**Matheus Marchini** <mat@mmarchini.me> +**Mary Marchini** <oss@mmarchini.me> (she/her) * [MylesBorins](https://github.com/MylesBorins) - **Myles Borins** <myles.borins@gmail.com> (he/him) * [targos](https://github.com/targos) - @@ -283,6 +283,8 @@ For information about the governance of the Node.js project, see **Danielle Adams** <adamzdanielle@gmail.com> (she/her) * [davisjam](https://github.com/davisjam) - **Jamie Davis** <davisjam@vt.edu> (he/him) +* [DerekNonGeneric](https://github.com/DerekNonGeneric) - +**Derek Lewis** <DerekNonGeneric@inf.is> (he/him) * [devnexen](https://github.com/devnexen) - **David Carlier** <devnexen@gmail.com> * [devsnek](https://github.com/devsnek) - @@ -360,7 +362,7 @@ For information about the governance of the Node.js project, see * [misterdjules](https://github.com/misterdjules) - **Julien Gilli** <jgilli@nodejs.org> * [mmarchini](https://github.com/mmarchini) - -**Matheus Marchini** <mat@mmarchini.me> +**Mary Marchini** <oss@mmarchini.me> (she/her) * [mscdex](https://github.com/mscdex) - **Brian White** <mscdex@mscdex.net> * [MylesBorins](https://github.com/MylesBorins) - @@ -381,6 +383,8 @@ For information about the governance of the Node.js project, see **Pranshu Srivastava** <rexagod@gmail.com> (he/him) * [richardlau](https://github.com/richardlau) - **Richard Lau** <riclau@uk.ibm.com> +* [rickyes](https://github.com/rickyes) - +**Ricky Zhou** <0x19951125@gmail.com> (he/him) * [ronag](https://github.com/ronag) - **Robert Nagy** <ronagy@icloud.com> * [ronkorving](https://github.com/ronkorving) - @@ -583,6 +587,8 @@ Primary GPG keys for Node.js Releasers (some Releasers sign with subkeys): `DD8F2338BAE7501E3DD5AC78C273792F7D83545D` * **Ruben Bridgewater** <ruben@bridgewater.de> `A48C2BEE680E841632CD4E44F07496B3EB3C1762` +* **Ruy Adorno** <ruyadorno@hotmail.com> +`108F52B48DB57BB0CC439B2997B01419BD92F80A` * **Shelley Vohr** <shelley.vohr@gmail.com> `B9E2F5981AA6E0CD28160D9FF13993A75599653C` @@ -597,6 +603,7 @@ gpg --keyserver pool.sks-keyservers.net --recv-keys C4F0DFFF4E8C1A8236409D08E73B gpg --keyserver pool.sks-keyservers.net --recv-keys C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C gpg --keyserver pool.sks-keyservers.net --recv-keys DD8F2338BAE7501E3DD5AC78C273792F7D83545D gpg --keyserver pool.sks-keyservers.net --recv-keys A48C2BEE680E841632CD4E44F07496B3EB3C1762 +gpg --keyserver pool.sks-keyservers.net --recv-keys 108F52B48DB57BB0CC439B2997B01419BD92F80A gpg --keyserver pool.sks-keyservers.net --recv-keys B9E2F5981AA6E0CD28160D9FF13993A75599653C ``` diff --git a/SECURITY.md b/SECURITY.md index 64714043db7e3b..e121072ffe4381 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,6 +1,6 @@ # Security -## Reporting a Bug in Node.js +## Reporting a bug in Node.js Report security bugs in Node.js via [HackerOne](https://hackerone.com/nodejs). @@ -13,13 +13,13 @@ you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue. -### Node.js Bug Bounty Program +### Node.js bug bounty program The Node.js project engages in an official bug bounty program for security researchers and responsible public disclosures. The program is managed through the HackerOne platform. See for further details. -## Reporting a Bug in a third party module +## Reporting a bug in a third party module Security bugs in third party modules should be reported to their respective maintainers and should also be coordinated through the Node.js Ecosystem @@ -31,7 +31,7 @@ Details regarding this process can be found in the Thank you for improving the security of Node.js and its ecosystem. Your efforts and responsible disclosure are greatly appreciated and will be acknowledged. -## Disclosure Policy +## Disclosure policy Here is the security disclosure policy for Node.js @@ -60,14 +60,14 @@ Here is the security disclosure policy for Node.js the release process above to ensure that the disclosure is handled in a consistent manner. -## Receiving Security Updates +## Receiving security updates Security notifications will be distributed via the following methods. * * -## Comments on this Policy +## Comments on this policy If you have suggestions on how this process could be improved please submit a [pull request](https://github.com/nodejs/nodejs.org) or diff --git a/benchmark/async_hooks/promises.js b/benchmark/async_hooks/promises.js index 9927ec0dc504e9..d60ae70192c8cb 100644 --- a/benchmark/async_hooks/promises.js +++ b/benchmark/async_hooks/promises.js @@ -37,10 +37,11 @@ const bench = common.createBenchmark(main, { ] }); +const err = new Error('foobar'); async function run(n) { for (let i = 0; i < n; i++) { await new Promise((resolve) => resolve()) - .then(() => { throw new Error('foobar'); }) + .then(() => { throw err; }) .catch((e) => e); } } diff --git a/benchmark/napi/type-tag-check/binding.gyp b/benchmark/napi/type-tag-check/binding.gyp new file mode 100644 index 00000000000000..595ab325233661 --- /dev/null +++ b/benchmark/napi/type-tag-check/binding.gyp @@ -0,0 +1,8 @@ +{ + 'targets': [ + { + 'target_name': 'binding', + 'sources': [ '../type-tag/binding.c' ] + } + ] +} diff --git a/benchmark/napi/type-tag-check/index.js b/benchmark/napi/type-tag-check/index.js new file mode 100644 index 00000000000000..346dfb7812dea1 --- /dev/null +++ b/benchmark/napi/type-tag-check/index.js @@ -0,0 +1,18 @@ +'use strict'; +const common = require('../../common.js'); + +let binding; +try { + binding = require(`./build/${common.buildType}/binding`); +} catch { + console.error(`${__filename}: Binding failed to load`); + process.exit(0); +} + +const bench = common.createBenchmark(main, { + n: [1e5, 1e6, 1e7], +}); + +function main({ n }) { + binding.checkObjectTag(n, bench, bench.start, bench.end); +} diff --git a/benchmark/napi/type-tag/binding.c b/benchmark/napi/type-tag/binding.c new file mode 100644 index 00000000000000..7bc8b5d7502e8b --- /dev/null +++ b/benchmark/napi/type-tag/binding.c @@ -0,0 +1,84 @@ +#include +#define NAPI_EXPERIMENTAL +#include + +#define NAPI_CALL(call) \ + do { \ + napi_status status = call; \ + assert(status == napi_ok && #call " failed"); \ + } while (0); + +#define EXPORT_FUNC(env, exports, name, func) \ + do { \ + napi_value js_func; \ + NAPI_CALL(napi_create_function((env), \ + (name), \ + NAPI_AUTO_LENGTH, \ + (func), \ + NULL, \ + &js_func)); \ + NAPI_CALL(napi_set_named_property((env), \ + (exports), \ + (name), \ + js_func)); \ + } while (0); + +static const napi_type_tag tag = { + 0xe7ecbcd5954842f6, 0x9e75161c9bf27282 +}; + +static napi_value TagObject(napi_env env, napi_callback_info info) { + size_t argc = 4; + napi_value argv[4]; + uint32_t n; + uint32_t index; + napi_handle_scope scope; + + NAPI_CALL(napi_get_cb_info(env, info, &argc, argv, NULL, NULL)); + NAPI_CALL(napi_get_value_uint32(env, argv[0], &n)); + NAPI_CALL(napi_open_handle_scope(env, &scope)); + napi_value objects[n]; + for (index = 0; index < n; index++) { + NAPI_CALL(napi_create_object(env, &objects[index])); + } + + // Time the object tag creation. + NAPI_CALL(napi_call_function(env, argv[1], argv[2], 0, NULL, NULL)); + for (index = 0; index < n; index++) { + NAPI_CALL(napi_type_tag_object(env, objects[index], &tag)); + } + NAPI_CALL(napi_call_function(env, argv[1], argv[3], 1, &argv[0], NULL)); + + NAPI_CALL(napi_close_handle_scope(env, scope)); + return NULL; +} + +static napi_value CheckObjectTag(napi_env env, napi_callback_info info) { + size_t argc = 4; + napi_value argv[4]; + uint32_t n; + uint32_t index; + bool is_of_type; + + NAPI_CALL(napi_get_cb_info(env, info, &argc, argv, NULL, NULL)); + NAPI_CALL(napi_get_value_uint32(env, argv[0], &n)); + napi_value object; + NAPI_CALL(napi_create_object(env, &object)); + NAPI_CALL(napi_type_tag_object(env, object, &tag)); + + // Time the object tag checking. + NAPI_CALL(napi_call_function(env, argv[1], argv[2], 0, NULL, NULL)); + for (index = 0; index < n; index++) { + NAPI_CALL(napi_check_object_type_tag(env, object, &tag, &is_of_type)); + assert(is_of_type && " type mismatch"); + } + NAPI_CALL(napi_call_function(env, argv[1], argv[3], 1, &argv[0], NULL)); + + return NULL; +} + +NAPI_MODULE_INIT() { + EXPORT_FUNC(env, exports, "tagObject", TagObject); + EXPORT_FUNC(env, exports, "checkObjectTag", CheckObjectTag); + return exports; +} diff --git a/benchmark/napi/type-tag/binding.gyp b/benchmark/napi/type-tag/binding.gyp new file mode 100644 index 00000000000000..413621ade335a1 --- /dev/null +++ b/benchmark/napi/type-tag/binding.gyp @@ -0,0 +1,8 @@ +{ + 'targets': [ + { + 'target_name': 'binding', + 'sources': [ 'binding.c' ] + } + ] +} diff --git a/benchmark/napi/type-tag/check-object-tag.js b/benchmark/napi/type-tag/check-object-tag.js new file mode 100644 index 00000000000000..346dfb7812dea1 --- /dev/null +++ b/benchmark/napi/type-tag/check-object-tag.js @@ -0,0 +1,18 @@ +'use strict'; +const common = require('../../common.js'); + +let binding; +try { + binding = require(`./build/${common.buildType}/binding`); +} catch { + console.error(`${__filename}: Binding failed to load`); + process.exit(0); +} + +const bench = common.createBenchmark(main, { + n: [1e5, 1e6, 1e7], +}); + +function main({ n }) { + binding.checkObjectTag(n, bench, bench.start, bench.end); +} diff --git a/benchmark/napi/type-tag/index.js b/benchmark/napi/type-tag/index.js new file mode 100644 index 00000000000000..3f85b9af8e7d59 --- /dev/null +++ b/benchmark/napi/type-tag/index.js @@ -0,0 +1,18 @@ +'use strict'; +const common = require('../../common.js'); + +let binding; +try { + binding = require(`./build/${common.buildType}/binding`); +} catch { + console.error(`${__filename}: Binding failed to load`); + process.exit(0); +} + +const bench = common.createBenchmark(main, { + n: [1e3, 1e4, 1e5], +}); + +function main({ n }) { + binding.tagObject(n, bench, bench.start, bench.end); +} diff --git a/deps/uvwasi/include/uvwasi.h b/deps/uvwasi/include/uvwasi.h index 0090313c8af2eb..9a0f8aa3c61711 100644 --- a/deps/uvwasi/include/uvwasi.h +++ b/deps/uvwasi/include/uvwasi.h @@ -10,7 +10,7 @@ extern "C" { #define UVWASI_VERSION_MAJOR 0 #define UVWASI_VERSION_MINOR 0 -#define UVWASI_VERSION_PATCH 9 +#define UVWASI_VERSION_PATCH 10 #define UVWASI_VERSION_HEX ((UVWASI_VERSION_MAJOR << 16) | \ (UVWASI_VERSION_MINOR << 8) | \ (UVWASI_VERSION_PATCH)) @@ -50,8 +50,8 @@ typedef struct uvwasi_s { } uvwasi_t; typedef struct uvwasi_preopen_s { - char* mapped_path; - char* real_path; + const char* mapped_path; + const char* real_path; } uvwasi_preopen_t; typedef struct uvwasi_options_s { @@ -70,6 +70,7 @@ typedef struct uvwasi_options_s { /* Embedder API. */ uvwasi_errno_t uvwasi_init(uvwasi_t* uvwasi, uvwasi_options_t* options); void uvwasi_destroy(uvwasi_t* uvwasi); +void uvwasi_options_init(uvwasi_options_t* options); /* Use int instead of uv_file to avoid needing uv.h */ uvwasi_errno_t uvwasi_embedder_remap_fd(uvwasi_t* uvwasi, const uvwasi_fd_t fd, diff --git a/deps/uvwasi/include/wasi_serdes.h b/deps/uvwasi/include/wasi_serdes.h index f927b82bac9cbc..ed80e4a88e6ee4 100644 --- a/deps/uvwasi/include/wasi_serdes.h +++ b/deps/uvwasi/include/wasi_serdes.h @@ -5,21 +5,20 @@ /* Basic uint{8,16,32,64}_t read/write functions. */ -#define BASIC_TYPE_(name, type) \ +#define BASIC_TYPE(name, type) \ void uvwasi_serdes_write_##name(void* ptr, size_t offset, type value); \ type uvwasi_serdes_read_##name(const void* ptr, size_t offset); \ -#define BASIC_TYPE(type) BASIC_TYPE_(type, type) -#define BASIC_TYPE_UVWASI(type) BASIC_TYPE_(type, uvwasi_##type) +#define BASIC_TYPE_UVWASI(type) BASIC_TYPE(type, uvwasi_##type) #define UVWASI_SERDES_SIZE_uint8_t sizeof(uint8_t) -BASIC_TYPE(uint8_t) +BASIC_TYPE(uint8_t, uint8_t) #define UVWASI_SERDES_SIZE_uint16_t sizeof(uint16_t) -BASIC_TYPE(uint16_t) +BASIC_TYPE(uint16_t, uint16_t) #define UVWASI_SERDES_SIZE_uint32_t sizeof(uint32_t) -BASIC_TYPE(uint32_t) +BASIC_TYPE(uint32_t, uint32_t) #define UVWASI_SERDES_SIZE_uint64_t sizeof(uint64_t) -BASIC_TYPE(uint64_t) +BASIC_TYPE(uint64_t, uint64_t) #define UVWASI_SERDES_SIZE_advice_t sizeof(uvwasi_advice_t) BASIC_TYPE_UVWASI(advice_t) @@ -80,7 +79,6 @@ BASIC_TYPE_UVWASI(whence_t) #undef BASIC_TYPE_UVWASI #undef BASIC_TYPE -#undef BASIC_TYPE_ /* WASI structure read/write functions. */ diff --git a/deps/uvwasi/src/debug.h b/deps/uvwasi/src/debug.h index 16bc2732ec90cd..8ef5a99a231518 100644 --- a/deps/uvwasi/src/debug.h +++ b/deps/uvwasi/src/debug.h @@ -2,12 +2,14 @@ #define __UVWASI_DEBUG_H__ #ifdef UVWASI_DEBUG_LOG +#ifndef __STDC_FORMAT_MACROS # define __STDC_FORMAT_MACROS +#endif # include -# define DEBUG(fmt, ...) \ +# define UVWASI_DEBUG(fmt, ...) \ do { fprintf(stderr, fmt, __VA_ARGS__); } while (0) #else -# define DEBUG(fmt, ...) +# define UVWASI_DEBUG(fmt, ...) #endif #endif /* __UVWASI_DEBUG_H__ */ diff --git a/deps/uvwasi/src/uvwasi.c b/deps/uvwasi/src/uvwasi.c index fc8f0ee4844b9e..acc25c3dba2f73 100644 --- a/deps/uvwasi/src/uvwasi.c +++ b/deps/uvwasi/src/uvwasi.c @@ -29,6 +29,78 @@ # undef POSIX_FADV_NORMAL #endif +#define VALIDATE_FSTFLAGS_OR_RETURN(flags) \ + do { \ + if ((flags) & ~(UVWASI_FILESTAT_SET_ATIM | \ + UVWASI_FILESTAT_SET_ATIM_NOW | \ + UVWASI_FILESTAT_SET_MTIM | \ + UVWASI_FILESTAT_SET_MTIM_NOW)) { \ + return UVWASI_EINVAL; \ + } \ + } while (0) + +static uvwasi_errno_t uvwasi__get_filestat_set_times( + uvwasi_timestamp_t* st_atim, + uvwasi_timestamp_t* st_mtim, + uvwasi_fstflags_t fst_flags, + uv_file* fd, + char* path + ) { + uvwasi_filestat_t stat; + uvwasi_timestamp_t now; + uvwasi_errno_t err; + uv_fs_t req; + int r; + + /* Check if either value requires the current time. */ + if ((fst_flags & + (UVWASI_FILESTAT_SET_ATIM_NOW | UVWASI_FILESTAT_SET_MTIM_NOW)) != 0) { + err = uvwasi__clock_gettime_realtime(&now); + if (err != UVWASI_ESUCCESS) + return err; + } + + /* Check if either value is omitted. libuv doesn't have an 'omitted' option, + so get the current stats for the file. This approach isn't perfect, but it + will do until libuv can get better support here. */ + if ((fst_flags & + (UVWASI_FILESTAT_SET_ATIM | UVWASI_FILESTAT_SET_ATIM_NOW)) == 0 || + (fst_flags & + (UVWASI_FILESTAT_SET_MTIM | UVWASI_FILESTAT_SET_MTIM_NOW)) == 0) { + + if (fd != NULL) + r = uv_fs_fstat(NULL, &req, *fd, NULL); + else + r = uv_fs_lstat(NULL, &req, path, NULL); + + if (r != 0) { + uv_fs_req_cleanup(&req); + return uvwasi__translate_uv_error(r); + } + + uvwasi__stat_to_filestat(&req.statbuf, &stat); + uv_fs_req_cleanup(&req); + } + + /* Choose the provided time or 'now' and convert WASI timestamps from + nanoseconds to seconds due to libuv. */ + if ((fst_flags & UVWASI_FILESTAT_SET_ATIM_NOW) != 0) + *st_atim = now / NANOS_PER_SEC; + else if ((fst_flags & UVWASI_FILESTAT_SET_ATIM) != 0) + *st_atim = *st_atim / NANOS_PER_SEC; + else + *st_atim = stat.st_atim / NANOS_PER_SEC; + + if ((fst_flags & UVWASI_FILESTAT_SET_MTIM_NOW) != 0) + *st_mtim = now / NANOS_PER_SEC; + else if ((fst_flags & UVWASI_FILESTAT_SET_MTIM) != 0) + *st_mtim = *st_mtim / NANOS_PER_SEC; + else + *st_mtim = stat.st_mtim / NANOS_PER_SEC; + + return UVWASI_ESUCCESS; +} + static void* default_malloc(size_t size, void* mem_user_data) { return malloc(size); } @@ -308,6 +380,23 @@ void uvwasi_destroy(uvwasi_t* uvwasi) { } +void uvwasi_options_init(uvwasi_options_t* options) { + if (options == NULL) + return; + + options->in = 0; + options->out = 1; + options->err = 2; + options->fd_table_size = 3; + options->argc = 0; + options->argv = NULL; + options->envp = NULL; + options->preopenc = 0; + options->preopens = NULL; + options->allocator = NULL; +} + + uvwasi_errno_t uvwasi_embedder_remap_fd(uvwasi_t* uvwasi, const uvwasi_fd_t fd, uv_file new_host_fd) { @@ -330,10 +419,10 @@ uvwasi_errno_t uvwasi_embedder_remap_fd(uvwasi_t* uvwasi, uvwasi_errno_t uvwasi_args_get(uvwasi_t* uvwasi, char** argv, char* argv_buf) { uvwasi_size_t i; - DEBUG("uvwasi_args_get(uvwasi=%p, argv=%p, argv_buf=%p)\n", - uvwasi, - argv, - argv_buf); + UVWASI_DEBUG("uvwasi_args_get(uvwasi=%p, argv=%p, argv_buf=%p)\n", + uvwasi, + argv, + argv_buf); if (uvwasi == NULL || argv == NULL || argv_buf == NULL) return UVWASI_EINVAL; @@ -350,10 +439,10 @@ uvwasi_errno_t uvwasi_args_get(uvwasi_t* uvwasi, char** argv, char* argv_buf) { uvwasi_errno_t uvwasi_args_sizes_get(uvwasi_t* uvwasi, uvwasi_size_t* argc, uvwasi_size_t* argv_buf_size) { - DEBUG("uvwasi_args_sizes_get(uvwasi=%p, argc=%p, argv_buf_size=%p)\n", - uvwasi, - argc, - argv_buf_size); + UVWASI_DEBUG("uvwasi_args_sizes_get(uvwasi=%p, argc=%p, argv_buf_size=%p)\n", + uvwasi, + argc, + argv_buf_size); if (uvwasi == NULL || argc == NULL || argv_buf_size == NULL) return UVWASI_EINVAL; @@ -367,10 +456,10 @@ uvwasi_errno_t uvwasi_args_sizes_get(uvwasi_t* uvwasi, uvwasi_errno_t uvwasi_clock_res_get(uvwasi_t* uvwasi, uvwasi_clockid_t clock_id, uvwasi_timestamp_t* resolution) { - DEBUG("uvwasi_clock_res_get(uvwasi=%p, clock_id=%d, resolution=%p)\n", - uvwasi, - clock_id, - resolution); + UVWASI_DEBUG("uvwasi_clock_res_get(uvwasi=%p, clock_id=%d, resolution=%p)\n", + uvwasi, + clock_id, + resolution); if (uvwasi == NULL || resolution == NULL) return UVWASI_EINVAL; @@ -394,12 +483,12 @@ uvwasi_errno_t uvwasi_clock_time_get(uvwasi_t* uvwasi, uvwasi_clockid_t clock_id, uvwasi_timestamp_t precision, uvwasi_timestamp_t* time) { - DEBUG("uvwasi_clock_time_get(uvwasi=%p, clock_id=%d, " - "precision=%"PRIu64", time=%p)\n", - uvwasi, - clock_id, - precision, - time); + UVWASI_DEBUG("uvwasi_clock_time_get(uvwasi=%p, clock_id=%d, " + "precision=%"PRIu64", time=%p)\n", + uvwasi, + clock_id, + precision, + time); if (uvwasi == NULL || time == NULL) return UVWASI_EINVAL; @@ -425,10 +514,11 @@ uvwasi_errno_t uvwasi_environ_get(uvwasi_t* uvwasi, char* environ_buf) { uvwasi_size_t i; - DEBUG("uvwasi_environ_get(uvwasi=%p, environment=%p, environ_buf=%p)\n", - uvwasi, - environment, - environ_buf); + UVWASI_DEBUG("uvwasi_environ_get(uvwasi=%p, environment=%p, " + "environ_buf=%p)\n", + uvwasi, + environment, + environ_buf); if (uvwasi == NULL || environment == NULL || environ_buf == NULL) return UVWASI_EINVAL; @@ -445,11 +535,11 @@ uvwasi_errno_t uvwasi_environ_get(uvwasi_t* uvwasi, uvwasi_errno_t uvwasi_environ_sizes_get(uvwasi_t* uvwasi, uvwasi_size_t* environ_count, uvwasi_size_t* environ_buf_size) { - DEBUG("uvwasi_environ_sizes_get(uvwasi=%p, environ_count=%p, " - "environ_buf_size=%p)\n", - uvwasi, - environ_count, - environ_buf_size); + UVWASI_DEBUG("uvwasi_environ_sizes_get(uvwasi=%p, environ_count=%p, " + "environ_buf_size=%p)\n", + uvwasi, + environ_count, + environ_buf_size); if (uvwasi == NULL || environ_count == NULL || environ_buf_size == NULL) return UVWASI_EINVAL; @@ -472,13 +562,13 @@ uvwasi_errno_t uvwasi_fd_advise(uvwasi_t* uvwasi, int r; #endif /* POSIX_FADV_NORMAL */ - DEBUG("uvwasi_fd_advise(uvwasi=%p, fd=%d, offset=%"PRIu64", len=%"PRIu64", " - "advice=%d)\n", - uvwasi, - fd, - offset, - len, - advice); + UVWASI_DEBUG("uvwasi_fd_advise(uvwasi=%p, fd=%d, offset=%"PRIu64", " + "len=%"PRIu64", advice=%d)\n", + uvwasi, + fd, + offset, + len, + advice); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -546,12 +636,12 @@ uvwasi_errno_t uvwasi_fd_allocate(uvwasi_t* uvwasi, uvwasi_errno_t err; int r; - DEBUG("uvwasi_fd_allocate(uvwasi=%p, fd=%d, offset=%"PRIu64", " - "len=%"PRIu64")\n", - uvwasi, - fd, - offset, - len); + UVWASI_DEBUG("uvwasi_fd_allocate(uvwasi=%p, fd=%d, offset=%"PRIu64", " + "len=%"PRIu64")\n", + uvwasi, + fd, + offset, + len); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -603,7 +693,7 @@ uvwasi_errno_t uvwasi_fd_close(uvwasi_t* uvwasi, uvwasi_fd_t fd) { uv_fs_t req; int r; - DEBUG("uvwasi_fd_close(uvwasi=%p, fd=%d)\n", uvwasi, fd); + UVWASI_DEBUG("uvwasi_fd_close(uvwasi=%p, fd=%d)\n", uvwasi, fd); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -637,7 +727,7 @@ uvwasi_errno_t uvwasi_fd_datasync(uvwasi_t* uvwasi, uvwasi_fd_t fd) { uv_fs_t req; int r; - DEBUG("uvwasi_fd_datasync(uvwasi=%p, fd=%d)\n", uvwasi, fd); + UVWASI_DEBUG("uvwasi_fd_datasync(uvwasi=%p, fd=%d)\n", uvwasi, fd); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -670,7 +760,10 @@ uvwasi_errno_t uvwasi_fd_fdstat_get(uvwasi_t* uvwasi, int r; #endif - DEBUG("uvwasi_fd_fdstat_get(uvwasi=%p, fd=%d, buf=%p)\n", uvwasi, fd, buf); + UVWASI_DEBUG("uvwasi_fd_fdstat_get(uvwasi=%p, fd=%d, buf=%p)\n", + uvwasi, + fd, + buf); if (uvwasi == NULL || buf == NULL) return UVWASI_EINVAL; @@ -703,10 +796,10 @@ uvwasi_errno_t uvwasi_fd_fdstat_set_flags(uvwasi_t* uvwasi, uvwasi_fd_t fd, uvwasi_fdflags_t flags) { #ifdef _WIN32 - DEBUG("uvwasi_fd_fdstat_set_flags(uvwasi=%p, fd=%d, flags=%d)\n", - uvwasi, - fd, - flags); + UVWASI_DEBUG("uvwasi_fd_fdstat_set_flags(uvwasi=%p, fd=%d, flags=%d)\n", + uvwasi, + fd, + flags); /* TODO(cjihrig): Windows is not supported. */ return UVWASI_ENOSYS; @@ -716,10 +809,10 @@ uvwasi_errno_t uvwasi_fd_fdstat_set_flags(uvwasi_t* uvwasi, int mapped_flags; int r; - DEBUG("uvwasi_fd_fdstat_set_flags(uvwasi=%p, fd=%d, flags=%d)\n", - uvwasi, - fd, - flags); + UVWASI_DEBUG("uvwasi_fd_fdstat_set_flags(uvwasi=%p, fd=%d, flags=%d)\n", + uvwasi, + fd, + flags); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -777,12 +870,12 @@ uvwasi_errno_t uvwasi_fd_fdstat_set_rights(uvwasi_t* uvwasi, struct uvwasi_fd_wrap_t* wrap; uvwasi_errno_t err; - DEBUG("uvwasi_fd_fdstat_set_rights(uvwasi=%p, fd=%d, " - "fs_rights_base=%"PRIu64", fs_rights_inheriting=%"PRIu64")\n", - uvwasi, - fd, - fs_rights_base, - fs_rights_inheriting); + UVWASI_DEBUG("uvwasi_fd_fdstat_set_rights(uvwasi=%p, fd=%d, " + "fs_rights_base=%"PRIu64", fs_rights_inheriting=%"PRIu64")\n", + uvwasi, + fd, + fs_rights_base, + fs_rights_inheriting); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -820,7 +913,10 @@ uvwasi_errno_t uvwasi_fd_filestat_get(uvwasi_t* uvwasi, uvwasi_errno_t err; int r; - DEBUG("uvwasi_fd_filestat_get(uvwasi=%p, fd=%d, buf=%p)\n", uvwasi, fd, buf); + UVWASI_DEBUG("uvwasi_fd_filestat_get(uvwasi=%p, fd=%d, buf=%p)\n", + uvwasi, + fd, + buf); if (uvwasi == NULL || buf == NULL) return UVWASI_EINVAL; @@ -857,10 +953,11 @@ uvwasi_errno_t uvwasi_fd_filestat_set_size(uvwasi_t* uvwasi, uvwasi_errno_t err; int r; - DEBUG("uvwasi_fd_filestat_set_size(uvwasi=%p, fd=%d, st_size=%"PRIu64")\n", - uvwasi, - fd, - st_size); + UVWASI_DEBUG("uvwasi_fd_filestat_set_size(uvwasi=%p, fd=%d, " + "st_size=%"PRIu64")\n", + uvwasi, + fd, + st_size); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -889,27 +986,25 @@ uvwasi_errno_t uvwasi_fd_filestat_set_times(uvwasi_t* uvwasi, uvwasi_timestamp_t st_atim, uvwasi_timestamp_t st_mtim, uvwasi_fstflags_t fst_flags) { - /* TODO(cjihrig): libuv does not currently support nanosecond precision. */ struct uvwasi_fd_wrap_t* wrap; + uvwasi_timestamp_t atim; + uvwasi_timestamp_t mtim; uv_fs_t req; uvwasi_errno_t err; int r; - DEBUG("uvwasi_fd_filestat_set_times(uvwasi=%p, fd=%d, st_atim=%"PRIu64", " - "st_mtim=%"PRIu64", fst_flags=%d)\n", - uvwasi, - fd, - st_atim, - st_mtim, - fst_flags); + UVWASI_DEBUG("uvwasi_fd_filestat_set_times(uvwasi=%p, fd=%d, " + "st_atim=%"PRIu64", st_mtim=%"PRIu64", fst_flags=%d)\n", + uvwasi, + fd, + st_atim, + st_mtim, + fst_flags); if (uvwasi == NULL) return UVWASI_EINVAL; - if (fst_flags & ~(UVWASI_FILESTAT_SET_ATIM | UVWASI_FILESTAT_SET_ATIM_NOW | - UVWASI_FILESTAT_SET_MTIM | UVWASI_FILESTAT_SET_MTIM_NOW)) { - return UVWASI_EINVAL; - } + VALIDATE_FSTFLAGS_OR_RETURN(fst_flags); err = uvwasi_fd_table_get(uvwasi->fds, fd, @@ -919,8 +1014,20 @@ uvwasi_errno_t uvwasi_fd_filestat_set_times(uvwasi_t* uvwasi, if (err != UVWASI_ESUCCESS) return err; - /* TODO(cjihrig): st_atim and st_mtim should not be unconditionally passed. */ - r = uv_fs_futime(NULL, &req, wrap->fd, st_atim, st_mtim, NULL); + atim = st_atim; + mtim = st_mtim; + err = uvwasi__get_filestat_set_times(&atim, + &mtim, + fst_flags, + &wrap->fd, + NULL); + if (err != UVWASI_ESUCCESS) { + uv_mutex_unlock(&wrap->mutex); + return err; + } + + /* libuv does not currently support nanosecond precision. */ + r = uv_fs_futime(NULL, &req, wrap->fd, atim, mtim, NULL); uv_mutex_unlock(&wrap->mutex); uv_fs_req_cleanup(&req); @@ -944,14 +1051,14 @@ uvwasi_errno_t uvwasi_fd_pread(uvwasi_t* uvwasi, size_t uvread; int r; - DEBUG("uvwasi_fd_pread(uvwasi=%p, fd=%d, iovs=%p, iovs_len=%zu, " - "offset=%"PRIu64", nread=%p)\n", - uvwasi, - fd, - iovs, - iovs_len, - offset, - nread); + UVWASI_DEBUG("uvwasi_fd_pread(uvwasi=%p, fd=%d, iovs=%p, iovs_len=%d, " + "offset=%"PRIu64", nread=%p)\n", + uvwasi, + fd, + iovs, + iovs_len, + offset, + nread); if (uvwasi == NULL || iovs == NULL || nread == NULL) return UVWASI_EINVAL; @@ -990,10 +1097,10 @@ uvwasi_errno_t uvwasi_fd_prestat_get(uvwasi_t* uvwasi, struct uvwasi_fd_wrap_t* wrap; uvwasi_errno_t err; - DEBUG("uvwasi_fd_prestat_get(uvwasi=%p, fd=%d, buf=%p)\n", - uvwasi, - fd, - buf); + UVWASI_DEBUG("uvwasi_fd_prestat_get(uvwasi=%p, fd=%d, buf=%p)\n", + uvwasi, + fd, + buf); if (uvwasi == NULL || buf == NULL) return UVWASI_EINVAL; @@ -1023,11 +1130,12 @@ uvwasi_errno_t uvwasi_fd_prestat_dir_name(uvwasi_t* uvwasi, uvwasi_errno_t err; size_t size; - DEBUG("uvwasi_fd_prestat_dir_name(uvwasi=%p, fd=%d, path=%p, path_len=%zu)\n", - uvwasi, - fd, - path, - path_len); + UVWASI_DEBUG("uvwasi_fd_prestat_dir_name(uvwasi=%p, fd=%d, path=%p, " + "path_len=%d)\n", + uvwasi, + fd, + path, + path_len); if (uvwasi == NULL || path == NULL) return UVWASI_EINVAL; @@ -1067,14 +1175,14 @@ uvwasi_errno_t uvwasi_fd_pwrite(uvwasi_t* uvwasi, size_t uvwritten; int r; - DEBUG("uvwasi_fd_pwrite(uvwasi=%p, fd=%d, iovs=%p, iovs_len=%zu, " - "offset=%"PRIu64", nwritten=%p)\n", - uvwasi, - fd, - iovs, - iovs_len, - offset, - nwritten); + UVWASI_DEBUG("uvwasi_fd_pwrite(uvwasi=%p, fd=%d, iovs=%p, iovs_len=%d, " + "offset=%"PRIu64", nwritten=%p)\n", + uvwasi, + fd, + iovs, + iovs_len, + offset, + nwritten); if (uvwasi == NULL || iovs == NULL || nwritten == NULL) return UVWASI_EINVAL; @@ -1119,12 +1227,13 @@ uvwasi_errno_t uvwasi_fd_read(uvwasi_t* uvwasi, size_t uvread; int r; - DEBUG("uvwasi_fd_read(uvwasi=%p, fd=%d, iovs=%p, iovs_len=%zu, nread=%p)\n", - uvwasi, - fd, - iovs, - iovs_len, - nread); + UVWASI_DEBUG("uvwasi_fd_read(uvwasi=%p, fd=%d, iovs=%p, iovs_len=%d, " + "nread=%p)\n", + uvwasi, + fd, + iovs, + iovs_len, + nread); if (uvwasi == NULL || iovs == NULL || nread == NULL) return UVWASI_EINVAL; @@ -1174,14 +1283,14 @@ uvwasi_errno_t uvwasi_fd_readdir(uvwasi_t* uvwasi, int i; int r; - DEBUG("uvwasi_fd_readdir(uvwasi=%p, fd=%d, buf=%p, buf_len=%zu, " - "cookie=%"PRIu64", bufused=%p)\n", - uvwasi, - fd, - buf, - buf_len, - cookie, - bufused); + UVWASI_DEBUG("uvwasi_fd_readdir(uvwasi=%p, fd=%d, buf=%p, buf_len=%d, " + "cookie=%"PRIu64", bufused=%p)\n", + uvwasi, + fd, + buf, + buf_len, + cookie, + bufused); if (uvwasi == NULL || buf == NULL || bufused == NULL) return UVWASI_EINVAL; @@ -1305,7 +1414,10 @@ uvwasi_errno_t uvwasi_fd_readdir(uvwasi_t* uvwasi, uvwasi_errno_t uvwasi_fd_renumber(uvwasi_t* uvwasi, uvwasi_fd_t from, uvwasi_fd_t to) { - DEBUG("uvwasi_fd_renumber(uvwasi=%p, from=%d, to=%d)\n", uvwasi, from, to); + UVWASI_DEBUG("uvwasi_fd_renumber(uvwasi=%p, from=%d, to=%d)\n", + uvwasi, + from, + to); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -1322,13 +1434,13 @@ uvwasi_errno_t uvwasi_fd_seek(uvwasi_t* uvwasi, struct uvwasi_fd_wrap_t* wrap; uvwasi_errno_t err; - DEBUG("uvwasi_fd_seek(uvwasi=%p, fd=%d, offset=%"PRId64", " - "whence=%d, newoffset=%p)\n", - uvwasi, - fd, - offset, - whence, - newoffset); + UVWASI_DEBUG("uvwasi_fd_seek(uvwasi=%p, fd=%d, offset=%"PRId64", " + "whence=%d, newoffset=%p)\n", + uvwasi, + fd, + offset, + whence, + newoffset); if (uvwasi == NULL || newoffset == NULL) return UVWASI_EINVAL; @@ -1349,7 +1461,7 @@ uvwasi_errno_t uvwasi_fd_sync(uvwasi_t* uvwasi, uvwasi_fd_t fd) { uvwasi_errno_t err; int r; - DEBUG("uvwasi_fd_sync(uvwasi=%p, fd=%d)\n", uvwasi, fd); + UVWASI_DEBUG("uvwasi_fd_sync(uvwasi=%p, fd=%d)\n", uvwasi, fd); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -1379,7 +1491,10 @@ uvwasi_errno_t uvwasi_fd_tell(uvwasi_t* uvwasi, struct uvwasi_fd_wrap_t* wrap; uvwasi_errno_t err; - DEBUG("uvwasi_fd_tell(uvwasi=%p, fd=%d, offset=%p)\n", uvwasi, fd, offset); + UVWASI_DEBUG("uvwasi_fd_tell(uvwasi=%p, fd=%d, offset=%p)\n", + uvwasi, + fd, + offset); if (uvwasi == NULL || offset == NULL) return UVWASI_EINVAL; @@ -1406,13 +1521,13 @@ uvwasi_errno_t uvwasi_fd_write(uvwasi_t* uvwasi, size_t uvwritten; int r; - DEBUG("uvwasi_fd_write(uvwasi=%p, fd=%d, iovs=%p, iovs_len=%zu, " - "nwritten=%p)\n", - uvwasi, - fd, - iovs, - iovs_len, - nwritten); + UVWASI_DEBUG("uvwasi_fd_write(uvwasi=%p, fd=%d, iovs=%p, iovs_len=%d, " + "nwritten=%p)\n", + uvwasi, + fd, + iovs, + iovs_len, + nwritten); if (uvwasi == NULL || iovs == NULL || nwritten == NULL) return UVWASI_EINVAL; @@ -1451,12 +1566,12 @@ uvwasi_errno_t uvwasi_path_create_directory(uvwasi_t* uvwasi, uvwasi_errno_t err; int r; - DEBUG("uvwasi_path_create_directory(uvwasi=%p, fd=%d, path='%s', " - "path_len=%zu)\n", - uvwasi, - fd, - path, - path_len); + UVWASI_DEBUG("uvwasi_path_create_directory(uvwasi=%p, fd=%d, path='%s', " + "path_len=%d)\n", + uvwasi, + fd, + path, + path_len); if (uvwasi == NULL || path == NULL) return UVWASI_EINVAL; @@ -1501,14 +1616,14 @@ uvwasi_errno_t uvwasi_path_filestat_get(uvwasi_t* uvwasi, uvwasi_errno_t err; int r; - DEBUG("uvwasi_path_filestat_get(uvwasi=%p, fd=%d, flags=%d, path='%s', " - "path_len=%zu, buf=%p)\n", - uvwasi, - fd, - flags, - path, - path_len, - buf); + UVWASI_DEBUG("uvwasi_path_filestat_get(uvwasi=%p, fd=%d, flags=%d, " + "path='%s', path_len=%d, buf=%p)\n", + uvwasi, + fd, + flags, + path, + path_len, + buf); if (uvwasi == NULL || path == NULL || buf == NULL) return UVWASI_EINVAL; @@ -1530,7 +1645,7 @@ uvwasi_errno_t uvwasi_path_filestat_get(uvwasi_t* uvwasi, if (err != UVWASI_ESUCCESS) goto exit; - r = uv_fs_stat(NULL, &req, resolved_path, NULL); + r = uv_fs_lstat(NULL, &req, resolved_path, NULL); uvwasi__free(uvwasi, resolved_path); if (r != 0) { uv_fs_req_cleanup(&req); @@ -1555,31 +1670,30 @@ uvwasi_errno_t uvwasi_path_filestat_set_times(uvwasi_t* uvwasi, uvwasi_timestamp_t st_atim, uvwasi_timestamp_t st_mtim, uvwasi_fstflags_t fst_flags) { - /* TODO(cjihrig): libuv does not currently support nanosecond precision. */ char* resolved_path; struct uvwasi_fd_wrap_t* wrap; + uvwasi_timestamp_t atim; + uvwasi_timestamp_t mtim; uv_fs_t req; uvwasi_errno_t err; int r; - DEBUG("uvwasi_path_filestat_set_times(uvwasi=%p, fd=%d, flags=%d, path='%s', " - "path_len=%zu, st_atim=%"PRIu64", st_mtim=%"PRIu64", fst_flags=%d)\n", - uvwasi, - fd, - flags, - path, - path_len, - st_atim, - st_mtim, - fst_flags); + UVWASI_DEBUG("uvwasi_path_filestat_set_times(uvwasi=%p, fd=%d, " + "flags=%d, path='%s', path_len=%d, " + "st_atim=%"PRIu64", st_mtim=%"PRIu64", fst_flags=%d)\n", + uvwasi, + fd, + flags, + path, + path_len, + st_atim, + st_mtim, + fst_flags); if (uvwasi == NULL || path == NULL) return UVWASI_EINVAL; - if (fst_flags & ~(UVWASI_FILESTAT_SET_ATIM | UVWASI_FILESTAT_SET_ATIM_NOW | - UVWASI_FILESTAT_SET_MTIM | UVWASI_FILESTAT_SET_MTIM_NOW)) { - return UVWASI_EINVAL; - } + VALIDATE_FSTFLAGS_OR_RETURN(fst_flags); err = uvwasi_fd_table_get(uvwasi->fds, fd, @@ -1598,8 +1712,20 @@ uvwasi_errno_t uvwasi_path_filestat_set_times(uvwasi_t* uvwasi, if (err != UVWASI_ESUCCESS) goto exit; - /* TODO(cjihrig): st_atim and st_mtim should not be unconditionally passed. */ - r = uv_fs_utime(NULL, &req, resolved_path, st_atim, st_mtim, NULL); + atim = st_atim; + mtim = st_mtim; + err = uvwasi__get_filestat_set_times(&atim, + &mtim, + fst_flags, + NULL, + resolved_path); + if (err != UVWASI_ESUCCESS) { + uvwasi__free(uvwasi, resolved_path); + goto exit; + } + + /* libuv does not currently support nanosecond precision. */ + r = uv_fs_lutime(NULL, &req, resolved_path, atim, mtim, NULL); uvwasi__free(uvwasi, resolved_path); uv_fs_req_cleanup(&req); @@ -1631,16 +1757,17 @@ uvwasi_errno_t uvwasi_path_link(uvwasi_t* uvwasi, uv_fs_t req; int r; - DEBUG("uvwasi_path_link(uvwasi=%p, old_fd=%d, old_flags=%d, old_path='%s', " - "old_path_len=%zu, new_fd=%d, new_path='%s', new_path_len=%zu)\n", - uvwasi, - old_fd, - old_flags, - old_path, - old_path_len, - new_fd, - new_path, - new_path_len); + UVWASI_DEBUG("uvwasi_path_link(uvwasi=%p, old_fd=%d, old_flags=%d, " + "old_path='%s', old_path_len=%d, new_fd=%d, new_path='%s', " + "new_path_len=%d)\n", + uvwasi, + old_fd, + old_flags, + old_path, + old_path_len, + new_fd, + new_path, + new_path_len); if (uvwasi == NULL || old_path == NULL || new_path == NULL) return UVWASI_EINVAL; @@ -1745,19 +1872,19 @@ uvwasi_errno_t uvwasi_path_open(uvwasi_t* uvwasi, int write; int r; - DEBUG("uvwasi_path_open(uvwasi=%p, dirfd=%d, dirflags=%d, path='%s', " - "path_len=%zu, o_flags=%d, fs_rights_base=%"PRIu64", " - "fs_rights_inheriting=%"PRIu64", fs_flags=%d, fd=%p)\n", - uvwasi, - dirfd, - dirflags, - path, - path_len, - o_flags, - fs_rights_base, - fs_rights_inheriting, - fs_flags, - fd); + UVWASI_DEBUG("uvwasi_path_open(uvwasi=%p, dirfd=%d, dirflags=%d, path='%s', " + "path_len=%d, o_flags=%d, fs_rights_base=%"PRIu64", " + "fs_rights_inheriting=%"PRIu64", fs_flags=%d, fd=%p)\n", + uvwasi, + dirfd, + dirflags, + path, + path_len, + o_flags, + fs_rights_base, + fs_rights_inheriting, + fs_flags, + fd); if (uvwasi == NULL || path == NULL || fd == NULL) return UVWASI_EINVAL; @@ -1892,15 +2019,15 @@ uvwasi_errno_t uvwasi_path_readlink(uvwasi_t* uvwasi, size_t len; int r; - DEBUG("uvwasi_path_readlink(uvwasi=%p, fd=%d, path='%s', path_len=%zu, " - "buf=%p, buf_len=%zu, bufused=%p)\n", - uvwasi, - fd, - path, - path_len, - buf, - buf_len, - bufused); + UVWASI_DEBUG("uvwasi_path_readlink(uvwasi=%p, fd=%d, path='%s', path_len=%d, " + "buf=%p, buf_len=%d, bufused=%p)\n", + uvwasi, + fd, + path, + path_len, + buf, + buf_len, + bufused); if (uvwasi == NULL || path == NULL || buf == NULL || bufused == NULL) return UVWASI_EINVAL; @@ -1951,12 +2078,12 @@ uvwasi_errno_t uvwasi_path_remove_directory(uvwasi_t* uvwasi, uvwasi_errno_t err; int r; - DEBUG("uvwasi_path_remove_directory(uvwasi=%p, fd=%d, path='%s', " - "path_len=%zu)\n", - uvwasi, - fd, - path, - path_len); + UVWASI_DEBUG("uvwasi_path_remove_directory(uvwasi=%p, fd=%d, path='%s', " + "path_len=%d)\n", + uvwasi, + fd, + path, + path_len); if (uvwasi == NULL || path == NULL) return UVWASI_EINVAL; @@ -2002,15 +2129,15 @@ uvwasi_errno_t uvwasi_path_rename(uvwasi_t* uvwasi, uv_fs_t req; int r; - DEBUG("uvwasi_path_rename(uvwasi=%p, old_fd=%d, old_path='%s', " - "old_path_len=%zu, new_fd=%d, new_path='%s', new_path_len=%zu)\n", - uvwasi, - old_fd, - old_path, - old_path_len, - new_fd, - new_path, - new_path_len); + UVWASI_DEBUG("uvwasi_path_rename(uvwasi=%p, old_fd=%d, old_path='%s', " + "old_path_len=%d, new_fd=%d, new_path='%s', new_path_len=%d)\n", + uvwasi, + old_fd, + old_path, + old_path_len, + new_fd, + new_path, + new_path_len); if (uvwasi == NULL || old_path == NULL || new_path == NULL) return UVWASI_EINVAL; @@ -2102,14 +2229,14 @@ uvwasi_errno_t uvwasi_path_symlink(uvwasi_t* uvwasi, uv_fs_t req; int r; - DEBUG("uvwasi_path_symlink(uvwasi=%p, old_path='%s', old_path_len=%zu, " - "fd=%d, new_path='%s', new_path_len=%zu)\n", - uvwasi, - old_path, - old_path_len, - fd, - new_path, - new_path_len); + UVWASI_DEBUG("uvwasi_path_symlink(uvwasi=%p, old_path='%s', old_path_len=%d, " + "fd=%d, new_path='%s', new_path_len=%d)\n", + uvwasi, + old_path, + old_path_len, + fd, + new_path, + new_path_len); if (uvwasi == NULL || old_path == NULL || new_path == NULL) return UVWASI_EINVAL; @@ -2155,11 +2282,12 @@ uvwasi_errno_t uvwasi_path_unlink_file(uvwasi_t* uvwasi, uvwasi_errno_t err; int r; - DEBUG("uvwasi_path_unlink_file(uvwasi=%p, fd=%d, path='%s', path_len=%zu)\n", - uvwasi, - fd, - path, - path_len); + UVWASI_DEBUG("uvwasi_path_unlink_file(uvwasi=%p, fd=%d, path='%s', " + "path_len=%d)\n", + uvwasi, + fd, + path, + path_len); if (uvwasi == NULL || path == NULL) return UVWASI_EINVAL; @@ -2207,13 +2335,13 @@ uvwasi_errno_t uvwasi_poll_oneoff(uvwasi_t* uvwasi, int has_timeout; uvwasi_size_t i; - DEBUG("uvwasi_poll_oneoff(uvwasi=%p, in=%p, out=%p, nsubscriptions=%zu, " - "nevents=%p)\n", - uvwasi, - in, - out, - nsubscriptions, - nevents); + UVWASI_DEBUG("uvwasi_poll_oneoff(uvwasi=%p, in=%p, out=%p, " + "nsubscriptions=%d, nevents=%p)\n", + uvwasi, + in, + out, + nsubscriptions, + nevents); if (uvwasi == NULL || in == NULL || out == NULL || nsubscriptions == 0 || nevents == NULL) { @@ -2313,7 +2441,7 @@ uvwasi_errno_t uvwasi_poll_oneoff(uvwasi_t* uvwasi, uvwasi_errno_t uvwasi_proc_exit(uvwasi_t* uvwasi, uvwasi_exitcode_t rval) { - DEBUG("uvwasi_proc_exit(uvwasi=%p, rval=%d)\n", uvwasi, rval); + UVWASI_DEBUG("uvwasi_proc_exit(uvwasi=%p, rval=%d)\n", uvwasi, rval); exit(rval); return UVWASI_ESUCCESS; /* This doesn't happen. */ } @@ -2322,7 +2450,7 @@ uvwasi_errno_t uvwasi_proc_exit(uvwasi_t* uvwasi, uvwasi_exitcode_t rval) { uvwasi_errno_t uvwasi_proc_raise(uvwasi_t* uvwasi, uvwasi_signal_t sig) { int r; - DEBUG("uvwasi_proc_raise(uvwasi=%p, sig=%d)\n", uvwasi, sig); + UVWASI_DEBUG("uvwasi_proc_raise(uvwasi=%p, sig=%d)\n", uvwasi, sig); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -2344,10 +2472,10 @@ uvwasi_errno_t uvwasi_random_get(uvwasi_t* uvwasi, uvwasi_size_t buf_len) { int r; - DEBUG("uvwasi_random_get(uvwasi=%p, buf=%p, buf_len=%zu)\n", - uvwasi, - buf, - buf_len); + UVWASI_DEBUG("uvwasi_random_get(uvwasi=%p, buf=%p, buf_len=%d)\n", + uvwasi, + buf, + buf_len); if (uvwasi == NULL || buf == NULL) return UVWASI_EINVAL; @@ -2361,7 +2489,7 @@ uvwasi_errno_t uvwasi_random_get(uvwasi_t* uvwasi, uvwasi_errno_t uvwasi_sched_yield(uvwasi_t* uvwasi) { - DEBUG("uvwasi_sched_yield(uvwasi=%p)\n", uvwasi); + UVWASI_DEBUG("uvwasi_sched_yield(uvwasi=%p)\n", uvwasi); if (uvwasi == NULL) return UVWASI_EINVAL; @@ -2386,7 +2514,7 @@ uvwasi_errno_t uvwasi_sock_recv(uvwasi_t* uvwasi, uvwasi_roflags_t* ro_flags) { /* TODO(cjihrig): Waiting to implement, pending https://github.com/WebAssembly/WASI/issues/4 */ - DEBUG("uvwasi_sock_recv(uvwasi=%p, unimplemented)\n", uvwasi); + UVWASI_DEBUG("uvwasi_sock_recv(uvwasi=%p, unimplemented)\n", uvwasi); return UVWASI_ENOTSUP; } @@ -2399,7 +2527,7 @@ uvwasi_errno_t uvwasi_sock_send(uvwasi_t* uvwasi, uvwasi_size_t* so_datalen) { /* TODO(cjihrig): Waiting to implement, pending https://github.com/WebAssembly/WASI/issues/4 */ - DEBUG("uvwasi_sock_send(uvwasi=%p, unimplemented)\n", uvwasi); + UVWASI_DEBUG("uvwasi_sock_send(uvwasi=%p, unimplemented)\n", uvwasi); return UVWASI_ENOTSUP; } @@ -2409,7 +2537,7 @@ uvwasi_errno_t uvwasi_sock_shutdown(uvwasi_t* uvwasi, uvwasi_sdflags_t how) { /* TODO(cjihrig): Waiting to implement, pending https://github.com/WebAssembly/WASI/issues/4 */ - DEBUG("uvwasi_sock_shutdown(uvwasi=%p, unimplemented)\n", uvwasi); + UVWASI_DEBUG("uvwasi_sock_shutdown(uvwasi=%p, unimplemented)\n", uvwasi); return UVWASI_ENOTSUP; } diff --git a/doc/api/addons.md b/doc/api/addons.md index fca5d2a154db4f..e470ee40bc615f 100644 --- a/doc/api/addons.md +++ b/doc/api/addons.md @@ -3,16 +3,16 @@ -Addons are dynamically-linked shared objects written in C++. The -[`require()`][require] function can load Addons as ordinary Node.js modules. +_Addons_ are dynamically-linked shared objects written in C++. The +[`require()`][require] function can load addons as ordinary Node.js modules. Addons provide an interface between JavaScript and C/C++ libraries. -There are three options for implementing Addons: N-API, nan, or direct +There are three options for implementing addons: N-API, nan, or direct use of internal V8, libuv and Node.js libraries. Unless there is a need for direct access to functionality which is not exposed by N-API, use N-API. -Refer to [C/C++ Addons with N-API](n-api.html) for more information on N-API. +Refer to [C/C++ addons with N-API](n-api.html) for more information on N-API. -When not using N-API, implementing Addons is complicated, +When not using N-API, implementing addons is complicated, involving knowledge of several components and APIs: * V8: the C++ library Node.js uses to provide the @@ -27,27 +27,27 @@ involving knowledge of several components and APIs: access across all major operating systems to many common system tasks, such as interacting with the filesystem, sockets, timers, and system events. libuv also provides a pthreads-like threading abstraction that may be used to - power more sophisticated asynchronous Addons that need to move beyond the + power more sophisticated asynchronous addons that need to move beyond the standard event loop. Addon authors are encouraged to think about how to avoid blocking the event loop with I/O or other time-intensive tasks by off-loading work via libuv to non-blocking system operations, worker threads or a custom use of libuv's threads. -* Internal Node.js libraries. Node.js itself exports C++ APIs that Addons can +* Internal Node.js libraries. Node.js itself exports C++ APIs that addons can use, the most important of which is the `node::ObjectWrap` class. * Node.js includes other statically linked libraries including OpenSSL. These other libraries are located in the `deps/` directory in the Node.js source tree. Only the libuv, OpenSSL, V8 and zlib symbols are purposefully - re-exported by Node.js and may be used to various extents by Addons. See + re-exported by Node.js and may be used to various extents by addons. See [Linking to libraries included with Node.js][] for additional information. All of the following examples are available for [download][] and may -be used as the starting-point for an Addon. +be used as the starting-point for an addon. ## Hello world -This "Hello world" example is a simple Addon, written in C++, that is the +This "Hello world" example is a simple addon, written in C++, that is the equivalent of the following JavaScript code: ```js @@ -84,7 +84,7 @@ NODE_MODULE(NODE_GYP_MODULE_NAME, Initialize) } // namespace demo ``` -All Node.js Addons must export an initialization function following +All Node.js addons must export an initialization function following the pattern: ```cpp @@ -232,6 +232,12 @@ NODE_MODULE_INIT(/* exports, module, context */) { ``` #### Worker support + In order to be loaded from multiple Node.js environments, such as a main thread and a Worker thread, an add-on needs to either: @@ -250,9 +256,14 @@ void AddEnvironmentCleanupHook(v8::Isolate* isolate, ``` This function adds a hook that will run before a given Node.js instance shuts -down. If necessary, such hooks can be removed using -`RemoveEnvironmentCleanupHook()` before they are run, which has the same -signature. Callbacks are run in last-in first-out order. +down. If necessary, such hooks can be removed before they are run using +`RemoveEnvironmentCleanupHook()`, which has the same signature. Callbacks are +run in last-in first-out order. + +If necessary, there is an additional pair of `AddEnvironmentCleanupHook()` +and `RemoveEnvironmentCleanupHook()` overloads, where the cleanup hook takes a +callback function. This can be used for shutting down asynchronous resources, +such as any libuv handles registered by the addon. The following `addon.cc` uses `AddEnvironmentCleanupHook`: @@ -315,7 +326,7 @@ Once the source code has been written, it must be compiled into the binary `addon.node` file. To do so, create a file called `binding.gyp` in the top-level of the project describing the build configuration of the module using a JSON-like format. This file is used by [node-gyp][], a tool written -specifically to compile Node.js Addons. +specifically to compile Node.js addons. ```json { @@ -331,7 +342,7 @@ specifically to compile Node.js Addons. A version of the `node-gyp` utility is bundled and distributed with Node.js as part of `npm`. This version is not made directly available for developers to use and is intended only to support the ability to use the -`npm install` command to compile and install Addons. Developers who wish to +`npm install` command to compile and install addons. Developers who wish to use `node-gyp` directly can install it using the command `npm install -g node-gyp`. See the `node-gyp` [installation instructions][] for more information, including platform-specific requirements. @@ -344,11 +355,11 @@ will generate either a `Makefile` (on Unix platforms) or a `vcxproj` file Next, invoke the `node-gyp build` command to generate the compiled `addon.node` file. This will be put into the `build/Release/` directory. -When using `npm install` to install a Node.js Addon, npm uses its own bundled +When using `npm install` to install a Node.js addon, npm uses its own bundled version of `node-gyp` to perform this same set of actions, generating a -compiled version of the Addon for the user's platform on demand. +compiled version of the addon for the user's platform on demand. -Once built, the binary Addon can be used from within Node.js by pointing +Once built, the binary addon can be used from within Node.js by pointing [`require()`][require] to the built `addon.node` module: ```js @@ -359,12 +370,12 @@ console.log(addon.hello()); // Prints: 'world' ``` -Because the exact path to the compiled Addon binary can vary depending on how -it is compiled (i.e. sometimes it may be in `./build/Debug/`), Addons can use +Because the exact path to the compiled addon binary can vary depending on how +it is compiled (i.e. sometimes it may be in `./build/Debug/`), addons can use the [bindings][] package to load the compiled module. While the `bindings` package implementation is more sophisticated in how it -locates Addon modules, it is essentially using a `try…catch` pattern similar to: +locates addon modules, it is essentially using a `try…catch` pattern similar to: ```js try { @@ -377,7 +388,7 @@ try { ### Linking to libraries included with Node.js Node.js uses statically linked libraries such as V8, libuv and OpenSSL. All -Addons are required to link to V8 and may link to any of the other dependencies +addons are required to link to V8 and may link to any of the other dependencies as well. Typically, this is as simple as including the appropriate `#include <...>` statements (e.g. `#include `) and `node-gyp` will locate the appropriate headers automatically. However, there are a few caveats to be @@ -385,23 +396,23 @@ aware of: * When `node-gyp` runs, it will detect the specific release version of Node.js and download either the full source tarball or just the headers. If the full -source is downloaded, Addons will have complete access to the full set of +source is downloaded, addons will have complete access to the full set of Node.js dependencies. However, if only the Node.js headers are downloaded, then only the symbols exported by Node.js will be available. * `node-gyp` can be run using the `--nodedir` flag pointing at a local Node.js -source image. Using this option, the Addon will have access to the full set of +source image. Using this option, the addon will have access to the full set of dependencies. ### Loading addons using `require()` -The filename extension of the compiled Addon binary is `.node` (as opposed +The filename extension of the compiled addon binary is `.node` (as opposed to `.dll` or `.so`). The [`require()`][require] function is written to look for files with the `.node` file extension and initialize those as dynamically-linked libraries. When calling [`require()`][require], the `.node` extension can usually be -omitted and Node.js will still find and initialize the Addon. One caveat, +omitted and Node.js will still find and initialize the addon. One caveat, however, is that Node.js will first attempt to locate and load modules or JavaScript files that happen to share the same base name. For instance, if there is a file `addon.js` in the same directory as the binary `addon.node`, @@ -411,15 +422,15 @@ and load it instead. ## Native abstractions for Node.js Each of the examples illustrated in this document make direct use of the -Node.js and V8 APIs for implementing Addons. The V8 API can, and has, changed +Node.js and V8 APIs for implementing addons. The V8 API can, and has, changed dramatically from one V8 release to the next (and one major Node.js release to -the next). With each change, Addons may need to be updated and recompiled in +the next). With each change, addons may need to be updated and recompiled in order to continue functioning. The Node.js release schedule is designed to minimize the frequency and impact of such changes but there is little that Node.js can do to ensure stability of the V8 APIs. The [Native Abstractions for Node.js][] (or `nan`) provide a set of tools that -Addon developers are recommended to use to keep compatibility between past and +addon developers are recommended to use to keep compatibility between past and future releases of V8 and Node.js. See the `nan` [examples][] for an illustration of how it can be used. @@ -427,10 +438,10 @@ illustration of how it can be used. > Stability: 2 - Stable -N-API is an API for building native Addons. It is independent from +N-API is an API for building native addons. It is independent from the underlying JavaScript runtime (e.g. V8) and is maintained as part of Node.js itself. This API will be Application Binary Interface (ABI) stable -across versions of Node.js. It is intended to insulate Addons from +across versions of Node.js. It is intended to insulate addons from changes in the underlying JavaScript engine and allow modules compiled for one version to run on later versions of Node.js without recompilation. Addons are built/packaged with the same approach/tools @@ -479,11 +490,11 @@ NAPI_MODULE(NODE_GYP_MODULE_NAME, init) ``` The functions available and how to use them are documented in -[C/C++ Addons with N-API](n-api.html). +[C/C++ addons with N-API](n-api.html). ## Addon examples -Following are some example Addons intended to help developers get started. The +Following are some example addons intended to help developers get started. The examples make use of the V8 APIs. Refer to the online [V8 reference][v8-docs] for help with the various V8 calls, and V8's [Embedder's Guide][] for an explanation of several concepts used such as handles, scopes, function @@ -509,7 +520,7 @@ filename to the `sources` array: "sources": ["addon.cc", "myexample.cc"] ``` -Once the `binding.gyp` file is ready, the example Addons can be configured and +Once the `binding.gyp` file is ready, the example addons can be configured and built using `node-gyp`: ```console @@ -583,7 +594,7 @@ NODE_MODULE(NODE_GYP_MODULE_NAME, Init) } // namespace demo ``` -Once compiled, the example Addon can be required and used from within Node.js: +Once compiled, the example addon can be required and used from within Node.js: ```js // test.js @@ -594,7 +605,7 @@ console.log('This should be eight:', addon.add(3, 5)); ### Callbacks -It is common practice within Addons to pass JavaScript functions to a C++ +It is common practice within addons to pass JavaScript functions to a C++ function and execute them from there. The following example illustrates how to invoke such callbacks: @@ -635,7 +646,7 @@ NODE_MODULE(NODE_GYP_MODULE_NAME, Init) ``` This example uses a two-argument form of `Init()` that receives the full -`module` object as the second argument. This allows the Addon to completely +`module` object as the second argument. This allows the addon to completely overwrite `exports` with a single function instead of adding the function as a property of `exports`. diff --git a/doc/api/async_hooks.md b/doc/api/async_hooks.md index 0471bc5ccef5ad..74abab4fa789be 100644 --- a/doc/api/async_hooks.md +++ b/doc/api/async_hooks.md @@ -727,6 +727,32 @@ class DBQuery extends AsyncResource { } ``` +#### Static method: `AsyncResource.bind(fn[, type])` + + +* `fn` {Function} The function to bind to the current execution context. +* `type` {string} An optional name to associate with the underlying + `AsyncResource`. + +Binds the given function to the current execution context. + +The returned function will have an `asyncResource` property referencing +the `AsyncResource` to which the function is bound. + +#### `asyncResource.bind(fn)` + + +* `fn` {Function} The function to bind to the current `AsyncResource`. + +Binds the given function to execute to this `AsyncResource`'s scope. + +The returned function will have an `asyncResource` property referencing +the `AsyncResource` to which the function is bound. + #### `asyncResource.runInAsyncScope(fn[, thisArg, ...args])` @@ -446,7 +446,7 @@ socket.on('readable', () => { A `TypeError` will be thrown if `size` is not a number. -### Class Method: `Buffer.byteLength(string[, encoding])` +### Static method: `Buffer.byteLength(string[, encoding])` @@ -581,7 +581,7 @@ appropriate for `Buffer.from()` variants. `Buffer.from(array)` and [`Buffer.from(string)`][] may also use the internal `Buffer` pool like [`Buffer.allocUnsafe()`][] does. -### Class Method: `Buffer.from(arrayBuffer[, byteOffset[, length]])` +### Static method: `Buffer.from(arrayBuffer[, byteOffset[, length]])` @@ -632,7 +632,7 @@ A `TypeError` will be thrown if `arrayBuffer` is not an [`ArrayBuffer`][] or a [`SharedArrayBuffer`][] or another type appropriate for `Buffer.from()` variants. -### Class Method: `Buffer.from(buffer)` +### Static method: `Buffer.from(buffer)` @@ -657,7 +657,7 @@ console.log(buf2.toString()); A `TypeError` will be thrown if `buffer` is not a `Buffer` or another type appropriate for `Buffer.from()` variants. -### Class Method: `Buffer.from(object[, offsetOrEncoding[, length]])` +### Static method: `Buffer.from(object[, offsetOrEncoding[, length]])` @@ -691,7 +691,7 @@ const buf = Buffer.from(new Foo(), 'utf8'); A `TypeError` will be thrown if `object` does not have the mentioned methods or is not of another type appropriate for `Buffer.from()` variants. -### Class Method: `Buffer.from(string[, encoding])` +### Static method: `Buffer.from(string[, encoding])` @@ -717,7 +717,7 @@ console.log(buf1.toString('latin1')); A `TypeError` will be thrown if `string` is not a string or another type appropriate for `Buffer.from()` variants. -### Class Method: `Buffer.isBuffer(obj)` +### Static method: `Buffer.isBuffer(obj)` @@ -727,7 +727,7 @@ added: v0.1.101 Returns `true` if `obj` is a `Buffer`, `false` otherwise. -### Class Method: `Buffer.isEncoding(encoding)` +### Static method: `Buffer.isEncoding(encoding)` @@ -3206,13 +3206,13 @@ introducing security vulnerabilities into an application. [RFC 4648, Section 5]: https://tools.ietf.org/html/rfc4648#section-5 [WHATWG Encoding Standard]: https://encoding.spec.whatwg.org/ [`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer -[`Buffer.alloc()`]: #buffer_class_method_buffer_alloc_size_fill_encoding -[`Buffer.allocUnsafe()`]: #buffer_class_method_buffer_allocunsafe_size -[`Buffer.allocUnsafeSlow()`]: #buffer_class_method_buffer_allocunsafeslow_size -[`Buffer.from(array)`]: #buffer_class_method_buffer_from_array -[`Buffer.from(arrayBuf)`]: #buffer_class_method_buffer_from_arraybuffer_byteoffset_length -[`Buffer.from(buffer)`]: #buffer_class_method_buffer_from_buffer -[`Buffer.from(string)`]: #buffer_class_method_buffer_from_string_encoding +[`Buffer.alloc()`]: #buffer_static_method_buffer_alloc_size_fill_encoding +[`Buffer.allocUnsafe()`]: #buffer_static_method_buffer_allocunsafe_size +[`Buffer.allocUnsafeSlow()`]: #buffer_static_method_buffer_allocunsafeslow_size +[`Buffer.from(array)`]: #buffer_static_method_buffer_from_array +[`Buffer.from(arrayBuf)`]: #buffer_static_method_buffer_from_arraybuffer_byteoffset_length +[`Buffer.from(buffer)`]: #buffer_static_method_buffer_from_buffer +[`Buffer.from(string)`]: #buffer_static_method_buffer_from_string_encoding [`Buffer.poolSize`]: #buffer_class_property_buffer_poolsize [`DataView`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView [`ERR_INVALID_BUFFER_SIZE`]: errors.html#ERR_INVALID_BUFFER_SIZE diff --git a/doc/api/cli.md b/doc/api/cli.md index ac7d46a6a703c0..92e6568d7a9360 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -239,16 +239,6 @@ the ability to import a directory that has an index file. Please see [customizing ESM specifier resolution][] for example usage. -### `--experimental-top-level-await` - - -Enable experimental top-level `await` keyword support, available only in ES -module scripts. - -(See also `--experimental-repl-await`.) - ### `--experimental-vm-modules` @@ -3184,6 +3184,11 @@ the `crypto`, `tls`, and `https` modules and are generally specific to OpenSSL. https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html for detail. + + SSL_OP_ALLOW_NO_DHE_KEX + Instructs OpenSSL to allow a non-[EC]DHE-based key exchange mode + for TLS v1.3 + SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION Allows legacy insecure renegotiation between OpenSSL and unpatched @@ -3256,10 +3261,18 @@ the `crypto`, `tls`, and `https` modules and are generally specific to OpenSSL. SSL_OP_NO_COMPRESSION Instructs OpenSSL to disable support for SSL/TLS compression. + + SSL_OP_NO_ENCRYPT_THEN_MAC + Instructs OpenSSL to disable encrypt-then-MAC. + SSL_OP_NO_QUERY_MTU + + SSL_OP_NO_RENEGOTIATION + Instructs OpenSSL to disable renegotiation. + SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION Instructs OpenSSL to always start a new session when performing @@ -3288,6 +3301,10 @@ the `crypto`, `tls`, and `https` modules and are generally specific to OpenSSL. SSL_OP_NO_TLSv1_2 Instructs OpenSSL to turn off TLS v1.2 + + + SSL_OP_NO_TLSv1_3 + Instructs OpenSSL to turn off TLS v1.3 SSL_OP_PKCS1_CHECK_1 @@ -3296,6 +3313,14 @@ the `crypto`, `tls`, and `https` modules and are generally specific to OpenSSL. SSL_OP_PKCS1_CHECK_2 + + SSL_OP_PRIORITIZE_CHACHA + Instructs OpenSSL server to prioritize ChaCha20Poly1305 + when client does. + This option has no effect if + SSL_OP_CIPHER_SERVER_PREFERENCE + is not enabled. + SSL_OP_SINGLE_DH_USE Instructs OpenSSL to always create a new key when using diff --git a/doc/api/deprecations.md b/doc/api/deprecations.md index 0c68842fe68299..c7583e3a9e97ba 100644 --- a/doc/api/deprecations.md +++ b/doc/api/deprecations.md @@ -2755,10 +2755,10 @@ Type: Documentation-only [`--pending-deprecation`]: cli.html#cli_pending_deprecation [`--throw-deprecation`]: cli.html#cli_throw_deprecation -[`Buffer.allocUnsafeSlow(size)`]: buffer.html#buffer_class_method_buffer_allocunsafeslow_size -[`Buffer.from(array)`]: buffer.html#buffer_class_method_buffer_from_array -[`Buffer.from(buffer)`]: buffer.html#buffer_class_method_buffer_from_buffer -[`Buffer.isBuffer()`]: buffer.html#buffer_class_method_buffer_isbuffer_obj +[`Buffer.allocUnsafeSlow(size)`]: buffer.html#buffer_static_method_buffer_allocunsafeslow_size +[`Buffer.from(array)`]: buffer.html#buffer_static_method_buffer_from_array +[`Buffer.from(buffer)`]: buffer.html#buffer_static_method_buffer_from_buffer +[`Buffer.isBuffer()`]: buffer.html#buffer_static_method_buffer_isbuffer_obj [`Cipher`]: crypto.html#crypto_class_cipher [`Decipher`]: crypto.html#crypto_class_decipher [`EventEmitter.listenerCount(emitter, eventName)`]: events.html#events_eventemitter_listenercount_emitter_eventname @@ -2870,8 +2870,8 @@ Type: Documentation-only [NIST SP 800-38D]: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38d.pdf [RFC 6066]: https://tools.ietf.org/html/rfc6066#section-3 [WHATWG URL API]: url.html#url_the_whatwg_url_api -[alloc]: buffer.html#buffer_class_method_buffer_alloc_size_fill_encoding -[alloc_unsafe_size]: buffer.html#buffer_class_method_buffer_allocunsafe_size -[from_arraybuffer]: buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length -[from_string_encoding]: buffer.html#buffer_class_method_buffer_from_string_encoding +[alloc]: buffer.html#buffer_static_method_buffer_alloc_size_fill_encoding +[alloc_unsafe_size]: buffer.html#buffer_static_method_buffer_allocunsafe_size +[from_arraybuffer]: buffer.html#buffer_static_method_buffer_from_arraybuffer_byteoffset_length +[from_string_encoding]: buffer.html#buffer_static_method_buffer_from_string_encoding [legacy `urlObject`]: url.html#url_legacy_urlobject diff --git a/doc/api/dgram.md b/doc/api/dgram.md index 3cc2ea78297228..462f71ed74c775 100644 --- a/doc/api/dgram.md +++ b/doc/api/dgram.md @@ -802,4 +802,4 @@ and `udp6` sockets). The bound address and port can be retrieved using [`socket.bind()`]: #dgram_socket_bind_port_address_callback [IPv6 Zone Indices]: https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses [RFC 4007]: https://tools.ietf.org/html/rfc4007 -[byte length]: buffer.html#buffer_class_method_buffer_bytelength_string_encoding +[byte length]: buffer.html#buffer_static_method_buffer_bytelength_string_encoding diff --git a/doc/api/esm.md b/doc/api/esm.md index c9bb7473a8e689..551eef604eac0e 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -1146,9 +1146,8 @@ would provide the exports interface for the instantiation of `module.wasm`. ## Experimental top-level `await` -When the `--experimental-top-level-await` flag is provided, `await` may be used -in the top level (outside of async functions) within modules. This implements -the [ECMAScript Top-Level `await` proposal][]. +The `await` keyword may be used in the top level (outside of async functions) +within modules as per the [ECMAScript Top-Level `await` proposal][]. Assuming an `a.mjs` with @@ -1166,8 +1165,7 @@ console.log(five); // Logs `5` ``` ```bash -node b.mjs # fails -node --experimental-top-level-await b.mjs # works +node b.mjs # works ``` ## Experimental loaders @@ -1184,11 +1182,19 @@ CommonJS modules loaded. ### Hooks -#### resolve hook +#### `resolve(specifier, context, defaultResolve)` > Note: The loaders API is being redesigned. This hook may disappear or its > signature may change. Do not rely on the API described below. +* `specifier` {string} +* `context` {Object} + * `conditions` {string[]} + * `parentURL` {string} +* `defaultResolve` {Function} +* Returns: {Object} + * `url` {string} + The `resolve` hook returns the resolved file URL for a given module specifier and parent URL. The module specifier is the string in an `import` statement or `import()` expression, and the parent URL is the URL of the module that imported @@ -1209,11 +1215,11 @@ Node.js module specifier resolution behavior_ when calling `defaultResolve`, the /** * @param {string} specifier * @param {{ + * conditions: !Array, * parentURL: !(string | undefined), - * conditions: !(Array), * }} context * @param {Function} defaultResolve - * @returns {!(Promise<{ url: string }>)} + * @returns {Promise<{ url: string }>} */ export async function resolve(specifier, context, defaultResolve) { const { parentURL = null } = context; @@ -1239,29 +1245,34 @@ export async function resolve(specifier, context, defaultResolve) { } ``` -#### getFormat hook +#### `getFormat(url, context, defaultGetFormat)` > Note: The loaders API is being redesigned. This hook may disappear or its > signature may change. Do not rely on the API described below. +* `url` {string} +* `context` {Object} +* `defaultGetFormat` {Function} +* Returns: {Object} + * `format` {string} + The `getFormat` hook provides a way to define a custom method of determining how a URL should be interpreted. The `format` returned also affects what the acceptable forms of source values are for a module when parsing. This can be one of the following: -| `format` | Description | Acceptable Types For `source` Returned by `getSource` or `transformSource` | -| --- | --- | --- | -| `'builtin'` | Load a Node.js builtin module | Not applicable | -| `'commonjs'` | Load a Node.js CommonJS module | Not applicable | -| `'json'` | Load a JSON file | { [ArrayBuffer][], [string][], [TypedArray][] } | -| `'module'` | Load an ES module | { [ArrayBuffer][], [string][], [TypedArray][] } | -| `'wasm'` | Load a WebAssembly module | { [ArrayBuffer][], [string][], [TypedArray][] } | +| `format` | Description | Acceptable Types For `source` Returned by `getSource` or `transformSource` | +| ------------ | ------------------------------ | -------------------------------------------------------------------------- | +| `'builtin'` | Load a Node.js builtin module | Not applicable | +| `'commonjs'` | Load a Node.js CommonJS module | Not applicable | +| `'json'` | Load a JSON file | { [`string`][], [`ArrayBuffer`][], [`TypedArray`][] } | +| `'module'` | Load an ES module | { [`string`][], [`ArrayBuffer`][], [`TypedArray`][] } | +| `'wasm'` | Load a WebAssembly module | { [`ArrayBuffer`][], [`TypedArray`][] } | Note: These types all correspond to classes defined in ECMAScript. -* The specific [ArrayBuffer][] object is a [SharedArrayBuffer][]. -* The specific [string][] object is not the class constructor, but an instance. -* The specific [TypedArray][] object is a [Uint8Array][]. +* The specific [`ArrayBuffer`][] object is a [`SharedArrayBuffer`][]. +* The specific [`TypedArray`][] object is a [`Uint8Array`][]. Note: If the source value of a text-based format (i.e., `'json'`, `'module'`) is not a string, it will be converted to a string using [`util.TextDecoder`][]. @@ -1287,11 +1298,18 @@ export async function getFormat(url, context, defaultGetFormat) { } ``` -#### getSource hook +#### `getSource(url, context, defaultGetSource)` > Note: The loaders API is being redesigned. This hook may disappear or its > signature may change. Do not rely on the API described below. +* `url` {string} +* `context` {Object} + * `format` {string} +* `defaultGetSource` {Function} +* Returns: {Object} + * `source` {string|SharedArrayBuffer|Uint8Array} + The `getSource` hook provides a way to define a custom method for retrieving the source code of an ES module specifier. This would allow a loader to potentially avoid reading files from disk. @@ -1301,7 +1319,7 @@ potentially avoid reading files from disk. * @param {string} url * @param {{ format: string }} context * @param {Function} defaultGetSource - * @returns {Promise<{ source: !(SharedArrayBuffer | string | Uint8Array) }>} + * @returns {Promise<{ source: !(string | SharedArrayBuffer | Uint8Array) }>} */ export async function getSource(url, context, defaultGetSource) { const { format } = context; @@ -1317,11 +1335,18 @@ export async function getSource(url, context, defaultGetSource) { } ``` -#### transformSource hook +#### `transformSource(source, context, defaultTransformSource)` > Note: The loaders API is being redesigned. This hook may disappear or its > signature may change. Do not rely on the API described below. +* `source` {string|SharedArrayBuffer|Uint8Array} +* `context` {Object} + * `format` {string} + * `url` {string} +* Returns: {Object} + * `source` {string|SharedArrayBuffer|Uint8Array} + The `transformSource` hook provides a way to modify the source code of a loaded ES module file after the source string has been loaded but before Node.js has done anything with it. @@ -1332,13 +1357,13 @@ unknown-to-Node.js file extensions. See the [transpiler loader example][] below. ```js /** - * @param {!(SharedArrayBuffer | string | Uint8Array)} source + * @param {!(string | SharedArrayBuffer | Uint8Array)} source * @param {{ - * url: string, * format: string, + * url: string, * }} context * @param {Function} defaultTransformSource - * @returns {Promise<{ source: !(SharedArrayBuffer | string | Uint8Array) }>} + * @returns {Promise<{ source: !(string | SharedArrayBuffer | Uint8Array) }>} */ export async function transformSource(source, context, defaultTransformSource) { const { url, format } = context; @@ -1354,11 +1379,13 @@ export async function transformSource(source, context, defaultTransformSource) { } ``` -#### getGlobalPreloadCode hook +#### `getGlobalPreloadCode()` > Note: The loaders API is being redesigned. This hook may disappear or its > signature may change. Do not rely on the API described below. +* Returns: {string} + Sometimes it can be necessary to run some code inside of the same global scope that the application will run in. This hook allows to return a string that will be ran as sloppy-mode script on startup. @@ -1909,12 +1936,12 @@ success! [`import`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import [`module.createRequire()`]: modules.html#modules_module_createrequire_filename [`module.syncBuiltinESMExports()`]: modules.html#modules_module_syncbuiltinesmexports -[`transformSource` hook]: #esm_code_transformsource_code_hook -[ArrayBuffer]: https://www.ecma-international.org/ecma-262/6.0/#sec-arraybuffer-constructor -[SharedArrayBuffer]: https://tc39.es/ecma262/#sec-sharedarraybuffer-constructor -[string]: https://www.ecma-international.org/ecma-262/6.0/#sec-string-constructor -[TypedArray]: https://www.ecma-international.org/ecma-262/6.0/#sec-typedarray-objects -[Uint8Array]: https://www.ecma-international.org/ecma-262/6.0/#sec-uint8array +[`transformSource` hook]: #esm_transformsource_source_context_defaulttransformsource +[`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer +[`SharedArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer +[`string`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String +[`TypedArray`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray +[`Uint8Array`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array [`util.TextDecoder`]: util.html#util_class_util_textdecoder [import an ES or CommonJS module for its side effects only]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import#Import_a_module_for_its_side_effects_only [special scheme]: https://url.spec.whatwg.org/#special-scheme diff --git a/doc/api/fs.md b/doc/api/fs.md index c31a7279f376d1..180e89b116528d 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -3996,6 +3996,7 @@ to be notified of filesystem changes. * On SunOS systems (including Solaris and SmartOS), this uses [`event ports`][]. * On Windows systems, this feature depends on [`ReadDirectoryChangesW`][]. * On Aix systems, this feature depends on [`AHAFS`][], which must be enabled. +* On IBM i systems, this feature is not supported. If the underlying functionality is not available for some reason, then `fs.watch()` will not be able to function and may thrown an exception. @@ -5854,7 +5855,7 @@ A call to `fs.ftruncate()` or `filehandle.truncate()` can be used to reset the file contents. [`AHAFS`]: https://www.ibm.com/developerworks/aix/library/au-aix_event_infrastructure/ -[`Buffer.byteLength`]: buffer.html#buffer_class_method_buffer_bytelength_string_encoding +[`Buffer.byteLength`]: buffer.html#buffer_static_method_buffer_bytelength_string_encoding [`Buffer`]: buffer.html#buffer_buffer [`FSEvents`]: https://developer.apple.com/documentation/coreservices/file_system_events [`Number.MAX_SAFE_INTEGER`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER diff --git a/doc/api/http.md b/doc/api/http.md index 46067b777484ea..fa60f0c9687cf5 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -2627,7 +2627,7 @@ try { [`'response'`]: #http_event_response [`'upgrade'`]: #http_event_upgrade [`Agent`]: #http_class_http_agent -[`Buffer.byteLength()`]: buffer.html#buffer_class_method_buffer_bytelength_string_encoding +[`Buffer.byteLength()`]: buffer.html#buffer_static_method_buffer_bytelength_string_encoding [`Duplex`]: stream.html#stream_class_stream_duplex [`HPE_HEADER_OVERFLOW`]: errors.html#errors_hpe_header_overflow [`TypeError`]: errors.html#errors_class_typeerror diff --git a/doc/api/http2.md b/doc/api/http2.md index 48aa027992291e..6b7f002cba0a49 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -1727,6 +1727,20 @@ the request body. When this event is emitted and handled, the [`'request'`][] event will not be emitted. +### Event: `'connection'` + + +* `socket` {stream.Duplex} + +This event is emitted when a new TCP stream is established. `socket` is +typically an object of type [`net.Socket`][]. Usually users will not want to +access this event. + +This event can also be explicitly emitted by users to inject connections +into the HTTP server. In that case, any [`Duplex`][] stream can be passed. + #### Event: `'request'` + +* `socket` {stream.Duplex} + +This event is emitted when a new TCP stream is established, before the TLS +handshake begins. `socket` is typically an object of type [`net.Socket`][]. +Usually users will not want to access this event. + +This event can also be explicitly emitted by users to inject connections +into the HTTP server. In that case, any [`Duplex`][] stream can be passed. + #### Event: `'request'` + +A 128-bit value stored as two unsigned 64-bit integers. It serves as a UUID +with which JavaScript objects can be "tagged" in order to ensure that they are +of a certain type. This is a stronger check than [`napi_instanceof`][], because +the latter can report a false positive if the object's prototype has been +manipulated. Type-tagging is most useful in conjunction with [`napi_wrap`][] +because it ensures that the pointer retrieved from a wrapped object can be +safely cast to the native type corresponding to the type tag that had been +previously applied to the JavaScript object. + +```c +typedef struct { + uint64_t lower; + uint64_t upper; +} napi_type_tag; +``` + ### N-API callback types #### napi_callback_info @@ -1525,10 +1550,12 @@ and will lead the process to abort. The hooks will be called in reverse order, i.e. the most recently added one will be called first. -Removing this hook can be done by using `napi_remove_env_cleanup_hook`. +Removing this hook can be done by using [`napi_remove_env_cleanup_hook`][]. Typically, that happens when the resource for which this hook was added is being torn down anyway. +For asynchronous cleanup, [`napi_add_async_cleanup_hook`][] is available. + #### napi_remove_env_cleanup_hook + +> Stability: 1 - Experimental + +```c +NAPI_EXTERN napi_status napi_add_async_cleanup_hook( + napi_env env, + void (*fun)(void* arg, void(* cb)(void*), void* cbarg), + void* arg, + napi_async_cleanup_hook_handle* remove_handle); +``` + +Registers `fun` as a function to be run with the `arg` parameter once the +current Node.js environment exits. Unlike [`napi_add_env_cleanup_hook`][], +the hook is allowed to be asynchronous in this case, and must invoke the passed +`cb()` function with `cbarg` once all asynchronous activity is finished. + +Otherwise, behavior generally matches that of [`napi_add_env_cleanup_hook`][]. + +If `remove_handle` is not `NULL`, an opaque value will be stored in it +that must later be passed to [`napi_remove_async_cleanup_hook`][], +regardless of whether the hook has already been invoked. +Typically, that happens when the resource for which this hook was added +is being torn down anyway. + +#### napi_remove_async_cleanup_hook + + +> Stability: 1 - Experimental + +```c +NAPI_EXTERN napi_status napi_remove_async_cleanup_hook( + napi_env env, + napi_async_cleanup_hook_handle remove_handle); +``` + +Unregisters the cleanup hook corresponding to `remove_handle`. This will prevent +the hook from being executed, unless it has already started executing. +This must be called on any `napi_async_cleanup_hook_handle` value retrieved +from [`napi_add_async_cleanup_hook`][]. + ## Module registration N-API modules are registered in a manner similar to other modules except that instead of using the `NODE_MODULE` macro the following @@ -3140,7 +3213,12 @@ Returns `napi_ok` if the API succeeded. This API represents behavior similar to invoking the `typeof` Operator on the object as defined in [Section 12.5.5][] of the ECMAScript Language -Specification. However, it has support for detecting an External value. +Specification. However, there are some differences: + +1. It has support for detecting an External value. +2. It detects `null` as a separate type, while ECMAScript `typeof` would detect + `object`. + If `value` has a type that is invalid, an error is returned. ### napi_instanceof @@ -4276,6 +4354,143 @@ if (is_instance) { The reference must be freed once it is no longer needed. +There are occasions where `napi_instanceof()` is insufficient for ensuring that +a JavaScript object is a wrapper for a certain native type. This is the case +especially when wrapped JavaScript objects are passed back into the addon via +static methods rather than as the `this` value of prototype methods. In such +cases there is a chance that they may be unwrapped incorrectly. + +```js +const myAddon = require('./build/Release/my_addon.node'); + +// `openDatabase()` returns a JavaScript object that wraps a native database +// handle. +const dbHandle = myAddon.openDatabase(); + +// `query()` returns a JavaScript object that wraps a native query handle. +const queryHandle = myAddon.query(dbHandle, 'Gimme ALL the things!'); + +// There is an accidental error in the line below. The first parameter to +// `myAddon.queryHasRecords()` should be the database handle (`dbHandle`), not +// the query handle (`query`), so the correct condition for the while-loop +// should be +// +// myAddon.queryHasRecords(dbHandle, queryHandle) +// +while (myAddon.queryHasRecords(queryHandle, dbHandle)) { + // retrieve records +} +``` + +In the above example `myAddon.queryHasRecords()` is a method that accepts two +arguments. The first is a database handle and the second is a query handle. +Internally, it unwraps the first argument and casts the resulting pointer to a +native database handle. It then unwraps the second argument and casts the +resulting pointer to a query handle. If the arguments are passed in the wrong +order, the casts will work, however, there is a good chance that the underlying +database operation will fail, or will even cause an invalid memory access. + +To ensure that the pointer retrieved from the first argument is indeed a pointer +to a database handle and, similarly, that the pointer retrieved from the second +argument is indeed a pointer to a query handle, the implementation of +`queryHasRecords()` has to perform a type validation. Retaining the JavaScript +class constructor from which the database handle was instantiated and the +constructor from which the query handle was instantiated in `napi_ref`s can +help, because `napi_instanceof()` can then be used to ensure that the instances +passed into `queryHashRecords()` are indeed of the correct type. + +Unfortunately, `napi_instanceof()` does not protect against prototype +manipulation. For example, the prototype of the database handle instance can be +set to the prototype of the constructor for query handle instances. In this +case, the database handle instance can appear as a query handle instance, and it +will pass the `napi_instanceof()` test for a query handle instance, while still +containing a pointer to a database handle. + +To this end, N-API provides type-tagging capabilities. + +A type tag is a 128-bit integer unique to the addon. N-API provides the +`napi_type_tag` structure for storing a type tag. When such a value is passed +along with a JavaScript object stored in a `napi_value` to +`napi_type_tag_object()`, the JavaScript object will be "marked" with the +type tag. The "mark" is invisible on the JavaScript side. When a JavaScript +object arrives into a native binding, `napi_check_object_type_tag()` can be used +along with the original type tag to determine whether the JavaScript object was +previously "marked" with the type tag. This creates a type-checking capability +of a higher fidelity than `napi_instanceof()` can provide, because such type- +tagging survives prototype manipulation and addon unloading/reloading. + +Continuing the above example, the following skeleton addon implementation +illustrates the use of `napi_type_tag_object()` and +`napi_check_object_type_tag()`. + +```c +// This value is the type tag for a database handle. The command +// +// uuidgen | sed -r -e 's/-//g' -e 's/(.{16})(.*)/0x\1, 0x\2/' +// +// can be used to obtain the two values with which to initialize the structure. +static const napi_type_tag DatabaseHandleTypeTag = { + 0x1edf75a38336451d, 0xa5ed9ce2e4c00c38 +}; + +// This value is the type tag for a query handle. +static const napi_type_tag QueryHandleTypeTag = { + 0x9c73317f9fad44a3, 0x93c3920bf3b0ad6a +}; + +static napi_value +openDatabase(napi_env env, napi_callback_info info) { + napi_status status; + napi_value result; + + // Perform the underlying action which results in a database handle. + DatabaseHandle* dbHandle = open_database(); + + // Create a new, empty JS object. + status = napi_create_object(env, &result); + if (status != napi_ok) return NULL; + + // Tag the object to indicate that it holds a pointer to a `DatabaseHandle`. + status = napi_type_tag_object(env, result, &DatabaseHandleTypeTag); + if (status != napi_ok) return NULL; + + // Store the pointer to the `DatabaseHandle` structure inside the JS object. + status = napi_wrap(env, result, dbHandle, NULL, NULL, NULL); + if (status != napi_ok) return NULL; + + return result; +} + +// Later when we receive a JavaScript object purporting to be a database handle +// we can use `napi_check_object_type_tag()` to ensure that it is indeed such a +// handle. + +static napi_value +query(napi_env env, napi_callback_info info) { + napi_status status; + size_t argc = 2; + napi_value argv[2]; + bool is_db_handle; + + status = napi_get_cb_info(env, info, &argc, argv, NULL, NULL); + if (status != napi_ok) return NULL; + + // Check that the object passed as the first parameter has the previously + // applied tag. + status = napi_check_object_type_tag(env, + argv[0], + &DatabaseHandleTypeTag, + &is_db_handle); + if (status != napi_ok) return NULL; + + // Throw a `TypeError` if it doesn't. + if (!is_db_handle) { + // Throw a TypeError. + return NULL; + } +} +``` + ### napi_define_class + +> Stability: 1 - Experimental + +```c +napi_status napi_type_tag_object(napi_env env, + napi_value js_object, + const napi_type_tag* type_tag); +``` + +* `[in] env`: The environment that the API is invoked under. +* `[in] js_object`: The JavaScript object to be marked. +* `[in] type_tag`: The tag with which the object is to be marked. + +Returns `napi_ok` if the API succeeded. + +Associates the value of the `type_tag` pointer with the JavaScript object. +`napi_check_object_type_tag()` can then be used to compare the tag that was +attached to the object with one owned by the addon to ensure that the object +has the right type. + +If the object already has an associated type tag, this API will return +`napi_invalid_arg`. + +### napi_check_object_type_tag + + +> Stability: 1 - Experimental + +```c +napi_status napi_check_object_type_tag(napi_env env, + napi_value js_object, + const napi_type_tag* type_tag, + bool* result); +``` + +* `[in] env`: The environment that the API is invoked under. +* `[in] js_object`: The JavaScript object whose type tag to examine. +* `[in] type_tag`: The tag with which to compare any tag found on the object. +* `[out] result`: Whether the type tag given matched the type tag on the +object. `false` is also returned if no type tag was found on the object. + +Returns `napi_ok` if the API succeeded. + +Compares the pointer given as `type_tag` with any that can be found on +`js_object`. If no tag is found on `js_object` or, if a tag is found but it does +not match `type_tag`, then `result` is set to `false`. If a tag is found and it +matches `type_tag`, then `result` is set to `true`. + ### napi_add_finalizer + +* `socket` {stream.Duplex} + +This event is emitted when a new TCP stream is established, before the TLS +handshake begins. `socket` is typically an object of type [`net.Socket`][]. +Usually users will not want to access this event. + +This event can also be explicitly emitted by users to inject connections +into the TLS server. In that case, any [`Duplex`][] stream can be passed. + ### Event: `'keylog'` diff --git a/doc/api/util.md b/doc/api/util.md index f21f957bba3eeb..b14c5969f0b416 100644 --- a/doc/api/util.md +++ b/doc/api/util.md @@ -2414,7 +2414,7 @@ util.log('Timestamped message.'); [`Array.isArray()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/isArray [`ArrayBuffer.isView()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView [`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer -[`Buffer.isBuffer()`]: buffer.html#buffer_class_method_buffer_isbuffer_obj +[`Buffer.isBuffer()`]: buffer.html#buffer_static_method_buffer_isbuffer_obj [`DataView`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView [`Date`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date [`Error`]: errors.html#errors_class_error diff --git a/doc/api/worker_threads.md b/doc/api/worker_threads.md index 7d1f3509c374c3..9b57d28a3e6dd2 100644 --- a/doc/api/worker_threads.md +++ b/doc/api/worker_threads.md @@ -892,7 +892,7 @@ active handle in the event system. If the worker is already `unref()`ed calling [`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer [`AsyncResource`]: async_hooks.html#async_hooks_class_asyncresource [`Buffer`]: buffer.html -[`Buffer.allocUnsafe()`]: buffer.html#buffer_class_method_buffer_allocunsafe_size +[`Buffer.allocUnsafe()`]: buffer.html#buffer_static_method_buffer_allocunsafe_size [`ERR_MISSING_MESSAGE_PORT_IN_TRANSFER_LIST`]: errors.html#errors_err_missing_message_port_in_transfer_list [`ERR_WORKER_NOT_RUNNING`]: errors.html#ERR_WORKER_NOT_RUNNING [`EventEmitter`]: events.html diff --git a/doc/changelogs/CHANGELOG_V14.md b/doc/changelogs/CHANGELOG_V14.md index 9df37eb9ad3ec2..dc30920df2c631 100644 --- a/doc/changelogs/CHANGELOG_V14.md +++ b/doc/changelogs/CHANGELOG_V14.md @@ -10,6 +10,7 @@ +14.8.0
14.7.0
14.6.0
14.5.0
@@ -38,6 +39,101 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2020-08-11, Version 14.8.0 (Current), @codebytere + +### Notable Changes + +* [[`16aa927216`](https://github.com/nodejs/node/commit/16aa927216)] - **(SEMVER-MINOR)** **async_hooks**: add AsyncResource.bind utility (James M Snell) [#34574](https://github.com/nodejs/node/pull/34574) +* [[`dc49561e8d`](https://github.com/nodejs/node/commit/dc49561e8d)] - **deps**: update to uvwasi 0.0.10 (Colin Ihrig) [#34623](https://github.com/nodejs/node/pull/34623) +* [[`6cd1c41604`](https://github.com/nodejs/node/commit/6cd1c41604)] - **doc**: add Ricky Zhou to collaborators (rickyes) [#34676](https://github.com/nodejs/node/pull/34676) +* [[`f0a41b2530`](https://github.com/nodejs/node/commit/f0a41b2530)] - **doc**: add release key for Ruy Adorno (Ruy Adorno) [#34628](https://github.com/nodejs/node/pull/34628) +* [[`10dd7a0eda`](https://github.com/nodejs/node/commit/10dd7a0eda)] - **doc**: add DerekNonGeneric to collaborators (Derek Lewis) [#34602](https://github.com/nodejs/node/pull/34602) +* [[`62bb2e757f`](https://github.com/nodejs/node/commit/62bb2e757f)] - **(SEMVER-MINOR)** **module**: unflag Top-Level Await (Myles Borins) [#34558](https://github.com/nodejs/node/pull/34558) +* [[`8cc9e5eb52`](https://github.com/nodejs/node/commit/8cc9e5eb52)] - **(SEMVER-MINOR)** **n-api**: support type-tagging objects (Gabriel Schulhof) [#28237](https://github.com/nodejs/node/pull/28237) +* [[`e89ec46ba9`](https://github.com/nodejs/node/commit/e89ec46ba9)] - **(SEMVER-MINOR)** **n-api,src**: provide asynchronous cleanup hooks (Anna Henningsen) [#34572](https://github.com/nodejs/node/pull/34572) + +### Commits + +* [[`650248922b`](https://github.com/nodejs/node/commit/650248922b)] - **async_hooks**: avoid GC tracking of AsyncResource in ALS (Gerhard Stoebich) [#34653](https://github.com/nodejs/node/pull/34653) +* [[`0a51aa8fdb`](https://github.com/nodejs/node/commit/0a51aa8fdb)] - **async_hooks**: avoid unneeded AsyncResource creation (Gerhard Stoebich) [#34616](https://github.com/nodejs/node/pull/34616) +* [[`0af9bee4c3`](https://github.com/nodejs/node/commit/0af9bee4c3)] - **async_hooks**: improve property descriptors in als.bind (Gerhard Stoebich) [#34620](https://github.com/nodejs/node/pull/34620) +* [[`16aa927216`](https://github.com/nodejs/node/commit/16aa927216)] - **(SEMVER-MINOR)** **async_hooks**: add AsyncResource.bind utility (James M Snell) [#34574](https://github.com/nodejs/node/pull/34574) +* [[`e45c68af27`](https://github.com/nodejs/node/commit/e45c68af27)] - **async_hooks**: don't read resource if ALS is disabled (Gerhard Stoebich) [#34617](https://github.com/nodejs/node/pull/34617) +* [[`e9aebc3a8f`](https://github.com/nodejs/node/commit/e9aebc3a8f)] - **async_hooks**: fix id assignment in fast-path promise hook (Andrey Pechkurov) [#34548](https://github.com/nodejs/node/pull/34548) +* [[`5aed83c77f`](https://github.com/nodejs/node/commit/5aed83c77f)] - **async_hooks**: fix resource stack for deep stacks (Anna Henningsen) [#34573](https://github.com/nodejs/node/pull/34573) +* [[`9af62641c6`](https://github.com/nodejs/node/commit/9af62641c6)] - **async_hooks**: execute destroy hooks earlier (Gerhard Stoebich) [#34342](https://github.com/nodejs/node/pull/34342) +* [[`14656e1703`](https://github.com/nodejs/node/commit/14656e1703)] - **async_hooks**: don't reuse resource in HttpAgent when queued (Andrey Pechkurov) [#34439](https://github.com/nodejs/node/pull/34439) +* [[`c4457d873f`](https://github.com/nodejs/node/commit/c4457d873f)] - **benchmark**: always throw the same Error instance (Anna Henningsen) [#34523](https://github.com/nodejs/node/pull/34523) +* [[`6a129d0cf5`](https://github.com/nodejs/node/commit/6a129d0cf5)] - **build**: do not run auto-start-ci on forks (Evan Lucas) [#34650](https://github.com/nodejs/node/pull/34650) +* [[`2cd299b217`](https://github.com/nodejs/node/commit/2cd299b217)] - **build**: run CI on release branches (Shelley Vohr) [#34649](https://github.com/nodejs/node/pull/34649) +* [[`9ed9ccc5b3`](https://github.com/nodejs/node/commit/9ed9ccc5b3)] - **build**: enable build for node-v8 push (gengjiawen) [#34634](https://github.com/nodejs/node/pull/34634) +* [[`10f29e7550`](https://github.com/nodejs/node/commit/10f29e7550)] - **build**: increase startCI verbosity and fix job name (Mary Marchini) [#34635](https://github.com/nodejs/node/pull/34635) +* [[`befbaf384e`](https://github.com/nodejs/node/commit/befbaf384e)] - **build**: don't run auto-start-ci on push (Mary Marchini) [#34588](https://github.com/nodejs/node/pull/34588) +* [[`4af5dbd3bf`](https://github.com/nodejs/node/commit/4af5dbd3bf)] - **build**: fix auto-start-ci script path (Mary Marchini) [#34588](https://github.com/nodejs/node/pull/34588) +* [[`70cf3cbdfa`](https://github.com/nodejs/node/commit/70cf3cbdfa)] - **build**: auto start Jenkins CI via PR labels (Mary Marchini) [#34089](https://github.com/nodejs/node/pull/34089) +* [[`70e9eceeee`](https://github.com/nodejs/node/commit/70e9eceeee)] - **build**: toolchain.gypi and node\_gyp.py cleanup (iandrc) [#34268](https://github.com/nodejs/node/pull/34268) +* [[`465968c5f8`](https://github.com/nodejs/node/commit/465968c5f8)] - **console**: document the behavior of console.assert() (iandrc) [#34501](https://github.com/nodejs/node/pull/34501) +* [[`a7b4318df9`](https://github.com/nodejs/node/commit/a7b4318df9)] - **crypto**: add OP flag constants added in OpenSSL v1.1.1 (Mateusz Krawczuk) [#33929](https://github.com/nodejs/node/pull/33929) +* [[`dc49561e8d`](https://github.com/nodejs/node/commit/dc49561e8d)] - **deps**: update to uvwasi 0.0.10 (Colin Ihrig) [#34623](https://github.com/nodejs/node/pull/34623) +* [[`8b1ec43da4`](https://github.com/nodejs/node/commit/8b1ec43da4)] - **doc**: use \_Static method\_ instead of \_Class Method\_ (Rich Trott) [#34659](https://github.com/nodejs/node/pull/34659) +* [[`a1b9d7f42e`](https://github.com/nodejs/node/commit/a1b9d7f42e)] - **doc**: tidy some addons.md text (Rich Trott) [#34654](https://github.com/nodejs/node/pull/34654) +* [[`b78278b922`](https://github.com/nodejs/node/commit/b78278b922)] - **doc**: use \_Class Method\_ in async\_hooks.md (Rich Trott) [#34626](https://github.com/nodejs/node/pull/34626) +* [[`6cd1c41604`](https://github.com/nodejs/node/commit/6cd1c41604)] - **doc**: add Ricky Zhou to collaborators (rickyes) [#34676](https://github.com/nodejs/node/pull/34676) +* [[`d8e0deaa7c`](https://github.com/nodejs/node/commit/d8e0deaa7c)] - **doc**: edit process.title note for brevity and clarity (Rich Trott) [#34627](https://github.com/nodejs/node/pull/34627) +* [[`dd6bf20e8f`](https://github.com/nodejs/node/commit/dd6bf20e8f)] - **doc**: update fs.watch() availability for IBM i (iandrc) [#34611](https://github.com/nodejs/node/pull/34611) +* [[`f260bdd57b`](https://github.com/nodejs/node/commit/f260bdd57b)] - **doc**: fix typo in path.md (aetheryx) [#34550](https://github.com/nodejs/node/pull/34550) +* [[`f0a41b2530`](https://github.com/nodejs/node/commit/f0a41b2530)] - **doc**: add release key for Ruy Adorno (Ruy Adorno) [#34628](https://github.com/nodejs/node/pull/34628) +* [[`3f55dcd723`](https://github.com/nodejs/node/commit/3f55dcd723)] - **doc**: clarify process.title inconsistencies (Corey Butler) [#34557](https://github.com/nodejs/node/pull/34557) +* [[`6cd9ea82f6`](https://github.com/nodejs/node/commit/6cd9ea82f6)] - **doc**: document the connection event for HTTP2 & TLS servers (Tim Perry) [#34531](https://github.com/nodejs/node/pull/34531) +* [[`0a9389bb1a`](https://github.com/nodejs/node/commit/0a9389bb1a)] - **doc**: mention null special-case for `napi\_typeof` (Renée Kooi) [#34577](https://github.com/nodejs/node/pull/34577) +* [[`10dd7a0eda`](https://github.com/nodejs/node/commit/10dd7a0eda)] - **doc**: add DerekNonGeneric to collaborators (Derek Lewis) [#34602](https://github.com/nodejs/node/pull/34602) +* [[`d7eaf3a027`](https://github.com/nodejs/node/commit/d7eaf3a027)] - **doc**: revise N-API versions matrix text (Rich Trott) [#34566](https://github.com/nodejs/node/pull/34566) +* [[`e2bea73b03`](https://github.com/nodejs/node/commit/e2bea73b03)] - **doc**: clarify N-API version 1 (Michael Dawson) [#34344](https://github.com/nodejs/node/pull/34344) +* [[`be23e23361`](https://github.com/nodejs/node/commit/be23e23361)] - **doc**: use consistent spelling for "falsy" (Rich Trott) [#34545](https://github.com/nodejs/node/pull/34545) +* [[`f393ae9296`](https://github.com/nodejs/node/commit/f393ae9296)] - **doc**: simplify and clarify console.assert() documentation (Rich Trott) [#34544](https://github.com/nodejs/node/pull/34544) +* [[`b69ff2ff60`](https://github.com/nodejs/node/commit/b69ff2ff60)] - **doc**: use consistent capitalization for addons (Rich Trott) [#34536](https://github.com/nodejs/node/pull/34536) +* [[`212d17fa06`](https://github.com/nodejs/node/commit/212d17fa06)] - **doc**: add mmarchini pronouns (Mary Marchini) [#34586](https://github.com/nodejs/node/pull/34586) +* [[`7a28c3d543`](https://github.com/nodejs/node/commit/7a28c3d543)] - **doc**: update mmarchini contact info (Mary Marchini) [#34586](https://github.com/nodejs/node/pull/34586) +* [[`c8104f3d10`](https://github.com/nodejs/node/commit/c8104f3d10)] - **doc**: update .mailmap for mmarchini (Mary Marchini) [#34586](https://github.com/nodejs/node/pull/34586) +* [[`692a735881`](https://github.com/nodejs/node/commit/692a735881)] - **doc**: use sentence-case for headers in SECURITY.md (Rich Trott) [#34525](https://github.com/nodejs/node/pull/34525) +* [[`44e6c010b4`](https://github.com/nodejs/node/commit/44e6c010b4)] - **esm**: fix hook mistypes and links to types (Derek Lewis) [#34240](https://github.com/nodejs/node/pull/34240) +* [[`7322e58d11`](https://github.com/nodejs/node/commit/7322e58d11)] - **http**: reset headers timeout on headers complete (Robert Nagy) [#34578](https://github.com/nodejs/node/pull/34578) +* [[`36fd3daae6`](https://github.com/nodejs/node/commit/36fd3daae6)] - **http**: provide keep-alive timeout response header (Robert Nagy) [#34561](https://github.com/nodejs/node/pull/34561) +* [[`d0efaf2fe3`](https://github.com/nodejs/node/commit/d0efaf2fe3)] - **lib**: use non-symbols in isURLInstance check (Shelley Vohr) [#34622](https://github.com/nodejs/node/pull/34622) +* [[`335cb0d1d1`](https://github.com/nodejs/node/commit/335cb0d1d1)] - **lib**: absorb `path` error cases (Gireesh Punathil) [#34519](https://github.com/nodejs/node/pull/34519) +* [[`521e620533`](https://github.com/nodejs/node/commit/521e620533)] - **meta**: uncomment all codeowners (Mary Marchini) [#34670](https://github.com/nodejs/node/pull/34670) +* [[`650adeca22`](https://github.com/nodejs/node/commit/650adeca22)] - **meta**: enable http2 team for CODEOWNERS (Rich Trott) [#34534](https://github.com/nodejs/node/pull/34534) +* [[`35ef9907aa`](https://github.com/nodejs/node/commit/35ef9907aa)] - **module**: handle Top-Level Await non-fulfills better (Anna Henningsen) [#34640](https://github.com/nodejs/node/pull/34640) +* [[`62bb2e757f`](https://github.com/nodejs/node/commit/62bb2e757f)] - **(SEMVER-MINOR)** **module**: unflag Top-Level Await (Myles Borins) [#34558](https://github.com/nodejs/node/pull/34558) +* [[`fbd411d28a`](https://github.com/nodejs/node/commit/fbd411d28a)] - **n-api**: fix use-after-free with napi\_remove\_async\_cleanup\_hook (Anna Henningsen) [#34662](https://github.com/nodejs/node/pull/34662) +* [[`8cc9e5eb52`](https://github.com/nodejs/node/commit/8cc9e5eb52)] - **(SEMVER-MINOR)** **n-api**: support type-tagging objects (Gabriel Schulhof) [#28237](https://github.com/nodejs/node/pull/28237) +* [[`2703fe498e`](https://github.com/nodejs/node/commit/2703fe498e)] - **n-api**: simplify bigint-from-word creation (Gabriel Schulhof) [#34554](https://github.com/nodejs/node/pull/34554) +* [[`e89ec46ba9`](https://github.com/nodejs/node/commit/e89ec46ba9)] - **(SEMVER-MINOR)** **n-api,src**: provide asynchronous cleanup hooks (Anna Henningsen) [#34572](https://github.com/nodejs/node/pull/34572) +* [[`b1890e0866`](https://github.com/nodejs/node/commit/b1890e0866)] - **net**: don't return the stream object from onStreamRead (Robey Pointer) [#34375](https://github.com/nodejs/node/pull/34375) +* [[`35fdfb44a2`](https://github.com/nodejs/node/commit/35fdfb44a2)] - **policy**: increase tests via permutation matrix (Bradley Meck) [#34404](https://github.com/nodejs/node/pull/34404) +* [[`ddd339ff45`](https://github.com/nodejs/node/commit/ddd339ff45)] - **repl**: use \_Node.js\_ in user-facing REPL text (Rich Trott) [#34644](https://github.com/nodejs/node/pull/34644) +* [[`276e2980e2`](https://github.com/nodejs/node/commit/276e2980e2)] - **repl**: use \_REPL\_ in user-facing text (Rich Trott) [#34643](https://github.com/nodejs/node/pull/34643) +* [[`465c262ac6`](https://github.com/nodejs/node/commit/465c262ac6)] - **repl**: improve static import error message in repl (Myles Borins) [#33588](https://github.com/nodejs/node/pull/33588) +* [[`12cb0fb8a0`](https://github.com/nodejs/node/commit/12cb0fb8a0)] - **repl**: give repl entries unique names (Bradley Meck) [#34372](https://github.com/nodejs/node/pull/34372) +* [[`2dbd15a075`](https://github.com/nodejs/node/commit/2dbd15a075)] - **src**: fix linter failures (Anna Henningsen) [#34582](https://github.com/nodejs/node/pull/34582) +* [[`2761f349ec`](https://github.com/nodejs/node/commit/2761f349ec)] - **src**: spin shutdown loop while immediates are pending (Anna Henningsen) [#34662](https://github.com/nodejs/node/pull/34662) +* [[`39ca48c840`](https://github.com/nodejs/node/commit/39ca48c840)] - **src**: fix `size` underflow in CallbackQueue (Anna Henningsen) [#34662](https://github.com/nodejs/node/pull/34662) +* [[`c1abc8d3e5`](https://github.com/nodejs/node/commit/c1abc8d3e5)] - **src**: fix unused namespace member in node\_util (Andrey Pechkurov) [#34565](https://github.com/nodejs/node/pull/34565) +* [[`e146686972`](https://github.com/nodejs/node/commit/e146686972)] - **test**: fix wrong method call (gengjiawen) [#34629](https://github.com/nodejs/node/pull/34629) +* [[`ca89c375f7`](https://github.com/nodejs/node/commit/ca89c375f7)] - **test**: add debugging for callbacks in test-https-foafssl.js (Rich Trott) [#34603](https://github.com/nodejs/node/pull/34603) +* [[`2133b18bee`](https://github.com/nodejs/node/commit/2133b18bee)] - **test**: add debugging for test-https-foafssl.js (Rich Trott) [#34603](https://github.com/nodejs/node/pull/34603) +* [[`b9fb0c63b3`](https://github.com/nodejs/node/commit/b9fb0c63b3)] - **test**: convert most N-API tests from C++ to C (Gabriel Schulhof) [#34615](https://github.com/nodejs/node/pull/34615) +* [[`54a4c6a39c`](https://github.com/nodejs/node/commit/54a4c6a39c)] - **test**: replace flaky pummel regression tests (Anna Henningsen) [#34530](https://github.com/nodejs/node/pull/34530) +* [[`bd55236788`](https://github.com/nodejs/node/commit/bd55236788)] - **test**: change Fixes: to Refs: (Rich Trott) [#34568](https://github.com/nodejs/node/pull/34568) +* [[`a340587cfd`](https://github.com/nodejs/node/commit/a340587cfd)] - **test**: fix flaky http-parser-timeout-reset (Robert Nagy) [#34609](https://github.com/nodejs/node/pull/34609) +* [[`9c442f9786`](https://github.com/nodejs/node/commit/9c442f9786)] - **test**: remove unneeded flag check in test-vm-memleak (Rich Trott) [#34528](https://github.com/nodejs/node/pull/34528) +* [[`05100e1eec`](https://github.com/nodejs/node/commit/05100e1eec)] - **tools**: fix C++ import checker argument expansion (Anna Henningsen) [#34582](https://github.com/nodejs/node/pull/34582) +* [[`bf6c8aaae3`](https://github.com/nodejs/node/commit/bf6c8aaae3)] - **tools**: update ESLint to 7.6.0 (Colin Ihrig) [#34589](https://github.com/nodejs/node/pull/34589) +* [[`0b1616c2f0`](https://github.com/nodejs/node/commit/0b1616c2f0)] - **tools**: add meta.fixable to fixable lint rules (Colin Ihrig) [#34589](https://github.com/nodejs/node/pull/34589) +* [[`f46649bc5b`](https://github.com/nodejs/node/commit/f46649bc5b)] - **util**: print External address from inspect (unknown) [#34398](https://github.com/nodejs/node/pull/34398) +* [[`2fa24c0ccc`](https://github.com/nodejs/node/commit/2fa24c0ccc)] - **wasi**: add \_\_wasi\_fd\_filestat\_set\_times() test (Colin Ihrig) [#34623](https://github.com/nodejs/node/pull/34623) + ## 2020-07-29, Version 14.7.0 (Current), @MylesBorins prepared by @ruyadorno diff --git a/doc/node.1 b/doc/node.1 index d9285cb54a36e4..3da44752bd38eb 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -157,9 +157,6 @@ keyword support in REPL. .It Fl -experimental-specifier-resolution Select extension resolution algorithm for ES Modules; either 'explicit' (default) or 'node' . -.It Fl -experimental-top-level-await -Enable experimental top-level await support in ES modules. -. .It Fl -experimental-vm-modules Enable experimental ES module support in VM module. . diff --git a/lib/_http_agent.js b/lib/_http_agent.js index 2d52ea3143b3cd..c4430e86ec3aca 100644 --- a/lib/_http_agent.js +++ b/lib/_http_agent.js @@ -34,6 +34,7 @@ const EventEmitter = require('events'); let debug = require('internal/util/debuglog').debuglog('http', (fn) => { debug = fn; }); +const { AsyncResource } = require('async_hooks'); const { async_id_symbol } = require('internal/async_hooks').symbols; const { codes: { @@ -47,6 +48,7 @@ const { validateNumber } = require('internal/validators'); const kOnKeylog = Symbol('onkeylog'); const kRequestOptions = Symbol('requestOptions'); +const kRequestAsyncResource = Symbol('requestAsyncResource'); // New Agent code. // The largest departure from the previous implementation is that @@ -127,7 +129,17 @@ function Agent(options) { const requests = this.requests[name]; if (requests && requests.length) { const req = requests.shift(); - setRequestSocket(this, req, socket); + const reqAsyncRes = req[kRequestAsyncResource]; + if (reqAsyncRes) { + // Run request within the original async context. + reqAsyncRes.runInAsyncScope(() => { + asyncResetHandle(socket); + setRequestSocket(this, req, socket); + }); + req[kRequestAsyncResource] = null; + } else { + setRequestSocket(this, req, socket); + } if (requests.length === 0) { delete this.requests[name]; } @@ -253,14 +265,7 @@ Agent.prototype.addRequest = function addRequest(req, options, port/* legacy */, const sockLen = freeLen + this.sockets[name].length; if (socket) { - // Guard against an uninitialized or user supplied Socket. - const handle = socket._handle; - if (handle && typeof handle.asyncReset === 'function') { - // Assign the handle a new asyncId and run any destroy()/init() hooks. - handle.asyncReset(new ReusedHandle(handle.getProviderType(), handle)); - socket[async_id_symbol] = handle.getAsyncId(); - } - + asyncResetHandle(socket); this.reuseSocket(socket, req); setRequestSocket(this, req, socket); this.sockets[name].push(socket); @@ -284,6 +289,8 @@ Agent.prototype.addRequest = function addRequest(req, options, port/* legacy */, // Used to create sockets for pending requests from different origin req[kRequestOptions] = options; + // Used to capture the original async context. + req[kRequestAsyncResource] = new AsyncResource('QueuedRequest'); this.requests[name].push(req); } @@ -493,6 +500,16 @@ function setRequestSocket(agent, req, socket) { socket.setTimeout(req.timeout); } +function asyncResetHandle(socket) { + // Guard against an uninitialized or user supplied Socket. + const handle = socket._handle; + if (handle && typeof handle.asyncReset === 'function') { + // Assign the handle a new asyncId and run any destroy()/init() hooks. + handle.asyncReset(new ReusedHandle(handle.getProviderType(), handle)); + socket[async_id_symbol] = handle.getAsyncId(); + } +} + module.exports = { Agent, globalAgent: new Agent() diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index aba05590dc25c3..f1392b6335ccad 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -28,6 +28,7 @@ const { ObjectKeys, ObjectPrototypeHasOwnProperty, ObjectSetPrototypeOf, + MathFloor, Symbol, } = primordials; @@ -118,6 +119,8 @@ function OutgoingMessage() { this._header = null; this[kOutHeaders] = null; + this._keepAliveTimeout = 0; + this._onPendingData = noopPendingOutput; } ObjectSetPrototypeOf(OutgoingMessage.prototype, Stream.prototype); @@ -419,6 +422,10 @@ function _storeHeader(firstLine, headers) { (state.contLen || this.useChunkedEncodingByDefault || this.agent); if (shouldSendKeepAlive) { header += 'Connection: keep-alive\r\n'; + if (this._keepAliveTimeout) { + const timeoutSeconds = MathFloor(this._keepAliveTimeout) / 1000; + header += `Keep-Alive: timeout=${timeoutSeconds}\r\n`; + } } else { this._last = true; header += 'Connection: close\r\n'; diff --git a/lib/_http_server.js b/lib/_http_server.js index c34f8092ecb6ce..fa0c922a61093f 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -743,6 +743,7 @@ function parserOnIncoming(server, socket, state, req, keepAlive) { } const res = new server[kServerResponse](req); + res._keepAliveTimeout = server.keepAliveTimeout; res._onPendingData = updateOutgoingData.bind(undefined, socket, state); res.shouldKeepAlive = keepAlive; diff --git a/lib/async_hooks.js b/lib/async_hooks.js index 9e287405f8af0b..7dd888b61f79f9 100644 --- a/lib/async_hooks.js +++ b/lib/async_hooks.js @@ -2,6 +2,8 @@ const { NumberIsSafeInteger, + ObjectDefineProperties, + ObjectIs, ReflectApply, Symbol, } = primordials; @@ -9,6 +11,7 @@ const { const { ERR_ASYNC_CALLBACK, ERR_ASYNC_TYPE, + ERR_INVALID_ARG_TYPE, ERR_INVALID_ASYNC_ID } = require('internal/errors').codes; const { validateString } = require('internal/validators'); @@ -211,6 +214,32 @@ class AsyncResource { triggerAsyncId() { return this[trigger_async_id_symbol]; } + + bind(fn) { + if (typeof fn !== 'function') + throw new ERR_INVALID_ARG_TYPE('fn', 'Function', fn); + const ret = this.runInAsyncScope.bind(this, fn); + ObjectDefineProperties(ret, { + 'length': { + configurable: true, + enumerable: false, + value: fn.length, + writable: false, + }, + 'asyncResource': { + configurable: true, + enumerable: true, + value: this, + writable: true, + } + }); + return ret; + } + + static bind(fn, type) { + type = type || fn.name; + return (new AsyncResource(type || 'bound-anonymous-fn')).bind(fn); + } } const storageList = []; @@ -224,6 +253,7 @@ const storageHook = createHook({ } }); +const defaultAlsResourceOpts = { requireManualDestroy: true }; class AsyncLocalStorage { constructor() { this.kResourceStore = Symbol('kResourceStore'); @@ -260,8 +290,15 @@ class AsyncLocalStorage { } run(store, callback, ...args) { - const resource = new AsyncResource('AsyncLocalStorage'); - return resource.runInAsyncScope(() => { + // Avoid creation of an AsyncResource if store is already active + if (ObjectIs(store, this.getStore())) { + return callback(...args); + } + const resource = new AsyncResource('AsyncLocalStorage', + defaultAlsResourceOpts); + // Calling emitDestroy before runInAsyncScope avoids a try/finally + // It is ok because emitDestroy only schedules calling the hook + return resource.emitDestroy().runInAsyncScope(() => { this.enterWith(store); return callback(...args); }); @@ -280,8 +317,8 @@ class AsyncLocalStorage { } getStore() { - const resource = executionAsyncResource(); if (this.enabled) { + const resource = executionAsyncResource(); return resource[this.kResourceStore]; } } diff --git a/lib/internal/async_hooks.js b/lib/internal/async_hooks.js index f4d4f1da49c4ca..9463d1d3348e67 100644 --- a/lib/internal/async_hooks.js +++ b/lib/internal/async_hooks.js @@ -499,11 +499,11 @@ function hasAsyncIdStack() { // This is the equivalent of the native push_async_ids() call. function pushAsyncContext(asyncId, triggerAsyncId, resource) { const offset = async_hook_fields[kStackLength]; + execution_async_resources[offset] = resource; if (offset * 2 >= async_wrap.async_ids_stack.length) return pushAsyncContext_(asyncId, triggerAsyncId); async_wrap.async_ids_stack[offset * 2] = async_id_fields[kExecutionAsyncId]; async_wrap.async_ids_stack[offset * 2 + 1] = async_id_fields[kTriggerAsyncId]; - execution_async_resources[offset] = resource; async_hook_fields[kStackLength]++; async_id_fields[kExecutionAsyncId] = asyncId; async_id_fields[kTriggerAsyncId] = triggerAsyncId; diff --git a/lib/internal/bootstrap/pre_execution.js b/lib/internal/bootstrap/pre_execution.js index e227e7853f5462..dfbefa955cab8a 100644 --- a/lib/internal/bootstrap/pre_execution.js +++ b/lib/internal/bootstrap/pre_execution.js @@ -92,7 +92,9 @@ function patchProcessObject(expandArgv1) { if (expandArgv1 && process.argv[1] && !process.argv[1].startsWith('-')) { // Expand process.argv[1] into a full path. const path = require('path'); - process.argv[1] = path.resolve(process.argv[1]); + try { + process.argv[1] = path.resolve(process.argv[1]); + } catch {} } // TODO(joyeecheung): most of these should be deprecated and removed, diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index 83a49599469b19..01803211fe0223 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -262,18 +262,13 @@ function readPackage(requestPath) { const existing = packageJsonCache.get(jsonPath); if (existing !== undefined) return existing; - const result = packageJsonReader.read(path.toNamespacedPath(jsonPath)); + const result = packageJsonReader.read(jsonPath); const json = result.containsKeys === false ? '{}' : result.string; if (json === undefined) { packageJsonCache.set(jsonPath, false); return false; } - if (manifest) { - const jsonURL = pathToFileURL(jsonPath); - manifest.assertIntegrity(jsonURL, json); - } - try { const parsed = JSONParse(json); const filtered = { diff --git a/lib/internal/modules/esm/get_source.js b/lib/internal/modules/esm/get_source.js index 7b2a2f42eabed9..5e2cc3e09e7687 100644 --- a/lib/internal/modules/esm/get_source.js +++ b/lib/internal/modules/esm/get_source.js @@ -1,5 +1,10 @@ 'use strict'; +const { getOptionValue } = require('internal/options'); +const manifest = getOptionValue('--experimental-policy') ? + require('internal/process/policy').manifest : + null; + const { Buffer } = require('buffer'); const fs = require('fs'); @@ -15,20 +20,22 @@ const DATA_URL_PATTERN = /^[^/]+\/[^,;]+(?:[^,]*?)(;base64)?,([\s\S]*)$/; async function defaultGetSource(url, { format } = {}, defaultGetSource) { const parsed = new URL(url); + let source; if (parsed.protocol === 'file:') { - return { - source: await readFileAsync(parsed) - }; + source = await readFileAsync(parsed); } else if (parsed.protocol === 'data:') { const match = DATA_URL_PATTERN.exec(parsed.pathname); if (!match) { throw new ERR_INVALID_URL(url); } const [ , base64, body ] = match; - return { - source: Buffer.from(body, base64 ? 'base64' : 'utf8') - }; + source = Buffer.from(body, base64 ? 'base64' : 'utf8'); + } else { + throw new ERR_INVALID_URL_SCHEME(['file', 'data']); + } + if (manifest) { + manifest.assertIntegrity(parsed, source); } - throw new ERR_INVALID_URL_SCHEME(['file', 'data']); + return { source }; } exports.defaultGetSource = defaultGetSource; diff --git a/lib/internal/modules/package_json_reader.js b/lib/internal/modules/package_json_reader.js index 066047b55eb9d8..25edfee027c35b 100644 --- a/lib/internal/modules/package_json_reader.js +++ b/lib/internal/modules/package_json_reader.js @@ -2,21 +2,35 @@ const { SafeMap } = primordials; const { internalModuleReadJSON } = internalBinding('fs'); +const { pathToFileURL } = require('url'); +const { toNamespacedPath } = require('path'); const cache = new SafeMap(); /** * - * @param {string} path + * @param {string} jsonPath */ -function read(path) { - if (cache.has(path)) { - return cache.get(path); +function read(jsonPath) { + if (cache.has(jsonPath)) { + return cache.get(jsonPath); } - const [string, containsKeys] = internalModuleReadJSON(path); + const [string, containsKeys] = internalModuleReadJSON( + toNamespacedPath(jsonPath) + ); const result = { string, containsKeys }; - cache.set(path, result); + const { getOptionValue } = require('internal/options'); + if (string !== undefined) { + const manifest = getOptionValue('--experimental-policy') ? + require('internal/process/policy').manifest : + null; + if (manifest) { + const jsonURL = pathToFileURL(jsonPath); + manifest.assertIntegrity(jsonURL, string); + } + } + cache.set(jsonPath, result); return result; } diff --git a/lib/internal/modules/run_main.js b/lib/internal/modules/run_main.js index 0967ef539ca20e..d7b0ee56a1e8a5 100644 --- a/lib/internal/modules/run_main.js +++ b/lib/internal/modules/run_main.js @@ -40,11 +40,23 @@ function shouldUseESMLoader(mainPath) { function runMainESM(mainPath) { const esmLoader = require('internal/process/esm_loader'); const { pathToFileURL } = require('internal/url'); - esmLoader.loadESM((ESMLoader) => { + handleMainPromise(esmLoader.loadESM((ESMLoader) => { const main = path.isAbsolute(mainPath) ? pathToFileURL(mainPath).href : mainPath; return ESMLoader.import(main); - }); + })); +} + +function handleMainPromise(promise) { + // Handle a Promise from running code that potentially does Top-Level Await. + // In that case, it makes sense to set the exit code to a specific non-zero + // value if the main code never finishes running. + function handler() { + if (process.exitCode === undefined) + process.exitCode = 13; + } + process.on('exit', handler); + return promise.finally(() => process.off('exit', handler)); } // For backwards compatibility, we have to run a bunch of @@ -62,5 +74,6 @@ function executeUserEntryPoint(main = process.argv[1]) { } module.exports = { - executeUserEntryPoint + executeUserEntryPoint, + handleMainPromise, }; diff --git a/lib/internal/process/execution.js b/lib/internal/process/execution.js index 08d6ec8c6ea906..f8d9e043efda82 100644 --- a/lib/internal/process/execution.js +++ b/lib/internal/process/execution.js @@ -2,7 +2,6 @@ const { JSONStringify, - PromiseResolve, } = primordials; const path = require('path'); @@ -43,20 +42,15 @@ function evalModule(source, print) { if (print) { throw new ERR_EVAL_ESM_CANNOT_PRINT(); } - const { log, error } = require('internal/console/global'); - const { decorateErrorStack } = require('internal/util'); - const asyncESM = require('internal/process/esm_loader'); - PromiseResolve(asyncESM.ESMLoader).then(async (loader) => { + const { log } = require('internal/console/global'); + const { loadESM } = require('internal/process/esm_loader'); + const { handleMainPromise } = require('internal/modules/run_main'); + handleMainPromise(loadESM(async (loader) => { const { result } = await loader.eval(source); if (print) { log(result); } - }) - .catch((e) => { - decorateErrorStack(e); - error(e); - process.exit(1); - }); + })); } function evalScript(name, body, breakFirstLine, print) { diff --git a/lib/internal/stream_base_commons.js b/lib/internal/stream_base_commons.js index 55b9c3d4bb03e0..233741285d2c72 100644 --- a/lib/internal/stream_base_commons.js +++ b/lib/internal/stream_base_commons.js @@ -204,7 +204,9 @@ function onStreamRead(arrayBuffer) { } if (nread !== UV_EOF) { - return stream.destroy(errnoException(nread, 'read')); + // #34375 CallJSOnreadMethod expects the return value to be a buffer. + stream.destroy(errnoException(nread, 'read')); + return; } // Defer this until we actually emit end @@ -221,8 +223,11 @@ function onStreamRead(arrayBuffer) { // test-https-truncate test. if (handle.readStop) { const err = handle.readStop(); - if (err) - return stream.destroy(errnoException(err, 'read')); + if (err) { + // #34375 CallJSOnreadMethod expects the return value to be a buffer. + stream.destroy(errnoException(err, 'read')); + return; + } } // Push a null to signal the end of data. diff --git a/lib/internal/url.js b/lib/internal/url.js index c0b8c17d098708..71b0dd26e2735a 100644 --- a/lib/internal/url.js +++ b/lib/internal/url.js @@ -1391,8 +1391,7 @@ function pathToFileURL(filepath) { } function isURLInstance(fileURLOrPath) { - return fileURLOrPath != null && fileURLOrPath[searchParams] && - fileURLOrPath[searchParams][searchParams]; + return fileURLOrPath != null && fileURLOrPath.href && fileURLOrPath.origin; } function toPathIfFileURL(fileURLOrPath) { diff --git a/lib/internal/util/inspect.js b/lib/internal/util/inspect.js index 495271fa62eecb..e3245010761164 100644 --- a/lib/internal/util/inspect.js +++ b/lib/internal/util/inspect.js @@ -63,6 +63,7 @@ const { kRejected, previewEntries, getConstructorName: internalGetConstructorName, + getExternalValue, propertyFilter: { ALL_PROPERTIES, ONLY_ENUMERABLE @@ -977,8 +978,10 @@ function formatRaw(ctx, value, recurseTimes, typedArray) { } } else { if (keys.length === 0 && protoProps === undefined) { - if (isExternal(value)) - return ctx.stylize('[External]', 'special'); + if (isExternal(value)) { + const address = getExternalValue(value).toString(16); + return ctx.stylize(`[External: ${address}]`, 'special'); + } return `${getCtxStyle(value, constructor, tag)}{}`; } braces[0] = `${getCtxStyle(value, constructor, tag)}{`; diff --git a/lib/internal/worker.js b/lib/internal/worker.js index a0608277158735..b62cc9382f7aea 100644 --- a/lib/internal/worker.js +++ b/lib/internal/worker.js @@ -205,6 +205,9 @@ class Worker extends EventEmitter { cwdCounter: cwdCounter || workerIo.sharedCwdCounter, workerData: options.workerData, publicPort: port2, + manifestURL: getOptionValue('--experimental-policy') ? + require('internal/process/policy').url : + null, manifestSrc: getOptionValue('--experimental-policy') ? require('internal/process/policy').src : null, diff --git a/lib/repl.js b/lib/repl.js index 26376a43c56d66..b13fece0713701 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -19,7 +19,7 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -/* A repl library that you can include in your own code to get a runtime +/* A REPL library that you can include in your own code to get a runtime * interface to your program. * * const repl = require("repl"); @@ -58,6 +58,7 @@ const { PromiseRace, RegExp, Set, + StringPrototypeIncludes, Symbol, WeakSet, } = primordials; @@ -128,6 +129,12 @@ const { } = internalBinding('contextify'); const history = require('internal/repl/history'); +let nextREPLResourceNumber = 1; +// This prevents v8 code cache from getting confused and using a different +// cache from a resource of the same name +function getREPLResourceName() { + return `REPL${nextREPLResourceNumber++}`; +} // Lazy-loaded. let processTopLevelAwait; @@ -578,10 +585,18 @@ function REPLServer(prompt, if (e.name === 'SyntaxError') { // Remove stack trace. e.stack = e.stack - .replace(/^repl:\d+\r?\n/, '') + .replace(/^REPL\d+:\d+\r?\n/, '') .replace(/^\s+at\s.*\n?/gm, ''); + const importErrorStr = 'Cannot use import statement outside a ' + + 'module'; + if (StringPrototypeIncludes(e.message, importErrorStr)) { + e.message = 'Cannot use import statement inside the Node.js ' + + 'REPL, alternatively use dynamic import'; + e.stack = e.stack.replace(/SyntaxError:.*\n/, + `SyntaxError: ${e.message}\n`); + } } else if (self.replMode === module.exports.REPL_MODE_STRICT) { - e.stack = e.stack.replace(/(\s+at\s+repl:)(\d+)/, + e.stack = e.stack.replace(/(\s+at\s+REPL\d+:)(\d+)/, (_, pre, line) => pre + (line - 1)); } } @@ -791,7 +806,7 @@ function REPLServer(prompt, const evalCmd = self[kBufferedCommandSymbol] + cmd + '\n'; debug('eval %j', evalCmd); - self.eval(evalCmd, self.context, 'repl', finish); + self.eval(evalCmd, self.context, getREPLResourceName(), finish); function finish(e, ret) { debug('finish', e, ret); @@ -799,7 +814,7 @@ function REPLServer(prompt, if (e && !self[kBufferedCommandSymbol] && cmd.trim().startsWith('npm ')) { self.output.write('npm should be run outside of the ' + - 'node repl, in your normal shell.\n' + + 'Node.js REPL, in your normal shell.\n' + '(Press Control-D to exit.)\n'); self.displayPrompt(); return; @@ -1277,7 +1292,7 @@ function complete(line, callback) { const memberGroups = []; const evalExpr = `try { ${expr} } catch {}`; - this.eval(evalExpr, this.context, 'repl', (e, obj) => { + this.eval(evalExpr, this.context, getREPLResourceName(), (e, obj) => { try { let p; if ((typeof obj === 'object' && obj !== null) || @@ -1506,7 +1521,7 @@ function defineDefaultCommands(repl) { }); repl.defineCommand('exit', { - help: 'Exit the repl', + help: 'Exit the REPL', action: function() { this.close(); } @@ -1528,7 +1543,7 @@ function defineDefaultCommands(repl) { this.output.write(line); } this.output.write('\nPress ^C to abort current expression, ' + - '^D to exit the repl\n'); + '^D to exit the REPL\n'); this.displayPrompt(); } }); diff --git a/src/api/hooks.cc b/src/api/hooks.cc index 037bdda6f41c82..e1536193f95730 100644 --- a/src/api/hooks.cc +++ b/src/api/hooks.cc @@ -10,10 +10,10 @@ using v8::HandleScope; using v8::Integer; using v8::Isolate; using v8::Local; +using v8::NewStringType; using v8::Object; using v8::String; using v8::Value; -using v8::NewStringType; void RunAtExit(Environment* env) { env->RunAtExitCallbacks(); @@ -73,8 +73,35 @@ int EmitExit(Environment* env) { .ToChecked(); } +typedef void (*CleanupHook)(void* arg); +typedef void (*AsyncCleanupHook)(void* arg, void(*)(void*), void*); + +struct AsyncCleanupHookInfo final { + Environment* env; + AsyncCleanupHook fun; + void* arg; + bool started = false; + // Use a self-reference to make sure the storage is kept alive while the + // cleanup hook is registered but not yet finished. + std::shared_ptr self; +}; + +// Opaque type that is basically an alias for `shared_ptr` +// (but not publicly so for easier ABI/API changes). In particular, +// std::shared_ptr does not generally maintain a consistent ABI even on a +// specific platform. +struct ACHHandle final { + std::shared_ptr info; +}; +// This is implemented as an operator on a struct because otherwise you can't +// default-initialize AsyncCleanupHookHandle, because in C++ for a +// std::unique_ptr to be default-initializable the deleter type also needs +// to be default-initializable; in particular, function types don't satisfy +// this. +void DeleteACHHandle::operator ()(ACHHandle* handle) const { delete handle; } + void AddEnvironmentCleanupHook(Isolate* isolate, - void (*fun)(void* arg), + CleanupHook fun, void* arg) { Environment* env = Environment::GetCurrent(isolate); CHECK_NOT_NULL(env); @@ -82,13 +109,50 @@ void AddEnvironmentCleanupHook(Isolate* isolate, } void RemoveEnvironmentCleanupHook(Isolate* isolate, - void (*fun)(void* arg), + CleanupHook fun, void* arg) { Environment* env = Environment::GetCurrent(isolate); CHECK_NOT_NULL(env); env->RemoveCleanupHook(fun, arg); } +static void FinishAsyncCleanupHook(void* arg) { + AsyncCleanupHookInfo* info = static_cast(arg); + std::shared_ptr keep_alive = info->self; + + info->env->DecreaseWaitingRequestCounter(); + info->self.reset(); +} + +static void RunAsyncCleanupHook(void* arg) { + AsyncCleanupHookInfo* info = static_cast(arg); + info->env->IncreaseWaitingRequestCounter(); + info->started = true; + info->fun(info->arg, FinishAsyncCleanupHook, info); +} + +AsyncCleanupHookHandle AddEnvironmentCleanupHook( + Isolate* isolate, + AsyncCleanupHook fun, + void* arg) { + Environment* env = Environment::GetCurrent(isolate); + CHECK_NOT_NULL(env); + auto info = std::make_shared(); + info->env = env; + info->fun = fun; + info->arg = arg; + info->self = info; + env->AddCleanupHook(RunAsyncCleanupHook, info.get()); + return AsyncCleanupHookHandle(new ACHHandle { info }); +} + +void RemoveEnvironmentCleanupHook( + AsyncCleanupHookHandle handle) { + if (handle->info->started) return; + handle->info->self.reset(); + handle->info->env->RemoveCleanupHook(RunAsyncCleanupHook, handle->info.get()); +} + async_id AsyncHooksGetExecutionAsyncId(Isolate* isolate) { Environment* env = Environment::GetCurrent(isolate); if (env == nullptr) return -1; diff --git a/src/async_wrap.cc b/src/async_wrap.cc index d206c33562470e..d6511a1dd8b3c3 100644 --- a/src/async_wrap.cc +++ b/src/async_wrap.cc @@ -38,9 +38,12 @@ using v8::Global; using v8::HandleScope; using v8::Integer; using v8::Isolate; +using v8::Just; using v8::Local; +using v8::Maybe; using v8::MaybeLocal; using v8::Name; +using v8::Nothing; using v8::Number; using v8::Object; using v8::ObjectTemplate; @@ -188,6 +191,21 @@ void AsyncWrap::EmitAfter(Environment* env, double async_id) { env->async_hooks_after_function()); } +// TODO(addaleax): Remove once we're on C++17. +constexpr double AsyncWrap::kInvalidAsyncId; + +static Maybe GetAssignedPromiseAsyncId(Environment* env, + Local promise, + Local id_symbol) { + Local maybe_async_id; + if (!promise->Get(env->context(), id_symbol).ToLocal(&maybe_async_id)) { + return Nothing(); + } + return maybe_async_id->IsNumber() + ? maybe_async_id->NumberValue(env->context()) + : Just(AsyncWrap::kInvalidAsyncId); +} + class PromiseWrap : public AsyncWrap { public: PromiseWrap(Environment* env, Local object, bool silent) @@ -230,18 +248,17 @@ PromiseWrap* PromiseWrap::New(Environment* env, // Skip for init events if (silent) { - Local maybe_async_id = promise - ->Get(context, env->async_id_symbol()) - .ToLocalChecked(); - - Local maybe_trigger_async_id = promise - ->Get(context, env->trigger_async_id_symbol()) - .ToLocalChecked(); - - if (maybe_async_id->IsNumber() && maybe_trigger_async_id->IsNumber()) { - double async_id = maybe_async_id.As()->Value(); - double trigger_async_id = maybe_trigger_async_id.As()->Value(); - return new PromiseWrap(env, obj, async_id, trigger_async_id); + double async_id; + double trigger_async_id; + if (!GetAssignedPromiseAsyncId(env, promise, env->async_id_symbol()) + .To(&async_id)) return nullptr; + if (!GetAssignedPromiseAsyncId(env, promise, env->trigger_async_id_symbol()) + .To(&trigger_async_id)) return nullptr; + + if (async_id != AsyncWrap::kInvalidAsyncId && + trigger_async_id != AsyncWrap::kInvalidAsyncId) { + return new PromiseWrap( + env, obj, async_id, trigger_async_id); } } @@ -320,46 +337,35 @@ static void FastPromiseHook(PromiseHookType type, Local promise, if (type == PromiseHookType::kBefore && env->async_hooks()->fields()[AsyncHooks::kBefore] == 0) { - Local maybe_async_id; - if (!promise->Get(context, env->async_id_symbol()) - .ToLocal(&maybe_async_id)) { - return; - } - - Local maybe_trigger_async_id; - if (!promise->Get(context, env->trigger_async_id_symbol()) - .ToLocal(&maybe_trigger_async_id)) { - return; - } - - if (maybe_async_id->IsNumber() && maybe_trigger_async_id->IsNumber()) { - double async_id = maybe_async_id.As()->Value(); - double trigger_async_id = maybe_trigger_async_id.As()->Value(); + double async_id; + double trigger_async_id; + if (!GetAssignedPromiseAsyncId(env, promise, env->async_id_symbol()) + .To(&async_id)) return; + if (!GetAssignedPromiseAsyncId(env, promise, env->trigger_async_id_symbol()) + .To(&trigger_async_id)) return; + + if (async_id != AsyncWrap::kInvalidAsyncId && + trigger_async_id != AsyncWrap::kInvalidAsyncId) { env->async_hooks()->push_async_context( async_id, trigger_async_id, promise); + return; } - - return; } if (type == PromiseHookType::kAfter && env->async_hooks()->fields()[AsyncHooks::kAfter] == 0) { - Local maybe_async_id; - if (!promise->Get(context, env->async_id_symbol()) - .ToLocal(&maybe_async_id)) { - return; - } + double async_id; + if (!GetAssignedPromiseAsyncId(env, promise, env->async_id_symbol()) + .To(&async_id)) return; - if (maybe_async_id->IsNumber()) { - double async_id = maybe_async_id.As()->Value(); + if (async_id != AsyncWrap::kInvalidAsyncId) { if (env->execution_async_id() == async_id) { // This condition might not be true if async_hooks was enabled during // the promise callback execution. env->async_hooks()->pop_async_context(async_id); } + return; } - - return; } if (type == PromiseHookType::kResolve && @@ -836,6 +842,18 @@ void AsyncWrap::EmitDestroy(Environment* env, double async_id) { env->SetImmediate(&DestroyAsyncIdsCallback, CallbackFlags::kUnrefed); } + // If the list gets very large empty it faster using a Microtask. + // Microtasks can't be added in GC context therefore we use an + // interrupt to get this Microtask scheduled as fast as possible. + if (env->destroy_async_id_list()->size() == 16384) { + env->RequestInterrupt([](Environment* env) { + env->isolate()->EnqueueMicrotask( + [](void* arg) { + DestroyAsyncIdsCallback(static_cast(arg)); + }, env); + }); + } + env->destroy_async_id_list()->push_back(async_id); } diff --git a/src/callback_queue-inl.h b/src/callback_queue-inl.h index 13561864027316..9e46ae48699320 100644 --- a/src/callback_queue-inl.h +++ b/src/callback_queue-inl.h @@ -22,8 +22,8 @@ CallbackQueue::Shift() { head_ = ret->get_next(); if (!head_) tail_ = nullptr; // The queue is now empty. + size_--; } - size_--; return ret; } diff --git a/src/env.cc b/src/env.cc index b92ea0ac99986c..331712f1c9db71 100644 --- a/src/env.cc +++ b/src/env.cc @@ -593,7 +593,10 @@ void Environment::RunCleanup() { initial_base_object_count_ = 0; CleanupHandles(); - while (!cleanup_hooks_.empty()) { + while (!cleanup_hooks_.empty() || + native_immediates_.size() > 0 || + native_immediates_threadsafe_.size() > 0 || + native_immediates_interrupts_.size() > 0) { // Copy into a vector, since we can't sort an unordered_set in-place. std::vector callbacks( cleanup_hooks_.begin(), cleanup_hooks_.end()); diff --git a/src/env.h b/src/env.h index e256a30e90f8ee..bc222804010a03 100644 --- a/src/env.h +++ b/src/env.h @@ -150,6 +150,7 @@ constexpr size_t kFsStatsBufferLength = V(contextify_context_private_symbol, "node:contextify:context") \ V(contextify_global_private_symbol, "node:contextify:global") \ V(decorated_private_symbol, "node:decorated") \ + V(napi_type_tag, "node:napi:type_tag") \ V(napi_wrapper, "node:napi:wrapper") \ V(untransferable_object_private_symbol, "node:untransferableObject") \ diff --git a/src/inspector/main_thread_interface.cc b/src/inspector/main_thread_interface.cc index a15cd52d239e40..0cf75a37146729 100644 --- a/src/inspector/main_thread_interface.cc +++ b/src/inspector/main_thread_interface.cc @@ -14,8 +14,8 @@ namespace node { namespace inspector { namespace { -using v8_inspector::StringView; using v8_inspector::StringBuffer; +using v8_inspector::StringView; template class DeletableWrapper : public Deletable { diff --git a/src/js_native_api.h b/src/js_native_api.h index 00dedfbfc71c38..d0d975c2b454a8 100644 --- a/src/js_native_api.h +++ b/src/js_native_api.h @@ -537,6 +537,16 @@ NAPI_EXTERN napi_status napi_detach_arraybuffer(napi_env env, NAPI_EXTERN napi_status napi_is_detached_arraybuffer(napi_env env, napi_value value, bool* result); +// Type tagging +NAPI_EXTERN napi_status napi_type_tag_object(napi_env env, + napi_value value, + const napi_type_tag* type_tag); + +NAPI_EXTERN napi_status +napi_check_object_type_tag(napi_env env, + napi_value value, + const napi_type_tag* type_tag, + bool* result); #endif // NAPI_EXPERIMENTAL EXTERN_C_END diff --git a/src/js_native_api_types.h b/src/js_native_api_types.h index 7254019dd78750..115ccebf26132e 100644 --- a/src/js_native_api_types.h +++ b/src/js_native_api_types.h @@ -140,4 +140,11 @@ typedef enum { } napi_key_conversion; #endif // NAPI_VERSION >= 6 +#ifdef NAPI_EXPERIMENTAL +typedef struct { + uint64_t lower; + uint64_t upper; +} napi_type_tag; +#endif // NAPI_EXPERIMENTAL + #endif // SRC_JS_NATIVE_API_TYPES_H_ diff --git a/src/js_native_api_v8.cc b/src/js_native_api_v8.cc index e99333a6a362d1..b8455eb3a69b3e 100644 --- a/src/js_native_api_v8.cc +++ b/src/js_native_api_v8.cc @@ -10,6 +10,9 @@ #define CHECK_MAYBE_NOTHING(env, maybe, status) \ RETURN_STATUS_IF_FALSE((env), !((maybe).IsNothing()), (status)) +#define CHECK_MAYBE_NOTHING_WITH_PREAMBLE(env, maybe, status) \ + RETURN_STATUS_IF_FALSE_WITH_PREAMBLE((env), !((maybe).IsNothing()), (status)) + #define CHECK_TO_NUMBER(env, context, result, src) \ CHECK_TO_TYPE((env), Number, (context), (result), (src), napi_number_expected) @@ -1602,13 +1605,10 @@ napi_status napi_create_bigint_words(napi_env env, v8::MaybeLocal b = v8::BigInt::NewFromWords( context, sign_bit, word_count, words); - if (try_catch.HasCaught()) { - return napi_set_last_error(env, napi_pending_exception); - } else { - CHECK_MAYBE_EMPTY(env, b, napi_generic_failure); - *result = v8impl::JsValueFromV8LocalValue(b.ToLocalChecked()); - return napi_clear_last_error(env); - } + CHECK_MAYBE_EMPTY_WITH_PREAMBLE(env, b, napi_generic_failure); + + *result = v8impl::JsValueFromV8LocalValue(b.ToLocalChecked()); + return GET_RETURN_STATUS(env); } napi_status napi_get_boolean(napi_env env, bool value, napi_value* result) { @@ -2359,6 +2359,72 @@ napi_status napi_create_external(napi_env env, return napi_clear_last_error(env); } +NAPI_EXTERN napi_status napi_type_tag_object(napi_env env, + napi_value object, + const napi_type_tag* type_tag) { + NAPI_PREAMBLE(env); + v8::Local context = env->context(); + v8::Local obj; + CHECK_TO_OBJECT_WITH_PREAMBLE(env, context, obj, object); + CHECK_ARG_WITH_PREAMBLE(env, type_tag); + + auto key = NAPI_PRIVATE_KEY(context, type_tag); + auto maybe_has = obj->HasPrivate(context, key); + CHECK_MAYBE_NOTHING_WITH_PREAMBLE(env, maybe_has, napi_generic_failure); + RETURN_STATUS_IF_FALSE_WITH_PREAMBLE(env, + !maybe_has.FromJust(), + napi_invalid_arg); + + auto tag = v8::BigInt::NewFromWords(context, + 0, + 2, + reinterpret_cast(type_tag)); + CHECK_MAYBE_EMPTY_WITH_PREAMBLE(env, tag, napi_generic_failure); + + auto maybe_set = obj->SetPrivate(context, key, tag.ToLocalChecked()); + CHECK_MAYBE_NOTHING_WITH_PREAMBLE(env, maybe_set, napi_generic_failure); + RETURN_STATUS_IF_FALSE_WITH_PREAMBLE(env, + maybe_set.FromJust(), + napi_generic_failure); + + return GET_RETURN_STATUS(env); +} + +NAPI_EXTERN napi_status +napi_check_object_type_tag(napi_env env, + napi_value object, + const napi_type_tag* type_tag, + bool* result) { + NAPI_PREAMBLE(env); + v8::Local context = env->context(); + v8::Local obj; + CHECK_TO_OBJECT_WITH_PREAMBLE(env, context, obj, object); + CHECK_ARG_WITH_PREAMBLE(env, type_tag); + CHECK_ARG_WITH_PREAMBLE(env, result); + + auto maybe_value = obj->GetPrivate(context, + NAPI_PRIVATE_KEY(context, type_tag)); + CHECK_MAYBE_EMPTY_WITH_PREAMBLE(env, maybe_value, napi_generic_failure); + v8::Local val = maybe_value.ToLocalChecked(); + + // We consider the type check to have failed unless we reach the line below + // where we set whether the type check succeeded or not based on the + // comparison of the two type tags. + *result = false; + if (val->IsBigInt()) { + int sign; + int size = 2; + napi_type_tag tag; + val.As()->ToWordsArray(&sign, + &size, + reinterpret_cast(&tag)); + if (size == 2 && sign == 0) + *result = (tag.lower == type_tag->lower && tag.upper == type_tag->upper); + } + + return GET_RETURN_STATUS(env); +} + napi_status napi_get_value_external(napi_env env, napi_value value, void** result) { diff --git a/src/js_native_api_v8.h b/src/js_native_api_v8.h index 83e6a0bd02e23c..06b8049ec46db0 100644 --- a/src/js_native_api_v8.h +++ b/src/js_native_api_v8.h @@ -148,6 +148,14 @@ napi_status napi_set_last_error(napi_env env, napi_status error_code, } \ } while (0) +#define RETURN_STATUS_IF_FALSE_WITH_PREAMBLE(env, condition, status) \ + do { \ + if (!(condition)) { \ + return napi_set_last_error( \ + (env), try_catch.HasCaught() ? napi_pending_exception : (status)); \ + } \ + } while (0) + #define CHECK_ENV(env) \ do { \ if ((env) == nullptr) { \ @@ -158,9 +166,17 @@ napi_status napi_set_last_error(napi_env env, napi_status error_code, #define CHECK_ARG(env, arg) \ RETURN_STATUS_IF_FALSE((env), ((arg) != nullptr), napi_invalid_arg) +#define CHECK_ARG_WITH_PREAMBLE(env, arg) \ + RETURN_STATUS_IF_FALSE_WITH_PREAMBLE((env), \ + ((arg) != nullptr), \ + napi_invalid_arg) + #define CHECK_MAYBE_EMPTY(env, maybe, status) \ RETURN_STATUS_IF_FALSE((env), !((maybe).IsEmpty()), (status)) +#define CHECK_MAYBE_EMPTY_WITH_PREAMBLE(env, maybe, status) \ + RETURN_STATUS_IF_FALSE_WITH_PREAMBLE((env), !((maybe).IsEmpty()), (status)) + // NAPI_PREAMBLE is not wrapped in do..while: try_catch must have function scope #define NAPI_PREAMBLE(env) \ CHECK_ENV((env)); \ @@ -178,6 +194,14 @@ napi_status napi_set_last_error(napi_env env, napi_status error_code, (result) = maybe.ToLocalChecked(); \ } while (0) +#define CHECK_TO_TYPE_WITH_PREAMBLE(env, type, context, result, src, status) \ + do { \ + CHECK_ARG_WITH_PREAMBLE((env), (src)); \ + auto maybe = v8impl::V8LocalValueFromJsValue((src))->To##type((context)); \ + CHECK_MAYBE_EMPTY_WITH_PREAMBLE((env), maybe, (status)); \ + (result) = maybe.ToLocalChecked(); \ + } while (0) + #define CHECK_TO_FUNCTION(env, result, src) \ do { \ CHECK_ARG((env), (src)); \ @@ -189,6 +213,14 @@ napi_status napi_set_last_error(napi_env env, napi_status error_code, #define CHECK_TO_OBJECT(env, context, result, src) \ CHECK_TO_TYPE((env), Object, (context), (result), (src), napi_object_expected) +#define CHECK_TO_OBJECT_WITH_PREAMBLE(env, context, result, src) \ + CHECK_TO_TYPE_WITH_PREAMBLE((env), \ + Object, \ + (context), \ + (result), \ + (src), \ + napi_object_expected) + #define CHECK_TO_STRING(env, context, result, src) \ CHECK_TO_TYPE((env), String, (context), (result), (src), napi_string_expected) diff --git a/src/node.cc b/src/node.cc index c2befefefd8130..0dc70403818895 100644 --- a/src/node.cc +++ b/src/node.cc @@ -779,6 +779,13 @@ int ProcessGlobalArgs(std::vector* args, return 12; } + // TODO(mylesborins): remove this when the harmony-top-level-await flag + // is removed in V8 + if (std::find(v8_args.begin(), v8_args.end(), + "--no-harmony-top-level-await") == v8_args.end()) { + v8_args.push_back("--harmony-top-level-await"); + } + auto env_opts = per_process::cli_options->per_isolate->per_env; if (std::find(v8_args.begin(), v8_args.end(), "--abort-on-uncaught-exception") != v8_args.end() || diff --git a/src/node.h b/src/node.h index 71273d6db9c6ed..1914e72ee8fa43 100644 --- a/src/node.h +++ b/src/node.h @@ -872,6 +872,20 @@ NODE_EXTERN void RemoveEnvironmentCleanupHook(v8::Isolate* isolate, void (*fun)(void* arg), void* arg); +/* These are async equivalents of the above. After the cleanup hook is invoked, + * `cb(cbarg)` *must* be called, and attempting to remove the cleanup hook will + * have no effect. */ +struct ACHHandle; +struct NODE_EXTERN DeleteACHHandle { void operator()(ACHHandle*) const; }; +typedef std::unique_ptr AsyncCleanupHookHandle; + +NODE_EXTERN AsyncCleanupHookHandle AddEnvironmentCleanupHook( + v8::Isolate* isolate, + void (*fun)(void* arg, void (*cb)(void*), void* cbarg), + void* arg); + +NODE_EXTERN void RemoveEnvironmentCleanupHook(AsyncCleanupHookHandle holder); + /* Returns the id of the current execution context. If the return value is * zero then no execution has been set. This will happen if the user handles * I/O from native code. */ diff --git a/src/node_api.cc b/src/node_api.cc index bb203fc03c310f..4fbab771d58400 100644 --- a/src/node_api.cc +++ b/src/node_api.cc @@ -518,6 +518,44 @@ napi_status napi_remove_env_cleanup_hook(napi_env env, return napi_ok; } +struct napi_async_cleanup_hook_handle__ { + node::AsyncCleanupHookHandle handle; +}; + +napi_status napi_add_async_cleanup_hook( + napi_env env, + void (*fun)(void* arg, void(* cb)(void*), void* cbarg), + void* arg, + napi_async_cleanup_hook_handle* remove_handle) { + CHECK_ENV(env); + CHECK_ARG(env, fun); + + auto handle = node::AddEnvironmentCleanupHook(env->isolate, fun, arg); + if (remove_handle != nullptr) { + *remove_handle = new napi_async_cleanup_hook_handle__ { std::move(handle) }; + env->Ref(); + } + + return napi_clear_last_error(env); +} + +napi_status napi_remove_async_cleanup_hook( + napi_env env, + napi_async_cleanup_hook_handle remove_handle) { + CHECK_ENV(env); + CHECK_ARG(env, remove_handle); + + node::RemoveEnvironmentCleanupHook(std::move(remove_handle->handle)); + delete remove_handle; + + // Release the `env` handle asynchronously since it would be surprising if + // a call to a N-API function would destroy `env` synchronously. + static_cast(env)->node_env() + ->SetImmediate([env](node::Environment*) { env->Unref(); }); + + return napi_clear_last_error(env); +} + napi_status napi_fatal_exception(napi_env env, napi_value err) { NAPI_PREAMBLE(env); CHECK_ARG(env, err); diff --git a/src/node_api.h b/src/node_api.h index 2f1b45572d8130..4f3eb8f2caae63 100644 --- a/src/node_api.h +++ b/src/node_api.h @@ -250,6 +250,20 @@ napi_ref_threadsafe_function(napi_env env, napi_threadsafe_function func); #endif // NAPI_VERSION >= 4 +#ifdef NAPI_EXPERIMENTAL + +NAPI_EXTERN napi_status napi_add_async_cleanup_hook( + napi_env env, + void (*fun)(void* arg, void(* cb)(void*), void* cbarg), + void* arg, + napi_async_cleanup_hook_handle* remove_handle); + +NAPI_EXTERN napi_status napi_remove_async_cleanup_hook( + napi_env env, + napi_async_cleanup_hook_handle remove_handle); + +#endif // NAPI_EXPERIMENTAL + EXTERN_C_END #endif // SRC_NODE_API_H_ diff --git a/src/node_api_types.h b/src/node_api_types.h index 1c9a2b8aa21889..b8711d3eddc408 100644 --- a/src/node_api_types.h +++ b/src/node_api_types.h @@ -41,4 +41,8 @@ typedef struct { const char* release; } napi_node_version; +#ifdef NAPI_EXPERIMENTAL +typedef struct napi_async_cleanup_hook_handle__* napi_async_cleanup_hook_handle; +#endif // NAPI_EXPERIMENTAL + #endif // SRC_NODE_API_TYPES_H_ diff --git a/src/node_constants.cc b/src/node_constants.cc index 5d99fa181a0472..38c8f2738b4bad 100644 --- a/src/node_constants.cc +++ b/src/node_constants.cc @@ -806,6 +806,10 @@ void DefineCryptoConstants(Local target) { NODE_DEFINE_CONSTANT(target, SSL_OP_ALL); #endif +#ifdef SSL_OP_ALLOW_NO_DHE_KEX + NODE_DEFINE_CONSTANT(target, SSL_OP_ALLOW_NO_DHE_KEX); +#endif + #ifdef SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION NODE_DEFINE_CONSTANT(target, SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION); #endif @@ -870,10 +874,18 @@ void DefineCryptoConstants(Local target) { NODE_DEFINE_CONSTANT(target, SSL_OP_NO_COMPRESSION); #endif +#ifdef SSL_OP_NO_ENCRYPT_THEN_MAC + NODE_DEFINE_CONSTANT(target, SSL_OP_NO_ENCRYPT_THEN_MAC); +#endif + #ifdef SSL_OP_NO_QUERY_MTU NODE_DEFINE_CONSTANT(target, SSL_OP_NO_QUERY_MTU); #endif +#ifdef SSL_OP_NO_RENEGOTIATION + NODE_DEFINE_CONSTANT(target, SSL_OP_NO_RENEGOTIATION); +#endif + #ifdef SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION NODE_DEFINE_CONSTANT(target, SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION); #endif @@ -902,6 +914,10 @@ void DefineCryptoConstants(Local target) { NODE_DEFINE_CONSTANT(target, SSL_OP_NO_TLSv1_2); #endif +#ifdef SSL_OP_NO_TLSv1_3 + NODE_DEFINE_CONSTANT(target, SSL_OP_NO_TLSv1_3); +#endif + #ifdef SSL_OP_PKCS1_CHECK_1 NODE_DEFINE_CONSTANT(target, SSL_OP_PKCS1_CHECK_1); #endif @@ -910,6 +926,10 @@ void DefineCryptoConstants(Local target) { NODE_DEFINE_CONSTANT(target, SSL_OP_PKCS1_CHECK_2); #endif +#ifdef SSL_OP_PRIORITIZE_CHACHA + NODE_DEFINE_CONSTANT(target, SSL_OP_PRIORITIZE_CHACHA); +#endif + #ifdef SSL_OP_SINGLE_DH_USE NODE_DEFINE_CONSTANT(target, SSL_OP_SINGLE_DH_USE); #endif diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc index c7a3df8d067af4..b409d007307877 100644 --- a/src/node_http_parser.cc +++ b/src/node_http_parser.cc @@ -280,6 +280,7 @@ class Parser : public AsyncWrap, public StreamListener { int on_headers_complete() { header_nread_ = 0; + header_parsing_start_time_ = 0; // Arguments for the on-headers-complete javascript callback. This // list needs to be kept in sync with the actual argument list for diff --git a/src/node_options.cc b/src/node_options.cc index 14689db81dc73a..17617d57bd8e67 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -613,12 +613,13 @@ PerIsolateOptionsParser::PerIsolateOptionsParser( Implies("--report-signal", "--report-on-signal"); AddOption("--experimental-top-level-await", - "enable experimental support for ECMAScript Top-Level Await", + "", &PerIsolateOptions::experimental_top_level_await, kAllowedInEnvironment); AddOption("--harmony-top-level-await", "", V8Option{}); Implies("--experimental-top-level-await", "--harmony-top-level-await"); Implies("--harmony-top-level-await", "--experimental-top-level-await"); + ImpliesNot("--no-harmony-top-level-await", "--experimental-top-level-await"); Insert(eop, &PerIsolateOptions::get_per_env_options); } diff --git a/src/node_options.h b/src/node_options.h index 7f8c223f755a24..fe20022bb2f357 100644 --- a/src/node_options.h +++ b/src/node_options.h @@ -188,7 +188,7 @@ class PerIsolateOptions : public Options { bool no_node_snapshot = false; bool report_uncaught_exception = false; bool report_on_signal = false; - bool experimental_top_level_await = false; + bool experimental_top_level_await = true; std::string report_signal = "SIGUSR2"; inline EnvironmentOptions* get_per_env_options(); void CheckOptions(std::vector* errors) override; diff --git a/src/node_report.cc b/src/node_report.cc index c93e03afe63918..e7bfe7fef09d14 100644 --- a/src/node_report.cc +++ b/src/node_report.cc @@ -43,13 +43,11 @@ using v8::HeapSpaceStatistics; using v8::HeapStatistics; using v8::Isolate; using v8::Local; -using v8::Number; using v8::Object; -using v8::StackTrace; using v8::String; using v8::TryCatch; -using v8::Value; using v8::V8; +using v8::Value; namespace per_process = node::per_process; diff --git a/src/node_util.cc b/src/node_util.cc index 22a372ad09a3bd..eac09f8d44fcbd 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -8,8 +8,10 @@ namespace util { using v8::ALL_PROPERTIES; using v8::Array; using v8::ArrayBufferView; +using v8::BigInt; using v8::Boolean; using v8::Context; +using v8::External; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Global; @@ -67,6 +69,18 @@ static void GetConstructorName( args.GetReturnValue().Set(name); } +static void GetExternalValue( + const FunctionCallbackInfo& args) { + CHECK(args[0]->IsExternal()); + Isolate* isolate = args.GetIsolate(); + Local external = args[0].As(); + + void* ptr = external->Value(); + uint64_t value = reinterpret_cast(ptr); + Local ret = BigInt::NewFromUnsigned(isolate, value); + args.GetReturnValue().Set(ret); +} + static void GetPromiseDetails(const FunctionCallbackInfo& args) { // Return undefined if it's not a Promise. if (!args[0]->IsPromise()) @@ -296,6 +310,7 @@ void Initialize(Local target, env->SetMethodNoSideEffect(target, "getOwnNonIndexProperties", GetOwnNonIndexProperties); env->SetMethodNoSideEffect(target, "getConstructorName", GetConstructorName); + env->SetMethodNoSideEffect(target, "getExternalValue", GetExternalValue); env->SetMethod(target, "sleep", Sleep); env->SetMethod(target, "arrayBufferViewHasBuffer", ArrayBufferViewHasBuffer); diff --git a/src/node_version.h b/src/node_version.h index 38be0bb7120cfd..b7f38a2c037b84 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 14 -#define NODE_MINOR_VERSION 7 -#define NODE_PATCH_VERSION 1 +#define NODE_MINOR_VERSION 8 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n) diff --git a/src/node_wasi.cc b/src/node_wasi.cc index 48ef82bd088d6a..80182724afea9c 100644 --- a/src/node_wasi.cc +++ b/src/node_wasi.cc @@ -175,6 +175,8 @@ void WASI::New(const FunctionCallbackInfo& args) { const uint32_t argc = argv->Length(); uvwasi_options_t options; + uvwasi_options_init(&options); + Local stdio = args[3].As(); CHECK_EQ(stdio->Length(), 3); options.in = stdio->Get(context, 0).ToLocalChecked()-> @@ -244,8 +246,8 @@ void WASI::New(const FunctionCallbackInfo& args) { if (options.preopens != nullptr) { for (uint32_t i = 0; i < options.preopenc; i++) { - free(options.preopens[i].mapped_path); - free(options.preopens[i].real_path); + free(const_cast(options.preopens[i].mapped_path)); + free(const_cast(options.preopens[i].real_path)); } free(options.preopens); diff --git a/test/addons/async-cleanup-hook/binding.cc b/test/addons/async-cleanup-hook/binding.cc new file mode 100644 index 00000000000000..d18da7a094f71a --- /dev/null +++ b/test/addons/async-cleanup-hook/binding.cc @@ -0,0 +1,59 @@ +#include +#include +#include + +void MustNotCall(void* arg, void(*cb)(void*), void* cbarg) { + assert(0); +} + +struct AsyncData { + uv_async_t async; + v8::Isolate* isolate; + node::AsyncCleanupHookHandle handle; + void (*done_cb)(void*); + void* done_arg; +}; + +void AsyncCleanupHook(void* arg, void(*cb)(void*), void* cbarg) { + AsyncData* data = static_cast(arg); + uv_loop_t* loop = node::GetCurrentEventLoop(data->isolate); + assert(loop != nullptr); + int err = uv_async_init(loop, &data->async, [](uv_async_t* async) { + AsyncData* data = static_cast(async->data); + // Attempting to remove the cleanup hook here should be a no-op since it + // has already been started. + node::RemoveEnvironmentCleanupHook(std::move(data->handle)); + + uv_close(reinterpret_cast(async), [](uv_handle_t* handle) { + AsyncData* data = static_cast(handle->data); + data->done_cb(data->done_arg); + delete data; + }); + }); + assert(err == 0); + + data->async.data = data; + data->done_cb = cb; + data->done_arg = cbarg; + uv_async_send(&data->async); +} + +void Initialize(v8::Local exports, + v8::Local module, + v8::Local context) { + AsyncData* data = new AsyncData(); + data->isolate = context->GetIsolate(); + auto handle = node::AddEnvironmentCleanupHook( + context->GetIsolate(), + AsyncCleanupHook, + data); + data->handle = std::move(handle); + + auto must_not_call_handle = node::AddEnvironmentCleanupHook( + context->GetIsolate(), + MustNotCall, + nullptr); + node::RemoveEnvironmentCleanupHook(std::move(must_not_call_handle)); +} + +NODE_MODULE_CONTEXT_AWARE(NODE_GYP_MODULE_NAME, Initialize) diff --git a/test/addons/async-cleanup-hook/binding.gyp b/test/addons/async-cleanup-hook/binding.gyp new file mode 100644 index 00000000000000..55fbe7050f18e4 --- /dev/null +++ b/test/addons/async-cleanup-hook/binding.gyp @@ -0,0 +1,9 @@ +{ + 'targets': [ + { + 'target_name': 'binding', + 'sources': [ 'binding.cc' ], + 'includes': ['../common.gypi'], + } + ] +} diff --git a/test/addons/async-cleanup-hook/test.js b/test/addons/async-cleanup-hook/test.js new file mode 100644 index 00000000000000..55cc2517a59bc8 --- /dev/null +++ b/test/addons/async-cleanup-hook/test.js @@ -0,0 +1,8 @@ +'use strict'; +const common = require('../../common'); +const path = require('path'); +const { Worker } = require('worker_threads'); +const binding = path.resolve(__dirname, `./build/${common.buildType}/binding`); + +const w = new Worker(`require(${JSON.stringify(binding)})`, { eval: true }); +w.on('exit', common.mustCall(() => require(binding))); diff --git a/test/addons/async-hooks-promise/binding.cc b/test/addons/async-hooks-promise/binding.cc index 452cbda8793aa1..8fe6b9471bcee5 100644 --- a/test/addons/async-hooks-promise/binding.cc +++ b/test/addons/async-hooks-promise/binding.cc @@ -6,7 +6,6 @@ namespace { using v8::FunctionCallbackInfo; using v8::Isolate; using v8::Local; -using v8::NewStringType; using v8::Object; using v8::Promise; using v8::String; diff --git a/test/addons/dlopen-ping-pong/binding.cc b/test/addons/dlopen-ping-pong/binding.cc index c8711f09aedac6..be33708c9317d3 100644 --- a/test/addons/dlopen-ping-pong/binding.cc +++ b/test/addons/dlopen-ping-pong/binding.cc @@ -17,7 +17,6 @@ using v8::FunctionCallbackInfo; using v8::Isolate; using v8::Local; using v8::Object; -using v8::NewStringType; using v8::String; using v8::Value; diff --git a/test/addons/non-node-context/binding.cc b/test/addons/non-node-context/binding.cc index 6fe776b7c60c3d..8423d2b1d7a5a5 100644 --- a/test/addons/non-node-context/binding.cc +++ b/test/addons/non-node-context/binding.cc @@ -5,12 +5,9 @@ namespace { using v8::Context; -using v8::Function; -using v8::FunctionTemplate; using v8::Isolate; using v8::Local; using v8::MaybeLocal; -using v8::NewStringType; using v8::Object; using v8::Script; using v8::String; diff --git a/test/addons/repl-domain-abort/binding.cc b/test/addons/repl-domain-abort/binding.cc index 3e716443540229..f5e82d3718ccac 100644 --- a/test/addons/repl-domain-abort/binding.cc +++ b/test/addons/repl-domain-abort/binding.cc @@ -25,8 +25,8 @@ using v8::Boolean; using v8::Function; using v8::FunctionCallbackInfo; -using v8::Local; using v8::Isolate; +using v8::Local; using v8::Object; using v8::Value; diff --git a/test/addons/uv-handle-leak/binding.cc b/test/addons/uv-handle-leak/binding.cc index 1b769b141c0076..20ddfac6802e97 100644 --- a/test/addons/uv-handle-leak/binding.cc +++ b/test/addons/uv-handle-leak/binding.cc @@ -6,7 +6,6 @@ using v8::Context; using v8::FunctionCallbackInfo; using v8::Isolate; using v8::Local; -using v8::Object; using v8::Value; // Give these things names in the public namespace so that we can see diff --git a/test/addons/worker-addon/binding.cc b/test/addons/worker-addon/binding.cc index 01c857c43ebcfc..d5a07ff20959e8 100644 --- a/test/addons/worker-addon/binding.cc +++ b/test/addons/worker-addon/binding.cc @@ -6,8 +6,6 @@ #include using v8::Context; -using v8::HandleScope; -using v8::Isolate; using v8::Local; using v8::Object; using v8::Value; diff --git a/test/async-hooks/test-async-local-storage-http-agent.js b/test/async-hooks/test-async-local-storage-http-agent.js new file mode 100644 index 00000000000000..1de535aa709687 --- /dev/null +++ b/test/async-hooks/test-async-local-storage-http-agent.js @@ -0,0 +1,35 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const { AsyncLocalStorage } = require('async_hooks'); +const http = require('http'); + +const asyncLocalStorage = new AsyncLocalStorage(); + +const agent = new http.Agent({ + maxSockets: 1 +}); + +const N = 3; +let responses = 0; + +const server = http.createServer(common.mustCall((req, res) => { + res.end('ok'); +}, N)); + +server.listen(0, common.mustCall(() => { + const port = server.address().port; + + for (let i = 0; i < N; i++) { + asyncLocalStorage.run(i, () => { + http.get({ agent, port }, common.mustCall((res) => { + assert.strictEqual(asyncLocalStorage.getStore(), i); + if (++responses === N) { + server.close(); + agent.destroy(); + } + res.resume(); + })); + }); + } +})); diff --git a/test/async-hooks/test-async-local-storage-run-resource.js b/test/async-hooks/test-async-local-storage-run-resource.js index 9a7479f699246c..21bc70051bd718 100644 --- a/test/async-hooks/test-async-local-storage-run-resource.js +++ b/test/async-hooks/test-async-local-storage-run-resource.js @@ -10,8 +10,21 @@ const asyncLocalStorage = new AsyncLocalStorage(); const outerResource = executionAsyncResource(); -asyncLocalStorage.run(new Map(), () => { - assert.notStrictEqual(executionAsyncResource(), outerResource); +const store = new Map(); +asyncLocalStorage.run(store, () => { + assert.strictEqual(asyncLocalStorage.getStore(), store); + const innerResource = executionAsyncResource(); + assert.notStrictEqual(innerResource, outerResource); + asyncLocalStorage.run(store, () => { + assert.strictEqual(asyncLocalStorage.getStore(), store); + assert.strictEqual(executionAsyncResource(), innerResource); + const otherStore = new Map(); + asyncLocalStorage.run(otherStore, () => { + assert.strictEqual(asyncLocalStorage.getStore(), otherStore); + assert.notStrictEqual(executionAsyncResource(), innerResource); + assert.notStrictEqual(executionAsyncResource(), outerResource); + }); + }); }); assert.strictEqual(executionAsyncResource(), outerResource); diff --git a/test/async-hooks/test-destroy-not-blocked.js b/test/async-hooks/test-destroy-not-blocked.js new file mode 100644 index 00000000000000..aa467f30143806 --- /dev/null +++ b/test/async-hooks/test-destroy-not-blocked.js @@ -0,0 +1,97 @@ +'use strict'; +// Flags: --expose_gc + +const common = require('../common'); +const assert = require('assert'); +const tick = require('../common/tick'); + +const { createHook, AsyncResource } = require('async_hooks'); + +// Test priority of destroy hook relative to nextTick,... and +// verify a microtask is scheduled in case a lot items are queued + +const resType = 'MyResource'; +let activeId = -1; +createHook({ + init(id, type) { + if (type === resType) { + assert.strictEqual(activeId, -1); + activeId = id; + } + }, + destroy(id) { + if (activeId === id) { + activeId = -1; + } + } +}).enable(); + +function testNextTick() { + assert.strictEqual(activeId, -1); + const res = new AsyncResource(resType); + assert.strictEqual(activeId, res.asyncId()); + res.emitDestroy(); + // nextTick has higher prio than emit destroy + process.nextTick(common.mustCall(() => + assert.strictEqual(activeId, res.asyncId())) + ); +} + +function testQueueMicrotask() { + assert.strictEqual(activeId, -1); + const res = new AsyncResource(resType); + assert.strictEqual(activeId, res.asyncId()); + res.emitDestroy(); + // queueMicrotask has higher prio than emit destroy + queueMicrotask(common.mustCall(() => + assert.strictEqual(activeId, res.asyncId())) + ); +} + +function testImmediate() { + assert.strictEqual(activeId, -1); + const res = new AsyncResource(resType); + assert.strictEqual(activeId, res.asyncId()); + res.emitDestroy(); + setImmediate(common.mustCall(() => + assert.strictEqual(activeId, -1)) + ); +} + +function testPromise() { + assert.strictEqual(activeId, -1); + const res = new AsyncResource(resType); + assert.strictEqual(activeId, res.asyncId()); + res.emitDestroy(); + // Promise has higher prio than emit destroy + Promise.resolve().then(common.mustCall(() => + assert.strictEqual(activeId, res.asyncId())) + ); +} + +async function testAwait() { + assert.strictEqual(activeId, -1); + const res = new AsyncResource(resType); + assert.strictEqual(activeId, res.asyncId()); + res.emitDestroy(); + + for (let i = 0; i < 5000; i++) { + await Promise.resolve(); + } + global.gc(); + await Promise.resolve(); + // Limit to trigger a microtask not yet reached + assert.strictEqual(activeId, res.asyncId()); + for (let i = 0; i < 5000; i++) { + await Promise.resolve(); + } + global.gc(); + await Promise.resolve(); + assert.strictEqual(activeId, -1); +} + +testNextTick(); +tick(2, testQueueMicrotask); +tick(4, testImmediate); +tick(6, testPromise); +tick(8, () => testAwait().then(common.mustCall())); diff --git a/test/async-hooks/test-http-agent-handle-reuse-parallel.js b/test/async-hooks/test-http-agent-handle-reuse-parallel.js new file mode 100644 index 00000000000000..cd73b3ed2cb61c --- /dev/null +++ b/test/async-hooks/test-http-agent-handle-reuse-parallel.js @@ -0,0 +1,92 @@ +'use strict'; +// Flags: --expose-internals +const common = require('../common'); +const initHooks = require('./init-hooks'); +const { checkInvocations } = require('./hook-checks'); +const assert = require('assert'); +const { async_id_symbol } = require('internal/async_hooks').symbols; +const http = require('http'); + +// Checks that the async resource used in init in case of a reused handle +// is not reused. Test is based on parallel\test-async-hooks-http-agent.js. + +const hooks = initHooks(); +hooks.enable(); + +const reqAsyncIds = []; +let socket; +let responses = 0; + +// Make sure a single socket is transparently reused for 2 requests. +const agent = new http.Agent({ + keepAlive: true, + keepAliveMsecs: Infinity, + maxSockets: 1 +}); + +const verifyRequest = (idx) => (res) => { + reqAsyncIds[idx] = res.socket[async_id_symbol]; + assert.ok(reqAsyncIds[idx] > 0, `${reqAsyncIds[idx]} > 0`); + if (socket) { + // Check that both requests share their socket. + assert.strictEqual(res.socket, socket); + } else { + socket = res.socket; + } + + res.on('data', common.mustCallAtLeast(() => {})); + res.on('end', common.mustCall(() => { + if (++responses === 2) { + // Clean up to let the event loop stop. + server.close(); + agent.destroy(); + } + })); +}; + +const server = http.createServer(common.mustCall((req, res) => { + req.once('data', common.mustCallAtLeast(() => { + res.writeHead(200, { 'Content-Type': 'text/plain' }); + res.write('foo'); + })); + req.on('end', common.mustCall(() => { + res.end('bar'); + })); +}, 2)).listen(0, common.mustCall(() => { + const port = server.address().port; + const payload = 'hello world'; + + // First request. + const r1 = http.request({ + agent, port, method: 'POST' + }, common.mustCall(verifyRequest(0))); + r1.end(payload); + + // Second request. Sent in parallel with the first one. + const r2 = http.request({ + agent, port, method: 'POST' + }, common.mustCall(verifyRequest(1))); + r2.end(payload); +})); + + +process.on('exit', onExit); + +function onExit() { + hooks.disable(); + hooks.sanityCheck(); + const activities = hooks.activities; + + // Verify both invocations + const first = activities.filter((x) => x.uid === reqAsyncIds[0])[0]; + checkInvocations(first, { init: 1, destroy: 1 }, 'when process exits'); + + const second = activities.filter((x) => x.uid === reqAsyncIds[1])[0]; + checkInvocations(second, { init: 1, destroy: 1 }, 'when process exits'); + + // Verify reuse handle has been wrapped + assert.strictEqual(first.type, second.type); + assert.ok(first.handle !== second.handle, 'Resource reused'); + assert.ok(first.handle === second.handle.handle, + 'Resource not wrapped correctly'); +} diff --git a/test/async-hooks/test-http-agent-handle-reuse.js b/test/async-hooks/test-http-agent-handle-reuse-serial.js similarity index 98% rename from test/async-hooks/test-http-agent-handle-reuse.js rename to test/async-hooks/test-http-agent-handle-reuse-serial.js index 4db83bec54a7bf..bbc7183d6e72ca 100644 --- a/test/async-hooks/test-http-agent-handle-reuse.js +++ b/test/async-hooks/test-http-agent-handle-reuse-serial.js @@ -7,7 +7,7 @@ const assert = require('assert'); const { async_id_symbol } = require('internal/async_hooks').symbols; const http = require('http'); -// Checks that the async resource used in init in case of a resused handle +// Checks that the async resource used in init in case of a reused handle // is not reused. Test is based on parallel\test-async-hooks-http-agent.js. const hooks = initHooks(); diff --git a/test/cctest/test_base64.cc b/test/cctest/test_base64.cc index 5e39a1052bc057..be61618f9086ed 100644 --- a/test/cctest/test_base64.cc +++ b/test/cctest/test_base64.cc @@ -5,8 +5,8 @@ #include "gtest/gtest.h" -using node::base64_encode; using node::base64_decode; +using node::base64_encode; TEST(Base64Test, Encode) { auto test = [](const char* string, const char* base64_string) { diff --git a/test/cctest/test_util.cc b/test/cctest/test_util.cc index 6cfd5d317f7982..85c5a6a7fe43ae 100644 --- a/test/cctest/test_util.cc +++ b/test/cctest/test_util.cc @@ -3,6 +3,16 @@ #include "env-inl.h" #include "gtest/gtest.h" +using node::Calloc; +using node::Malloc; +using node::MaybeStackBuffer; +using node::SPrintF; +using node::StringEqualNoCase; +using node::StringEqualNoCaseN; +using node::ToLower; +using node::UncheckedCalloc; +using node::UncheckedMalloc; + TEST(UtilTest, ListHead) { struct Item { node::ListNode node_; }; typedef node::ListHead List; @@ -58,7 +68,6 @@ TEST(UtilTest, ListHead) { } TEST(UtilTest, StringEqualNoCase) { - using node::StringEqualNoCase; EXPECT_FALSE(StringEqualNoCase("a", "b")); EXPECT_TRUE(StringEqualNoCase("", "")); EXPECT_TRUE(StringEqualNoCase("equal", "equal")); @@ -69,7 +78,6 @@ TEST(UtilTest, StringEqualNoCase) { } TEST(UtilTest, StringEqualNoCaseN) { - using node::StringEqualNoCaseN; EXPECT_FALSE(StringEqualNoCaseN("a", "b", strlen("a"))); EXPECT_TRUE(StringEqualNoCaseN("", "", strlen(""))); EXPECT_TRUE(StringEqualNoCaseN("equal", "equal", strlen("equal"))); @@ -84,7 +92,6 @@ TEST(UtilTest, StringEqualNoCaseN) { } TEST(UtilTest, ToLower) { - using node::ToLower; EXPECT_EQ('0', ToLower('0')); EXPECT_EQ('a', ToLower('a')); EXPECT_EQ('a', ToLower('A')); @@ -98,7 +105,6 @@ TEST(UtilTest, ToLower) { } while (0) TEST(UtilTest, Malloc) { - using node::Malloc; TEST_AND_FREE(Malloc(0)); TEST_AND_FREE(Malloc(1)); TEST_AND_FREE(Malloc(0)); @@ -106,7 +112,6 @@ TEST(UtilTest, Malloc) { } TEST(UtilTest, Calloc) { - using node::Calloc; TEST_AND_FREE(Calloc(0)); TEST_AND_FREE(Calloc(1)); TEST_AND_FREE(Calloc(0)); @@ -114,7 +119,6 @@ TEST(UtilTest, Calloc) { } TEST(UtilTest, UncheckedMalloc) { - using node::UncheckedMalloc; TEST_AND_FREE(UncheckedMalloc(0)); TEST_AND_FREE(UncheckedMalloc(1)); TEST_AND_FREE(UncheckedMalloc(0)); @@ -122,7 +126,6 @@ TEST(UtilTest, UncheckedMalloc) { } TEST(UtilTest, UncheckedCalloc) { - using node::UncheckedCalloc; TEST_AND_FREE(UncheckedCalloc(0)); TEST_AND_FREE(UncheckedCalloc(1)); TEST_AND_FREE(UncheckedCalloc(0)); @@ -131,8 +134,6 @@ TEST(UtilTest, UncheckedCalloc) { template static void MaybeStackBufferBasic() { - using node::MaybeStackBuffer; - MaybeStackBuffer buf; size_t old_length; size_t old_capacity; @@ -211,8 +212,6 @@ static void MaybeStackBufferBasic() { } TEST(UtilTest, MaybeStackBuffer) { - using node::MaybeStackBuffer; - MaybeStackBufferBasic(); MaybeStackBufferBasic(); @@ -254,8 +253,6 @@ TEST(UtilTest, MaybeStackBuffer) { } TEST(UtilTest, SPrintF) { - using node::SPrintF; - // %d, %u and %s all do the same thing. The actual C++ type is used to infer // the right representation. EXPECT_EQ(SPrintF("%s", false), "false"); diff --git a/test/doctool/test-doctool-html.js b/test/doctool/test-doctool-html.js index 30221a7fe18b1a..b03bada053761f 100644 --- a/test/doctool/test-doctool-html.js +++ b/test/doctool/test-doctool-html.js @@ -65,9 +65,9 @@ const testData = [ }, { file: fixtures.path('order_of_end_tags_5873.md'), - html: '

ClassMethod: Buffer.from(array) ' + - '#

' + + html: '

Static method: Buffer.from(array) ' + + '#

' + '' diff --git a/test/doctool/test-doctool-json.js b/test/doctool/test-doctool-json.js index 78afc818e9fae0..36d76cfec8236b 100644 --- a/test/doctool/test-doctool-json.js +++ b/test/doctool/test-doctool-json.js @@ -69,7 +69,7 @@ const testData = [ textRaw: 'Subsection', name: 'subsection', classMethods: [{ - textRaw: 'Class Method: Buffer.from(array)', + textRaw: 'Static method: Buffer.from(array)', type: 'classMethod', name: 'from', signatures: [ @@ -181,7 +181,7 @@ const testData = [ params: [] } ], - textRaw: 'Class Method: `Fhqwhgads.again()`', + textRaw: 'Static method: `Fhqwhgads.again()`', type: 'classMethod' } ], diff --git a/test/embedding/embedtest.cc b/test/embedding/embedtest.cc index a927167ea6a921..21e5ac713ed017 100644 --- a/test/embedding/embedtest.cc +++ b/test/embedding/embedtest.cc @@ -16,8 +16,8 @@ using v8::Local; using v8::Locker; using v8::MaybeLocal; using v8::SealHandleScope; -using v8::Value; using v8::V8; +using v8::Value; static int RunNodeInstance(MultiIsolatePlatform* platform, const std::vector& args, diff --git a/test/es-module/test-esm-tla-unfinished.mjs b/test/es-module/test-esm-tla-unfinished.mjs new file mode 100644 index 00000000000000..7d35c86285ac81 --- /dev/null +++ b/test/es-module/test-esm-tla-unfinished.mjs @@ -0,0 +1,82 @@ +import '../common/index.mjs'; +import assert from 'assert'; +import child_process from 'child_process'; +import fixtures from '../common/fixtures.js'; + +{ + // Unresolved TLA promise, --eval + const { status, stdout, stderr } = child_process.spawnSync( + process.execPath, + ['--input-type=module', '--eval', 'await new Promise(() => {})'], + { encoding: 'utf8' }); + assert.deepStrictEqual([status, stdout, stderr], [13, '', '']); +} + +{ + // Rejected TLA promise, --eval + const { status, stdout, stderr } = child_process.spawnSync( + process.execPath, + ['--input-type=module', '-e', 'await Promise.reject(new Error("Xyz"))'], + { encoding: 'utf8' }); + assert.deepStrictEqual([status, stdout], [1, '']); + assert.match(stderr, /Error: Xyz/); +} + +{ + // Unresolved TLA promise with explicit exit code, --eval + const { status, stdout, stderr } = child_process.spawnSync( + process.execPath, + ['--input-type=module', '--eval', + 'process.exitCode = 42;await new Promise(() => {})'], + { encoding: 'utf8' }); + assert.deepStrictEqual([status, stdout, stderr], [42, '', '']); +} + +{ + // Rejected TLA promise with explicit exit code, --eval + const { status, stdout, stderr } = child_process.spawnSync( + process.execPath, + ['--input-type=module', '-e', + 'process.exitCode = 42;await Promise.reject(new Error("Xyz"))'], + { encoding: 'utf8' }); + assert.deepStrictEqual([status, stdout], [1, '']); + assert.match(stderr, /Error: Xyz/); +} + +{ + // Unresolved TLA promise, module file + const { status, stdout, stderr } = child_process.spawnSync( + process.execPath, + [fixtures.path('es-modules/tla/unresolved.mjs')], + { encoding: 'utf8' }); + assert.deepStrictEqual([status, stdout, stderr], [13, '', '']); +} + +{ + // Rejected TLA promise, module file + const { status, stdout, stderr } = child_process.spawnSync( + process.execPath, + [fixtures.path('es-modules/tla/rejected.mjs')], + { encoding: 'utf8' }); + assert.deepStrictEqual([status, stdout], [1, '']); + assert.match(stderr, /Error: Xyz/); +} + +{ + // Unresolved TLA promise, module file + const { status, stdout, stderr } = child_process.spawnSync( + process.execPath, + [fixtures.path('es-modules/tla/unresolved-withexitcode.mjs')], + { encoding: 'utf8' }); + assert.deepStrictEqual([status, stdout, stderr], [42, '', '']); +} + +{ + // Rejected TLA promise, module file + const { status, stdout, stderr } = child_process.spawnSync( + process.execPath, + [fixtures.path('es-modules/tla/rejected-withexitcode.mjs')], + { encoding: 'utf8' }); + assert.deepStrictEqual([status, stdout], [1, '']); + assert.match(stderr, /Error: Xyz/); +} diff --git a/test/es-module/test-esm-tla.mjs b/test/es-module/test-esm-tla.mjs index 816f88dd80a74e..68a9d8954c5e2e 100644 --- a/test/es-module/test-esm-tla.mjs +++ b/test/es-module/test-esm-tla.mjs @@ -1,5 +1,3 @@ -// Flags: --experimental-top-level-await - import '../common/index.mjs'; import fixtures from '../common/fixtures.js'; import assert from 'assert'; diff --git a/test/fixtures/doc_with_backticks_in_headings.md b/test/fixtures/doc_with_backticks_in_headings.md index 74b4f3fd7ff39d..8b4fe5efaf4d37 100644 --- a/test/fixtures/doc_with_backticks_in_headings.md +++ b/test/fixtures/doc_with_backticks_in_headings.md @@ -8,6 +8,6 @@ ## Constructor: `new Fhqwhgads()` -## Class Method: `Fhqwhgads.again()` +## Static method: `Fhqwhgads.again()` ## `Fqhqwhgads.fullName` diff --git a/test/fixtures/es-modules/tla/rejected-withexitcode.mjs b/test/fixtures/es-modules/tla/rejected-withexitcode.mjs new file mode 100644 index 00000000000000..34e98e0147f134 --- /dev/null +++ b/test/fixtures/es-modules/tla/rejected-withexitcode.mjs @@ -0,0 +1,2 @@ +process.exitCode = 42; +await Promise.reject(new Error('Xyz')); diff --git a/test/fixtures/es-modules/tla/rejected.mjs b/test/fixtures/es-modules/tla/rejected.mjs new file mode 100644 index 00000000000000..752a3b91ff6534 --- /dev/null +++ b/test/fixtures/es-modules/tla/rejected.mjs @@ -0,0 +1 @@ +await Promise.reject(new Error('Xyz')); diff --git a/test/fixtures/es-modules/tla/unresolved-withexitcode.mjs b/test/fixtures/es-modules/tla/unresolved-withexitcode.mjs new file mode 100644 index 00000000000000..1cb982311080b8 --- /dev/null +++ b/test/fixtures/es-modules/tla/unresolved-withexitcode.mjs @@ -0,0 +1,2 @@ +process.exitCode = 42; +await new Promise(() => {}); diff --git a/test/fixtures/es-modules/tla/unresolved.mjs b/test/fixtures/es-modules/tla/unresolved.mjs new file mode 100644 index 00000000000000..231a8cd634825c --- /dev/null +++ b/test/fixtures/es-modules/tla/unresolved.mjs @@ -0,0 +1 @@ +await new Promise(() => {}); diff --git a/test/fixtures/order_of_end_tags_5873.md b/test/fixtures/order_of_end_tags_5873.md index 3eb7dadcb32b1a..888fe231802019 100644 --- a/test/fixtures/order_of_end_tags_5873.md +++ b/test/fixtures/order_of_end_tags_5873.md @@ -2,5 +2,5 @@ ## Subsection -### Class Method: Buffer.from(array) +### Static method: Buffer.from(array) * `array` {Array} diff --git a/test/js-native-api/test_bigint/test.js b/test/js-native-api/test_bigint/test.js index 85a183171743c7..bf9ce5066d6d2a 100644 --- a/test/js-native-api/test_bigint/test.js +++ b/test/js-native-api/test_bigint/test.js @@ -7,6 +7,7 @@ const { TestUint64, TestWords, CreateTooBigBigInt, + MakeBigIntWordsThrow, } = require(`./build/${common.buildType}/test_bigint`); [ @@ -43,3 +44,9 @@ assert.throws(CreateTooBigBigInt, { name: 'Error', message: 'Invalid argument', }); + +// Test that we correctly forward exceptions from the engine. +assert.throws(MakeBigIntWordsThrow, { + name: 'RangeError', + message: 'Maximum BigInt size exceeded' +}); diff --git a/test/js-native-api/test_bigint/test_bigint.c b/test/js-native-api/test_bigint/test_bigint.c index c62a0a6a6c2bbc..181f9103fa3399 100644 --- a/test/js-native-api/test_bigint/test_bigint.c +++ b/test/js-native-api/test_bigint/test_bigint.c @@ -1,3 +1,4 @@ +#include #include #include #include @@ -122,6 +123,22 @@ static napi_value CreateTooBigBigInt(napi_env env, napi_callback_info info) { return output; } +// Test that we correctly forward exceptions from the engine. +static napi_value MakeBigIntWordsThrow(napi_env env, napi_callback_info info) { + uint64_t words[10]; + napi_value output; + + napi_status status = napi_create_bigint_words(env, + 0, + INT_MAX, + words, + &output); + if (status != napi_pending_exception) + napi_throw_error(env, NULL, "Expected status `napi_pending_exception`"); + + return NULL; +} + EXTERN_C_START napi_value Init(napi_env env, napi_value exports) { napi_property_descriptor descriptors[] = { @@ -130,6 +147,7 @@ napi_value Init(napi_env env, napi_value exports) { DECLARE_NAPI_PROPERTY("TestUint64", TestUint64), DECLARE_NAPI_PROPERTY("TestWords", TestWords), DECLARE_NAPI_PROPERTY("CreateTooBigBigInt", CreateTooBigBigInt), + DECLARE_NAPI_PROPERTY("MakeBigIntWordsThrow", MakeBigIntWordsThrow), }; NAPI_CALL(env, napi_define_properties( diff --git a/test/js-native-api/test_object/test.js b/test/js-native-api/test_object/test.js index 2cd65af6b34de6..b78666995271ff 100644 --- a/test/js-native-api/test_object/test.js +++ b/test/js-native-api/test_object/test.js @@ -159,6 +159,24 @@ assert.strictEqual(newObject.test_string, 'test string'); assert(wrapper.protoB, true); } +{ + // Verify that objects can be type-tagged and type-tag-checked. + const obj1 = test_object.TypeTaggedInstance(0); + const obj2 = test_object.TypeTaggedInstance(1); + + // Verify that type tags are correctly accepted. + assert.strictEqual(test_object.CheckTypeTag(0, obj1), true); + assert.strictEqual(test_object.CheckTypeTag(1, obj2), true); + + // Verify that wrongly tagged objects are rejected. + assert.strictEqual(test_object.CheckTypeTag(0, obj2), false); + assert.strictEqual(test_object.CheckTypeTag(1, obj1), false); + + // Verify that untagged objects are rejected. + assert.strictEqual(test_object.CheckTypeTag(0, {}), false); + assert.strictEqual(test_object.CheckTypeTag(1, {}), false); +} + { // Verify that normal and nonexistent properties can be deleted. const sym = Symbol(); diff --git a/test/js-native-api/test_object/test_object.c b/test/js-native-api/test_object/test_object.c index 08f619bf7ffc88..f2ea89d6c60943 100644 --- a/test/js-native-api/test_object/test_object.c +++ b/test/js-native-api/test_object/test_object.c @@ -1,3 +1,4 @@ +#define NAPI_EXPERIMENTAL #include #include "../common.h" #include @@ -471,6 +472,44 @@ static napi_value TestGetProperty(napi_env env, return object; } +// We create two type tags. They are basically 128-bit UUIDs. +static const napi_type_tag type_tags[2] = { + { 0xdaf987b3cc62481a, 0xb745b0497f299531 }, + { 0xbb7936c374084d9b, 0xa9548d0762eeedb9 } +}; + +static napi_value +TypeTaggedInstance(napi_env env, napi_callback_info info) { + size_t argc = 1; + uint32_t type_index; + napi_value instance, which_type; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &which_type, NULL, NULL)); + NAPI_CALL(env, napi_get_value_uint32(env, which_type, &type_index)); + NAPI_CALL(env, napi_create_object(env, &instance)); + NAPI_CALL(env, napi_type_tag_object(env, instance, &type_tags[type_index])); + + return instance; +} + +static napi_value +CheckTypeTag(napi_env env, napi_callback_info info) { + size_t argc = 2; + bool result; + napi_value argv[2], js_result; + uint32_t type_index; + + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, argv, NULL, NULL)); + NAPI_CALL(env, napi_get_value_uint32(env, argv[0], &type_index)); + NAPI_CALL(env, napi_check_object_type_tag(env, + argv[1], + &type_tags[type_index], + &result)); + NAPI_CALL(env, napi_get_boolean(env, result, &js_result)); + + return js_result; +} + EXTERN_C_START napi_value Init(napi_env env, napi_value exports) { napi_property_descriptor descriptors[] = { @@ -490,6 +529,8 @@ napi_value Init(napi_env env, napi_value exports) { DECLARE_NAPI_PROPERTY("Unwrap", Unwrap), DECLARE_NAPI_PROPERTY("TestSetProperty", TestSetProperty), DECLARE_NAPI_PROPERTY("TestHasProperty", TestHasProperty), + DECLARE_NAPI_PROPERTY("TypeTaggedInstance", TypeTaggedInstance), + DECLARE_NAPI_PROPERTY("CheckTypeTag", CheckTypeTag), DECLARE_NAPI_PROPERTY("TestGetProperty", TestGetProperty), }; diff --git a/test/message/esm_display_syntax_error.mjs b/test/message/esm_display_syntax_error.mjs index bda4a7e6ebe3a3..0b9a30c2d054f7 100644 --- a/test/message/esm_display_syntax_error.mjs +++ b/test/message/esm_display_syntax_error.mjs @@ -1,2 +1,4 @@ +// Flags: --no-harmony-top-level-await + 'use strict'; await async () => 0; diff --git a/test/message/esm_display_syntax_error.out b/test/message/esm_display_syntax_error.out index b7d2008540adf3..775448d4b68afd 100644 --- a/test/message/esm_display_syntax_error.out +++ b/test/message/esm_display_syntax_error.out @@ -1,4 +1,4 @@ -file:///*/test/message/esm_display_syntax_error.mjs:2 +file:///*/test/message/esm_display_syntax_error.mjs:4 await async () => 0; ^^^^^ diff --git a/test/message/esm_display_syntax_error_module.out b/test/message/esm_display_syntax_error_module.out index 708257fcaf5792..c45a5008e691ca 100644 --- a/test/message/esm_display_syntax_error_module.out +++ b/test/message/esm_display_syntax_error_module.out @@ -1,6 +1,6 @@ file:///*/test/fixtures/es-module-loaders/syntax-error.mjs:2 await async () => 0; -^^^^^ +^^^^^^^^^^^^^ -SyntaxError: Unexpected reserved word +SyntaxError: Malformed arrow function parameter list at Loader.moduleStrategy (internal/modules/esm/translators.js:*:*) \ No newline at end of file diff --git a/test/message/esm_loader_syntax_error.out b/test/message/esm_loader_syntax_error.out index 3aee72d423b1a1..6201de95208ff3 100644 --- a/test/message/esm_loader_syntax_error.out +++ b/test/message/esm_loader_syntax_error.out @@ -2,8 +2,8 @@ (Use `node --trace-warnings ...` to show where the warning was created) file://*/test/fixtures/es-module-loaders/syntax-error.mjs:2 await async () => 0; -^^^^^ +^^^^^^^^^^^^^ -SyntaxError: Unexpected reserved word +SyntaxError: Malformed arrow function parameter list at Loader.moduleStrategy (internal/modules/esm/translators.js:*:*) at async link (internal/modules/esm/module_job.js:*:*) diff --git a/test/node-api/test_async/binding.gyp b/test/node-api/test_async/binding.gyp index cf8beb70c68e78..d8436e2b1d189f 100644 --- a/test/node-api/test_async/binding.gyp +++ b/test/node-api/test_async/binding.gyp @@ -2,7 +2,7 @@ "targets": [ { "target_name": "test_async", - "sources": [ "test_async.cc" ] + "sources": [ "test_async.c" ] } ] } diff --git a/test/node-api/test_async/test_async.cc b/test/node-api/test_async/test_async.c similarity index 78% rename from test/node-api/test_async/test_async.cc rename to test/node-api/test_async/test_async.c index ff3d2749a9d184..44ad08366b1908 100644 --- a/test/node-api/test_async/test_async.cc +++ b/test/node-api/test_async/test_async.c @@ -19,32 +19,32 @@ typedef struct { napi_async_work _request; } carrier; -carrier the_carrier; -carrier async_carrier[MAX_CANCEL_THREADS]; +static carrier the_carrier; +static carrier async_carrier[MAX_CANCEL_THREADS]; -void Execute(napi_env env, void* data) { +static void Execute(napi_env env, void* data) { #if defined _WIN32 Sleep(1000); #else sleep(1); #endif - carrier* c = static_cast(data); + carrier* c = (carrier*)(data); assert(c == &the_carrier); c->_output = c->_input * 2; } -void Complete(napi_env env, napi_status status, void* data) { - carrier* c = static_cast(data); +static void Complete(napi_env env, napi_status status, void* data) { + carrier* c = (carrier*)(data); if (c != &the_carrier) { - napi_throw_type_error(env, nullptr, "Wrong data parameter to Complete."); + napi_throw_type_error(env, NULL, "Wrong data parameter to Complete."); return; } if (status != napi_ok) { - napi_throw_type_error(env, nullptr, "Execute callback failed."); + napi_throw_type_error(env, NULL, "Execute callback failed."); return; } @@ -66,7 +66,7 @@ void Complete(napi_env env, napi_status status, void* data) { NAPI_CALL_RETURN_VOID(env, napi_delete_async_work(env, c->_request)); } -napi_value Test(napi_env env, napi_callback_info info) { +static napi_value Test(napi_env env, napi_callback_info info) { size_t argc = 3; napi_value argv[3]; napi_value _this; @@ -101,16 +101,16 @@ napi_value Test(napi_env env, napi_callback_info info) { NAPI_CALL(env, napi_queue_async_work(env, the_carrier._request)); - return nullptr; + return NULL; } -void BusyCancelComplete(napi_env env, napi_status status, void* data) { - carrier* c = static_cast(data); +static void BusyCancelComplete(napi_env env, napi_status status, void* data) { + carrier* c = (carrier*)(data); NAPI_CALL_RETURN_VOID(env, napi_delete_async_work(env, c->_request)); } -void CancelComplete(napi_env env, napi_status status, void* data) { - carrier* c = static_cast(data); +static void CancelComplete(napi_env env, napi_status status, void* data) { + carrier* c = (carrier*)(data); if (status == napi_cancelled) { // ok we got the status we expected so make the callback to @@ -122,14 +122,14 @@ void CancelComplete(napi_env env, napi_status status, void* data) { NAPI_CALL_RETURN_VOID(env, napi_get_global(env, &global)); napi_value result; NAPI_CALL_RETURN_VOID(env, - napi_call_function(env, global, callback, 0, nullptr, &result)); + napi_call_function(env, global, callback, 0, NULL, &result)); } NAPI_CALL_RETURN_VOID(env, napi_delete_async_work(env, c->_request)); NAPI_CALL_RETURN_VOID(env, napi_delete_reference(env, c->_callback)); } -void CancelExecute(napi_env env, void* data) { +static void CancelExecute(napi_env env, void* data) { #if defined _WIN32 Sleep(1000); #else @@ -137,7 +137,7 @@ void CancelExecute(napi_env env, void* data) { #endif } -napi_value TestCancel(napi_env env, napi_callback_info info) { +static napi_value TestCancel(napi_env env, napi_callback_info info) { size_t argc = 1; napi_value argv[1]; napi_value _this; @@ -150,7 +150,7 @@ napi_value TestCancel(napi_env env, napi_callback_info info) { // make sure the work we are going to cancel will not be // able to start by using all the threads in the pool for (int i = 1; i < MAX_CANCEL_THREADS; i++) { - NAPI_CALL(env, napi_create_async_work(env, nullptr, resource_name, + NAPI_CALL(env, napi_create_async_work(env, NULL, resource_name, CancelExecute, BusyCancelComplete, &async_carrier[i], &async_carrier[i]._request)); NAPI_CALL(env, napi_queue_async_work(env, async_carrier[i]._request)); @@ -162,20 +162,20 @@ napi_value TestCancel(napi_env env, napi_callback_info info) { // workers above. NAPI_CALL(env, napi_get_cb_info(env, info, &argc, argv, &_this, &data)); - NAPI_CALL(env, napi_create_async_work(env, nullptr, resource_name, + NAPI_CALL(env, napi_create_async_work(env, NULL, resource_name, CancelExecute, CancelComplete, &async_carrier[0], &async_carrier[0]._request)); NAPI_CALL(env, napi_create_reference(env, argv[0], 1, &async_carrier[0]._callback)); NAPI_CALL(env, napi_queue_async_work(env, async_carrier[0]._request)); NAPI_CALL(env, napi_cancel_async_work(env, async_carrier[0]._request)); - return nullptr; + return NULL; } struct { napi_ref ref; napi_async_work work; -} repeated_work_info = { nullptr, nullptr }; +} repeated_work_info = { NULL, NULL }; static void RepeatedWorkerThread(napi_env env, void* data) {} @@ -187,33 +187,33 @@ static void RepeatedWorkComplete(napi_env env, napi_status status, void* data) { napi_delete_async_work(env, repeated_work_info.work)); NAPI_CALL_RETURN_VOID(env, napi_delete_reference(env, repeated_work_info.ref)); - repeated_work_info.work = nullptr; - repeated_work_info.ref = nullptr; + repeated_work_info.work = NULL; + repeated_work_info.ref = NULL; NAPI_CALL_RETURN_VOID(env, napi_create_uint32(env, (uint32_t)status, &js_status)); NAPI_CALL_RETURN_VOID(env, - napi_call_function(env, cb, cb, 1, &js_status, nullptr)); + napi_call_function(env, cb, cb, 1, &js_status, NULL)); } static napi_value DoRepeatedWork(napi_env env, napi_callback_info info) { size_t argc = 1; napi_value cb, name; - NAPI_ASSERT(env, repeated_work_info.ref == nullptr, + NAPI_ASSERT(env, repeated_work_info.ref == NULL, "Reference left over from previous work"); - NAPI_ASSERT(env, repeated_work_info.work == nullptr, + NAPI_ASSERT(env, repeated_work_info.work == NULL, "Work pointer left over from previous work"); - NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &cb, nullptr, nullptr)); + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, &cb, NULL, NULL)); NAPI_CALL(env, napi_create_reference(env, cb, 1, &repeated_work_info.ref)); NAPI_CALL(env, napi_create_string_utf8(env, "Repeated Work", NAPI_AUTO_LENGTH, &name)); NAPI_CALL(env, - napi_create_async_work(env, nullptr, name, RepeatedWorkerThread, + napi_create_async_work(env, NULL, name, RepeatedWorkerThread, RepeatedWorkComplete, &repeated_work_info, &repeated_work_info.work)); NAPI_CALL(env, napi_queue_async_work(env, repeated_work_info.work)); - return nullptr; + return NULL; } -napi_value Init(napi_env env, napi_value exports) { +static napi_value Init(napi_env env, napi_value exports) { napi_property_descriptor properties[] = { DECLARE_NAPI_PROPERTY("Test", Test), DECLARE_NAPI_PROPERTY("TestCancel", TestCancel), diff --git a/test/node-api/test_async_cleanup_hook/binding.c b/test/node-api/test_async_cleanup_hook/binding.c new file mode 100644 index 00000000000000..f0c9cd97a26c48 --- /dev/null +++ b/test/node-api/test_async_cleanup_hook/binding.c @@ -0,0 +1,82 @@ +#define NAPI_EXPERIMENTAL +#include "node_api.h" +#include "assert.h" +#include "uv.h" +#include +#include "../../js-native-api/common.h" + +void MustNotCall(void* arg, void(*cb)(void*), void* cbarg) { + assert(0); +} + +struct AsyncData { + uv_async_t async; + napi_env env; + napi_async_cleanup_hook_handle handle; + void (*done_cb)(void*); + void* done_arg; +}; + +struct AsyncData* CreateAsyncData() { + struct AsyncData* data = (struct AsyncData*) malloc(sizeof(struct AsyncData)); + data->handle = NULL; + return data; +} + +void AfterCleanupHookTwo(uv_handle_t* handle) { + struct AsyncData* data = (struct AsyncData*) handle->data; + data->done_cb(data->done_arg); + free(data); +} + +void AfterCleanupHookOne(uv_async_t* async) { + struct AsyncData* data = (struct AsyncData*) async->data; + if (data->handle != NULL) { + // Verify that removing the hook is okay between starting and finishing + // of its execution. + napi_status status = + napi_remove_async_cleanup_hook(data->env, data->handle); + assert(status == napi_ok); + } + + uv_close((uv_handle_t*) async, AfterCleanupHookTwo); +} + +void AsyncCleanupHook(void* arg, void(*cb)(void*), void* cbarg) { + struct AsyncData* data = (struct AsyncData*) arg; + uv_loop_t* loop; + napi_status status = napi_get_uv_event_loop(data->env, &loop); + assert(status == napi_ok); + int err = uv_async_init(loop, &data->async, AfterCleanupHookOne); + assert(err == 0); + + data->async.data = data; + data->done_cb = cb; + data->done_arg = cbarg; + uv_async_send(&data->async); +} + +napi_value Init(napi_env env, napi_value exports) { + { + struct AsyncData* data = CreateAsyncData(); + data->env = env; + napi_add_async_cleanup_hook(env, AsyncCleanupHook, data, &data->handle); + } + + { + struct AsyncData* data = CreateAsyncData(); + data->env = env; + napi_add_async_cleanup_hook(env, AsyncCleanupHook, data, NULL); + } + + { + napi_async_cleanup_hook_handle must_not_call_handle; + napi_add_async_cleanup_hook( + env, MustNotCall, NULL, &must_not_call_handle); + napi_remove_async_cleanup_hook(env, must_not_call_handle); + } + + return NULL; +} + +NAPI_MODULE(NODE_GYP_MODULE_NAME, Init) diff --git a/test/node-api/test_async_cleanup_hook/binding.gyp b/test/node-api/test_async_cleanup_hook/binding.gyp new file mode 100644 index 00000000000000..23daf507916ff6 --- /dev/null +++ b/test/node-api/test_async_cleanup_hook/binding.gyp @@ -0,0 +1,9 @@ +{ + 'targets': [ + { + 'target_name': 'binding', + 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], + 'sources': [ 'binding.c' ] + } + ] +} diff --git a/test/node-api/test_async_cleanup_hook/test.js b/test/node-api/test_async_cleanup_hook/test.js new file mode 100644 index 00000000000000..55cc2517a59bc8 --- /dev/null +++ b/test/node-api/test_async_cleanup_hook/test.js @@ -0,0 +1,8 @@ +'use strict'; +const common = require('../../common'); +const path = require('path'); +const { Worker } = require('worker_threads'); +const binding = path.resolve(__dirname, `./build/${common.buildType}/binding`); + +const w = new Worker(`require(${JSON.stringify(binding)})`, { eval: true }); +w.on('exit', common.mustCall(() => require(binding))); diff --git a/test/node-api/test_callback_scope/binding.cc b/test/node-api/test_callback_scope/binding.c similarity index 76% rename from test/node-api/test_callback_scope/binding.cc rename to test/node-api/test_callback_scope/binding.c index a8a755a016a7ee..d512219e7bfa58 100644 --- a/test/node-api/test_callback_scope/binding.cc +++ b/test/node-api/test_callback_scope/binding.c @@ -1,17 +1,16 @@ +#include #include "node_api.h" #include "uv.h" #include "../../js-native-api/common.h" -namespace { - -napi_value RunInCallbackScope(napi_env env, napi_callback_info info) { +static napi_value RunInCallbackScope(napi_env env, napi_callback_info info) { size_t argc; napi_value args[3]; - NAPI_CALL(env, napi_get_cb_info(env, info, &argc, nullptr, nullptr, nullptr)); + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, NULL, NULL, NULL)); NAPI_ASSERT(env, argc == 3 , "Wrong number of arguments"); - NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, nullptr, nullptr)); + NAPI_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL)); napi_valuetype valuetype; NAPI_CALL(env, napi_typeof(env, args[0], &valuetype)); @@ -29,7 +28,7 @@ napi_value RunInCallbackScope(napi_env env, napi_callback_info info) { napi_async_context context; NAPI_CALL(env, napi_async_init(env, args[0], args[1], &context)); - napi_callback_scope scope = nullptr; + napi_callback_scope scope = NULL; NAPI_CALL( env, napi_open_callback_scope(env, @@ -39,9 +38,9 @@ napi_value RunInCallbackScope(napi_env env, napi_callback_info info) { // if the function has an exception pending after the call that is ok // so we don't use NAPI_CALL as we must close the callback scope regardless - napi_value result = nullptr; + napi_value result = NULL; napi_status function_call_result = - napi_call_function(env, args[0], args[2], 0, nullptr, &result); + napi_call_function(env, args[0], args[2], 0, NULL, &result); if (function_call_result != napi_ok) { GET_AND_THROW_LAST_ERROR((env)); } @@ -52,13 +51,13 @@ napi_value RunInCallbackScope(napi_env env, napi_callback_info info) { return result; } -static napi_env shared_env = nullptr; -static napi_deferred deferred = nullptr; +static napi_env shared_env = NULL; +static napi_deferred deferred = NULL; static void Callback(uv_work_t* req, int ignored) { napi_env env = shared_env; - napi_handle_scope handle_scope = nullptr; + napi_handle_scope handle_scope = NULL; NAPI_CALL_RETURN_VOID(env, napi_open_handle_scope(env, &handle_scope)); napi_value resource_name; @@ -66,7 +65,7 @@ static void Callback(uv_work_t* req, int ignored) { env, "test", NAPI_AUTO_LENGTH, &resource_name)); napi_async_context context; NAPI_CALL_RETURN_VOID(env, - napi_async_init(env, nullptr, resource_name, &context)); + napi_async_init(env, NULL, resource_name, &context)); napi_value resource_object; NAPI_CALL_RETURN_VOID(env, napi_create_object(env, &resource_object)); @@ -74,7 +73,7 @@ static void Callback(uv_work_t* req, int ignored) { napi_value undefined_value; NAPI_CALL_RETURN_VOID(env, napi_get_undefined(env, &undefined_value)); - napi_callback_scope scope = nullptr; + napi_callback_scope scope = NULL; NAPI_CALL_RETURN_VOID(env, napi_open_callback_scope(env, resource_object, context, @@ -87,28 +86,30 @@ static void Callback(uv_work_t* req, int ignored) { NAPI_CALL_RETURN_VOID(env, napi_close_handle_scope(env, handle_scope)); NAPI_CALL_RETURN_VOID(env, napi_async_destroy(env, context)); - delete req; + free(req); } -napi_value TestResolveAsync(napi_env env, napi_callback_info info) { - napi_value promise = nullptr; - if (deferred == nullptr) { +static void NoopWork(uv_work_t* work) { (void) work; } + +static napi_value TestResolveAsync(napi_env env, napi_callback_info info) { + napi_value promise = NULL; + if (deferred == NULL) { shared_env = env; NAPI_CALL(env, napi_create_promise(env, &deferred, &promise)); - uv_loop_t* loop = nullptr; + uv_loop_t* loop = NULL; NAPI_CALL(env, napi_get_uv_event_loop(env, &loop)); - uv_work_t* req = new uv_work_t(); + uv_work_t* req = malloc(sizeof(*req)); uv_queue_work(loop, req, - [](uv_work_t*) {}, + NoopWork, Callback); } return promise; } -napi_value Init(napi_env env, napi_value exports) { +static napi_value Init(napi_env env, napi_value exports) { napi_property_descriptor descriptors[] = { DECLARE_NAPI_PROPERTY("runInCallbackScope", RunInCallbackScope), DECLARE_NAPI_PROPERTY("testResolveAsync", TestResolveAsync) @@ -120,6 +121,4 @@ napi_value Init(napi_env env, napi_value exports) { return exports; } -} // anonymous namespace - NAPI_MODULE(NODE_GYP_MODULE_NAME, Init) diff --git a/test/node-api/test_callback_scope/binding.gyp b/test/node-api/test_callback_scope/binding.gyp index 7ede63d94a0d77..23daf507916ff6 100644 --- a/test/node-api/test_callback_scope/binding.gyp +++ b/test/node-api/test_callback_scope/binding.gyp @@ -3,7 +3,7 @@ { 'target_name': 'binding', 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], - 'sources': [ 'binding.cc' ] + 'sources': [ 'binding.c' ] } ] } diff --git a/test/node-api/test_cleanup_hook/binding.cc b/test/node-api/test_cleanup_hook/binding.c similarity index 56% rename from test/node-api/test_cleanup_hook/binding.cc rename to test/node-api/test_cleanup_hook/binding.c index 9426716e674d22..3e0ddfe9e3c59b 100644 --- a/test/node-api/test_cleanup_hook/binding.cc +++ b/test/node-api/test_cleanup_hook/binding.c @@ -2,23 +2,19 @@ #include "uv.h" #include "../../js-native-api/common.h" -namespace { - -void cleanup(void* arg) { - printf("cleanup(%d)\n", *static_cast(arg)); +static void cleanup(void* arg) { + printf("cleanup(%d)\n", *(int*)(arg)); } -int secret = 42; -int wrong_secret = 17; +static int secret = 42; +static int wrong_secret = 17; -napi_value Init(napi_env env, napi_value exports) { +static napi_value Init(napi_env env, napi_value exports) { napi_add_env_cleanup_hook(env, cleanup, &wrong_secret); napi_add_env_cleanup_hook(env, cleanup, &secret); napi_remove_env_cleanup_hook(env, cleanup, &wrong_secret); - return nullptr; + return NULL; } -} // anonymous namespace - NAPI_MODULE(NODE_GYP_MODULE_NAME, Init) diff --git a/test/node-api/test_cleanup_hook/binding.gyp b/test/node-api/test_cleanup_hook/binding.gyp index 7ede63d94a0d77..23daf507916ff6 100644 --- a/test/node-api/test_cleanup_hook/binding.gyp +++ b/test/node-api/test_cleanup_hook/binding.gyp @@ -3,7 +3,7 @@ { 'target_name': 'binding', 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], - 'sources': [ 'binding.cc' ] + 'sources': [ 'binding.c' ] } ] } diff --git a/test/node-api/test_make_callback_recurse/binding.cc b/test/node-api/test_make_callback_recurse/binding.c similarity index 75% rename from test/node-api/test_make_callback_recurse/binding.cc rename to test/node-api/test_make_callback_recurse/binding.c index 93440ee4ce54c2..78f27e370babde 100644 --- a/test/node-api/test_make_callback_recurse/binding.cc +++ b/test/node-api/test_make_callback_recurse/binding.c @@ -1,10 +1,7 @@ #include #include "../../js-native-api/common.h" -#include -namespace { - -napi_value MakeCallback(napi_env env, napi_callback_info info) { +static napi_value MakeCallback(napi_env env, napi_callback_info info) { size_t argc = 2; napi_value args[2]; // NOLINTNEXTLINE (readability/null_usage) @@ -13,8 +10,8 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { napi_value recv = args[0]; napi_value func = args[1]; - napi_status status = napi_make_callback(env, nullptr /* async_context */, - recv, func, 0 /* argc */, nullptr /* argv */, nullptr /* result */); + napi_status status = napi_make_callback(env, NULL /* async_context */, + recv, func, 0 /* argc */, NULL /* argv */, NULL /* result */); bool isExceptionPending; NAPI_CALL(env, napi_is_exception_pending(env, &isExceptionPending)); @@ -25,14 +22,14 @@ napi_value MakeCallback(napi_env env, napi_callback_info info) { status = napi_get_and_clear_last_exception(env, &pending_error); NAPI_CALL(env, napi_throw_error((env), - nullptr, + NULL, "error when only pending exception expected")); } return recv; } -napi_value Init(napi_env env, napi_value exports) { +static napi_value Init(napi_env env, napi_value exports) { napi_value fn; NAPI_CALL(env, napi_create_function( // NOLINTNEXTLINE (readability/null_usage) @@ -41,6 +38,4 @@ napi_value Init(napi_env env, napi_value exports) { return exports; } -} // anonymous namespace - NAPI_MODULE(NODE_GYP_MODULE_NAME, Init) diff --git a/test/node-api/test_make_callback_recurse/binding.gyp b/test/node-api/test_make_callback_recurse/binding.gyp index 7ede63d94a0d77..23daf507916ff6 100644 --- a/test/node-api/test_make_callback_recurse/binding.gyp +++ b/test/node-api/test_make_callback_recurse/binding.gyp @@ -3,7 +3,7 @@ { 'target_name': 'binding', 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], - 'sources': [ 'binding.cc' ] + 'sources': [ 'binding.c' ] } ] } diff --git a/test/parallel/test-async-hooks-enable-before-promise-resolve.js b/test/parallel/test-async-hooks-enable-before-promise-resolve.js new file mode 100644 index 00000000000000..c96c9e5dd96655 --- /dev/null +++ b/test/parallel/test-async-hooks-enable-before-promise-resolve.js @@ -0,0 +1,25 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const async_hooks = require('async_hooks'); + +// This test ensures that fast-path PromiseHook assigns async ids +// to already created promises when the native hook function is +// triggered on before event. + +let initialAsyncId; +const promise = new Promise((resolve) => { + setTimeout(() => { + initialAsyncId = async_hooks.executionAsyncId(); + async_hooks.createHook({ + after: common.mustCall(() => {}, 2) + }).enable(); + resolve(); + }, 0); +}); + +promise.then(common.mustCall(() => { + const id = async_hooks.executionAsyncId(); + assert.notStrictEqual(id, initialAsyncId); + assert.ok(id > 0); +})); diff --git a/test/parallel/test-async-local-storage-deep-stack.js b/test/parallel/test-async-local-storage-deep-stack.js new file mode 100644 index 00000000000000..b5e1048d94a4ed --- /dev/null +++ b/test/parallel/test-async-local-storage-deep-stack.js @@ -0,0 +1,15 @@ +'use strict'; +const common = require('../common'); +const { AsyncLocalStorage } = require('async_hooks'); + +// Regression test for: https://github.com/nodejs/node/issues/34556 + +const als = new AsyncLocalStorage(); + +const done = common.mustCall(); + +function run(count) { + if (count !== 0) return als.run({}, run, --count); + done(); +} +run(1000); diff --git a/test/parallel/test-asyncresource-bind.js b/test/parallel/test-asyncresource-bind.js new file mode 100644 index 00000000000000..3aeed475aee126 --- /dev/null +++ b/test/parallel/test-asyncresource-bind.js @@ -0,0 +1,35 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { AsyncResource, executionAsyncId } = require('async_hooks'); + +const fn = common.mustCall(AsyncResource.bind(() => { + return executionAsyncId(); +})); + +setImmediate(() => { + const asyncId = executionAsyncId(); + assert.notStrictEqual(asyncId, fn()); +}); + +const asyncResource = new AsyncResource('test'); + +[1, false, '', {}, []].forEach((i) => { + assert.throws(() => asyncResource.bind(i), { + code: 'ERR_INVALID_ARG_TYPE' + }); +}); + +const fn2 = asyncResource.bind((a, b) => { + return executionAsyncId(); +}); + +assert.strictEqual(fn2.asyncResource, asyncResource); +assert.strictEqual(fn2.length, 2); + +setImmediate(() => { + const asyncId = executionAsyncId(); + assert.strictEqual(asyncResource.asyncId(), fn2()); + assert.notStrictEqual(asyncId, fn2()); +}); diff --git a/test/parallel/test-cli-node-options.js b/test/parallel/test-cli-node-options.js index 98acf353c4ee43..8fb15d3ba505c1 100644 --- a/test/parallel/test-cli-node-options.js +++ b/test/parallel/test-cli-node-options.js @@ -79,7 +79,7 @@ if (!['arm', 'arm64'].includes(process.arch)) expect('--interpreted-frames-native-stack', 'B\n'); // Workers can't eval as ES Modules. https://github.com/nodejs/node/issues/30682 -expectNoWorker('--experimental-top-level-await --input-type=module', +expectNoWorker('--input-type=module', 'B\n', 'console.log(await "B")'); function expectNoWorker(opt, want, command, wantsError) { diff --git a/test/parallel/test-fs-write-reuse-callback.js b/test/parallel/test-fs-write-reuse-callback.js new file mode 100644 index 00000000000000..45844f836d858d --- /dev/null +++ b/test/parallel/test-fs-write-reuse-callback.js @@ -0,0 +1,39 @@ +// Flags: --expose-gc +'use strict'; +const common = require('../common'); +const tmpdir = require('../common/tmpdir'); +const assert = require('assert'); +const path = require('path'); + +// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/814: +// Make sure that Buffers passed to fs.write() are not garbage-collected +// even when the callback is being reused. + +const fs = require('fs'); + +tmpdir.refresh(); +const filename = path.join(tmpdir.path, 'test.txt'); +const fd = fs.openSync(filename, 'w'); + +const size = 16 * 1024; +const writes = 1000; +let done = 0; + +const ondone = common.mustCall((err) => { + assert.ifError(err); + if (++done < writes) { + if (done % 25 === 0) global.gc(); + setImmediate(write); + } else { + assert.strictEqual( + fs.readFileSync(filename, 'utf8'), + 'x'.repeat(writes * size)); + fs.closeSync(fd); + } +}, writes); + +write(); +function write() { + const buf = Buffer.alloc(size, 'x'); + fs.write(fd, buf, 0, buf.length, -1, ondone); +} diff --git a/test/parallel/test-http-keep-alive-timeout.js b/test/parallel/test-http-keep-alive-timeout.js new file mode 100644 index 00000000000000..fccb267b8e9ee2 --- /dev/null +++ b/test/parallel/test-http-keep-alive-timeout.js @@ -0,0 +1,28 @@ +'use strict'; + +const common = require('../common'); +const http = require('http'); +const assert = require('assert'); + +const server = http.createServer(common.mustCall((req, res) => { + const body = 'hello world\n'; + + res.writeHead(200, { 'Content-Length': body.length }); + res.write(body); + res.end(); +})); +server.keepAliveTimeout = 12000; + +const agent = new http.Agent({ maxSockets: 1, keepAlive: true }); + +server.listen(0, common.mustCall(function() { + http.get({ + path: '/', port: this.address().port, agent: agent + }, common.mustCall((response) => { + response.resume(); + assert.strictEqual( + response.headers['keep-alive'], 'timeout=12'); + server.close(); + agent.destroy(); + })); +})); diff --git a/test/parallel/test-http-outgoing-message-inheritance.js b/test/parallel/test-http-outgoing-message-inheritance.js index 335a9a28956108..d0da4c68c3118f 100644 --- a/test/parallel/test-http-outgoing-message-inheritance.js +++ b/test/parallel/test-http-outgoing-message-inheritance.js @@ -6,8 +6,8 @@ const { Writable } = require('stream'); const assert = require('assert'); // Check that OutgoingMessage can be used without a proper Socket -// Fixes: https://github.com/nodejs/node/issues/14386 -// Fixes: https://github.com/nodejs/node/issues/14381 +// Refs: https://github.com/nodejs/node/issues/14386 +// Refs: https://github.com/nodejs/node/issues/14381 class Response extends OutgoingMessage { _implicitHeader() {} diff --git a/test/parallel/test-http-parser-timeout-reset.js b/test/parallel/test-http-parser-timeout-reset.js new file mode 100644 index 00000000000000..1ba72d865f73fb --- /dev/null +++ b/test/parallel/test-http-parser-timeout-reset.js @@ -0,0 +1,46 @@ +'use strict'; +const common = require('../common'); + +const net = require('net'); +const { HTTPParser } = process.binding('http_parser'); + +const server = net.createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n'); + socket.write('Transfer-Encoding: chunked\r\n\r\n'); + setTimeout(() => { + socket.write('1\r\n'); + socket.write('\n\r\n'); + setTimeout(() => { + socket.write('1\r\n'); + socket.write('\n\r\n'); + setImmediate(() => { + socket.destroy(); + server.close(); + }); + }, 500); + }, 500); +}).listen(0, () => { + const socket = net.connect(server.address().port); + const parser = new HTTPParser(HTTPParser.RESPONSE, false); + parser.initialize( + HTTPParser.RESPONSE, + {}, + 0, + false, + 1e3 + ); + + parser[HTTPParser.kOnTimeout] = common.mustNotCall(); + + parser[HTTPParser.kOnHeaders] = common.mustNotCall(); + + parser[HTTPParser.kOnExecute] = common.mustCallAtLeast(3); + + parser[HTTPParser.kOnHeadersComplete] = common.mustCall(); + + parser[HTTPParser.kOnBody] = common.mustCall(2); + + parser[HTTPParser.kOnMessageComplete] = common.mustNotCall(); + + parser.consume(socket._handle); +}); diff --git a/test/parallel/test-http-server-response-standalone.js b/test/parallel/test-http-server-response-standalone.js index 3c91dd0889b066..ec6d1e89e38525 100644 --- a/test/parallel/test-http-server-response-standalone.js +++ b/test/parallel/test-http-server-response-standalone.js @@ -6,8 +6,8 @@ const { Writable } = require('stream'); const assert = require('assert'); // Check that ServerResponse can be used without a proper Socket -// Fixes: https://github.com/nodejs/node/issues/14386 -// Fixes: https://github.com/nodejs/node/issues/14381 +// Refs: https://github.com/nodejs/node/issues/14386 +// Refs: https://github.com/nodejs/node/issues/14381 const res = new ServerResponse({ method: 'GET', diff --git a/test/parallel/test-https-foafssl.js b/test/parallel/test-https-foafssl.js index 43057817043789..ff2c7d4387ee1b 100644 --- a/test/parallel/test-https-foafssl.js +++ b/test/parallel/test-https-foafssl.js @@ -56,7 +56,8 @@ const server = https.createServer(options, common.mustCall(function(req, res) { assert.strictEqual(cert.exponent, exponent); assert.strictEqual(cert.modulus, modulus); res.writeHead(200, { 'content-type': 'text/plain' }); - res.end(body); + res.end(body, () => { console.log('stream finished'); }); + console.log('sent response'); })); server.listen(0, function() { @@ -69,10 +70,15 @@ server.listen(0, function() { const client = spawn(common.opensslCli, args); client.stdout.on('data', function(data) { + console.log('response received'); const message = data.toString(); const contents = message.split(CRLF + CRLF).pop(); assert.strictEqual(body, contents); - server.close(); + server.close((e) => { + assert.ifError(e); + console.log('server closed'); + }); + console.log('server.close() called'); }); client.stdin.write('GET /\n\n'); diff --git a/test/parallel/test-no-harmony-top-level-await.mjs b/test/parallel/test-no-harmony-top-level-await.mjs new file mode 100644 index 00000000000000..d48fbbe7f45246 --- /dev/null +++ b/test/parallel/test-no-harmony-top-level-await.mjs @@ -0,0 +1,11 @@ +// Flags: --no-harmony-top-level-await + +import { + mustCall, + disableCrashOnUnhandledRejection +} from '../common/index.mjs'; + +disableCrashOnUnhandledRejection(); + +process.on('unhandledRejection', mustCall()); +Promise.reject(new Error('should not be fatal error')); diff --git a/test/parallel/test-policy-integrity.js b/test/parallel/test-policy-integrity.js deleted file mode 100644 index 2cc1f1280c8059..00000000000000 --- a/test/parallel/test-policy-integrity.js +++ /dev/null @@ -1,414 +0,0 @@ -'use strict'; - -const common = require('../common'); -if (!common.hasCrypto) - common.skip('missing crypto'); - -const tmpdir = require('../common/tmpdir'); -const assert = require('assert'); -const { spawnSync } = require('child_process'); -const crypto = require('crypto'); -const fs = require('fs'); -const path = require('path'); -const { pathToFileURL } = require('url'); - -tmpdir.refresh(); - -function hash(algo, body) { - const h = crypto.createHash(algo); - h.update(body); - return h.digest('base64'); -} - -const policyFilepath = path.join(tmpdir.path, 'policy'); - -const packageFilepath = path.join(tmpdir.path, 'package.json'); -const packageURL = pathToFileURL(packageFilepath); -const packageBody = '{"main": "dep.js"}'; -const policyToPackageRelativeURLString = `./${ - path.relative(path.dirname(policyFilepath), packageFilepath) -}`; - -const parentFilepath = path.join(tmpdir.path, 'parent.js'); -const parentURL = pathToFileURL(parentFilepath); -const parentBody = 'require(\'./dep.js\')'; - -const workerSpawningFilepath = path.join(tmpdir.path, 'worker_spawner.js'); -const workerSpawningURL = pathToFileURL(workerSpawningFilepath); -const workerSpawningBody = ` -const { Worker } = require('worker_threads'); -// make sure this is gone to ensure we don't do another fs read of it -// will error out if we do -require('fs').unlinkSync(${JSON.stringify(policyFilepath)}); -const w = new Worker(${JSON.stringify(parentFilepath)}); -w.on('exit', process.exit); -`; - -const depFilepath = path.join(tmpdir.path, 'dep.js'); -const depURL = pathToFileURL(depFilepath); -const depBody = ''; -const policyToDepRelativeURLString = `./${ - path.relative(path.dirname(policyFilepath), depFilepath) -}`; - -fs.writeFileSync(parentFilepath, parentBody); -fs.writeFileSync(depFilepath, depBody); - -const tmpdirURL = pathToFileURL(tmpdir.path); -if (!tmpdirURL.pathname.endsWith('/')) { - tmpdirURL.pathname += '/'; -} -function test({ - shouldFail = false, - preload = [], - entry, - onerror = undefined, - resources = {} -}) { - const manifest = { - onerror, - resources: {} - }; - for (const [url, { body, match }] of Object.entries(resources)) { - manifest.resources[url] = { - integrity: `sha256-${hash('sha256', match ? body : body + '\n')}`, - dependencies: true - }; - fs.writeFileSync(new URL(url, tmpdirURL.href), body); - } - fs.writeFileSync(policyFilepath, JSON.stringify(manifest, null, 2)); - const { status } = spawnSync(process.execPath, [ - '--experimental-policy', policyFilepath, - ...preload.map((m) => ['-r', m]).flat(), - entry - ]); - if (shouldFail) { - assert.notStrictEqual(status, 0); - } else { - assert.strictEqual(status, 0); - } -} - -{ - const { status } = spawnSync(process.execPath, [ - '--experimental-policy', policyFilepath, - '--experimental-policy', policyFilepath - ], { - stdio: 'pipe' - }); - assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); -} -{ - const enoentFilepath = path.join(tmpdir.path, 'enoent'); - try { fs.unlinkSync(enoentFilepath); } catch {} - const { status } = spawnSync(process.execPath, [ - '--experimental-policy', enoentFilepath, '-e', '' - ], { - stdio: 'pipe' - }); - assert.notStrictEqual(status, 0, 'Should not allow missing policies'); -} - -test({ - shouldFail: true, - entry: parentFilepath, - resources: { - } -}); -test({ - shouldFail: false, - entry: parentFilepath, - onerror: 'log', -}); -test({ - shouldFail: true, - entry: parentFilepath, - onerror: 'exit', -}); -test({ - shouldFail: true, - entry: parentFilepath, - onerror: 'throw', -}); -test({ - shouldFail: true, - entry: parentFilepath, - onerror: 'unknown-onerror-value', -}); -test({ - shouldFail: true, - entry: path.dirname(packageFilepath), - resources: { - } -}); -test({ - shouldFail: true, - entry: path.dirname(packageFilepath), - resources: { - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: false, - entry: path.dirname(packageFilepath), - onerror: 'log', - resources: { - [packageURL]: { - body: packageBody, - match: false, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: true, - entry: path.dirname(packageFilepath), - resources: { - [packageURL]: { - body: packageBody, - match: false, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: true, - entry: path.dirname(packageFilepath), - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [depURL]: { - body: depBody, - match: false, - } - } -}); -test({ - shouldFail: false, - entry: path.dirname(packageFilepath), - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: false, - entry: parentFilepath, - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [parentURL]: { - body: parentBody, - match: true, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: false, - preload: [depFilepath], - entry: parentFilepath, - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [parentURL]: { - body: parentBody, - match: true, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: true, - entry: parentFilepath, - resources: { - [parentURL]: { - body: parentBody, - match: false, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: true, - entry: parentFilepath, - resources: { - [parentURL]: { - body: parentBody, - match: true, - }, - [depURL]: { - body: depBody, - match: false, - } - } -}); -test({ - shouldFail: true, - entry: parentFilepath, - resources: { - [parentURL]: { - body: parentBody, - match: true, - } - } -}); -test({ - shouldFail: false, - entry: depFilepath, - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: false, - entry: depFilepath, - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [policyToDepRelativeURLString]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: true, - entry: depFilepath, - resources: { - [policyToDepRelativeURLString]: { - body: depBody, - match: false, - } - } -}); -test({ - shouldFail: false, - entry: depFilepath, - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [policyToDepRelativeURLString]: { - body: depBody, - match: true, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: true, - entry: depFilepath, - resources: { - [policyToPackageRelativeURLString]: { - body: packageBody, - match: true, - }, - [packageURL]: { - body: packageBody, - match: true, - }, - [depURL]: { - body: depBody, - match: false, - } - } -}); -test({ - shouldFail: true, - entry: workerSpawningFilepath, - resources: { - [workerSpawningURL]: { - body: workerSpawningBody, - match: true, - }, - } -}); -test({ - shouldFail: false, - entry: workerSpawningFilepath, - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [workerSpawningURL]: { - body: workerSpawningBody, - match: true, - }, - [parentURL]: { - body: parentBody, - match: true, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); -test({ - shouldFail: false, - entry: workerSpawningFilepath, - preload: [parentFilepath], - resources: { - [packageURL]: { - body: packageBody, - match: true, - }, - [workerSpawningURL]: { - body: workerSpawningBody, - match: true, - }, - [parentURL]: { - body: parentBody, - match: true, - }, - [depURL]: { - body: depBody, - match: true, - } - } -}); diff --git a/test/parallel/test-policy-parse-integrity.js b/test/parallel/test-policy-parse-integrity.js index 6fa95416390693..2443d9691c2a51 100644 --- a/test/parallel/test-policy-parse-integrity.js +++ b/test/parallel/test-policy-parse-integrity.js @@ -19,24 +19,28 @@ function hash(algo, body) { return h.digest('base64'); } -const policyFilepath = path.join(tmpdir.path, 'policy'); +const tmpdirPath = path.join(tmpdir.path, 'test-policy-parse-integrity'); +fs.rmdirSync(tmpdirPath, { maxRetries: 3, recursive: true }); +fs.mkdirSync(tmpdirPath, { recursive: true }); -const parentFilepath = path.join(tmpdir.path, 'parent.js'); +const policyFilepath = path.join(tmpdirPath, 'policy'); + +const parentFilepath = path.join(tmpdirPath, 'parent.js'); const parentBody = "require('./dep.js')"; -const depFilepath = path.join(tmpdir.path, 'dep.js'); +const depFilepath = path.join(tmpdirPath, 'dep.js'); const depURL = pathToFileURL(depFilepath); const depBody = ''; fs.writeFileSync(parentFilepath, parentBody); fs.writeFileSync(depFilepath, depBody); -const tmpdirURL = pathToFileURL(tmpdir.path); +const tmpdirURL = pathToFileURL(tmpdirPath); if (!tmpdirURL.pathname.endsWith('/')) { tmpdirURL.pathname += '/'; } -const packageFilepath = path.join(tmpdir.path, 'package.json'); +const packageFilepath = path.join(tmpdirPath, 'package.json'); const packageURL = pathToFileURL(packageFilepath); const packageBody = '{"main": "dep.js"}'; diff --git a/test/parallel/test-repl-dynamic-import.js b/test/parallel/test-repl-dynamic-import.js new file mode 100644 index 00000000000000..1f7a01575aa89b --- /dev/null +++ b/test/parallel/test-repl-dynamic-import.js @@ -0,0 +1,20 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const child_process = require('child_process'); +const child = child_process.spawn(process.execPath, [ + '--interactive', + '--expose-gc' +], { + stdio: 'pipe' +}); +child.stdin.write('\nimport("fs");\n_.then(gc);\n'); +// Wait for concurrent GC to finish +setTimeout(() => { + child.stdin.write('\nimport("fs");\n'); + child.stdin.write('\nprocess.exit(0);\n'); +}, common.platformTimeout(50)); +child.on('exit', (code, signal) => { + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); +}); diff --git a/test/parallel/test-repl-pretty-custom-stack.js b/test/parallel/test-repl-pretty-custom-stack.js index 8f633a4d4808c5..d04a394a2e249e 100644 --- a/test/parallel/test-repl-pretty-custom-stack.js +++ b/test/parallel/test-repl-pretty-custom-stack.js @@ -5,7 +5,7 @@ const fixtures = require('../common/fixtures'); const assert = require('assert'); const repl = require('repl'); -const stackRegExp = /repl:[0-9]+:[0-9]+/g; +const stackRegExp = /(REPL\d+):[0-9]+:[0-9]+/g; function run({ command, expected }) { let accum = ''; @@ -25,8 +25,8 @@ function run({ command, expected }) { r.write(`${command}\n`); assert.strictEqual( - accum.replace(stackRegExp, 'repl:*:*'), - expected.replace(stackRegExp, 'repl:*:*') + accum.replace(stackRegExp, '$1:*:*'), + expected.replace(stackRegExp, '$1:*:*') ); r.close(); } @@ -48,8 +48,8 @@ const tests = [ { // test .load for a file that throws command: `.load ${fixtures.path('repl-pretty-stack.js')}`, - expected: 'Uncaught Error: Whoops!--->\nrepl:*:*--->\nd (repl:*:*)' + - '--->\nc (repl:*:*)--->\nb (repl:*:*)--->\na (repl:*:*)\n' + expected: 'Uncaught Error: Whoops!--->\nREPL1:*:*--->\nd (REPL1:*:*)' + + '--->\nc (REPL1:*:*)--->\nb (REPL1:*:*)--->\na (REPL1:*:*)\n' }, { command: 'let x y;', @@ -67,7 +67,7 @@ const tests = [ // test anonymous IIFE { command: '(function() { throw new Error(\'Whoops!\'); })()', - expected: 'Uncaught Error: Whoops!--->\nrepl:*:*\n' + expected: 'Uncaught Error: Whoops!--->\nREPL5:*:*\n' } ]; diff --git a/test/parallel/test-repl-pretty-stack.js b/test/parallel/test-repl-pretty-stack.js index 456e866b7b20f9..8ab3fef2aaa033 100644 --- a/test/parallel/test-repl-pretty-stack.js +++ b/test/parallel/test-repl-pretty-stack.js @@ -5,7 +5,7 @@ const fixtures = require('../common/fixtures'); const assert = require('assert'); const repl = require('repl'); -const stackRegExp = /(at .*repl:)[0-9]+:[0-9]+/g; +const stackRegExp = /(at .*REPL\d+:)[0-9]+:[0-9]+/g; function run({ command, expected, ...extraREPLOptions }, i) { let accum = ''; @@ -37,9 +37,9 @@ const tests = [ { // Test .load for a file that throws. command: `.load ${fixtures.path('repl-pretty-stack.js')}`, - expected: 'Uncaught Error: Whoops!\n at repl:*:*\n' + - ' at d (repl:*:*)\n at c (repl:*:*)\n' + - ' at b (repl:*:*)\n at a (repl:*:*)\n' + expected: 'Uncaught Error: Whoops!\n at REPL1:*:*\n' + + ' at d (REPL1:*:*)\n at c (REPL1:*:*)\n' + + ' at b (REPL1:*:*)\n at a (REPL1:*:*)\n' }, { command: 'let x y;', @@ -53,12 +53,12 @@ const tests = [ { command: '(() => { const err = Error(\'Whoops!\'); ' + 'err.foo = \'bar\'; throw err; })()', - expected: "Uncaught Error: Whoops!\n at repl:*:* {\n foo: 'bar'\n}\n", + expected: "Uncaught Error: Whoops!\n at REPL4:*:* {\n foo: 'bar'\n}\n", }, { command: '(() => { const err = Error(\'Whoops!\'); ' + 'err.foo = \'bar\'; throw err; })()', - expected: 'Uncaught Error: Whoops!\n at repl:*:* {\n foo: ' + + expected: 'Uncaught Error: Whoops!\n at REPL5:*:* {\n foo: ' + "\u001b[32m'bar'\u001b[39m\n}\n", useColors: true }, @@ -69,7 +69,7 @@ const tests = [ // Test anonymous IIFE. { command: '(function() { throw new Error(\'Whoops!\'); })()', - expected: 'Uncaught Error: Whoops!\n at repl:*:*\n' + expected: 'Uncaught Error: Whoops!\n at REPL7:*:*\n' } ]; diff --git a/test/parallel/test-repl.js b/test/parallel/test-repl.js index b653e27750b27d..8e3d1173b51131 100644 --- a/test/parallel/test-repl.js +++ b/test/parallel/test-repl.js @@ -372,7 +372,7 @@ const errorTests = [ { send: 'npm install foobar', expect: [ - 'npm should be run outside of the node repl, in your normal shell.', + 'npm should be run outside of the Node.js REPL, in your normal shell.', '(Press Control-D to exit.)' ] }, @@ -453,7 +453,7 @@ const errorTests = [ /\.load/, /\.save/, '', - 'Press ^C to abort current expression, ^D to exit the repl', + 'Press ^C to abort current expression, ^D to exit the REPL', /'thefourtheye'/ ] }, @@ -805,6 +805,16 @@ const tcpTests = [ { send: `require(${JSON.stringify(moduleFilename)}).number`, expect: '42' + }, + { + send: 'import comeOn from \'fhqwhgads\'', + expect: [ + kSource, + kArrow, + '', + 'Uncaught:', + /^SyntaxError: .* dynamic import/ + ] } ]; diff --git a/test/parallel/test-util-inspect.js b/test/parallel/test-util-inspect.js index 55dcb851154ecf..ffdf121dd50fb1 100644 --- a/test/parallel/test-util-inspect.js +++ b/test/parallel/test-util-inspect.js @@ -147,8 +147,8 @@ assert.strictEqual( "[String: 'hello'] { [length]: 5, [Symbol(foo)]: 123 }" ); -assert.strictEqual(util.inspect((new JSStream())._externalStream), - '[External]'); +assert.match(util.inspect((new JSStream())._externalStream), + /^\[External: [0-9a-f]+\]$/); { const regexp = /regexp/; diff --git a/test/pummel/test-policy-integrity.js b/test/pummel/test-policy-integrity.js new file mode 100644 index 00000000000000..998d1d43355c5d --- /dev/null +++ b/test/pummel/test-policy-integrity.js @@ -0,0 +1,390 @@ +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) common.skip('missing crypto'); + +const { debuglog } = require('util'); +const debug = debuglog('test'); +const tmpdir = require('../common/tmpdir'); +const assert = require('assert'); +const { spawnSync, spawn } = require('child_process'); +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); +const { pathToFileURL } = require('url'); + +function hash(algo, body) { + const values = []; + { + const h = crypto.createHash(algo); + h.update(body); + values.push(`${algo}-${h.digest('base64')}`); + } + { + const h = crypto.createHash(algo); + h.update(body.replace('\n', '\r\n')); + values.push(`${algo}-${h.digest('base64')}`); + } + return values; +} + +const policyPath = './policy.json'; +const parentBody = { + commonjs: ` + if (!process.env.DEP_FILE) { + console.error( + 'missing required DEP_FILE env to determine dependency' + ); + process.exit(33); + } + require(process.env.DEP_FILE) + `, + module: ` + if (!process.env.DEP_FILE) { + console.error( + 'missing required DEP_FILE env to determine dependency' + ); + process.exit(33); + } + import(process.env.DEP_FILE) + `, +}; +const workerSpawningBody = ` + const path = require('path'); + const { Worker } = require('worker_threads'); + if (!process.env.PARENT_FILE) { + console.error( + 'missing required PARENT_FILE env to determine worker entry point' + ); + process.exit(33); + } + if (!process.env.DELETABLE_POLICY_FILE) { + console.error( + 'missing required DELETABLE_POLICY_FILE env to check reloading' + ); + process.exit(33); + } + const w = new Worker(path.resolve(process.env.PARENT_FILE)); + w.on('exit', (status) => process.exit(status === 0 ? 0 : 1)); +`; + +let nextTestId = 1; +function newTestId() { + return nextTestId++; +} +tmpdir.refresh(); + +let spawned = 0; +const toSpawn = []; +function queueSpawn(opts) { + toSpawn.push(opts); + drainQueue(); +} + +function drainQueue() { + if (spawned > 50) { + return; + } + if (toSpawn.length) { + const config = toSpawn.shift(); + const { + shouldSucceed, // = (() => { throw new Error('required')})(), + preloads, // = (() =>{ throw new Error('required')})(), + entryPath, // = (() => { throw new Error('required')})(), + willDeletePolicy, // = (() => { throw new Error('required')})(), + onError, // = (() => { throw new Error('required')})(), + resources, // = (() => { throw new Error('required')})(), + parentPath, + depPath, + } = config; + const testId = newTestId(); + const configDirPath = path.join( + tmpdir.path, + `test-policy-integrity-permutation-${testId}` + ); + const tmpPolicyPath = path.join( + tmpdir.path, + `deletable-policy-${testId}.json` + ); + const cliPolicy = willDeletePolicy ? tmpPolicyPath : policyPath; + fs.rmdirSync(configDirPath, { maxRetries: 3, recursive: true }); + fs.mkdirSync(configDirPath, { recursive: true }); + const manifest = { + onerror: onError, + resources: {}, + }; + const manifestPath = path.join(configDirPath, policyPath); + for (const [resourcePath, { body, integrities }] of Object.entries( + resources + )) { + const filePath = path.join(configDirPath, resourcePath); + if (integrities !== null) { + manifest.resources[pathToFileURL(filePath).href] = { + integrity: integrities.join(' '), + dependencies: true, + }; + } + fs.writeFileSync(filePath, body, 'utf8'); + } + const manifestBody = JSON.stringify(manifest); + fs.writeFileSync(manifestPath, manifestBody); + if (cliPolicy === tmpPolicyPath) { + fs.writeFileSync(tmpPolicyPath, manifestBody); + } + const spawnArgs = [ + process.execPath, + [ + '--unhandled-rejections=strict', + '--experimental-policy', + cliPolicy, + ...preloads.flatMap((m) => ['-r', m]), + entryPath, + '--', + testId, + configDirPath, + ], + { + env: { + ...process.env, + DELETABLE_POLICY_FILE: tmpPolicyPath, + PARENT_FILE: parentPath, + DEP_FILE: depPath, + }, + cwd: configDirPath, + stdio: 'pipe', + }, + ]; + spawned++; + const stdout = []; + const stderr = []; + const child = spawn(...spawnArgs); + child.stdout.on('data', (d) => stdout.push(d)); + child.stderr.on('data', (d) => stderr.push(d)); + child.on('exit', (status, signal) => { + spawned--; + try { + if (shouldSucceed) { + assert.strictEqual(status, 0); + } else { + assert.notStrictEqual(status, 0); + } + } catch (e) { + console.log( + 'permutation', + testId, + 'failed' + ); + console.dir( + { config, manifest }, + { depth: null } + ); + console.log('exit code:', status, 'signal:', signal); + console.log(`stdout: ${Buffer.concat(stdout)}`); + console.log(`stderr: ${Buffer.concat(stderr)}`); + throw e; + } + fs.rmdirSync(configDirPath, { maxRetries: 3, recursive: true }); + drainQueue(); + }); + } +} + +{ + const { status } = spawnSync( + process.execPath, + ['--experimental-policy', policyPath, '--experimental-policy', policyPath], + { + stdio: 'pipe', + } + ); + assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); +} +{ + const enoentFilepath = path.join(tmpdir.path, 'enoent'); + try { + fs.unlinkSync(enoentFilepath); + } catch { } + const { status } = spawnSync( + process.execPath, + ['--experimental-policy', enoentFilepath, '-e', ''], + { + stdio: 'pipe', + } + ); + assert.notStrictEqual(status, 0, 'Should not allow missing policies'); +} + +/** + * @template {Record>} T + * @param {T} configurations + * @param {object} path + * @returns {Array<{[key: keyof T]: T[keyof configurations]}>} + */ +function permutations(configurations, path = {}) { + const keys = Object.keys(configurations); + if (keys.length === 0) { + return path; + } + const config = keys[0]; + const { [config]: values, ...otherConfigs } = configurations; + return values.flatMap((value) => { + return permutations(otherConfigs, { ...path, [config]: value }); + }); +} +const tests = new Set(); +function fileExtensionFormat(extension, packageType) { + if (extension === '.js') { + return packageType === 'module' ? 'module' : 'commonjs'; + } else if (extension === '.mjs') { + return 'module'; + } else if (extension === '.cjs') { + return 'commonjs'; + } + throw new Error('unknown format ' + extension); +} +for (const permutation of permutations({ + entry: ['worker', 'parent', 'dep'], + preloads: [[], ['parent'], ['dep']], + onError: ['log', 'exit'], + parentExtension: ['.js', '.mjs', '.cjs'], + parentIntegrity: ['match', 'invalid', 'missing'], + depExtension: ['.js', '.mjs', '.cjs'], + depIntegrity: ['match', 'invalid', 'missing'], + packageType: ['no-package-json', 'module', 'commonjs'], + packageIntegrity: ['match', 'invalid', 'missing'], +})) { + let shouldSucceed = true; + const parentPath = `./parent${permutation.parentExtension}`; + const effectivePackageType = + permutation.packageType === 'module' ? 'module' : 'commonjs'; + const parentFormat = fileExtensionFormat( + permutation.parentExtension, + effectivePackageType + ); + const depFormat = fileExtensionFormat( + permutation.depExtension, + effectivePackageType + ); + // non-sensical attempt to require ESM + if (depFormat === 'module' && parentFormat === 'commonjs') { + continue; + } + const depPath = `./dep${permutation.depExtension}`; + const workerSpawnerPath = './worker-spawner.cjs'; + const entryPath = { + dep: depPath, + parent: parentPath, + worker: workerSpawnerPath, + }[permutation.entry]; + const packageJSON = { + main: entryPath, + type: permutation.packageType, + }; + if (permutation.packageType === 'no-field') { + delete packageJSON.type; + } + const resources = { + [depPath]: { + body: '', + integrities: hash('sha256', ''), + }, + }; + if (permutation.depIntegrity === 'invalid') { + resources[depPath].body += '\n// INVALID INTEGRITY'; + shouldSucceed = false; + } else if (permutation.depIntegrity === 'missing') { + resources[depPath].integrities = null; + shouldSucceed = false; + } else if (permutation.depIntegrity === 'match') { + } else { + throw new Error('unreachable'); + } + if (parentFormat !== 'commonjs') { + permutation.preloads = permutation.preloads.filter((_) => _ !== 'parent'); + } + const hasParent = + permutation.entry !== 'dep' || permutation.preloads.includes('parent'); + if (hasParent) { + resources[parentPath] = { + body: parentBody[parentFormat], + integrities: hash('sha256', parentBody[parentFormat]), + }; + if (permutation.parentIntegrity === 'invalid') { + resources[parentPath].body += '\n// INVALID INTEGRITY'; + shouldSucceed = false; + } else if (permutation.parentIntegrity === 'missing') { + resources[parentPath].integrities = null; + shouldSucceed = false; + } else if (permutation.parentIntegrity === 'match') { + } else { + throw new Error('unreachable'); + } + } + if (permutation.entry === 'worker') { + resources[workerSpawnerPath] = { + body: workerSpawningBody, + integrities: hash('sha256', workerSpawningBody), + }; + } + if (permutation.packageType !== 'no-package-json') { + let packageBody = JSON.stringify(packageJSON, null, 2); + let packageIntegrities = hash('sha256', packageBody); + if ( + permutation.parentExtension !== '.js' || + permutation.depExtension !== '.js' + ) { + // NO PACKAGE LOOKUP + continue; + } + if (permutation.packageIntegrity === 'invalid') { + packageJSON['//'] = 'INVALID INTEGRITY'; + packageBody = JSON.stringify(packageJSON, null, 2); + shouldSucceed = false; + } else if (permutation.packageIntegrity === 'missing') { + packageIntegrities = []; + shouldSucceed = false; + } else if (permutation.packageIntegrity === 'match') { + } else { + throw new Error('unreachable'); + } + resources['./package.json'] = { + body: packageBody, + integrities: packageIntegrities, + }; + } + const willDeletePolicy = permutation.entry === 'worker'; + if (permutation.onError === 'log') { + shouldSucceed = true; + } + tests.add( + JSON.stringify({ + // hasParent, + // original: permutation, + onError: permutation.onError, + shouldSucceed, + entryPath, + willDeletePolicy, + preloads: permutation.preloads + .map((_) => { + return { + '': '', + 'parent': parentFormat === 'commonjs' ? parentPath : '', + 'dep': depFormat === 'commonjs' ? depPath : '', + }[_]; + }) + .filter(Boolean), + parentPath, + depPath, + resources, + }) + ); +} +debug(`spawning ${tests.size} policy integrity permutations`); +debug( + 'use NODE_DEBUG=test:policy-integrity:NUMBER to log a specific permutation' +); +for (const config of tests) { + const parsed = JSON.parse(config); + tests.delete(config); + queueSpawn(parsed); +} diff --git a/test/pummel/test-regress-GH-814.js b/test/pummel/test-regress-GH-814.js deleted file mode 100644 index 323163225738b4..00000000000000 --- a/test/pummel/test-regress-GH-814.js +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// Refs: https://github.com/nodejs/node-v0.x-archive/issues/814 - -'use strict'; -// Flags: --expose_gc - -require('../common'); -const assert = require('assert'); - -const tmpdir = require('../common/tmpdir'); - -function newBuffer(size, value) { - const buffer = Buffer.allocUnsafe(size); - while (size--) { - buffer[size] = value; - } - buffer[buffer.length - 1] = 0x0a; - return buffer; -} - -const fs = require('fs'); - -tmpdir.refresh(); -const testFileName = require('path').join(tmpdir.path, 'GH-814_testFile.txt'); -const testFileFD = fs.openSync(testFileName, 'w'); -console.log(testFileName); - - -const kBufSize = 128 * 1024; -let PASS = true; -const neverWrittenBuffer = newBuffer(kBufSize, 0x2e); // 0x2e === '.' -const bufPool = []; - - -const tail = require('child_process').spawn('tail', ['-f', testFileName]); -tail.stdout.on('data', tailCB); - -function tailCB(data) { - PASS = !data.toString().includes('.'); -} - - -const timeToQuit = Date.now() + 8e3; // Test during no more than this seconds. -(function main() { - - if (PASS) { - fs.write(testFileFD, newBuffer(kBufSize, 0x61), 0, kBufSize, -1, cb); - global.gc(); - const nuBuf = Buffer.allocUnsafe(kBufSize); - neverWrittenBuffer.copy(nuBuf); - if (bufPool.push(nuBuf) > 100) { - bufPool.length = 0; - } - } else { - throw new Error("Buffer GC'ed test -> FAIL"); - } - - if (Date.now() < timeToQuit) { - process.nextTick(main); - } else { - tail.kill(); - console.log("Buffer GC'ed test -> PASS (OK)"); - } - -})(); - - -function cb(err, written) { - assert.ifError(err); -} diff --git a/test/pummel/test-regress-GH-814_2.js b/test/pummel/test-regress-GH-814_2.js deleted file mode 100644 index 6806a7fc45143c..00000000000000 --- a/test/pummel/test-regress-GH-814_2.js +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// Refs: https://github.com/nodejs/node-v0.x-archive/issues/814 - -'use strict'; -// Flags: --expose_gc - -require('../common'); -const assert = require('assert'); - -const fs = require('fs'); -const tmpdir = require('../common/tmpdir'); - -tmpdir.refresh(); -const testFileName = require('path').join(tmpdir.path, 'GH-814_test.txt'); -const testFD = fs.openSync(testFileName, 'w'); -console.error(`${testFileName}\n`); - - -const tailProc = require('child_process').spawn('tail', ['-f', testFileName]); -tailProc.stdout.on('data', tailCB); - -function tailCB(data) { - PASS = !data.toString().includes('.'); - - if (!PASS) { - console.error('[FAIL]\n DATA -> '); - console.error(data); - console.error('\n'); - throw new Error('Buffers GC test -> FAIL'); - } -} - - -let PASS = true; -const bufPool = []; -const kBufSize = 16 * 1024 * 1024; -const neverWrittenBuffer = newBuffer(kBufSize, 0x2e); // 0x2e === '.' - -const timeToQuit = Date.now() + 5e3; // Test should last no more than this. -writer(); - -function writer() { - - if (PASS) { - if (Date.now() > timeToQuit) { - setTimeout(function() { - process.kill(tailProc.pid); - console.error('\nBuffers GC test -> PASS (OK)\n'); - }, 555); - } else { - fs.write(testFD, newBuffer(kBufSize, 0x61), 0, kBufSize, -1, writerCB); - global.gc(); - global.gc(); - global.gc(); - global.gc(); - global.gc(); - global.gc(); - const nuBuf = Buffer.allocUnsafe(kBufSize); - neverWrittenBuffer.copy(nuBuf); - if (bufPool.push(nuBuf) > 100) { - bufPool.length = 0; - } - process.nextTick(writer); - } - } - -} - -function writerCB(err, written) { - assert.ifError(err); -} - - -// ******************* UTILITIES - - -function newBuffer(size, value) { - const buffer = Buffer.allocUnsafe(size); - while (size--) { - buffer[size] = value; - } - buffer[buffer.length - 1] = 0x0d; - buffer[buffer.length - 1] = 0x0a; - return buffer; -} diff --git a/test/pummel/test-vm-memleak.js b/test/pummel/test-vm-memleak.js index 94e4055a767096..33f2f3d75905a5 100644 --- a/test/pummel/test-vm-memleak.js +++ b/test/pummel/test-vm-memleak.js @@ -29,11 +29,6 @@ const vm = require('vm'); const start = Date.now(); let maxMem = 0; -const ok = process.execArgv.some(function(arg) { - return arg === '--max_old_space_size=32'; -}); -assert(ok, 'Run this test with --max_old_space_size=32.'); - const interval = setInterval(function() { try { vm.runInNewContext('throw 1;'); diff --git a/test/wasi/c/stat.c b/test/wasi/c/stat.c index e4fcafd713d855..877e5b68a130a2 100644 --- a/test/wasi/c/stat.c +++ b/test/wasi/c/stat.c @@ -11,6 +11,7 @@ #define SIZE 500 int main(void) { + struct timespec times[2]; struct stat st; int fd; int ret; @@ -33,6 +34,15 @@ int main(void) { assert(ret == 0); assert(st.st_size == SIZE); + times[0].tv_sec = 4; + times[0].tv_nsec = 0; + times[1].tv_sec = 9; + times[1].tv_nsec = 0; + assert(0 == futimens(fd, times)); + assert(0 == fstat(fd, &st)); + assert(4 == st.st_atime); + assert(9 == st.st_mtime); + ret = close(fd); assert(ret == 0); diff --git a/test/wasi/wasm/stat.wasm b/test/wasi/wasm/stat.wasm index 62093147938347..4a50c0282bb60a 100755 Binary files a/test/wasi/wasm/stat.wasm and b/test/wasi/wasm/stat.wasm differ diff --git a/tools/actions/start-ci.sh b/tools/actions/start-ci.sh new file mode 100755 index 00000000000000..8eb3dae3c5bdf0 --- /dev/null +++ b/tools/actions/start-ci.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +set -xe + +GITHUB_TOKEN=$1 +OWNER=$2 +REPOSITORY=$3 +API_URL=https://api.github.com +REQUEST_CI_LABEL='request-ci' +REQUEST_CI_FAILED_LABEL='request-ci-failed' +shift 3 + +function issueUrl() { + echo "$API_URL/repos/${OWNER}/${REPOSITORY}/issues/${1}" +} + +function labelsUrl() { + echo "$(issueUrl "${1}")/labels" +} + +function commentsUrl() { + echo "$(issueUrl "${1}")/comments" +} + +for pr in "$@"; do + curl -sL --request DELETE \ + --url "$(labelsUrl "$pr")"/"$REQUEST_CI_LABEL" \ + --header "authorization: Bearer ${GITHUB_TOKEN}" \ + --header 'content-type: application/json' + + ci_started=yes + rm -f output; + ncu-ci run "$pr" >output 2>&1 || ci_started=no + cat output + + if [ "$ci_started" == "no" ]; then + # Do we need to reset? + curl -sL --request PUT \ + --url "$(labelsUrl "$pr")" \ + --header "authorization: Bearer ${GITHUB_TOKEN}" \ + --header 'content-type: application/json' \ + --data '{"labels": ["'"${REQUEST_CI_FAILED_LABEL}"'"]}' + + jq -n --arg content "
Couldn't start CI
$(cat output)
" '{body: $content}' > output.json + + curl -sL --request POST \ + --url "$(commentsUrl "$pr")" \ + --header "authorization: Bearer ${GITHUB_TOKEN}" \ + --header 'content-type: application/json' \ + --data @output.json + + rm output.json; + fi +done; diff --git a/tools/checkimports.py b/tools/checkimports.py index 609a75f542748f..b94919e3cc47e4 100755 --- a/tools/checkimports.py +++ b/tools/checkimports.py @@ -5,7 +5,7 @@ import io import re import sys - +import itertools def do_exist(file_name, lines, imported): if not any(not re.match('using \w+::{0};'.format(imported), line) and @@ -41,5 +41,10 @@ def is_valid(file_name): return valid if __name__ == '__main__': - files = glob.iglob(sys.argv[1] if len(sys.argv) > 1 else 'src/*.cc') - sys.exit(0 if all(map(is_valid, files)) else 1) + if len(sys.argv) > 1: + files = [] + for pattern in sys.argv[1:]: + files = itertools.chain(files, glob.iglob(pattern)) + else: + files = glob.iglob('src/*.cc') + sys.exit(0 if all(list(map(is_valid, files))) else 1) diff --git a/tools/code_cache/mkcodecache.cc b/tools/code_cache/mkcodecache.cc index 2aa140fff73ae8..c314e07fd78b22 100644 --- a/tools/code_cache/mkcodecache.cc +++ b/tools/code_cache/mkcodecache.cc @@ -27,6 +27,7 @@ int main(int argc, char* argv[]) { #endif // _WIN32 v8::V8::SetFlagsFromString("--random_seed=42"); + v8::V8::SetFlagsFromString("--harmony-top-level-await"); if (argc < 2) { std::cerr << "Usage: " << argv[0] << " \n"; diff --git a/tools/doc/json.js b/tools/doc/json.js index bcba923d907fb2..739a4f9f4fd3de 100644 --- a/tools/doc/json.js +++ b/tools/doc/json.js @@ -436,7 +436,7 @@ const r = String.raw; const eventPrefix = '^Event: +'; const classPrefix = '^[Cc]lass: +'; const ctorPrefix = '^(?:[Cc]onstructor: +)?`?new +'; -const classMethodPrefix = '^Class Method: +'; +const classMethodPrefix = '^Static method: +'; const maybeClassPropertyPrefix = '(?:Class Property: +)?'; const maybeQuote = '[\'"]?'; diff --git a/tools/eslint-rules/crypto-check.js b/tools/eslint-rules/crypto-check.js index 93a0c1ec865f38..4704b550a360e9 100644 --- a/tools/eslint-rules/crypto-check.js +++ b/tools/eslint-rules/crypto-check.js @@ -120,3 +120,7 @@ module.exports = function(context) { 'Program:exit': () => reportIfMissingCheck() }; }; + +module.exports.meta = { + fixable: 'code' +}; diff --git a/tools/eslint-rules/eslint-check.js b/tools/eslint-rules/eslint-check.js index fcfd7f3f9000fe..0edf7d2899cbf9 100644 --- a/tools/eslint-rules/eslint-check.js +++ b/tools/eslint-rules/eslint-check.js @@ -58,3 +58,7 @@ module.exports = function(context) { 'Program:exit': () => reportIfMissing(context) }; }; + +module.exports.meta = { + fixable: 'code' +}; diff --git a/tools/eslint-rules/inspector-check.js b/tools/eslint-rules/inspector-check.js index a56bb9be4c4ed4..bb5dc27bff5cef 100644 --- a/tools/eslint-rules/inspector-check.js +++ b/tools/eslint-rules/inspector-check.js @@ -59,3 +59,7 @@ module.exports = function(context) { 'Program:exit': () => reportIfMissing(context) }; }; + +module.exports.meta = { + fixable: 'code' +}; diff --git a/tools/eslint-rules/lowercase-name-for-primitive.js b/tools/eslint-rules/lowercase-name-for-primitive.js index cfe17c06c12535..274dcd1c9dce6e 100644 --- a/tools/eslint-rules/lowercase-name-for-primitive.js +++ b/tools/eslint-rules/lowercase-name-for-primitive.js @@ -53,3 +53,7 @@ module.exports = function(context) { [astSelector]: (node) => checkNamesArgument(node) }; }; + +module.exports.meta = { + fixable: 'code' +}; diff --git a/tools/eslint-rules/non-ascii-character.js b/tools/eslint-rules/non-ascii-character.js index e67aac7cd91e82..6588125d33d201 100644 --- a/tools/eslint-rules/non-ascii-character.js +++ b/tools/eslint-rules/non-ascii-character.js @@ -59,3 +59,7 @@ module.exports = (context) => { Program: (node) => reportIfError(node, context.getSourceCode()) }; }; + +module.exports.meta = { + fixable: 'code' +}; diff --git a/tools/eslint-rules/prefer-assert-iferror.js b/tools/eslint-rules/prefer-assert-iferror.js index 2129600e2d4eea..63430d70af93e4 100644 --- a/tools/eslint-rules/prefer-assert-iferror.js +++ b/tools/eslint-rules/prefer-assert-iferror.js @@ -8,6 +8,9 @@ const utils = require('./rules-utils.js'); module.exports = { + meta: { + fixable: 'code' + }, create(context) { const sourceCode = context.getSourceCode(); let assertImported = false; diff --git a/tools/eslint-rules/prefer-assert-methods.js b/tools/eslint-rules/prefer-assert-methods.js index 2917d40de40810..400a5d54759c86 100644 --- a/tools/eslint-rules/prefer-assert-methods.js +++ b/tools/eslint-rules/prefer-assert-methods.js @@ -42,3 +42,7 @@ module.exports = function(context) { } }; }; + +module.exports.meta = { + fixable: 'code' +}; diff --git a/tools/gyp_node.py b/tools/gyp_node.py index 333c89f5ae9f5a..8c7493f3292e83 100755 --- a/tools/gyp_node.py +++ b/tools/gyp_node.py @@ -45,18 +45,6 @@ def run_gyp(args): args.append('-Dcomponent=static_library') args.append('-Dlibrary=static_library') - # Don't compile with -B and -fuse-ld=, we don't bundle ld.gold. Can't be - # set in common.gypi due to how deps/v8/build/toolchain.gypi uses them. - args.append('-Dlinux_use_bundled_binutils=0') - args.append('-Dlinux_use_bundled_gold=0') - args.append('-Dlinux_use_gold_flags=0') - - # Set the current program to this module. This is done because gyp - # will use the program path in targets it generates. If this script was called - # by another script the program name will not be gyp_node.py but whatever - # the name of the script that called it is, leading to incorrect commands - # in generated targets (for example cmd_regen_makefile). - sys.argv[0] = os.path.abspath(__file__) rc = gyp.main(args) if rc != 0: print('Error running GYP') diff --git a/tools/node_modules/eslint/README.md b/tools/node_modules/eslint/README.md index c8d725426874a0..998a961bbb1af3 100644 --- a/tools/node_modules/eslint/README.md +++ b/tools/node_modules/eslint/README.md @@ -121,7 +121,7 @@ Yes, ESLint natively supports parsing JSX syntax (this must be enabled in [confi ### What ECMAScript versions does ESLint support? -ESLint has full support for ECMAScript 3, 5 (default), 2015, 2016, 2017, 2018, and 2019. You can set your desired ECMAScript syntax (and other settings, like global variables or your target environments) through [configuration](https://eslint.org/docs/user-guide/configuring). +ESLint has full support for ECMAScript 3, 5 (default), 2015, 2016, 2017, 2018, 2019, and 2020. You can set your desired ECMAScript syntax (and other settings, like global variables or your target environments) through [configuration](https://eslint.org/docs/user-guide/configuring). ### What about experimental features? @@ -251,7 +251,7 @@ The following companies, organizations, and individuals support ESLint's ongoing

Gold Sponsors

Shopify Salesforce Airbnb Microsoft FOSS Fund Sponsorships

Silver Sponsors

Liftoff AMP Project

Bronze Sponsors

-

Bruce EduBirdie CasinoTop.com Casino Topp Writers Per Hour Anagram Solver vpn netflix Kasinot.fi Pelisivut Nettikasinot.org BonusFinder Deutschland Bugsnag Stability Monitoring Mixpanel VPS Server Free Icons by Icons8 Discord ThemeIsle TekHattan Marfeel Fire Stick Tricks

+

Veikkaajat.com Nettikasinot.media My True Media Norgekasino Japanesecasino Bruce EduBirdie CasinoTop.com Casino Topp Writers Per Hour Anagram Solver Kasinot.fi Pelisivut Nettikasinot.org BonusFinder Deutschland Bugsnag Stability Monitoring Mixpanel VPS Server Icons8: free icons, photos, illustrations, and music Discord ThemeIsle Marfeel Fire Stick Tricks

## Technology Sponsors diff --git a/tools/node_modules/eslint/lib/cli-engine/formatters/checkstyle.js b/tools/node_modules/eslint/lib/cli-engine/formatters/checkstyle.js index ba4d1b5b3ec532..f19b6fc0957e5d 100644 --- a/tools/node_modules/eslint/lib/cli-engine/formatters/checkstyle.js +++ b/tools/node_modules/eslint/lib/cli-engine/formatters/checkstyle.js @@ -42,8 +42,8 @@ module.exports = function(results) { messages.forEach(message => { output += [ - `` diff --git a/tools/node_modules/eslint/lib/rule-tester/rule-tester.js b/tools/node_modules/eslint/lib/rule-tester/rule-tester.js index 77df1def893ccc..d1fcbca5ba0622 100644 --- a/tools/node_modules/eslint/lib/rule-tester/rule-tester.js +++ b/tools/node_modules/eslint/lib/rule-tester/rule-tester.js @@ -852,6 +852,16 @@ class RuleTester { ); } + // Rules that produce fixes must have `meta.fixable` property. + if (result.output !== item.code) { + assert.ok( + hasOwnProperty(rule, "meta"), + "Fixable rules should export a `meta.fixable` property." + ); + + // Linter throws if a rule that produced a fix has `meta` but doesn't have `meta.fixable`. + } + assertASTDidntChange(result.beforeAST, result.afterAST); } diff --git a/tools/node_modules/eslint/lib/rules/no-duplicate-case.js b/tools/node_modules/eslint/lib/rules/no-duplicate-case.js index c8a0fa9da3c025..e2d9665e7f564d 100644 --- a/tools/node_modules/eslint/lib/rules/no-duplicate-case.js +++ b/tools/node_modules/eslint/lib/rules/no-duplicate-case.js @@ -6,6 +6,12 @@ "use strict"; +//------------------------------------------------------------------------------ +// Requirements +//------------------------------------------------------------------------------ + +const astUtils = require("./utils/ast-utils"); + //------------------------------------------------------------------------------ // Rule Definition //------------------------------------------------------------------------------ @@ -31,18 +37,31 @@ module.exports = { create(context) { const sourceCode = context.getSourceCode(); + /** + * Determines whether the two given nodes are considered to be equal. + * @param {ASTNode} a First node. + * @param {ASTNode} b Second node. + * @returns {boolean} `true` if the nodes are considered to be equal. + */ + function equal(a, b) { + if (a.type !== b.type) { + return false; + } + + return astUtils.equalTokens(a, b, sourceCode); + } return { SwitchStatement(node) { - const previousKeys = new Set(); + const previousTests = []; for (const switchCase of node.cases) { if (switchCase.test) { - const key = sourceCode.getText(switchCase.test); + const test = switchCase.test; - if (previousKeys.has(key)) { + if (previousTests.some(previousTest => equal(previousTest, test))) { context.report({ node: switchCase, messageId: "unexpected" }); } else { - previousKeys.add(key); + previousTests.push(test); } } } diff --git a/tools/node_modules/eslint/messages/extend-config-missing.txt b/tools/node_modules/eslint/messages/extend-config-missing.txt index f7c5f71ebe3256..4defd7ac4d159d 100644 --- a/tools/node_modules/eslint/messages/extend-config-missing.txt +++ b/tools/node_modules/eslint/messages/extend-config-missing.txt @@ -2,4 +2,4 @@ ESLint couldn't find the config "<%- configName %>" to extend from. Please check The config "<%- configName %>" was referenced from the config file in "<%- importerName %>". -If you still have problems, please stop by https://eslint.org/chat to chat with the team. +If you still have problems, please stop by https://eslint.org/chat/help to chat with the team. diff --git a/tools/node_modules/eslint/messages/no-config-found.txt b/tools/node_modules/eslint/messages/no-config-found.txt index f1f7beb63b19a0..b46a7e5a7a6f3f 100644 --- a/tools/node_modules/eslint/messages/no-config-found.txt +++ b/tools/node_modules/eslint/messages/no-config-found.txt @@ -4,4 +4,4 @@ ESLint couldn't find a configuration file. To set up a configuration file for th ESLint looked for configuration files in <%= directoryPath %> and its ancestors. If it found none, it then looked in your home directory. -If you think you already have a configuration file or if you need more help, please stop by the ESLint chat room: https://eslint.org/chat +If you think you already have a configuration file or if you need more help, please stop by the ESLint chat room: https://eslint.org/chat/help diff --git a/tools/node_modules/eslint/messages/plugin-conflict.txt b/tools/node_modules/eslint/messages/plugin-conflict.txt index f8b60631c58ea1..3ab4b340ef2dab 100644 --- a/tools/node_modules/eslint/messages/plugin-conflict.txt +++ b/tools/node_modules/eslint/messages/plugin-conflict.txt @@ -4,4 +4,4 @@ ESLint couldn't determine the plugin "<%- pluginId %>" uniquely. Please remove the "plugins" setting from either config or remove either plugin installation. -If you still can't figure out the problem, please stop by https://eslint.org/chat to chat with the team. +If you still can't figure out the problem, please stop by https://eslint.org/chat/help to chat with the team. diff --git a/tools/node_modules/eslint/messages/plugin-missing.txt b/tools/node_modules/eslint/messages/plugin-missing.txt index 3d376733085667..aa25f59ac440ba 100644 --- a/tools/node_modules/eslint/messages/plugin-missing.txt +++ b/tools/node_modules/eslint/messages/plugin-missing.txt @@ -8,4 +8,4 @@ It's likely that the plugin isn't installed correctly. Try reinstalling by runni The plugin "<%- pluginName %>" was referenced from the config file in "<%- importerName %>". -If you still can't figure out the problem, please stop by https://eslint.org/chat to chat with the team. +If you still can't figure out the problem, please stop by https://eslint.org/chat/help to chat with the team. diff --git a/tools/node_modules/eslint/messages/whitespace-found.txt b/tools/node_modules/eslint/messages/whitespace-found.txt index 7d72149a8fd4fb..3eed1af58665a3 100644 --- a/tools/node_modules/eslint/messages/whitespace-found.txt +++ b/tools/node_modules/eslint/messages/whitespace-found.txt @@ -1,3 +1,3 @@ ESLint couldn't find the plugin "<%- pluginName %>". because there is whitespace in the name. Please check your configuration and remove all whitespace from the plugin name. -If you still can't figure out the problem, please stop by https://eslint.org/chat to chat with the team. +If you still can't figure out the problem, please stop by https://eslint.org/chat/help to chat with the team. diff --git a/tools/node_modules/eslint/package.json b/tools/node_modules/eslint/package.json index 193a95c950bdb7..a0b031f6e9baf1 100644 --- a/tools/node_modules/eslint/package.json +++ b/tools/node_modules/eslint/package.json @@ -153,5 +153,5 @@ "test:cli": "mocha", "webpack": "node Makefile.js webpack" }, - "version": "7.5.0" + "version": "7.6.0" } \ No newline at end of file diff --git a/tools/v8_gypfiles/toolchain.gypi b/tools/v8_gypfiles/toolchain.gypi index 196196a4e2d65b..d4bad70d45025e 100644 --- a/tools/v8_gypfiles/toolchain.gypi +++ b/tools/v8_gypfiles/toolchain.gypi @@ -99,34 +99,6 @@ ['OS=="linux" and host_arch=="ia32"', { 'binutils_dir%': 'third_party/binutils/Linux_ia32/Release/bin', }], - - # linux_use_bundled_gold: whether to use the gold linker binary checked - # into third_party/binutils. Force this off via GYP_DEFINES when you - # are using a custom toolchain and need to control -B in ldflags. - # Do not use 32-bit gold on 32-bit hosts as it runs out address space - # for component=static_library builds. - ['((OS=="linux" or OS=="android") and (target_arch=="x64" or target_arch=="arm" or (target_arch=="ia32" and host_arch=="x64"))) or (OS=="linux" and target_arch=="mipsel")', { - 'linux_use_bundled_gold%': 1, - }, { - 'linux_use_bundled_gold%': 0, - }], - # linux_use_bundled_binutils: whether to use the binary binutils - # checked into third_party/binutils. These are not multi-arch so cannot - # be used except on x86 and x86-64 (the only two architectures which - # are currently checke in). Force this off via GYP_DEFINES when you - # are using a custom toolchain and need to control -B in cflags. - ['OS=="linux" and (target_arch=="ia32" or target_arch=="x64")', { - 'linux_use_bundled_binutils%': 1, - }, { - 'linux_use_bundled_binutils%': 0, - }], - # linux_use_gold_flags: whether to use build flags that rely on gold. - # On by default for x64 Linux. - ['OS=="linux" and target_arch=="x64"', { - 'linux_use_gold_flags%': 1, - }, { - 'linux_use_gold_flags%': 0, - }], ], # Indicates if gcmole tools are downloaded by a hook. @@ -990,26 +962,6 @@ '-mx32', ], }], # v8_target_arch=="x32" - ['linux_use_gold_flags==1', { - # Newer gccs and clangs support -fuse-ld, use the flag to force gold - # selection. - # gcc -- http://gcc.gnu.org/onlinedocs/gcc-4.8.0/gcc/Optimize-Options.html - 'ldflags': [ '-fuse-ld=gold', ], - }], - ['linux_use_bundled_binutils==1', { - 'cflags': [ - '-B