From 817b44db54feacb5f14e733d3338560e3b1cf29f Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 13 Jan 2019 20:09:17 -0800 Subject: [PATCH 01/88] test: refactor pummel/test-keep-alive * Reduce concurrent and duration options by half so as to avoid interference with other tests. (Excessive TCP activity in this test resulted in throttling that caused subsequent tests to fail on my local setup.) * Use an OS-provided port rather than `common.PORT`. This possibly reduces side-effects on other tests (that may also be using `common.PORT`). * Add punctuation in comments. PR-URL: https://github.com/nodejs/node/pull/25485 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- test/pummel/test-keep-alive.js | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/test/pummel/test-keep-alive.js b/test/pummel/test-keep-alive.js index 0fec1ff877b89b..5d2f00170d45b0 100644 --- a/test/pummel/test-keep-alive.js +++ b/test/pummel/test-keep-alive.js @@ -21,7 +21,7 @@ 'use strict'; -// This test requires the program 'wrk' +// This test requires the program 'wrk'. const common = require('../common'); if (common.isWindows) common.skip('no `wrk` on windows'); @@ -47,9 +47,9 @@ let normalReqSec = 0; const runAb = (opts, callback) => { const args = [ - '-c', opts.concurrent || 100, + '-c', opts.concurrent || 50, '-t', opts.threads || 2, - '-d', opts.duration || '10s', + '-d', opts.duration || '5s', ]; if (!opts.keepalive) { @@ -58,7 +58,7 @@ const runAb = (opts, callback) => { } args.push(url.format({ hostname: '127.0.0.1', - port: common.PORT, protocol: 'http' })); + port: opts.port, protocol: 'http' })); const child = spawn('wrk', args); child.stderr.pipe(process.stderr); @@ -90,11 +90,12 @@ const runAb = (opts, callback) => { }); }; -server.listen(common.PORT, () => { - runAb({ keepalive: true }, (reqSec) => { +server.listen(0, () => { + const port = server.address().port; + runAb({ keepalive: true, port: port }, (reqSec) => { keepAliveReqSec = reqSec; - runAb({ keepalive: false }, (reqSec) => { + runAb({ keepalive: false, port: port }, (reqSec) => { normalReqSec = reqSec; server.close(); }); From 69c0841a5af498e282b11fc66c6daa3563bebe1c Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 13 Jan 2019 20:35:52 -0800 Subject: [PATCH 02/88] test: refactor pummel/test-net-connect-econnrefused * Reduce ROUNDS and ATTEMPTS_PER_ROUND by half to avoid spurious test failures as a result of side effects from other tests. (For my local setup, test-keep-alive seems to cause this test to fail with ETIMEDOUT and/or EADDRNOTAVAIL. It would seem to be a result of throttling. Reducing the pummel-iness of that test and this one seems to solve the problem.) * Apply capitalization and punctuation to comment. PR-URL: https://github.com/nodejs/node/pull/25485 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- test/pummel/test-net-connect-econnrefused.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/pummel/test-net-connect-econnrefused.js b/test/pummel/test-net-connect-econnrefused.js index 0f350bd572f177..39737f73bf0831 100644 --- a/test/pummel/test-net-connect-econnrefused.js +++ b/test/pummel/test-net-connect-econnrefused.js @@ -20,14 +20,14 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -// verify that connect reqs are properly cleaned up +// Verify that connect reqs are properly cleaned up. const common = require('../common'); const assert = require('assert'); const net = require('net'); -const ROUNDS = 10; -const ATTEMPTS_PER_ROUND = 100; +const ROUNDS = 5; +const ATTEMPTS_PER_ROUND = 50; let rounds = 1; let reqs = 0; From 307da2d3e75b970c2dadffc776738ca63e21abd8 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 13 Jan 2019 20:58:06 -0800 Subject: [PATCH 03/88] test: refactor pummel/test-net-many-clients * Use port 0 instead of `common.PORT`. * Reduce `concurrent` from 100 to 50 and `connections_per_client` from 5 to 3. This is to avoid side effects from other tests. Prior to this change, running this along with test-keep-alive would result in failures on my local setup, apparently due to network throttling. * Remove unnecessary `console.log()` and improve remaining `console.log()` to provide clearer information. PR-URL: https://github.com/nodejs/node/pull/25485 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- test/pummel/test-net-many-clients.js | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/test/pummel/test-net-many-clients.js b/test/pummel/test-net-many-clients.js index db7da1ae041341..4d114922a92ad8 100644 --- a/test/pummel/test-net-many-clients.js +++ b/test/pummel/test-net-many-clients.js @@ -20,14 +20,14 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const net = require('net'); // settings const bytes = 1024 * 40; -const concurrency = 100; -const connections_per_client = 5; +const concurrency = 50; +const connections_per_client = 3; // measured let total_connections = 0; @@ -35,15 +35,14 @@ let total_connections = 0; const body = 'C'.repeat(bytes); const server = net.createServer(function(c) { - console.log('connected'); total_connections++; - console.log('#'); + console.log('connected', total_connections); c.write(body); c.end(); }); -function runClient(callback) { - const client = net.createConnection(common.PORT); +function runClient(port, callback) { + const client = net.createConnection(port); client.connections = 0; @@ -79,17 +78,17 @@ function runClient(callback) { assert.ok(!client.fd); if (this.connections < connections_per_client) { - this.connect(common.PORT); + this.connect(port); } else { callback(); } }); } -server.listen(common.PORT, function() { +server.listen(0, function() { let finished_clients = 0; for (let i = 0; i < concurrency; i++) { - runClient(function() { + runClient(server.address().port, function() { if (++finished_clients === concurrency) server.close(); }); } From 6347940e9ff09d242d144745dab7aa295b9c4f78 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 13 Jan 2019 21:01:43 -0800 Subject: [PATCH 04/88] test: refactor pummel/test-net-pingpong * Use port 0 instead of `common.PORT`. * Use `//` for comments, capitalize comments, and add punctuation. PR-URL: https://github.com/nodejs/node/pull/25485 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater --- test/pummel/test-net-pingpong.js | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/test/pummel/test-net-pingpong.js b/test/pummel/test-net-pingpong.js index 29ffd0bdf7261b..b9507d519d8e6d 100644 --- a/test/pummel/test-net-pingpong.js +++ b/test/pummel/test-net-pingpong.js @@ -26,7 +26,7 @@ const net = require('net'); let tests_run = 0; -function pingPongTest(port, host, on_complete) { +function pingPongTest(host, on_complete) { const N = 1000; let count = 0; let sent_final_ping = false; @@ -69,8 +69,8 @@ function pingPongTest(port, host, on_complete) { }); }); - server.listen(port, host, function() { - const client = net.createConnection(port, host); + server.listen(0, host, function() { + const client = net.createConnection(server.address().port, host); client.setEncoding('utf8'); @@ -110,12 +110,12 @@ function pingPongTest(port, host, on_complete) { }); } -/* All are run at once, so run on different ports */ -pingPongTest(common.PORT, 'localhost'); -pingPongTest(common.PORT + 1, null); +// All are run at once and will run on different ports. +pingPongTest('localhost'); +pingPongTest(null); -// This IPv6 isn't working on Solaris -if (!common.isSunOS) pingPongTest(common.PORT + 2, '::1'); +// This IPv6 isn't working on Solaris. +if (!common.isSunOS) pingPongTest('::1'); process.on('exit', function() { assert.strictEqual(tests_run, common.isSunOS ? 2 : 3); From 7ffa8ec7562af7b7a7f26132bda5a7bcf3c6adfa Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Sat, 12 Jan 2019 21:26:12 +0100 Subject: [PATCH 05/88] src: reset `StopTracingAgent()` before platform teardown This makes sure that `StopTracingAgent()` is always called before tearing down the `tracing::Agent`, since previously its destructor might have tried to access the agent, which would be destroyed by the (earlier) `Dispose()` call. PR-URL: https://github.com/nodejs/node/pull/25472 Reviewed-By: Daniel Bevenius Reviewed-By: Minwoo Jung Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Anatoli Papirovski Reviewed-By: Joyee Cheung --- src/node.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/node.cc b/src/node.cc index 0f668472442273..b3e94f51cec29f 100644 --- a/src/node.cc +++ b/src/node.cc @@ -240,6 +240,7 @@ static struct { } void Dispose() { + StopTracingAgent(); platform_->Shutdown(); delete platform_; platform_ = nullptr; @@ -579,7 +580,6 @@ static void WaitForInspectorDisconnect(Environment* env) { void Exit(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); WaitForInspectorDisconnect(env); - v8_platform.StopTracingAgent(); int code = args[0]->Int32Value(env->context()).FromMaybe(0); env->Exit(code); } @@ -1468,7 +1468,6 @@ int Start(int argc, char** argv) { per_process::v8_initialized = true; const int exit_code = Start(uv_default_loop(), args, exec_args); - v8_platform.StopTracingAgent(); per_process::v8_initialized = false; V8::Dispose(); From 0897504adc1acf6457c6bd415b5022bcc6a9a4dc Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Sat, 12 Jan 2019 21:28:48 +0100 Subject: [PATCH 06/88] src: call `Environment::Exit()` for fatal exceptions Call `Environment::Exit()` rather than the process-wide `exit()` function, since JS exceptions generally only affect the current JS engine instance. PR-URL: https://github.com/nodejs/node/pull/25472 Reviewed-By: Daniel Bevenius Reviewed-By: Minwoo Jung Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Anatoli Papirovski Reviewed-By: Joyee Cheung --- src/node_errors.cc | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/node_errors.cc b/src/node_errors.cc index 2a86c7402b5a64..9b55a0b92187ac 100644 --- a/src/node_errors.cc +++ b/src/node_errors.cc @@ -324,7 +324,7 @@ TryCatchScope::~TryCatchScope() { if (HasCaught() && !HasTerminated() && mode_ == CatchMode::kFatal) { HandleScope scope(env_->isolate()); ReportException(env_, Exception(), Message()); - exit(7); + env_->Exit(7); } } @@ -711,7 +711,7 @@ void FatalException(Isolate* isolate, // Failed before the process._fatalException function was added! // this is probably pretty bad. Nothing to do but report and exit. ReportException(env, error, message); - exit(6); + env->Exit(6); } else { errors::TryCatchScope fatal_try_catch(env); @@ -727,7 +727,7 @@ void FatalException(Isolate* isolate, if (fatal_try_catch.HasCaught()) { // The fatal exception function threw, so we must exit ReportException(env, fatal_try_catch); - exit(7); + env->Exit(7); } else if (caught.ToLocalChecked()->IsFalse()) { ReportException(env, error, message); @@ -738,9 +738,9 @@ void FatalException(Isolate* isolate, Local code; if (!process_object->Get(env->context(), exit_code).ToLocal(&code) || !code->IsInt32()) { - exit(1); + env->Exit(1); } - exit(code.As()->Value()); + env->Exit(code.As()->Value()); } } } From 47d040dd779a2b4ecb0df517f1658feef2497947 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 9 Jan 2019 11:54:08 +0100 Subject: [PATCH 07/88] build: introduce --openssl-is-fips flag This commit introduces a new configuration flag named --openssl-is-fips which is intended to be used when linking against an OpenSSL library that is FIPS compatible. The motivation for this is that Red Hat Enterprise Linux 8 (RHEL8) comes with OpenSSL 1.1.1 and includes FIPS support, and we would like to be able to dynamically link against this version and also have FIPS features enabled in node, like would be done when statically linking and using the --openssl-fips flag. The suggestion here is to introduce a new flag: $ ./configure --help ... --openssl-is-fips specifies that the shared OpenSSL version is FIPS compatible This flag could be used in combination with the shared-openssl flag: $ ./configure --shared-openssl ---openssl-is-fips This will enable FIPS support in node and the runtime flags will be availalbe to enable FIPS (--enable-fips, --force-fips). PR-URL: https://github.com/nodejs/node/pull/25412 Reviewed-By: Sam Roberts Reviewed-By: Anna Henningsen --- configure.py | 6 ++++++ node.gypi | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/configure.py b/configure.py index 3ac700c86c3482..232806170a0c3b 100755 --- a/configure.py +++ b/configure.py @@ -173,6 +173,11 @@ dest='openssl_fips', help='Build OpenSSL using FIPS canister .o file in supplied folder') +parser.add_option('--openssl-is-fips', + action='store_true', + dest='openssl_is_fips', + help='specifies that the OpenSSL library is FIPS compatible') + parser.add_option('--openssl-use-def-ca-store', action='store_true', dest='use_openssl_ca_store', @@ -1190,6 +1195,7 @@ def configure_openssl(o): variables = o['variables'] variables['node_use_openssl'] = b(not options.without_ssl) variables['node_shared_openssl'] = b(options.shared_openssl) + variables['openssl_is_fips'] = b(options.openssl_is_fips) variables['openssl_fips'] = '' if options.openssl_no_asm: diff --git a/node.gypi b/node.gypi index 13886faf80985f..689138c15b5705 100644 --- a/node.gypi +++ b/node.gypi @@ -319,7 +319,7 @@ [ 'node_use_openssl=="true"', { 'defines': [ 'HAVE_OPENSSL=1' ], 'conditions': [ - ['openssl_fips != ""', { + ['openssl_fips != "" or openssl_is_fips=="true"', { 'defines': [ 'NODE_FIPS_MODE' ], }], [ 'node_shared_openssl=="false"', { From 03e05cb4fbb4cee1670ce9264cb910b2d9bce1be Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 9 Jan 2019 11:43:41 +0100 Subject: [PATCH 08/88] src: fix FIPS section in Sign::SignFinal Currently, while FIPS is not supported yet for this release there might be an option to dynamically link against a FIPS compatible OpenSSL version. This commit fixes the compiler errors. PR-URL: https://github.com/nodejs/node/pull/25412 Reviewed-By: Sam Roberts Reviewed-By: Anna Henningsen --- src/node_crypto.cc | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/node_crypto.cc b/src/node_crypto.cc index 71eb3880100ea0..f2b118978d2d1f 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -4478,9 +4478,14 @@ Sign::SignResult Sign::SignFinal( #ifdef NODE_FIPS_MODE /* Validate DSA2 parameters from FIPS 186-4 */ - if (FIPS_mode() && EVP_PKEY_DSA == pkey->type) { - size_t L = BN_num_bits(pkey->pkey.dsa->p); - size_t N = BN_num_bits(pkey->pkey.dsa->q); + if (FIPS_mode() && EVP_PKEY_DSA == EVP_PKEY_base_id(pkey.get())) { + DSA* dsa = EVP_PKEY_get0_DSA(pkey.get()); + const BIGNUM* p; + DSA_get0_pqg(dsa, &p, nullptr, nullptr); + size_t L = BN_num_bits(p); + const BIGNUM* q; + DSA_get0_pqg(dsa, nullptr, &q, nullptr); + size_t N = BN_num_bits(q); bool result = false; if (L == 1024 && N == 160) @@ -4493,7 +4498,7 @@ Sign::SignResult Sign::SignFinal( result = true; if (!result) { - return kSignPrivateKey; + return SignResult(kSignPrivateKey); } } #endif // NODE_FIPS_MODE From 26c5bd8a5cb5e7c13cde729d11a5b03c5ef23987 Mon Sep 17 00:00:00 2001 From: Sam Roberts Date: Mon, 14 Jan 2019 12:11:49 -0800 Subject: [PATCH 09/88] doc: add metadata about ecdh curve options - DEFAULT_ECDH_CURVE default changed to 'auto' for 10.0.0 - ecdhCurve parameter allowed multiple values and 'auto' from 9.0.0 PR-URL: https://github.com/nodejs/node/pull/25502 Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Ben Noordhuis Reviewed-By: Colin Ihrig Reviewed-By: Ruben Bridgewater --- doc/api/tls.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/api/tls.md b/doc/api/tls.md index 907229951e3302..c7bd89f50101af 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -1183,6 +1183,10 @@ changes: - version: v9.3.0 pr-url: https://github.com/nodejs/node/pull/14903 description: The `options` parameter can now include `clientCertEngine`. + - version: v9.0.0 + pr-url: https://github.com/nodejs/node/pull/15206 + description: The `ecdhCurve` option can now be multiple `':'` separated + curve names or `'auto'`. - version: v7.3.0 pr-url: https://github.com/nodejs/node/pull/10294 description: If the `key` option is an array, individual entries do not @@ -1409,6 +1413,10 @@ console.log(tls.getCiphers()); // ['AES128-SHA', 'AES256-SHA', ...] ## tls.DEFAULT_ECDH_CURVE The default curve name to use for ECDH key agreement in a tls server. The From ab861433c9da03271ac6ed3a42bb027e8a13d53c Mon Sep 17 00:00:00 2001 From: Brian White Date: Wed, 9 Jan 2019 17:59:17 -0500 Subject: [PATCH 10/88] test: fix test-repl timeout and tmpdir refresh PR-URL: https://github.com/nodejs/node/pull/25425 Reviewed-By: Ruben Bridgewater Reviewed-By: Rich Trott Reviewed-By: Colin Ihrig Reviewed-By: Anto Aravinth --- test/parallel/test-repl.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/parallel/test-repl.js b/test/parallel/test-repl.js index 89370b0f7ad464..14ada1fe601cca 100644 --- a/test/parallel/test-repl.js +++ b/test/parallel/test-repl.js @@ -22,6 +22,7 @@ 'use strict'; const common = require('../common'); const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); const assert = require('assert'); const net = require('net'); const repl = require('repl'); @@ -823,6 +824,8 @@ function startUnixRepl() { resolveReplServer(replServer); })); + tmpdir.refresh(); + server.listen(common.PIPE, common.mustCall(() => { const client = net.createConnection(common.PIPE); @@ -852,7 +855,7 @@ function event(ee, expected) { const data = inspect(expected, { compact: false }); const msg = `The REPL did not reply as expected for:\n\n${data}`; reject(new Error(msg)); - }, 500); + }, common.platformTimeout(500)); ee.once('data', common.mustCall((...args) => { clearTimeout(timeout); resolve(...args); From 07f1bb001c51c7106f1868820a3c931188e84405 Mon Sep 17 00:00:00 2001 From: cjihrig Date: Tue, 15 Jan 2019 15:39:43 -0500 Subject: [PATCH 11/88] process: allow reading umask in workers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://github.com/nodejs/node/issues/25448 PR-URL: https://github.com/nodejs/node/pull/25526 Reviewed-By: Ruben Bridgewater Reviewed-By: Michaël Zasso Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Joyee Cheung --- doc/api/errors.md | 5 +++++ doc/api/process.md | 3 ++- lib/internal/bootstrap/node.js | 4 ++++ lib/internal/errors.js | 2 ++ lib/internal/process/worker_thread_only.js | 21 ++++++++++++++++++++- src/node_process_methods.cc | 7 ++++++- test/common/index.js | 9 ++++----- test/parallel/test-fs-write-file-sync.js | 2 +- test/parallel/test-process-umask-mask.js | 2 +- test/parallel/test-process-umask.js | 11 +++++++++-- 10 files changed, 54 insertions(+), 12 deletions(-) diff --git a/doc/api/errors.md b/doc/api/errors.md index 595d18267255ad..54067182b25f49 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1880,6 +1880,11 @@ All attempts at serializing an uncaught exception from a worker thread failed. The pathname used for the main script of a worker has an unknown file extension. + +### ERR_WORKER_UNSUPPORTED_OPERATION + +The requested functionality is not supported in worker threads. + ### ERR_ZLIB_INITIALIZATION_FAILED diff --git a/doc/api/process.md b/doc/api/process.md index 28ed9734f55144..0ba97f851cce9f 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -2007,7 +2007,8 @@ console.log( ); ``` -This feature is not available in [`Worker`][] threads. +[`Worker`][] threads are able to read the umask, however attempting to set the +umask will result in a thrown exception. ## process.uptime() + +Use the specified file as a security policy. + ### `--experimental-repl-await` + + +> Stability: 1 - Experimental + + + +Node.js contains experimental support for creating policies on loading code. + +Policies are a security feature intended to allow guarantees +about what code Node.js is able to load. The use of policies assumes +safe practices for the policy files such as ensuring that policy +files cannot be overwritten by the Node.js application by using +file permissions. + +A best practice would be to ensure that the policy manifest is read only for +the running Node.js application, and that the file cannot be changed +by the running Node.js application in any way. A typical setup would be to +create the policy file as a different user id than the one running Node.js +and granting read permissions to the user id running Node.js. + +## Enabling + + + +The `--experimental-policy` flag can be used to enable features for policies +when loading modules. + +Once this has been set, all modules must conform to a policy manifest file +passed to the flag: + +```sh +node --experimental-policy=policy.json app.js +``` + +The policy manifest will be used to enforce constraints on code loaded by +Node.js. + +## Features + +### Error Behavior + +When a policy check fails, Node.js by default will throw an error. +It is possible to change the error behavior to one of a few possibilities +by defining an "onerror" field in a policy manifest. The following values are +available to change the behavior: + +* `"exit"` - will exit the process immediately. + No cleanup code will be allowed to run. +* `"log"` - will log the error at the site of the failure. +* `"throw"` (default) - will throw a JS error at the site of the failure. + +```json +{ + "onerror": "log", + "resources": { + "./app/checked.js": { + "integrity": "sha384-SggXRQHwCG8g+DktYYzxkXRIkTiEYWBHqev0xnpCxYlqMBufKZHAHQM3/boDaI/0" + } + } +} +``` + +### Integrity Checks + +Policy files must use integrity checks with Subresource Integrity strings +compatible with the browser +[integrity attribute](https://www.w3.org/TR/SRI/#the-integrity-attribute) +associated with absolute URLs. + +When using `require()` all resources involved in loading are checked for +integrity if a policy manifest has been specified. If a resource does not match +the integrity listed in the manifest, an error will be thrown. + +An example policy file that would allow loading a file `checked.js`: + +```json +{ + "resources": { + "./app/checked.js": { + "integrity": "sha384-SggXRQHwCG8g+DktYYzxkXRIkTiEYWBHqev0xnpCxYlqMBufKZHAHQM3/boDaI/0" + } + } +} +``` + +Each resource listed in the policy manifest can be of one the following +formats to determine its location: + +1. A [relative url string][] to a resource from the manifest such as `./resource.js`, `../resource.js`, or `/resource.js`. +2. A complete url string to a resource such as `file:///resource.js`. + +When loading resources the entire URL must match including search parameters +and hash fragment. `./a.js?b` will not be used when attempting to load +`./a.js` and vice versa. + +In order to generate integrity strings, a script such as +`printf "sha384-$(cat checked.js | openssl dgst -sha384 -binary | base64)"` +can be used. + + +[relative url string]: https://url.spec.whatwg.org/#relative-url-with-fragment-string diff --git a/doc/node.1 b/doc/node.1 index 09c91f3a41adb1..c5ec38bb34c35d 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -86,6 +86,9 @@ Requires Node.js to be built with .It Fl -experimental-modules Enable experimental ES module support and caching modules. . +.It Fl -experimental-policy +Use the specified file as a security policy. +. .It Fl -experimental-repl-await Enable experimental top-level .Sy await diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js index 1ad2dce622215c..d8313210fd5248 100644 --- a/lib/internal/bootstrap/node.js +++ b/lib/internal/bootstrap/node.js @@ -171,6 +171,28 @@ function startup() { mainThreadSetup.setupChildProcessIpcChannel(); } + // TODO(joyeecheung): move this down further to get better snapshotting + if (getOptionValue('[has_experimental_policy]')) { + process.emitWarning('Policies are experimental.', + 'ExperimentalWarning'); + const experimentalPolicy = getOptionValue('--experimental-policy'); + const { pathToFileURL, URL } = NativeModule.require('url'); + // URL here as it is slightly different parsing + // no bare specifiers for now + let manifestURL; + if (NativeModule.require('path').isAbsolute(experimentalPolicy)) { + manifestURL = new URL(`file:///${experimentalPolicy}`); + } else { + const cwdURL = pathToFileURL(process.cwd()); + cwdURL.pathname += '/'; + manifestURL = new URL(experimentalPolicy, cwdURL); + } + const fs = NativeModule.require('fs'); + const src = fs.readFileSync(manifestURL, 'utf8'); + NativeModule.require('internal/process/policy') + .setup(src, manifestURL.href); + } + const browserGlobals = !process._noBrowserGlobals; if (browserGlobals) { setupGlobalTimeouts(); diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 8cc9bbcd2d74db..12b01ffb9b64f3 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -818,6 +818,28 @@ E('ERR_IPC_CHANNEL_CLOSED', 'Channel closed', Error); E('ERR_IPC_DISCONNECTED', 'IPC channel is already disconnected', Error); E('ERR_IPC_ONE_PIPE', 'Child process can have only one IPC pipe', Error); E('ERR_IPC_SYNC_FORK', 'IPC cannot be used with synchronous forks', Error); +E('ERR_MANIFEST_ASSERT_INTEGRITY', + (moduleURL, realIntegrities) => { + let msg = `The content of "${ + moduleURL + }" does not match the expected integrity.`; + if (realIntegrities.size) { + const sri = [...realIntegrities.entries()].map(([alg, dgs]) => { + return `${alg}-${dgs}`; + }).join(' '); + msg += ` Integrities found are: ${sri}`; + } else { + msg += ' The resource was not found in the policy.'; + } + return msg; + }, Error); +E('ERR_MANIFEST_INTEGRITY_MISMATCH', + 'Manifest resource %s has multiple entries but integrity lists do not match', + SyntaxError); +E('ERR_MANIFEST_TDZ', 'Manifest initialization has not yet run', Error); +E('ERR_MANIFEST_UNKNOWN_ONERROR', + 'Manifest specified unknown error behavior "%s".', + SyntaxError); E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error); E('ERR_MISSING_ARGS', (...args) => { @@ -889,6 +911,9 @@ E('ERR_SOCKET_BUFFER_SIZE', E('ERR_SOCKET_CANNOT_SEND', 'Unable to send data', Error); E('ERR_SOCKET_CLOSED', 'Socket is closed', Error); E('ERR_SOCKET_DGRAM_NOT_RUNNING', 'Not running', Error); +E('ERR_SRI_PARSE', + 'Subresource Integrity string %s had an unexpected at %d', + SyntaxError); E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error); E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error); E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index 89e4c56e2e790c..bc1ddbcf241816 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -22,8 +22,8 @@ 'use strict'; const { NativeModule } = require('internal/bootstrap/loaders'); -const util = require('util'); const { pathToFileURL } = require('internal/url'); +const util = require('util'); const vm = require('vm'); const assert = require('assert').ok; const fs = require('fs'); @@ -45,6 +45,9 @@ const { getOptionValue } = require('internal/options'); const preserveSymlinks = getOptionValue('--preserve-symlinks'); const preserveSymlinksMain = getOptionValue('--preserve-symlinks-main'); const experimentalModules = getOptionValue('--experimental-modules'); +const manifest = getOptionValue('[has_experimental_policy]') ? + require('internal/process/policy').manifest : + null; const { ERR_INVALID_ARG_VALUE, @@ -164,6 +167,11 @@ function readPackage(requestPath) { return false; } + if (manifest) { + const jsonURL = pathToFileURL(jsonPath); + manifest.assertIntegrity(jsonURL, json); + } + try { return packageMainCache[requestPath] = JSON.parse(json).main; } catch (e) { @@ -675,6 +683,10 @@ function normalizeReferrerURL(referrer) { // the file. // Returns exception, if any. Module.prototype._compile = function(content, filename) { + if (manifest) { + const moduleURL = pathToFileURL(filename); + manifest.assertIntegrity(moduleURL, content); + } content = stripShebang(content); @@ -714,11 +726,14 @@ Module.prototype._compile = function(content, filename) { var depth = requireDepth; if (depth === 0) stat.cache = new Map(); var result; + var exports = this.exports; + var thisValue = exports; + var module = this; if (inspectorWrapper) { - result = inspectorWrapper(compiledWrapper, this.exports, this.exports, - require, this, filename, dirname); + result = inspectorWrapper(compiledWrapper, thisValue, exports, + require, module, filename, dirname); } else { - result = compiledWrapper.call(this.exports, this.exports, require, this, + result = compiledWrapper.call(thisValue, exports, require, module, filename, dirname); } if (depth === 0) stat.cache = null; @@ -735,7 +750,13 @@ Module._extensions['.js'] = function(module, filename) { // Native extension for .json Module._extensions['.json'] = function(module, filename) { - var content = fs.readFileSync(filename, 'utf8'); + const content = fs.readFileSync(filename, 'utf8'); + + if (manifest) { + const moduleURL = pathToFileURL(filename); + manifest.assertIntegrity(moduleURL, content); + } + try { module.exports = JSON.parse(stripBOM(content)); } catch (err) { @@ -747,6 +768,12 @@ Module._extensions['.json'] = function(module, filename) { // Native extension for .node Module._extensions['.node'] = function(module, filename) { + if (manifest) { + const content = fs.readFileSync(filename); + const moduleURL = pathToFileURL(filename); + manifest.assertIntegrity(moduleURL, content); + } + // be aware this doesn't use `content` return process.dlopen(module, path.toNamespacedPath(filename)); }; diff --git a/lib/internal/policy/manifest.js b/lib/internal/policy/manifest.js new file mode 100644 index 00000000000000..272abf2457ddc3 --- /dev/null +++ b/lib/internal/policy/manifest.js @@ -0,0 +1,130 @@ +'use strict'; +const { + ERR_MANIFEST_ASSERT_INTEGRITY, + ERR_MANIFEST_INTEGRITY_MISMATCH, + ERR_MANIFEST_UNKNOWN_ONERROR, +} = require('internal/errors').codes; +const debug = require('util').debuglog('policy'); +const SRI = require('internal/policy/sri'); +const { SafeWeakMap } = require('internal/safe_globals'); +const crypto = require('crypto'); +const { Buffer } = require('buffer'); +const { URL } = require('url'); +const { createHash, timingSafeEqual } = crypto; +const HashUpdate = Function.call.bind(crypto.Hash.prototype.update); +const HashDigest = Function.call.bind(crypto.Hash.prototype.digest); +const BufferEquals = Function.call.bind(Buffer.prototype.equals); +const BufferToString = Function.call.bind(Buffer.prototype.toString); +const RegExpTest = Function.call.bind(RegExp.prototype.test); +const { entries } = Object; +const kIntegrities = new SafeWeakMap(); +const kReactions = new SafeWeakMap(); +const kRelativeURLStringPattern = /^\.{0,2}\//; +const { shouldAbortOnUncaughtException } = internalBinding('config'); +const { abort, exit, _rawDebug } = process; +function REACTION_THROW(error) { + throw error; +} +function REACTION_EXIT(error) { + REACTION_LOG(error); + if (shouldAbortOnUncaughtException) { + abort(); + } + exit(1); +} +function REACTION_LOG(error) { + _rawDebug(error.stack); +} +class Manifest { + constructor(obj, manifestURL) { + const integrities = { + __proto__: null, + }; + const reactions = { + __proto__: null, + integrity: REACTION_THROW, + }; + if (obj.onerror) { + const behavior = obj.onerror; + if (behavior === 'throw') { + } else if (behavior === 'exit') { + reactions.integrity = REACTION_EXIT; + } else if (behavior === 'log') { + reactions.integrity = REACTION_LOG; + } else { + throw new ERR_MANIFEST_UNKNOWN_ONERROR(behavior); + } + } + kReactions.set(this, Object.freeze(reactions)); + const manifestEntries = entries(obj.resources); + for (var i = 0; i < manifestEntries.length; i++) { + let url = manifestEntries[i][0]; + const integrity = manifestEntries[i][1].integrity; + if (integrity != null) { + debug(`Manifest contains integrity for url ${url}`); + if (RegExpTest(kRelativeURLStringPattern, url)) { + url = new URL(url, manifestURL).href; + } + const sri = Object.freeze(SRI.parse(integrity)); + if (url in integrities) { + const old = integrities[url]; + let mismatch = false; + if (old.length !== sri.length) { + mismatch = true; + } else { + compare: + for (var sriI = 0; sriI < sri.length; sriI++) { + for (var oldI = 0; oldI < old.length; oldI++) { + if (sri[sriI].algorithm === old[oldI].algorithm && + BufferEquals(sri[sriI].value, old[oldI].value) && + sri[sriI].options === old[oldI].options) { + continue compare; + } + } + mismatch = true; + break compare; + } + } + if (mismatch) { + throw new ERR_MANIFEST_INTEGRITY_MISMATCH(url); + } + } + integrities[url] = sri; + } + } + Object.freeze(integrities); + kIntegrities.set(this, integrities); + Object.freeze(this); + } + assertIntegrity(url, content) { + debug(`Checking integrity of ${url}`); + const integrities = kIntegrities.get(this); + const realIntegrities = new Map(); + if (integrities && url in integrities) { + const integrityEntries = integrities[url]; + // Avoid clobbered Symbol.iterator + for (var i = 0; i < integrityEntries.length; i++) { + const { + algorithm, + value: expected + } = integrityEntries[i]; + const hash = createHash(algorithm); + HashUpdate(hash, content); + const digest = HashDigest(hash); + if (digest.length === expected.length && + timingSafeEqual(digest, expected)) { + return true; + } + realIntegrities.set(algorithm, BufferToString(digest, 'base64')); + } + } + const error = new ERR_MANIFEST_ASSERT_INTEGRITY(url, realIntegrities); + kReactions.get(this).integrity(error); + } +} +// Lock everything down to avoid problems even if reference is leaked somehow +Object.setPrototypeOf(Manifest, null); +Object.setPrototypeOf(Manifest.prototype, null); +Object.freeze(Manifest); +Object.freeze(Manifest.prototype); +module.exports = Object.freeze({ Manifest }); diff --git a/lib/internal/policy/sri.js b/lib/internal/policy/sri.js new file mode 100644 index 00000000000000..fff4e066b17451 --- /dev/null +++ b/lib/internal/policy/sri.js @@ -0,0 +1,68 @@ +'use strict'; +// Value of https://w3c.github.io/webappsec-subresource-integrity/#the-integrity-attribute + +// Returns [{algorithm, value (in base64 string), options,}] +const { + ERR_SRI_PARSE +} = require('internal/errors').codes; +const kWSP = '[\\x20\\x09]'; +const kVCHAR = '[\\x21-\\x7E]'; +const kHASH_ALGO = 'sha256|sha384|sha512'; +// Base64 +const kHASH_VALUE = '[A-Za-z0-9+/]+[=]{0,2}'; +const kHASH_EXPRESSION = `(${kHASH_ALGO})-(${kHASH_VALUE})`; +const kOPTION_EXPRESSION = `(${kVCHAR}*)`; +const kHASH_WITH_OPTIONS = `${kHASH_EXPRESSION}(?:[?](${kOPTION_EXPRESSION}))?`; +const kSRIPattern = new RegExp(`(${kWSP}*)(?:${kHASH_WITH_OPTIONS})`, 'g'); +const { freeze } = Object; +Object.seal(kSRIPattern); +const kAllWSP = new RegExp(`^${kWSP}*$`); +Object.seal(kAllWSP); +const RegExpExec = Function.call.bind(RegExp.prototype.exec); +const RegExpTest = Function.call.bind(RegExp.prototype.test); +const StringSlice = Function.call.bind(String.prototype.slice); +const { + Buffer: { + from: BufferFrom + } +} = require('buffer'); +const { defineProperty } = Object; +const parse = (str) => { + kSRIPattern.lastIndex = 0; + let prevIndex = 0; + let match = RegExpExec(kSRIPattern, str); + const entries = []; + while (match) { + if (match.index !== prevIndex) { + throw new ERR_SRI_PARSE(str, prevIndex); + } + if (entries.length > 0) { + if (match[1] === '') { + throw new ERR_SRI_PARSE(str, prevIndex); + } + } + // Avoid setters being fired + defineProperty(entries, entries.length, { + enumerable: true, + configurable: true, + value: freeze({ + __proto__: null, + algorithm: match[2], + value: BufferFrom(match[3], 'base64'), + options: match[4] === undefined ? null : match[4], + }) + }); + prevIndex = prevIndex + match[0].length; + match = RegExpExec(kSRIPattern, str); + } + if (prevIndex !== str.length) { + if (!RegExpTest(kAllWSP, StringSlice(str, prevIndex))) { + throw new ERR_SRI_PARSE(str, prevIndex); + } + } + return entries; +}; + +module.exports = { + parse, +}; diff --git a/lib/internal/process/policy.js b/lib/internal/process/policy.js new file mode 100644 index 00000000000000..f5ca4eeb07a3e0 --- /dev/null +++ b/lib/internal/process/policy.js @@ -0,0 +1,33 @@ +'use strict'; + +const { + ERR_MANIFEST_TDZ, +} = require('internal/errors').codes; +const { Manifest } = require('internal/policy/manifest'); +let manifest; +module.exports = Object.freeze({ + __proto__: null, + setup(src, url) { + if (src === null) { + manifest = null; + return; + } + const json = JSON.parse(src, (_, o) => { + if (o && typeof o === 'object') { + Reflect.setPrototypeOf(o, null); + Object.freeze(o); + } + return o; + }); + manifest = new Manifest(json, url); + }, + get manifest() { + if (typeof manifest === 'undefined') { + throw new ERR_MANIFEST_TDZ(); + } + return manifest; + }, + assertIntegrity(moduleURL, content) { + this.manifest.matchesIntegrity(moduleURL, content); + } +}); diff --git a/lib/internal/safe_globals.js b/lib/internal/safe_globals.js index 31de4137f0ad53..109409d535495d 100644 --- a/lib/internal/safe_globals.js +++ b/lib/internal/safe_globals.js @@ -20,5 +20,6 @@ const makeSafe = (unsafe, safe) => { }; exports.SafeMap = makeSafe(Map, class SafeMap extends Map {}); +exports.SafeWeakMap = makeSafe(WeakMap, class SafeWeakMap extends WeakMap {}); exports.SafeSet = makeSafe(Set, class SafeSet extends Set {}); exports.SafePromise = makeSafe(Promise, class SafePromise extends Promise {}); diff --git a/node.gyp b/node.gyp index 17e4b27dd6e09a..d5ca93b4b7ef90 100644 --- a/node.gyp +++ b/node.gyp @@ -141,6 +141,8 @@ 'lib/internal/safe_globals.js', 'lib/internal/net.js', 'lib/internal/options.js', + 'lib/internal/policy/sri.js', + 'lib/internal/policy/manifest.js', 'lib/internal/print_help.js', 'lib/internal/priority_queue.js', 'lib/internal/process/esm_loader.js', @@ -148,6 +150,7 @@ 'lib/internal/process/main_thread_only.js', 'lib/internal/process/next_tick.js', 'lib/internal/process/per_thread.js', + 'lib/internal/process/policy.js', 'lib/internal/process/promises.js', 'lib/internal/process/stdio.js', 'lib/internal/process/warning.js', diff --git a/src/node_options.cc b/src/node_options.cc index f5011fbe4bcb66..dcbeac97f689a8 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -102,6 +102,15 @@ EnvironmentOptionsParser::EnvironmentOptionsParser() { "experimental ES Module support and caching modules", &EnvironmentOptions::experimental_modules, kAllowedInEnvironment); + AddOption("[has_experimental_policy]", + "", + &EnvironmentOptions::has_experimental_policy); + AddOption("--experimental-policy", + "use the specified file as a " + "security policy", + &EnvironmentOptions::experimental_policy, + kAllowedInEnvironment); + Implies("--experimental-policy", "[has_experimental_policy]"); AddOption("--experimental-repl-await", "experimental await keyword support in REPL", &EnvironmentOptions::experimental_repl_await, diff --git a/src/node_options.h b/src/node_options.h index aae3743306d50c..47e300e8916ddc 100644 --- a/src/node_options.h +++ b/src/node_options.h @@ -94,6 +94,8 @@ class EnvironmentOptions : public Options { public: bool abort_on_uncaught_exception = false; bool experimental_modules = false; + std::string experimental_policy; + bool has_experimental_policy; bool experimental_repl_await = false; bool experimental_vm_modules = false; bool expose_internals = false; diff --git a/test/parallel/test-policy-integrity.js b/test/parallel/test-policy-integrity.js new file mode 100644 index 00000000000000..5c1ea4fc4eed64 --- /dev/null +++ b/test/parallel/test-policy-integrity.js @@ -0,0 +1,297 @@ +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const tmpdir = require('../common/tmpdir'); +const assert = require('assert'); +const { spawnSync } = require('child_process'); +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); +const { pathToFileURL } = require('url'); + +tmpdir.refresh(); + +function hash(algo, body) { + const h = crypto.createHash(algo); + h.update(body); + return h.digest('base64'); +} + +const policyFilepath = path.join(tmpdir.path, 'policy'); + +const packageFilepath = path.join(tmpdir.path, 'package.json'); +const packageURL = pathToFileURL(packageFilepath); +const packageBody = '{"main": "dep.js"}'; +const policyToPackageRelativeURLString = `./${ + path.relative(path.dirname(policyFilepath), packageFilepath) +}`; + +const parentFilepath = path.join(tmpdir.path, 'parent.js'); +const parentURL = pathToFileURL(parentFilepath); +const parentBody = 'require(\'./dep.js\')'; + +const depFilepath = path.join(tmpdir.path, 'dep.js'); +const depURL = pathToFileURL(depFilepath); +const depBody = ''; +const policyToDepRelativeURLString = `./${ + path.relative(path.dirname(policyFilepath), depFilepath) +}`; + +fs.writeFileSync(parentFilepath, parentBody); +fs.writeFileSync(depFilepath, depBody); + +const tmpdirURL = pathToFileURL(tmpdir.path); +if (!tmpdirURL.pathname.endsWith('/')) { + tmpdirURL.pathname += '/'; +} +function test({ + shouldFail = false, + entry, + onerror, + resources = {} +}) { + const manifest = { + onerror, + resources: {} + }; + for (const [url, { body, match }] of Object.entries(resources)) { + manifest.resources[url] = { + integrity: `sha256-${hash('sha256', match ? body : body + '\n')}` + }; + fs.writeFileSync(new URL(url, tmpdirURL.href), body); + } + fs.writeFileSync(policyFilepath, JSON.stringify(manifest, null, 2)); + const { status } = spawnSync(process.execPath, [ + '--experimental-policy', policyFilepath, entry + ]); + if (shouldFail) { + assert.notStrictEqual(status, 0); + } else { + assert.strictEqual(status, 0); + } +} + +const { status } = spawnSync(process.execPath, [ + '--experimental-policy', policyFilepath, + '--experimental-policy', policyFilepath +], { + stdio: 'pipe' +}); +assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); + +test({ + shouldFail: true, + entry: parentFilepath, + resources: { + } +}); +test({ + shouldFail: false, + entry: parentFilepath, + onerror: 'log', +}); +test({ + shouldFail: true, + entry: parentFilepath, + onerror: 'exit', +}); +test({ + shouldFail: true, + entry: parentFilepath, + onerror: 'throw', +}); +test({ + shouldFail: true, + entry: parentFilepath, + onerror: 'unknown-onerror-value', +}); +test({ + shouldFail: true, + entry: path.dirname(packageFilepath), + resources: { + } +}); +test({ + shouldFail: true, + entry: path.dirname(packageFilepath), + resources: { + [depURL]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: false, + entry: path.dirname(packageFilepath), + onerror: 'log', + resources: { + [packageURL]: { + body: packageBody, + match: false, + }, + [depURL]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: true, + entry: path.dirname(packageFilepath), + resources: { + [packageURL]: { + body: packageBody, + match: false, + }, + [depURL]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: true, + entry: path.dirname(packageFilepath), + resources: { + [packageURL]: { + body: packageBody, + match: true, + }, + [depURL]: { + body: depBody, + match: false, + } + } +}); +test({ + shouldFail: false, + entry: path.dirname(packageFilepath), + resources: { + [packageURL]: { + body: packageBody, + match: true, + }, + [depURL]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: false, + entry: parentFilepath, + resources: { + [parentURL]: { + body: parentBody, + match: true, + }, + [depURL]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: true, + entry: parentFilepath, + resources: { + [parentURL]: { + body: parentBody, + match: false, + }, + [depURL]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: true, + entry: parentFilepath, + resources: { + [parentURL]: { + body: parentBody, + match: true, + }, + [depURL]: { + body: depBody, + match: false, + } + } +}); +test({ + shouldFail: true, + entry: parentFilepath, + resources: { + [parentURL]: { + body: parentBody, + match: true, + } + } +}); +test({ + shouldFail: false, + entry: depFilepath, + resources: { + [depURL]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: false, + entry: depFilepath, + resources: { + [policyToDepRelativeURLString]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: true, + entry: depFilepath, + resources: { + [policyToDepRelativeURLString]: { + body: depBody, + match: false, + } + } +}); +test({ + shouldFail: false, + entry: depFilepath, + resources: { + [policyToDepRelativeURLString]: { + body: depBody, + match: true, + }, + [depURL]: { + body: depBody, + match: true, + } + } +}); +test({ + shouldFail: true, + entry: depFilepath, + resources: { + [policyToPackageRelativeURLString]: { + body: packageBody, + match: true, + }, + [packageURL]: { + body: packageBody, + match: true, + }, + [depURL]: { + body: depBody, + match: false, + } + } +}); From 219b1b8ce1a5e1d312dd11895213fe3c7d67437c Mon Sep 17 00:00:00 2001 From: Yael Hermon Date: Fri, 4 Jan 2019 20:02:25 +0200 Subject: [PATCH 56/88] worker: enable passing command line flags This PR adds the ability to provide Workers with their own execArgv flags in replacement of the main thread's execArgv. Only per-Isolate/per-Environment options are allowed. Per-Process options and V8 flags are not allowed. Passing an empty execArgv array will reset per-Isolate and per-Environment options of the Worker to their defaults. If execArgv option is not passed, the Worker will get the same flags as the main thread. Usage example: ``` const worker = new Worker(__filename, { execArgv: ['--trace-warnings'], }); ``` PR-URL: https://github.com/nodejs/node/pull/25467 Reviewed-By: Anna Henningsen Reviewed-By: Benjamin Gruenbaum Reviewed-By: Joyee Cheung --- doc/api/errors.md | 6 ++ doc/api/worker_threads.md | 9 ++- lib/internal/errors.js | 3 + lib/internal/worker.js | 13 +++- src/env-inl.h | 5 ++ src/env.h | 1 + src/node_worker.cc | 67 ++++++++++++++++++- src/node_worker.h | 5 +- test/parallel/test-internal-errors.js | 10 +++ test/parallel/test-worker-execargv-invalid.js | 35 ++++++++++ test/parallel/test-worker-execargv.js | 22 ++++++ 11 files changed, 167 insertions(+), 9 deletions(-) create mode 100644 test/parallel/test-worker-execargv-invalid.js create mode 100644 test/parallel/test-worker-execargv.js diff --git a/doc/api/errors.md b/doc/api/errors.md index aa67893fc8f431..4a8222715143d4 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1903,6 +1903,12 @@ The fulfilled value of a linking promise is not a `vm.SourceTextModule` object. The current module's status does not allow for this operation. The specific meaning of the error depends on the specific function. + +### ERR_WORKER_INVALID_EXEC_ARGV + +The `execArgv` option passed to the `Worker` constructor contains +invalid flags. + ### ERR_WORKER_PATH diff --git a/doc/api/worker_threads.md b/doc/api/worker_threads.md index 78f35412c32a65..05cafa9cb178e7 100644 --- a/doc/api/worker_threads.md +++ b/doc/api/worker_threads.md @@ -316,13 +316,16 @@ if (isMainThread) { occur as described in the [HTML structured clone algorithm][], and an error will be thrown if the object cannot be cloned (e.g. because it contains `function`s). - * stdin {boolean} If this is set to `true`, then `worker.stdin` will + * `stdin` {boolean} If this is set to `true`, then `worker.stdin` will provide a writable stream whose contents will appear as `process.stdin` inside the Worker. By default, no data is provided. - * stdout {boolean} If this is set to `true`, then `worker.stdout` will + * `stdout` {boolean} If this is set to `true`, then `worker.stdout` will not automatically be piped through to `process.stdout` in the parent. - * stderr {boolean} If this is set to `true`, then `worker.stderr` will + * `stderr` {boolean} If this is set to `true`, then `worker.stderr` will not automatically be piped through to `process.stderr` in the parent. + * `execArgv` {string[]} List of node CLI options passed to the worker. + V8 options (such as `--max-old-space-size`) and options that affect the + process (such as `--title`) are not supported. ### Event: 'error' + +Location at which the report will be generated. + +### `--diagnostic-report-filename=filename` + + +Name of the file to which the report will be written. + +### `--diagnostic-report-on-fatalerror` + + +Enables the report to be triggered on fatal errors (internal errors within +the Node.js runtime such as out of memory) that lead to termination of the +application, if `--experimental-report` is enabled. Useful to inspect various +diagnostic data elements such as heap, stack, event loop state, resource +consumption etc. to reason about the fatal error. + +### `--diagnostic-report-on-signal` + + +Enables report to be generated upon receiving the specified (or predefined) +signal to the running Node.js process, if `--experimental-report` is enabled. +The signal to trigger the report is specified through `--diagnostic-report-signal`. + +### `--diagnostic-report-signal=signal` + + +Sets or resets the signal for report generation (not supported on Windows). +Default signal is `SIGUSR2`. + +### `--diagnostic-report-uncaught-exception` + + +Enables report to be generated on un-caught exceptions, if +`--experimental-report` is enabled. Useful when inspecting JavaScript stack in +conjunction with native stack and other runtime environment data. + +### `--diagnostic-report-verbose` + + +Flag that enables additional information to be printed during report generation. + ### `--enable-fips` + +Enable experimental diagnostic report feature. + ### `--experimental-vm-modules` + +* `err` {Object} +* Returns: {Object} Returns the diagnostics report as an `Object`. + +Generates a JSON-formatted diagnostic report summary of the running process. +The report includes JavaScript and native stack traces, heap statistics, +platform information, resource usage etc. + +```js +const data = process.report.getReport(); +console.log(data); +``` + +Additional documentation on diagnostic report is available +at [report documentation][]. + +### process.report.setDiagnosticReportOptions([options]); + + +Set the runtime configuration of diagnostic report data capture. Upon invocation +of this function, the runtime is reconfigured to generate report based on +the new input. + +* `options` {Object} + * `events` {string[]} + * `signal`: generate a report in response to a signal raised on the process. + * `exception`: generate a report on unhandled exceptions. + * `fatalerror`: generate a report on internal fault + (such as out of memory errors or native assertions). + * `signal` {string} sets or resets the signal for report generation + (not supported on Windows). **Default:** `'SIGUSR2'`. + * `filename` {string} name of the file to which the report will be written. + * `path` {string} drectory at which the report will be generated. + **Default:** the current working directory of the Node.js process. + * `verbose` {boolean} flag that controls additional verbose information on + report generation. **Default:** `false`. + +```js +// Trigger a report upon uncaught exceptions or fatal erros. +process.report.setDiagnosticReportOptions( + { events: ['exception', 'fatalerror'] }); + +// Change the default path and filename of the report. +process.report.setDiagnosticReportOptions( + { filename: 'foo.json', path: '/home' }); + +// Produce the report onto stdout, when generated. Special meaning is attached +// to `stdout` and `stderr`. Usage of these will result in report being written +// to the associated standard streams. URLs are not supported. +process.report.setDiagnosticReportOptions( + { filename: 'stdout' }); + +// Enable verbose option on report generation. +process.report.setDiagnosticReportOptions( + { verbose: true }); + +``` + +Signal based report generation is not supported on Windows. + +Additional documentation on diagnostic report is available +at [report documentation][]. + +### process.report.triggerReport([filename][, err]) + + +* `filename` {string} The file to write into. The `filename` should be +a relative path, that will be appended to the directory specified by +`process.report.setDiagnosticReportOptions`, or current working directory +of the Node.js process, if unspecified. +* `err` {Object} A custom object which will be used for reporting +JavsScript stack. + +* Returns: {string} Returns the filename of the generated report. + +If both `filename` and `err` object are passed to `triggerReport()` the +`err` object must be the second parameter. + +Triggers and produces the report (a JSON-formatted file with the internal +state of Node.js runtime) synchronously, and writes into a file. + +```js +process.report.triggerReport(); +``` + +When a report is triggered, start and end messages are issued to stderr and the +filename of the report is returned to the caller. The default filename includes +the date, time, PID and a sequence number. Alternatively, a filename and error +object can be specified as parameters on the `triggerReport()` call. + +Additional documentation on diagnostic report is available +at [report documentation][]. + ## process.send(message[, sendHandle[, options]][, callback]) Use the specified file as a security policy. diff --git a/doc/api/policy.md b/doc/api/policy.md index ee8109efd651f0..dec8db886ff8e6 100644 --- a/doc/api/policy.md +++ b/doc/api/policy.md @@ -1,6 +1,6 @@ # Policies - + > Stability: 1 - Experimental From 2b1858298a6c8f239eb0c4efe568c2332d25a3be Mon Sep 17 00:00:00 2001 From: Gabriel Schulhof Date: Thu, 17 Jan 2019 22:09:48 -0800 Subject: [PATCH 64/88] n-api: mark thread-safe function as stable Fixes: https://github.com/nodejs/node/issues/24249 PR-URL: https://github.com/nodejs/node/pull/25556 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson --- doc/api/n-api.md | 22 +++++++++---------- src/js_native_api.h | 2 +- src/node_api.h | 4 ++-- src/node_api_types.h | 12 +++++----- src/node_version.h | 2 +- test/js-native-api/test_general/test.js | 4 ++-- .../test_threadsafe_function/binding.c | 1 - 7 files changed, 23 insertions(+), 24 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index fa30b8290f325d..2b061167f6cf4c 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -282,7 +282,7 @@ This is an opaque pointer that is used to represent a JavaScript value. ### napi_threadsafe_function -> Stability: 1 - Experimental +> Stability: 2 - Stable This is an opaque pointer that represents a JavaScript function which can be called asynchronously from multiple threads via @@ -290,7 +290,7 @@ called asynchronously from multiple threads via ### napi_threadsafe_function_release_mode -> Stability: 1 - Experimental +> Stability: 2 - Stable A value to be given to `napi_release_threadsafe_function()` to indicate whether the thread-safe function is to be closed immediately (`napi_tsfn_abort`) or @@ -305,7 +305,7 @@ typedef enum { ### napi_threadsafe_function_call_mode -> Stability: 1 - Experimental +> Stability: 2 - Stable A value to be given to `napi_call_threadsafe_function()` to indicate whether the call should block whenever the queue associated with the thread-safe @@ -400,7 +400,7 @@ typedef void (*napi_async_complete_callback)(napi_env env, #### napi_threadsafe_function_call_js -> Stability: 1 - Experimental +> Stability: 2 - Stable Function pointer used with asynchronous thread-safe function calls. The callback will be called on the main thread. Its purpose is to use a data item arriving @@ -4526,7 +4526,7 @@ prevent the event loop from exiting. The APIs `napi_ref_threadsafe_function` and ### napi_create_threadsafe_function -> Stability: 1 - Experimental +> Stability: 2 - Stable Location at which the report will be generated. ### `--diagnostic-report-filename=filename` Name of the file to which the report will be written. ### `--diagnostic-report-on-fatalerror` Enables the report to be triggered on fatal errors (internal errors within @@ -102,7 +102,7 @@ consumption etc. to reason about the fatal error. ### `--diagnostic-report-on-signal` Enables report to be generated upon receiving the specified (or predefined) @@ -111,7 +111,7 @@ The signal to trigger the report is specified through `--diagnostic-report-signa ### `--diagnostic-report-signal=signal` Sets or resets the signal for report generation (not supported on Windows). @@ -119,7 +119,7 @@ Default signal is `SIGUSR2`. ### `--diagnostic-report-uncaught-exception` Enables report to be generated on un-caught exceptions, if @@ -128,7 +128,7 @@ conjunction with native stack and other runtime environment data. ### `--diagnostic-report-verbose` Flag that enables additional information to be printed during report generation. @@ -150,7 +150,7 @@ Enable experimental ES module support and caching modules. ### `--experimental-policy` Use the specified file as a security policy. @@ -164,7 +164,7 @@ Enable experimental top-level `await` keyword support in REPL. ### `--experimental-report` Enable experimental diagnostic report feature. diff --git a/doc/api/policy.md b/doc/api/policy.md index dec8db886ff8e6..fce290c049cd27 100644 --- a/doc/api/policy.md +++ b/doc/api/policy.md @@ -1,6 +1,6 @@ # Policies - + > Stability: 1 - Experimental diff --git a/doc/api/process.md b/doc/api/process.md index c8c298c1c6b133..60d3d1af1b66b0 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -1662,7 +1662,7 @@ relied upon to exist. ### process.report.getReport([err]) * `err` {Object} @@ -1682,7 +1682,7 @@ at [report documentation][]. ### process.report.setDiagnosticReportOptions([options]); Set the runtime configuration of diagnostic report data capture. Upon invocation @@ -1731,7 +1731,7 @@ at [report documentation][]. ### process.report.triggerReport([filename][, err]) * `filename` {string} The file to write into. The `filename` should be diff --git a/doc/api/tls.md b/doc/api/tls.md index 7819475ffeb745..c751186f828cf9 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -1023,7 +1023,7 @@ being issued by trusted CA (`options.ca`).