From 9788e96836243e2439867b81c6d6bf522115a0f5 Mon Sep 17 00:00:00 2001 From: Rami Moshe Date: Sat, 23 Sep 2017 01:40:37 -0400 Subject: [PATCH 01/28] querystring: convert to using internal/errors PR-URL: https://github.com/nodejs/node/pull/15565 Reviewed-By: Refael Ackermann Reviewed-By: James M Snell Reviewed-By: Joyee Cheung Reviewed-By: Gireesh Punathil --- doc/api/errors.md | 5 +++++ lib/internal/errors.js | 2 ++ lib/querystring.js | 12 +++++++----- test/parallel/test-querystring-escape.js | 13 ++++++++++--- test/parallel/test-querystring.js | 13 +++++++++---- 5 files changed, 33 insertions(+), 12 deletions(-) diff --git a/doc/api/errors.md b/doc/api/errors.md index fcc39ab2ac8..498fc856ad5 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1116,6 +1116,11 @@ API] [`URLSearchParams` constructor][`new URLSearchParams(iterable)`] does not represent a `[name, value]` tuple – that is, if an element is not iterable, or does not consist of exactly two elements. + +### ERR_INVALID_URI + +Used when an invalid URI is passed. + ### ERR_INVALID_URL diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 9bf0951eece..fc6b11a40e9 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -126,6 +126,7 @@ module.exports = exports = { Error: makeNodeError(Error), TypeError: makeNodeError(TypeError), RangeError: makeNodeError(RangeError), + URIError: makeNodeError(URIError), AssertionError, E // This is exported only to facilitate testing. }; @@ -287,6 +288,7 @@ E('ERR_INVALID_SYNC_FORK_INPUT', 'Asynchronous forks do not support Buffer, Uint8Array or string input: %s'); E('ERR_INVALID_THIS', 'Value of "this" must be of type %s'); E('ERR_INVALID_TUPLE', '%s must be an iterable %s tuple'); +E('ERR_INVALID_URI', 'URI malformed'); E('ERR_INVALID_URL', 'Invalid URL: %s'); E('ERR_INVALID_URL_SCHEME', (expected) => `The URL must be ${oneOf(expected, 'scheme')}`); diff --git a/lib/querystring.js b/lib/querystring.js index 936b7463c9e..ec6ad51a891 100644 --- a/lib/querystring.js +++ b/lib/querystring.js @@ -24,6 +24,7 @@ 'use strict'; const { Buffer } = require('buffer'); +const errors = require('internal/errors'); const { hexTable, isHexTable @@ -174,11 +175,12 @@ function qsEscape(str) { } // Surrogate pair ++i; - var c2; - if (i < str.length) - c2 = str.charCodeAt(i) & 0x3FF; - else - throw new URIError('URI malformed'); + + if (i >= str.length) + throw new errors.URIError('ERR_INVALID_URI'); + + var c2 = str.charCodeAt(i) & 0x3FF; + lastPos = i + 1; c = 0x10000 + (((c & 0x3FF) << 10) | c2); out += hexTable[0xF0 | (c >> 18)] + diff --git a/test/parallel/test-querystring-escape.js b/test/parallel/test-querystring-escape.js index fd20a8424fe..18bece1ab13 100644 --- a/test/parallel/test-querystring-escape.js +++ b/test/parallel/test-querystring-escape.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const qs = require('querystring'); @@ -12,8 +12,15 @@ assert.deepStrictEqual(qs.escape('Ŋōđĕ'), '%C5%8A%C5%8D%C4%91%C4%95'); assert.deepStrictEqual(qs.escape('testŊōđĕ'), 'test%C5%8A%C5%8D%C4%91%C4%95'); assert.deepStrictEqual(qs.escape(`${String.fromCharCode(0xD800 + 1)}test`), '%F0%90%91%B4est'); -assert.throws(() => qs.escape(String.fromCharCode(0xD800 + 1)), - /^URIError: URI malformed$/); + +common.expectsError( + () => qs.escape(String.fromCharCode(0xD800 + 1)), + { + code: 'ERR_INVALID_URI', + type: URIError, + message: 'URI malformed' + } +); // using toString for objects assert.strictEqual( diff --git a/test/parallel/test-querystring.js b/test/parallel/test-querystring.js index ed0138e0793..400431fd2cb 100644 --- a/test/parallel/test-querystring.js +++ b/test/parallel/test-querystring.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const inspect = require('util').inspect; @@ -271,9 +271,14 @@ qsWeirdObjects.forEach(function(testCase) { }); // invalid surrogate pair throws URIError -assert.throws(function() { - qs.stringify({ foo: '\udc00' }); -}, /^URIError: URI malformed$/); +common.expectsError( + () => qs.stringify({ foo: '\udc00' }), + { + code: 'ERR_INVALID_URI', + type: URIError, + message: 'URI malformed' + } +); // coerce numbers to string assert.strictEqual('foo=0', qs.stringify({ foo: 0 })); From 4eaf1fc0f8e93e54e43bc7d9fed4436cd878a28f Mon Sep 17 00:00:00 2001 From: nhoel Date: Fri, 6 Oct 2017 11:20:18 -0700 Subject: [PATCH 02/28] test: include values in assertion messages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Specifically for the test-crypto-hash.js test file. PR-URL: https://github.com/nodejs/node/pull/15996 Reviewed-By: James M Snell Reviewed-By: Tobias Nießen Reviewed-By: Joyee Cheung Reviewed-By: Gireesh Punathil --- test/parallel/test-crypto-hash.js | 32 ++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/test/parallel/test-crypto-hash.js b/test/parallel/test-crypto-hash.js index 82a76131fb9..f55a6768900 100644 --- a/test/parallel/test-crypto-hash.js +++ b/test/parallel/test-crypto-hash.js @@ -9,6 +9,9 @@ const fs = require('fs'); const fixtures = require('../common/fixtures'); +let cryptoType; +let digest; + // Test hashing const a1 = crypto.createHash('sha1').update('Test123').digest('hex'); const a2 = crypto.createHash('sha256').update('Test123').digest('base64'); @@ -37,16 +40,29 @@ a8.end(); a8 = a8.read(); if (!common.hasFipsCrypto) { - const a0 = crypto.createHash('md5').update('Test123').digest('latin1'); + cryptoType = 'md5'; + digest = 'latin1'; + const a0 = crypto.createHash(cryptoType).update('Test123').digest(digest); assert.strictEqual( a0, 'h\u00ea\u00cb\u0097\u00d8o\fF!\u00fa+\u000e\u0017\u00ca\u00bd\u008c', - 'Test MD5 as latin1' + `${cryptoType} with ${digest} digest failed to evaluate to expected hash` ); } -assert.strictEqual(a1, '8308651804facb7b9af8ffc53a33a22d6a1c8ac2', 'Test SHA1'); -assert.strictEqual(a2, '2bX1jws4GYKTlxhloUB09Z66PoJZW+y+hq5R8dnx9l4=', - 'Test SHA256 as base64'); +cryptoType = 'md5'; +digest = 'hex'; +assert.strictEqual( + a1, + '8308651804facb7b9af8ffc53a33a22d6a1c8ac2', + `${cryptoType} with ${digest} digest failed to evaluate to expected hash`); +cryptoType = 'sha256'; +digest = 'base64'; +assert.strictEqual( + a2, + '2bX1jws4GYKTlxhloUB09Z66PoJZW+y+hq5R8dnx9l4=', + `${cryptoType} with ${digest} digest failed to evaluate to expected hash`); +cryptoType = 'sha512'; +digest = 'latin1'; assert.deepStrictEqual( a3, Buffer.from( @@ -56,11 +72,13 @@ assert.deepStrictEqual( '\u00d7\u00d6\u00a2\u00a8\u0085\u00e3<\u0083\u009c\u0093' + '\u00c2\u0006\u00da0\u00a1\u00879(G\u00ed\'', 'latin1'), - 'Test SHA512 as assumed buffer'); + `${cryptoType} with ${digest} digest failed to evaluate to expected hash`); +cryptoType = 'sha1'; +digest = 'hex'; assert.deepStrictEqual( a4, Buffer.from('8308651804facb7b9af8ffc53a33a22d6a1c8ac2', 'hex'), - 'Test SHA1' + `${cryptoType} with ${digest} digest failed to evaluate to expected hash` ); // stream interface should produce the same result. From 737239a053bb8b1f368625cceb72c3c467335481 Mon Sep 17 00:00:00 2001 From: Jon Moss Date: Wed, 25 Oct 2017 20:55:38 -0400 Subject: [PATCH 03/28] doc: fix CHANGELOG_V8 indentation Should not be nested under the bullet point. Headings need to be at the root level. PR-URL: https://github.com/nodejs/node/pull/16507 Refs: https://github.com/nodejs/nodejs.org/pull/1425 Reviewed-By: Anatoli Papirovski Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: Yuta Hiroto Reviewed-By: Gireesh Punathil --- doc/changelogs/CHANGELOG_V8.md | 46 +++++++++++++++++----------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/doc/changelogs/CHANGELOG_V8.md b/doc/changelogs/CHANGELOG_V8.md index e7efc8df517..f219c1ee7bd 100644 --- a/doc/changelogs/CHANGELOG_V8.md +++ b/doc/changelogs/CHANGELOG_V8.md @@ -35,29 +35,29 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) - - ## 2017-10-25, Version 8.8.1 (Current), @cjihrig - - ### Notable Changes - - * **net**: - - Fix timeout with null handle issue. This is a regression in Node 8.8.0 [#16489](https://github.com/nodejs/node/pull/16489) - - ### Commits - - * [[`db8c92fb42`](https://github.com/nodejs/node/commit/db8c92fb42)] - **doc**: fix spelling in v8.8.0 changelog (Myles Borins) [#16477](https://github.com/nodejs/node/pull/16477) - * [[`c8396b8370`](https://github.com/nodejs/node/commit/c8396b8370)] - **doc**: remove loader hooks from unsupported features (Teppei Sato) [#16465](https://github.com/nodejs/node/pull/16465) - * [[`2b0bb57055`](https://github.com/nodejs/node/commit/2b0bb57055)] - **doc**: fix wrong URL (Jon Moss) [#16470](https://github.com/nodejs/node/pull/16470) - * [[`9ffc32974e`](https://github.com/nodejs/node/commit/9ffc32974e)] - **doc**: fix typo in changelog for 8.8.0 (Alec Perkins) [#16462](https://github.com/nodejs/node/pull/16462) - * [[`7facaa5031`](https://github.com/nodejs/node/commit/7facaa5031)] - **doc**: fix missing newline character (Daijiro Wachi) [#16447](https://github.com/nodejs/node/pull/16447) - * [[`16eb7d3a5f`](https://github.com/nodejs/node/commit/16eb7d3a5f)] - **doc**: fix doc styles (Daijiro Wachi) [#16385](https://github.com/nodejs/node/pull/16385) - * [[`99fdc1d04f`](https://github.com/nodejs/node/commit/99fdc1d04f)] - **doc**: add recommendations for first timers (Refael Ackermann) [#16350](https://github.com/nodejs/node/pull/16350) - * [[`6fbef7f350`](https://github.com/nodejs/node/commit/6fbef7f350)] - **doc**: fix typo in zlib.md (Luigi Pinca) [#16480](https://github.com/nodejs/node/pull/16480) - * [[`655e017e40`](https://github.com/nodejs/node/commit/655e017e40)] - **net**: fix timeout with null handle (Anatoli Papirovski) [#16489](https://github.com/nodejs/node/pull/16489) - * [[`7fad10cc7e`](https://github.com/nodejs/node/commit/7fad10cc7e)] - **test**: make test-v8-serdes work without stdin (Anna Henningsen) - * [[`12dc06e3e1`](https://github.com/nodejs/node/commit/12dc06e3e1)] - **test**: call toLowerCase on the resolved module (Daniel Bevenius) [#16486](https://github.com/nodejs/node/pull/16486) - * [[`10894c3835`](https://github.com/nodejs/node/commit/10894c3835)] - **test**: allow for different nsswitch.conf settings (Daniel Bevenius) [#16378](https://github.com/nodejs/node/pull/16378) - * [[`2a53165aa0`](https://github.com/nodejs/node/commit/2a53165aa0)] - **test**: add missing assertion (cjihrig) [#15519](https://github.com/nodejs/node/pull/15519) + +## 2017-10-25, Version 8.8.1 (Current), @cjihrig + +### Notable Changes + +* **net**: + - Fix timeout with null handle issue. This is a regression in Node 8.8.0 [#16489](https://github.com/nodejs/node/pull/16489) + +### Commits + +* [[`db8c92fb42`](https://github.com/nodejs/node/commit/db8c92fb42)] - **doc**: fix spelling in v8.8.0 changelog (Myles Borins) [#16477](https://github.com/nodejs/node/pull/16477) +* [[`c8396b8370`](https://github.com/nodejs/node/commit/c8396b8370)] - **doc**: remove loader hooks from unsupported features (Teppei Sato) [#16465](https://github.com/nodejs/node/pull/16465) +* [[`2b0bb57055`](https://github.com/nodejs/node/commit/2b0bb57055)] - **doc**: fix wrong URL (Jon Moss) [#16470](https://github.com/nodejs/node/pull/16470) +* [[`9ffc32974e`](https://github.com/nodejs/node/commit/9ffc32974e)] - **doc**: fix typo in changelog for 8.8.0 (Alec Perkins) [#16462](https://github.com/nodejs/node/pull/16462) +* [[`7facaa5031`](https://github.com/nodejs/node/commit/7facaa5031)] - **doc**: fix missing newline character (Daijiro Wachi) [#16447](https://github.com/nodejs/node/pull/16447) +* [[`16eb7d3a5f`](https://github.com/nodejs/node/commit/16eb7d3a5f)] - **doc**: fix doc styles (Daijiro Wachi) [#16385](https://github.com/nodejs/node/pull/16385) +* [[`99fdc1d04f`](https://github.com/nodejs/node/commit/99fdc1d04f)] - **doc**: add recommendations for first timers (Refael Ackermann) [#16350](https://github.com/nodejs/node/pull/16350) +* [[`6fbef7f350`](https://github.com/nodejs/node/commit/6fbef7f350)] - **doc**: fix typo in zlib.md (Luigi Pinca) [#16480](https://github.com/nodejs/node/pull/16480) +* [[`655e017e40`](https://github.com/nodejs/node/commit/655e017e40)] - **net**: fix timeout with null handle (Anatoli Papirovski) [#16489](https://github.com/nodejs/node/pull/16489) +* [[`7fad10cc7e`](https://github.com/nodejs/node/commit/7fad10cc7e)] - **test**: make test-v8-serdes work without stdin (Anna Henningsen) +* [[`12dc06e3e1`](https://github.com/nodejs/node/commit/12dc06e3e1)] - **test**: call toLowerCase on the resolved module (Daniel Bevenius) [#16486](https://github.com/nodejs/node/pull/16486) +* [[`10894c3835`](https://github.com/nodejs/node/commit/10894c3835)] - **test**: allow for different nsswitch.conf settings (Daniel Bevenius) [#16378](https://github.com/nodejs/node/pull/16378) +* [[`2a53165aa0`](https://github.com/nodejs/node/commit/2a53165aa0)] - **test**: add missing assertion (cjihrig) [#15519](https://github.com/nodejs/node/pull/15519) ## 2017-10-24, Version 8.8.0 (Current), @MylesBorins From 6a7210edcf4621f65c73e442ff7d0cb9ebe43bd3 Mon Sep 17 00:00:00 2001 From: Jon Moss Date: Wed, 25 Oct 2017 20:57:30 -0400 Subject: [PATCH 04/28] doc: add note to releases.md PR-URL: https://github.com/nodejs/node/pull/16507 Refs: https://github.com/nodejs/nodejs.org/pull/1425 Reviewed-By: Anatoli Papirovski Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: Yuta Hiroto Reviewed-By: Gireesh Punathil --- doc/releases.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/releases.md b/doc/releases.md index ddca73eaeee..faca396768e 100644 --- a/doc/releases.md +++ b/doc/releases.md @@ -134,6 +134,9 @@ The new entry should take the following form: The release type should be either Current, LTS, or Maintenance, depending on the type of release being produced. +Be sure that the `` tag, as well as the two headings, are not +indented at all. + At the top of each `CHANGELOG_*.md` file, and in the root `CHANGELOG.md` file, there is a table indexing all releases in each major release line. A link to the new release needs to be added to each. Follow the existing examples and be From 2e8c04b1bb3fd9e735b26ff0de2f18d8e9c7099c Mon Sep 17 00:00:00 2001 From: Tom Boutell Date: Fri, 6 Oct 2017 11:30:48 -0700 Subject: [PATCH 05/28] src: use maybe versions of methods MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/16005 Fixes: https://github.com/nodejs/node/issues/15864 Reviewed-By: Ben Noordhuis Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Timothy Gu Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig --- src/inspector_agent.cc | 9 +++++---- src/inspector_js_api.cc | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/inspector_agent.cc b/src/inspector_agent.cc index 4828fa3071c..beaebde1c3e 100644 --- a/src/inspector_agent.cc +++ b/src/inspector_agent.cc @@ -496,11 +496,11 @@ bool Agent::StartIoThread(bool wait_for_connect) { v8::Isolate* isolate = parent_env_->isolate(); HandleScope handle_scope(isolate); + auto context = parent_env_->context(); // Enable tracking of async stack traces if (!enable_async_hook_function_.IsEmpty()) { Local enable_fn = enable_async_hook_function_.Get(isolate); - auto context = parent_env_->context(); auto result = enable_fn->Call(context, Undefined(isolate), 0, nullptr); if (result.IsEmpty()) { FatalError( @@ -512,14 +512,15 @@ bool Agent::StartIoThread(bool wait_for_connect) { // Send message to enable debug in workers Local process_object = parent_env_->process_object(); Local emit_fn = - process_object->Get(FIXED_ONE_BYTE_STRING(isolate, "emit")); + process_object->Get(context, FIXED_ONE_BYTE_STRING(isolate, "emit")) + .ToLocalChecked(); // In case the thread started early during the startup if (!emit_fn->IsFunction()) return true; Local message = Object::New(isolate); - message->Set(FIXED_ONE_BYTE_STRING(isolate, "cmd"), - FIXED_ONE_BYTE_STRING(isolate, "NODE_DEBUG_ENABLED")); + message->Set(context, FIXED_ONE_BYTE_STRING(isolate, "cmd"), + FIXED_ONE_BYTE_STRING(isolate, "NODE_DEBUG_ENABLED")).FromJust(); Local argv[] = { FIXED_ONE_BYTE_STRING(isolate, "internalMessage"), message diff --git a/src/inspector_js_api.cc b/src/inspector_js_api.cc index 110f1363aa5..8bd682351a0 100644 --- a/src/inspector_js_api.cc +++ b/src/inspector_js_api.cc @@ -284,7 +284,7 @@ void Open(const FunctionCallbackInfo& args) { } if (args.Length() > 2 && args[2]->IsBoolean()) { - wait_for_connect = args[2]->BooleanValue(); + wait_for_connect = args[2]->BooleanValue(env->context()).FromJust(); } agent->StartIoThread(wait_for_connect); From 65b8d2133d89aecd9b044b4bf5f4ac5c5c39ebf2 Mon Sep 17 00:00:00 2001 From: Cheyenne Arrowsmith Date: Fri, 6 Oct 2017 11:00:54 -0700 Subject: [PATCH 06/28] test: show values instead of assertion message PR-URL: https://github.com/nodejs/node/pull/15979 Reviewed-By: Luigi Pinca Reviewed-By: Gireesh Punathil --- test/internet/test-dgram-multicast-multi-process.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/internet/test-dgram-multicast-multi-process.js b/test/internet/test-dgram-multicast-multi-process.js index e1017e2a26e..f8797bc8f3b 100644 --- a/test/internet/test-dgram-multicast-multi-process.js +++ b/test/internet/test-dgram-multicast-multi-process.js @@ -111,8 +111,7 @@ function launchChildProcess() { console.error('[PARENT] %d received %d matching messages.', worker.pid, count); - assert.strictEqual(count, messages.length, - 'A worker received an invalid multicast message'); + assert.strictEqual(count, messages.length); }); clearTimeout(timer); From eefee3e9a612696f5a0a25fa86bf91552e4bc616 Mon Sep 17 00:00:00 2001 From: Hadis-Fard Date: Fri, 6 Oct 2017 12:06:04 -0700 Subject: [PATCH 07/28] test: imporove assert messages Improve assert messages in test-cluster-worker-disconnect.js. PR-URL: https://github.com/nodejs/node/pull/16021 Reviewed-By: Anatoli Papirovski Reviewed-By: Rich Trott --- test/parallel/test-cluster-worker-disconnect.js | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/test/parallel/test-cluster-worker-disconnect.js b/test/parallel/test-cluster-worker-disconnect.js index 1c5a7fc3ae5..71c151410cd 100644 --- a/test/parallel/test-cluster-worker-disconnect.js +++ b/test/parallel/test-cluster-worker-disconnect.js @@ -58,7 +58,7 @@ if (cluster.isWorker) { // Disconnect worker when it is ready worker.once('listening', common.mustCall(() => { const w = worker.disconnect(); - assert.strictEqual(worker, w, 'did not return a reference'); + assert.strictEqual(worker, w, `${worker.id} did not return a reference`); })); // Check cluster events @@ -97,10 +97,8 @@ if (cluster.isWorker) { assert.ok(c.emitExit, 'Exit event did not emit'); // flags - assert.strictEqual(w.state, 'disconnected', - 'The state property was not set'); - assert.strictEqual(w.voluntaryMode, true, - 'Voluntary exit mode was not set'); + assert.strictEqual(w.state, 'disconnected'); + assert.strictEqual(w.voluntaryMode, true); // is process alive assert.ok(w.died, 'The worker did not die'); From 64aded33ef310ca87e5566239ab2dbd12b1899b9 Mon Sep 17 00:00:00 2001 From: Scott J Beck Date: Fri, 6 Oct 2017 09:53:49 -0700 Subject: [PATCH 08/28] test: use fixtures module PR-URL: https://github.com/nodejs/node/pull/15843 Reviewed-By: Colin Ihrig Reviewed-By: Gireesh Punathil --- test/parallel/test-https-localaddress-bind-error.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/test/parallel/test-https-localaddress-bind-error.js b/test/parallel/test-https-localaddress-bind-error.js index a22db1e9e61..57e4dd054d7 100644 --- a/test/parallel/test-https-localaddress-bind-error.js +++ b/test/parallel/test-https-localaddress-bind-error.js @@ -25,12 +25,13 @@ if (!common.hasCrypto) common.skip('missing crypto'); const assert = require('assert'); -const fs = require('fs'); const https = require('https'); +const fixtures = require('../common/fixtures'); + const options = { - key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), - cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem') }; const invalidLocalAddress = '1.2.3.4'; From 50fe1a840915a22517ffd2c480558409dbfd6677 Mon Sep 17 00:00:00 2001 From: Sarah Meyer Date: Fri, 6 Oct 2017 12:58:55 -0700 Subject: [PATCH 09/28] tools, benchmark: test util benchmark Create a minimal test for the util benchmark files. PR-URL: https://github.com/nodejs/node/pull/16050 Reviewed-By: Joyee Cheung Reviewed-By: Anatoli Papirovski Reviewed-By: Gireesh Punathil --- benchmark/util/format.js | 3 +++ benchmark/util/inspect-array.js | 3 +++ benchmark/util/type-check.js | 7 +++++-- test/parallel/test-benchmark-util.js | 14 ++++++++++++++ 4 files changed, 25 insertions(+), 2 deletions(-) create mode 100644 test/parallel/test-benchmark-util.js diff --git a/benchmark/util/format.js b/benchmark/util/format.js index 6f171318ee2..5f9c4c3b594 100644 --- a/benchmark/util/format.js +++ b/benchmark/util/format.js @@ -21,6 +21,9 @@ const bench = common.createBenchmark(main, { }); function main({ n, type }) { + // For testing, if supplied with an empty type, default to string. + type = type || 'string'; + const [first, second] = inputs[type]; bench.start(); diff --git a/benchmark/util/inspect-array.js b/benchmark/util/inspect-array.js index 751e2c3c2de..74332d18579 100644 --- a/benchmark/util/inspect-array.js +++ b/benchmark/util/inspect-array.js @@ -18,6 +18,9 @@ function main({ n, len, type }) { var arr = Array(len); var i, opts; + // For testing, if supplied with an empty type, default to denseArray. + type = type || 'denseArray'; + switch (type) { case 'denseArray_showHidden': opts = { showHidden: true }; diff --git a/benchmark/util/type-check.js b/benchmark/util/type-check.js index 1d9a4f30ef0..ee8dd7e4ece 100644 --- a/benchmark/util/type-check.js +++ b/benchmark/util/type-check.js @@ -29,16 +29,19 @@ const bench = common.createBenchmark(main, { type: Object.keys(args), version: ['native', 'js'], argument: ['true', 'false-primitive', 'false-object'], - millions: ['5'] + n: [5e6] }, { flags: ['--expose-internals'] }); function main(conf) { + // For testing, if supplied with an empty type, default to ArrayBufferView. + conf.type = conf.type || 'ArrayBufferView'; + const util = process.binding('util'); const types = require('internal/util/types'); - const n = (+conf.millions * 1e6) | 0; + const n = (+conf.n) | 0; const func = { native: util, js: types }[conf.version][`is${conf.type}`]; const arg = args[conf.type][conf.argument]; diff --git a/test/parallel/test-benchmark-util.js b/test/parallel/test-benchmark-util.js new file mode 100644 index 00000000000..25140f8fe7e --- /dev/null +++ b/test/parallel/test-benchmark-util.js @@ -0,0 +1,14 @@ +'use strict'; + +require('../common'); + +const runBenchmark = require('../common/benchmark'); + +runBenchmark('util', + ['argument=false', + 'input=', + 'method=Array', + 'n=1', + 'option=none', + 'type=', + 'version=native']); From 6ff2909ef6dac12ca8975f0c888b3daa7b5f4ea0 Mon Sep 17 00:00:00 2001 From: Raphael Rheault Date: Fri, 6 Oct 2017 13:11:15 -0400 Subject: [PATCH 10/28] test: use fixtures module in test-fs-realpath.js MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/15904 Reviewed-By: Daijiro Wachi Reviewed-By: Tobias Nießen --- test/parallel/test-fs-realpath.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/test/parallel/test-fs-realpath.js b/test/parallel/test-fs-realpath.js index a9ec65168d0..3af0092f617 100644 --- a/test/parallel/test-fs-realpath.js +++ b/test/parallel/test-fs-realpath.js @@ -21,6 +21,8 @@ 'use strict'; const common = require('../common'); +const fixtures = require('../common/fixtures'); + const assert = require('assert'); const fs = require('fs'); const path = require('path'); @@ -133,7 +135,7 @@ function test_simple_absolute_symlink(callback) { console.log('using type=%s', type); const entry = `${tmpAbsDir}/symlink`; - const expected = `${common.fixturesDir}/nested-index/one`; + const expected = fixtures.path('nested-index', 'one'); [ [entry, expected] ].forEach(function(t) { @@ -156,7 +158,7 @@ function test_deep_relative_file_symlink(callback) { return runNextTest(); } - const expected = path.join(common.fixturesDir, 'cycles', 'root.js'); + const expected = fixtures.path('cycles', 'root.js'); const linkData1 = path .relative(path.join(targetsAbsDir, 'nested-index', 'one'), expected); @@ -185,7 +187,7 @@ function test_deep_relative_dir_symlink(callback) { common.printSkipMessage('symlink test (no privs)'); return runNextTest(); } - const expected = path.join(common.fixturesDir, 'cycles', 'folder'); + const expected = fixtures.path('cycles', 'folder'); const path1b = path.join(targetsAbsDir, 'nested-index', 'one'); const linkPath1b = path.join(path1b, 'symlink1-dir'); const linkData1b = path.relative(path1b, expected); From 4ed152ea8a0735d5de5b97db2ebb66fdfc799d0d Mon Sep 17 00:00:00 2001 From: Nigel Kibodeaux Date: Wed, 25 Oct 2017 13:35:53 -0800 Subject: [PATCH 11/28] test: increase fs.exists coverage PR-URL: https://github.com/nodejs/node/pull/15963 Reviewed-By: Anatoli Papirovski Reviewed-By: Colin Ihrig Reviewed-By: Gireesh Punathil Reviewed-By: Ruben Bridgewater Reviewed-By: Michael Dawson --- test/parallel/test-fs-exists.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/parallel/test-fs-exists.js b/test/parallel/test-fs-exists.js index b19aa387741..331c8c04e38 100644 --- a/test/parallel/test-fs-exists.js +++ b/test/parallel/test-fs-exists.js @@ -30,6 +30,8 @@ fs.exists(f, common.mustCall(function(y) { assert.strictEqual(y, true); })); +assert.doesNotThrow(() => fs.exists(f)); + fs.exists(`${f}-NO`, common.mustCall(function(y) { assert.strictEqual(y, false); })); From 51637574daa06f5f378d72c83336008f5ec0050d Mon Sep 17 00:00:00 2001 From: Vladimir Ilic Date: Mon, 9 Oct 2017 22:02:29 -0700 Subject: [PATCH 12/28] test: add details in assertions in test-vm-context PR-URL: https://github.com/nodejs/node/pull/16116 Reviewed-By: James M Snell Reviewed-By: Franziska Hinkelmann Reviewed-By: Gireesh Punathil Reviewed-By: Joyee Cheung --- test/parallel/test-vm-context.js | 34 ++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/test/parallel/test-vm-context.js b/test/parallel/test-vm-context.js index 3c976648579..8ef89cba107 100644 --- a/test/parallel/test-vm-context.js +++ b/test/parallel/test-vm-context.js @@ -56,10 +56,10 @@ try { } catch (e) { gh1140Exception = e; assert.ok(/expected-filename/.test(e.stack), - 'expected appearance of filename in Error stack'); + `expected appearance of filename in Error stack: ${e.stack}`); } -assert.ok(gh1140Exception, - 'expected exception from runInContext signature test'); +// This is outside of catch block to confirm catch block ran. +assert.strictEqual(gh1140Exception.toString(), 'Error'); // GH-558, non-context argument segfaults / raises assertion const nonContextualSandboxErrorMsg = @@ -90,18 +90,22 @@ Object.defineProperty(ctx, 'b', { configurable: false }); ctx = vm.createContext(ctx); assert.strictEqual(script.runInContext(ctx), false); -// Error on the first line of a module should -// have the correct line and column number -assert.throws(() => { - vm.runInContext(' throw new Error()', context, { - filename: 'expected-filename.js', - lineOffset: 32, - columnOffset: 123 - }); -}, (err) => { - return /^ \^/m.test(err.stack) && - /expected-filename\.js:33:131/.test(err.stack); -}, 'Expected appearance of proper offset in Error stack'); +// Error on the first line of a module should have the correct line and column +// number. +{ + let stack = null; + assert.throws(() => { + vm.runInContext(' throw new Error()', context, { + filename: 'expected-filename.js', + lineOffset: 32, + columnOffset: 123 + }); + }, (err) => { + stack = err.stack; + return /^ \^/m.test(stack) && + /expected-filename\.js:33:131/.test(stack); + }, `stack not formatted as expected: ${stack}`); +} // https://github.com/nodejs/node/issues/6158 ctx = new Proxy({}, {}); From 1906df1b29f6402cd869b6c2b12a402c2eebc881 Mon Sep 17 00:00:00 2001 From: Iryna Yaremtso Date: Fri, 6 Oct 2017 10:00:59 -0700 Subject: [PATCH 13/28] test: use fixtures module Replace usage of common.fixturesDir with fixtures module in test-https-agent-disable-session-reuse.js. PR-URL: https://github.com/nodejs/node/pull/15901 Reviewed-By: Colin Ihrig Reviewed-By: Joyee Cheung --- test/parallel/test-https-agent-disable-session-reuse.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/test/parallel/test-https-agent-disable-session-reuse.js b/test/parallel/test-https-agent-disable-session-reuse.js index 4f92547e999..b3d0327f59b 100644 --- a/test/parallel/test-https-agent-disable-session-reuse.js +++ b/test/parallel/test-https-agent-disable-session-reuse.js @@ -1,17 +1,19 @@ 'use strict'; const common = require('../common'); + if (!common.hasCrypto) common.skip('missing crypto'); +const fixtures = require('../common/fixtures'); + const assert = require('assert'); const https = require('https'); -const fs = require('fs'); const TOTAL_REQS = 2; const options = { - key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`), - cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`) + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem') }; const clientSessions = []; From a70ef362ef8de8505ac9f140a4e803af9d706b77 Mon Sep 17 00:00:00 2001 From: Casie Lynch Date: Fri, 6 Oct 2017 09:46:22 -0700 Subject: [PATCH 14/28] test: replace fixturesDir in test-tls-connect Replace common.fixturesDir with fixtures module in test-tls-connect. PR-URL: https://github.com/nodejs/node/pull/15849 Reviewed-By: Gireesh Punathil Reviewed-By: Joyee Cheung --- test/parallel/test-tls-connect.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/parallel/test-tls-connect.js b/test/parallel/test-tls-connect.js index 374f22d10e5..2f1fb32323b 100644 --- a/test/parallel/test-tls-connect.js +++ b/test/parallel/test-tls-connect.js @@ -25,16 +25,16 @@ const common = require('../common'); if (!common.hasCrypto) common.skip('missing crypto'); +const fixtures = require('../common/fixtures'); + const assert = require('assert'); -const fs = require('fs'); -const path = require('path'); const tls = require('tls'); // https://github.com/joyent/node/issues/1218 // uncatchable exception on TLS connection error { - const cert = fs.readFileSync(path.join(common.fixturesDir, 'test_cert.pem')); - const key = fs.readFileSync(path.join(common.fixturesDir, 'test_key.pem')); + const cert = fixtures.readSync('test_cert.pem'); + const key = fixtures.readSync('test_key.pem'); const options = { cert: cert, key: key, port: common.PORT }; const conn = tls.connect(options, common.mustNotCall()); @@ -47,8 +47,8 @@ const tls = require('tls'); // SSL_accept/SSL_connect error handling { - const cert = fs.readFileSync(path.join(common.fixturesDir, 'test_cert.pem')); - const key = fs.readFileSync(path.join(common.fixturesDir, 'test_key.pem')); + const cert = fixtures.readSync('test_cert.pem'); + const key = fixtures.readSync('test_key.pem'); const conn = tls.connect({ cert: cert, From 287545997288b038c0e8a84f44d338e4d1acc384 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 18 Oct 2017 13:58:46 +0800 Subject: [PATCH 15/28] build: run linter before running tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/16284 Fixes: https://github.com/node/issues/16283 Reviewed-By: Rich Trott Reviewed-By: Gibson Fahnestock Reviewed-By: Refael Ackermann Reviewed-By: Luigi Pinca Reviewed-By: Michael Dawson Reviewed-By: Ruben Bridgewater Reviewed-By: Anatoli Papirovski Reviewed-By: Tobias Nießen Reviewed-By: Franziska Hinkelmann Reviewed-By: Richard Lau --- Makefile | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 4e974903a93..f051b3aaf1c 100644 --- a/Makefile +++ b/Makefile @@ -213,15 +213,26 @@ test: all $(MAKE) build-addons $(MAKE) build-addons-napi $(MAKE) doc-only + $(MAKE) lint $(MAKE) cctest $(PYTHON) tools/test.py --mode=release -J \ $(CI_ASYNC_HOOKS) \ $(CI_JS_SUITES) \ $(CI_NATIVE_SUITES) \ doctool known_issues - $(MAKE) lint endif +# For a quick test, does not run linter or build doc +test-only: all + $(MAKE) build-addons + $(MAKE) build-addons-napi + $(MAKE) cctest + $(PYTHON) tools/test.py --mode=release -J \ + $(CI_ASYNC_HOOKS) \ + $(CI_JS_SUITES) \ + $(CI_NATIVE_SUITES) \ + known_issues + test-cov: all $(MAKE) build-addons $(MAKE) build-addons-napi @@ -1107,6 +1118,7 @@ endif test-gc \ test-gc-clean \ test-hash-seed \ + test-only \ test-v8 \ test-v8-all \ test-v8-benchmarks \ From 201ecef767d0e4b8ea07e650646bd6811156dc12 Mon Sep 17 00:00:00 2001 From: Matthew Cantelon Date: Fri, 6 Oct 2017 17:26:16 +0000 Subject: [PATCH 16/28] test: include actual value in assertion message PR-URL: https://github.com/nodejs/node/pull/15935 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Joyee Cheung Reviewed-By: Gireesh Punathil --- test/addons-napi/test_dataview/test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/addons-napi/test_dataview/test.js b/test/addons-napi/test_dataview/test.js index 87cf8159ab8..711ab01ddb3 100644 --- a/test/addons-napi/test_dataview/test.js +++ b/test/addons-napi/test_dataview/test.js @@ -11,4 +11,4 @@ const template = Reflect.construct(DataView, [buffer]); const theDataview = test_dataview.CreateDataView(template); assert.ok(theDataview instanceof DataView, - 'The new variable should be of type Dataview'); + `Expect ${theDataview} to be a DataView`); From a4e46df87fc92e9c0da7a011486e40a34ef13307 Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Thu, 26 Oct 2017 12:49:30 +0200 Subject: [PATCH 17/28] src: use uv_stream_t, not uv_stream_s PR-URL: https://github.com/nodejs/node/pull/16512 Refs: https://github.com/libuv/libuv/issues/1607 Reviewed-By: Daniel Bevenius Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Santiago Gimeno Reviewed-By: Colin Ihrig --- src/inspector_socket.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/inspector_socket.cc b/src/inspector_socket.cc index 2d5054bb802..092138a7fa6 100644 --- a/src/inspector_socket.cc +++ b/src/inspector_socket.cc @@ -513,7 +513,7 @@ static int message_complete_cb(http_parser* parser) { return 0; } -static void data_received_cb(uv_stream_s* tcp, ssize_t nread, +static void data_received_cb(uv_stream_t* tcp, ssize_t nread, const uv_buf_t* buf) { #if DUMP_READS if (nread >= 0) { From 7c3d6ccbf2101580d7704f8c37de09ef109ba841 Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Wed, 25 Oct 2017 10:02:54 +0200 Subject: [PATCH 18/28] src: remove superfluous HandleScope MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Accessors implicitly run inside a HandleScope, UDPWrap::GetFD() doesn't need to create one explicitly. PR-URL: https://github.com/nodejs/node/pull/16482 Reviewed-By: Franziska Hinkelmann Reviewed-By: Colin Ihrig Reviewed-By: Tobias Nießen Reviewed-By: Anna Henningsen --- src/udp_wrap.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/udp_wrap.cc b/src/udp_wrap.cc index 98399155a84..4a37aadc662 100644 --- a/src/udp_wrap.cc +++ b/src/udp_wrap.cc @@ -169,7 +169,6 @@ void UDPWrap::New(const FunctionCallbackInfo& args) { void UDPWrap::GetFD(Local, const PropertyCallbackInfo& args) { int fd = UV_EBADF; #if !defined(_WIN32) - HandleScope scope(args.GetIsolate()); UDPWrap* wrap = Unwrap(args.Holder()); if (wrap != nullptr) uv_fileno(reinterpret_cast(&wrap->handle_), &fd); From de61f97c3dbc1a07911349f8360f94be127ca28a Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Wed, 25 Oct 2017 10:02:54 +0200 Subject: [PATCH 19/28] src: move handle properties to prototype MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Reduce the size of wrap objects by moving a couple of accessors from the instance template to the prototype template. They occupied one slot per instance instead of one slot per class. This commit fixes some instances of unwrapping twice since that code had to be updated anyway to use `args.This()` instead of `args.Holder()`. PR-URL: https://github.com/nodejs/node/pull/16482 Reviewed-By: Franziska Hinkelmann Reviewed-By: Colin Ihrig Reviewed-By: Tobias Nießen Reviewed-By: Anna Henningsen --- src/stream_base-inl.h | 58 ++++++++++++++++++++----------------------- src/udp_wrap.cc | 14 +++++------ 2 files changed, 34 insertions(+), 38 deletions(-) diff --git a/src/stream_base-inl.h b/src/stream_base-inl.h index 25293d2d065..562af2a5332 100644 --- a/src/stream_base-inl.h +++ b/src/stream_base-inl.h @@ -33,26 +33,26 @@ void StreamBase::AddMethods(Environment* env, enum PropertyAttribute attributes = static_cast(v8::ReadOnly | v8::DontDelete); - t->InstanceTemplate()->SetAccessor(env->fd_string(), - GetFD, - nullptr, - env->as_external(), - v8::DEFAULT, - attributes); - - t->InstanceTemplate()->SetAccessor(env->external_stream_string(), - GetExternal, - nullptr, - env->as_external(), - v8::DEFAULT, - attributes); - - t->InstanceTemplate()->SetAccessor(env->bytes_read_string(), - GetBytesRead, - nullptr, - env->as_external(), - v8::DEFAULT, - attributes); + t->PrototypeTemplate()->SetAccessor(env->fd_string(), + GetFD, + nullptr, + env->as_external(), + v8::DEFAULT, + attributes); + + t->PrototypeTemplate()->SetAccessor(env->external_stream_string(), + GetExternal, + nullptr, + env->as_external(), + v8::DEFAULT, + attributes); + + t->PrototypeTemplate()->SetAccessor(env->bytes_read_string(), + GetBytesRead, + nullptr, + env->as_external(), + v8::DEFAULT, + attributes); env->SetProtoMethod(t, "readStart", JSMethod); env->SetProtoMethod(t, "readStop", JSMethod); @@ -81,11 +81,10 @@ void StreamBase::AddMethods(Environment* env, template void StreamBase::GetFD(Local key, const PropertyCallbackInfo& args) { - Base* handle = Unwrap(args.Holder()); - // Mimic implementation of StreamBase::GetFD() and UDPWrap::GetFD(). + Base* handle; ASSIGN_OR_RETURN_UNWRAP(&handle, - args.Holder(), + args.This(), args.GetReturnValue().Set(UV_EINVAL)); StreamBase* wrap = static_cast(handle); @@ -99,11 +98,10 @@ void StreamBase::GetFD(Local key, template void StreamBase::GetBytesRead(Local key, const PropertyCallbackInfo& args) { - Base* handle = Unwrap(args.Holder()); - // The handle instance hasn't been set. So no bytes could have been read. + Base* handle; ASSIGN_OR_RETURN_UNWRAP(&handle, - args.Holder(), + args.This(), args.GetReturnValue().Set(0)); StreamBase* wrap = static_cast(handle); @@ -115,9 +113,8 @@ void StreamBase::GetBytesRead(Local key, template void StreamBase::GetExternal(Local key, const PropertyCallbackInfo& args) { - Base* handle = Unwrap(args.Holder()); - - ASSIGN_OR_RETURN_UNWRAP(&handle, args.Holder()); + Base* handle; + ASSIGN_OR_RETURN_UNWRAP(&handle, args.This()); StreamBase* wrap = static_cast(handle); Local ext = External::New(args.GetIsolate(), wrap); @@ -128,8 +125,7 @@ void StreamBase::GetExternal(Local key, template & args)> void StreamBase::JSMethod(const FunctionCallbackInfo& args) { - Base* handle = Unwrap(args.Holder()); - + Base* handle; ASSIGN_OR_RETURN_UNWRAP(&handle, args.Holder()); StreamBase* wrap = static_cast(handle); diff --git a/src/udp_wrap.cc b/src/udp_wrap.cc index 4a37aadc662..b8c53534052 100644 --- a/src/udp_wrap.cc +++ b/src/udp_wrap.cc @@ -113,12 +113,12 @@ void UDPWrap::Initialize(Local target, enum PropertyAttribute attributes = static_cast(v8::ReadOnly | v8::DontDelete); - t->InstanceTemplate()->SetAccessor(env->fd_string(), - UDPWrap::GetFD, - nullptr, - env->as_external(), - v8::DEFAULT, - attributes); + t->PrototypeTemplate()->SetAccessor(env->fd_string(), + UDPWrap::GetFD, + nullptr, + env->as_external(), + v8::DEFAULT, + attributes); env->SetProtoMethod(t, "bind", Bind); env->SetProtoMethod(t, "send", Send); @@ -169,7 +169,7 @@ void UDPWrap::New(const FunctionCallbackInfo& args) { void UDPWrap::GetFD(Local, const PropertyCallbackInfo& args) { int fd = UV_EBADF; #if !defined(_WIN32) - UDPWrap* wrap = Unwrap(args.Holder()); + UDPWrap* wrap = Unwrap(args.This()); if (wrap != nullptr) uv_fileno(reinterpret_cast(&wrap->handle_), &fd); #endif From 88fb359c578b5d1a58885f3afcf6e542a9f07843 Mon Sep 17 00:00:00 2001 From: Ben Halverson Date: Sat, 26 Aug 2017 00:53:27 -0700 Subject: [PATCH 20/28] stream: migrate _stream_readable use error codes PR-URL: https://github.com/nodejs/node/pull/15042 Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson Reviewed-By: Refael Ackermann Reviewed-By: Gireesh Punathil Reviewed-By: Joyee Cheung --- doc/api/errors.md | 21 +++++++++++++++++++ lib/_stream_readable.js | 8 ++++--- lib/internal/errors.js | 3 +++ ...tream-readable-with-unimplemented-_read.js | 8 +++++-- .../parallel/test-stream-unshift-read-race.js | 12 +++++++++-- 5 files changed, 45 insertions(+), 7 deletions(-) diff --git a/doc/api/errors.md b/doc/api/errors.md index 498fc856ad5..77da5583719 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1317,6 +1317,24 @@ Node.js does not allow `stdout` or `stderr` Streams to be closed by user code. Used when an attempt is made to close the `process.stdout` stream. By design, Node.js does not allow `stdout` or `stderr` Streams to be closed by user code. + +### ERR_STREAM_PUSH_AFTER_EOF + +Used when an attempt is made to call [`stream.push()`][] after a `null`(EOF) +has been pushed to the stream. + + +### ERR_STREAM_READ_NOT_IMPLEMENTED + +Used when an attempt is made to use a readable stream that has not implemented +[`readable._read()`][]. + + +### ERR_STREAM_UNSHIFT_AFTER_END_EVENT + +Used when an attempt is made to call [`stream.unshift()`][] after the +`end` event has been emitted. + ### ERR_STREAM_WRAP @@ -1462,7 +1480,10 @@ closed. [`ERR_INVALID_ARG_TYPE`]: #ERR_INVALID_ARG_TYPE [`hash.digest()`]: crypto.html#crypto_hash_digest_encoding [`hash.update()`]: crypto.html#crypto_hash_update_data_inputencoding +[`readable._read()`]: stream.html#stream_readable_read_size_1 [`sign.sign()`]: crypto.html#crypto_sign_sign_privatekey_outputformat +[`stream.push()`]: stream.html#stream_readable_push_chunk_encoding +[`stream.unshift()`]: stream.html#stream_readable_unshift_chunk [`subprocess.kill()`]: child_process.html#child_process_subprocess_kill_signal [`subprocess.send()`]: child_process.html#child_process_subprocess_send_message_sendhandle_options_callback [`fs.readFileSync`]: fs.html#fs_fs_readfilesync_path_options diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 78732ed185a..deea864647b 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -31,6 +31,7 @@ const util = require('util'); const debug = util.debuglog('stream'); const BufferList = require('internal/streams/BufferList'); const destroyImpl = require('internal/streams/destroy'); +const errors = require('internal/errors'); var StringDecoder; util.inherits(Readable, Stream); @@ -233,11 +234,12 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { if (addToFront) { if (state.endEmitted) - stream.emit('error', new Error('stream.unshift() after end event')); + stream.emit('error', + new errors.Error('ERR_STREAM_UNSHIFT_AFTER_END_EVENT')); else addChunk(stream, state, chunk, true); } else if (state.ended) { - stream.emit('error', new Error('stream.push() after EOF')); + stream.emit('error', new errors.Error('ERR_STREAM_PUSH_AFTER_EOF')); } else { state.reading = false; if (state.decoder && !encoding) { @@ -548,7 +550,7 @@ function maybeReadMore_(stream, state) { // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. Readable.prototype._read = function(n) { - this.emit('error', new Error('_read() is not implemented')); + this.emit('error', new errors.Error('ERR_STREAM_READ_NOT_IMPLEMENTED')); }; Readable.prototype.pipe = function(dest, pipeOpts) { diff --git a/lib/internal/errors.js b/lib/internal/errors.js index fc6b11a40e9..a01d9b30cd2 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -325,6 +325,9 @@ E('ERR_SOCKET_CLOSED', 'Socket is closed'); E('ERR_SOCKET_DGRAM_NOT_RUNNING', 'Not running'); E('ERR_STDERR_CLOSE', 'process.stderr cannot be closed'); E('ERR_STDOUT_CLOSE', 'process.stdout cannot be closed'); +E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +E('ERR_STREAM_READ_NOT_IMPLEMENTED', '_read() is not implemented'); +E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); E('ERR_STREAM_WRAP', 'Stream has StringDecoder set or is in objectMode'); E('ERR_TLS_CERT_ALTNAME_INVALID', 'Hostname/IP does not match certificate\'s altnames: %s'); diff --git a/test/parallel/test-stream-readable-with-unimplemented-_read.js b/test/parallel/test-stream-readable-with-unimplemented-_read.js index ce325c0823e..b644bd40674 100644 --- a/test/parallel/test-stream-readable-with-unimplemented-_read.js +++ b/test/parallel/test-stream-readable-with-unimplemented-_read.js @@ -1,8 +1,12 @@ 'use strict'; -require('../common'); +const common = require('../common'); const stream = require('stream'); const assert = require('assert'); const readable = new stream.Readable(); -assert.throws(() => readable.read(), /not implemented/); +assert.throws(() => readable.read(), common.expectsError({ + code: 'ERR_STREAM_READ_NOT_IMPLEMENTED', + type: Error, + message: '_read() is not implemented' +})); diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js index e04de5392aa..a67147506e4 100644 --- a/test/parallel/test-stream-unshift-read-race.js +++ b/test/parallel/test-stream-unshift-read-race.js @@ -70,7 +70,11 @@ r._read = function(n) { function pushError() { assert.throws(function() { r.push(Buffer.allocUnsafe(1)); - }, /^Error: stream\.push\(\) after EOF$/); + }, common.expectsError({ + code: 'ERR_STREAM_PUSH_AFTER_EOF', + type: Error, + message: 'stream.push() after EOF' + })); } @@ -84,7 +88,11 @@ w._write = function(chunk, encoding, cb) { r.on('end', common.mustCall(function() { assert.throws(function() { r.unshift(Buffer.allocUnsafe(1)); - }, /^Error: stream\.unshift\(\) after end event$/); + }, common.expectsError({ + code: 'ERR_STREAM_UNSHIFT_AFTER_END_EVENT', + type: Error, + message: 'stream.unshift() after end event' + })); w.end(); })); From c08750275882f1472f7ff2a98916f857a0857dc6 Mon Sep 17 00:00:00 2001 From: Ali Ijaz Sheikh Date: Wed, 18 Oct 2017 15:19:43 -0700 Subject: [PATCH 21/28] deps: V8: backport b1cd96e from upstream Original commit message: [inspector] added V8InspectorClient::maxAsyncCallStackDepthChanged R=dgozman@chromium.org Bug: none Cq-Include-Trybots: master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_chromium_rel_ng Change-Id: I0fa10978266feb3c3907ce1f3386ae7a34a33582 Reviewed-on: https://chromium-review.googlesource.com/726490 Reviewed-by: Dmitry Gozman Commit-Queue: Aleksey Kozyatinskiy Cr-Commit-Position: refs/heads/master@{#48705} PR-URL: https://github.com/nodejs/node/pull/16308 Refs: https://github.com/v8/v8/commit/b1cd96ec4b836348bcffba357cd29076ab3ae48b Reviewed-By: Eugene Ostroukhov Reviewed-By: Refael Ackermann Reviewed-By: James M Snell Reviewed-By: Ben Noordhuis Reviewed-By: Timothy Gu --- common.gypi | 2 +- deps/v8/include/v8-inspector.h | 2 ++ deps/v8/src/inspector/v8-debugger.cc | 2 ++ ...x-async-call-stack-depth-changed-expected.txt | 5 +++++ .../max-async-call-stack-depth-changed.js | 16 ++++++++++++++++ deps/v8/test/inspector/inspector-test.cc | 15 +++++++++++++++ deps/v8/test/inspector/isolate-data.cc | 9 +++++++++ deps/v8/test/inspector/isolate-data.h | 3 +++ 8 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 deps/v8/test/inspector/debugger/max-async-call-stack-depth-changed-expected.txt create mode 100644 deps/v8/test/inspector/debugger/max-async-call-stack-depth-changed.js diff --git a/common.gypi b/common.gypi index 404fd4615e2..c7c367ec83b 100644 --- a/common.gypi +++ b/common.gypi @@ -27,7 +27,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.7', + 'v8_embedder_string': '-node.8', # Enable disassembler for `--print-code` v8 options 'v8_enable_disassembler': 1, diff --git a/deps/v8/include/v8-inspector.h b/deps/v8/include/v8-inspector.h index 43bf3b4f60b..d0bb9b47fe4 100644 --- a/deps/v8/include/v8-inspector.h +++ b/deps/v8/include/v8-inspector.h @@ -211,6 +211,8 @@ class V8_EXPORT V8InspectorClient { // TODO(dgozman): this was added to support service worker shadow page. We // should not connect at all. virtual bool canExecuteScripts(int contextGroupId) { return true; } + + virtual void maxAsyncCallStackDepthChanged(int depth) {} }; class V8_EXPORT V8Inspector { diff --git a/deps/v8/src/inspector/v8-debugger.cc b/deps/v8/src/inspector/v8-debugger.cc index acd778b2b69..bc5437c6d42 100644 --- a/deps/v8/src/inspector/v8-debugger.cc +++ b/deps/v8/src/inspector/v8-debugger.cc @@ -726,6 +726,8 @@ void V8Debugger::setAsyncCallStackDepth(V8DebuggerAgentImpl* agent, int depth) { if (m_maxAsyncCallStackDepth == maxAsyncCallStackDepth) return; // TODO(dgozman): ideally, this should be per context group. m_maxAsyncCallStackDepth = maxAsyncCallStackDepth; + m_inspector->client()->maxAsyncCallStackDepthChanged( + m_maxAsyncCallStackDepth); if (!maxAsyncCallStackDepth) allAsyncTasksCanceled(); } diff --git a/deps/v8/test/inspector/debugger/max-async-call-stack-depth-changed-expected.txt b/deps/v8/test/inspector/debugger/max-async-call-stack-depth-changed-expected.txt new file mode 100644 index 00000000000..0f94b592e02 --- /dev/null +++ b/deps/v8/test/inspector/debugger/max-async-call-stack-depth-changed-expected.txt @@ -0,0 +1,5 @@ +Tests for max async call stack depth changed. +maxAsyncCallStackDepthChanged: 8 +maxAsyncCallStackDepthChanged: 0 +maxAsyncCallStackDepthChanged: 8 +maxAsyncCallStackDepthChanged: 0 diff --git a/deps/v8/test/inspector/debugger/max-async-call-stack-depth-changed.js b/deps/v8/test/inspector/debugger/max-async-call-stack-depth-changed.js new file mode 100644 index 00000000000..2d585398108 --- /dev/null +++ b/deps/v8/test/inspector/debugger/max-async-call-stack-depth-changed.js @@ -0,0 +1,16 @@ +// Copyright 2017 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +let {session, contextGroup, Protocol} = + InspectorTest.start('Tests for max async call stack depth changed.'); + +(async function test(){ + utils.setLogMaxAsyncCallStackDepthChanged(true); + await Protocol.Debugger.enable(); + await Protocol.Debugger.setAsyncCallStackDepth({maxDepth: 8}); + await Protocol.Debugger.setAsyncCallStackDepth({maxDepth: 0}); + await Protocol.Debugger.setAsyncCallStackDepth({maxDepth: 8}); + await Protocol.Debugger.disable(); + InspectorTest.completeTest(); +})(); diff --git a/deps/v8/test/inspector/inspector-test.cc b/deps/v8/test/inspector/inspector-test.cc index dcaf5823ef7..de89271fbfc 100644 --- a/deps/v8/test/inspector/inspector-test.cc +++ b/deps/v8/test/inspector/inspector-test.cc @@ -299,6 +299,10 @@ class UtilsExtension : public IsolateData::SetupGlobalTask { utils->Set(ToV8String(isolate, "setLogConsoleApiMessageCalls"), v8::FunctionTemplate::New( isolate, &UtilsExtension::SetLogConsoleApiMessageCalls)); + utils->Set( + ToV8String(isolate, "setLogMaxAsyncCallStackDepthChanged"), + v8::FunctionTemplate::New( + isolate, &UtilsExtension::SetLogMaxAsyncCallStackDepthChanged)); utils->Set(ToV8String(isolate, "createContextGroup"), v8::FunctionTemplate::New(isolate, &UtilsExtension::CreateContextGroup)); @@ -486,6 +490,17 @@ class UtilsExtension : public IsolateData::SetupGlobalTask { args[0].As()->Value()); } + static void SetLogMaxAsyncCallStackDepthChanged( + const v8::FunctionCallbackInfo& args) { + if (args.Length() != 1 || !args[0]->IsBoolean()) { + fprintf(stderr, + "Internal error: setLogMaxAsyncCallStackDepthChanged(bool)."); + Exit(); + } + backend_runner_->data()->SetLogMaxAsyncCallStackDepthChanged( + args[0].As()->Value()); + } + static void CreateContextGroup( const v8::FunctionCallbackInfo& args) { if (args.Length() != 0) { diff --git a/deps/v8/test/inspector/isolate-data.cc b/deps/v8/test/inspector/isolate-data.cc index 82a5cc04834..4c1d81670f3 100644 --- a/deps/v8/test/inspector/isolate-data.cc +++ b/deps/v8/test/inspector/isolate-data.cc @@ -370,6 +370,10 @@ void IsolateData::SetLogConsoleApiMessageCalls(bool log) { log_console_api_message_calls_ = log; } +void IsolateData::SetLogMaxAsyncCallStackDepthChanged(bool log) { + log_max_async_call_stack_depth_changed_ = log; +} + v8::MaybeLocal IsolateData::memoryInfo(v8::Isolate* isolate, v8::Local) { if (memory_info_.IsEmpty()) return v8::MaybeLocal(); @@ -396,3 +400,8 @@ void IsolateData::consoleAPIMessage(int contextGroupId, Print(isolate_, stack->toString()->string()); fprintf(stdout, "\n"); } + +void IsolateData::maxAsyncCallStackDepthChanged(int depth) { + if (!log_max_async_call_stack_depth_changed_) return; + fprintf(stdout, "maxAsyncCallStackDepthChanged: %d\n", depth); +} diff --git a/deps/v8/test/inspector/isolate-data.h b/deps/v8/test/inspector/isolate-data.h index b8f3b9e76c9..455b44b49bb 100644 --- a/deps/v8/test/inspector/isolate-data.h +++ b/deps/v8/test/inspector/isolate-data.h @@ -64,6 +64,7 @@ class IsolateData : public v8_inspector::V8InspectorClient { void SetCurrentTimeMS(double time); void SetMemoryInfo(v8::Local memory_info); void SetLogConsoleApiMessageCalls(bool log); + void SetLogMaxAsyncCallStackDepthChanged(bool log); void SetMaxAsyncTaskStacksForTest(int limit); void DumpAsyncTaskStacksStateForTest(); void FireContextCreated(v8::Local context, int context_group_id); @@ -106,6 +107,7 @@ class IsolateData : public v8_inspector::V8InspectorClient { unsigned lineNumber, unsigned columnNumber, v8_inspector::V8StackTrace*) override; bool isInspectableHeapObject(v8::Local) override; + void maxAsyncCallStackDepthChanged(int depth) override; TaskRunner* task_runner_; SetupGlobalTasks setup_global_tasks_; @@ -123,6 +125,7 @@ class IsolateData : public v8_inspector::V8InspectorClient { bool current_time_set_ = false; double current_time_ = 0.0; bool log_console_api_message_calls_ = false; + bool log_max_async_call_stack_depth_changed_ = false; v8::Global not_inspectable_private_; DISALLOW_COPY_AND_ASSIGN(IsolateData); From 5886e204f04013879c00aca5ab653c16bef5befc Mon Sep 17 00:00:00 2001 From: Ali Ijaz Sheikh Date: Wed, 18 Oct 2017 16:00:03 -0700 Subject: [PATCH 22/28] inspector: track async stacks when necessary With this change, we do async stack tracking only when explicitly requested by the inspector client. This avoids unnecessary overhead for clients that might not be interested in async stack traces. PR-URL: https://github.com/nodejs/node/pull/16308 Fixes: https://github.com/nodejs/node/issues/16180 Reviewed-By: Eugene Ostroukhov Reviewed-By: Refael Ackermann Reviewed-By: James M Snell Reviewed-By: Ben Noordhuis Reviewed-By: Timothy Gu --- lib/internal/inspector_async_hook.js | 6 -- src/inspector_agent.cc | 96 +++++++++++-------- src/inspector_agent.h | 7 ++ test/sequential/sequential.status | 1 + .../test-inspector-async-call-stack.js | 79 +++++++++++++++ ...pector-async-hook-teardown-at-debug-end.js | 22 ----- 6 files changed, 142 insertions(+), 69 deletions(-) create mode 100644 test/sequential/test-inspector-async-call-stack.js delete mode 100644 test/sequential/test-inspector-async-hook-teardown-at-debug-end.js diff --git a/lib/internal/inspector_async_hook.js b/lib/internal/inspector_async_hook.js index e32a026cd69..4ccf8626977 100644 --- a/lib/internal/inspector_async_hook.js +++ b/lib/internal/inspector_async_hook.js @@ -55,10 +55,4 @@ function disable() { exports.setup = function() { inspector.registerAsyncHook(enable, disable); - - if (inspector.isEnabled()) { - // If the inspector was already enabled via --inspect or --inspect-brk, - // the we need to enable the async hook immediately at startup. - enable(); - } }; diff --git a/src/inspector_agent.cc b/src/inspector_agent.cc index beaebde1c3e..efc0ffc15c5 100644 --- a/src/inspector_agent.cc +++ b/src/inspector_agent.cc @@ -319,6 +319,14 @@ class NodeInspectorClient : public V8InspectorClient { return uv_hrtime() * 1.0 / NANOS_PER_MSEC; } + void maxAsyncCallStackDepthChanged(int depth) override { + if (depth == 0) { + env_->inspector_agent()->DisableAsyncHook(); + } else { + env_->inspector_agent()->EnableAsyncHook(); + } + } + void contextCreated(Local context, const std::string& name) { std::unique_ptr name_buffer = Utf8ToStringView(name); v8_inspector::V8ContextInfo info(context, CONTEXT_GROUP_ID, @@ -449,7 +457,9 @@ Agent::Agent(Environment* env) : parent_env_(env), client_(nullptr), platform_(nullptr), enabled_(false), - next_context_number_(1) {} + next_context_number_(1), + pending_enable_async_hook_(false), + pending_disable_async_hook_(false) {} // Destructor needs to be defined here in implementation file as the header // does not have full definition of some classes. @@ -498,17 +508,6 @@ bool Agent::StartIoThread(bool wait_for_connect) { HandleScope handle_scope(isolate); auto context = parent_env_->context(); - // Enable tracking of async stack traces - if (!enable_async_hook_function_.IsEmpty()) { - Local enable_fn = enable_async_hook_function_.Get(isolate); - auto result = enable_fn->Call(context, Undefined(isolate), 0, nullptr); - if (result.IsEmpty()) { - FatalError( - "node::InspectorAgent::StartIoThread", - "Cannot enable Inspector's AsyncHook, please report this."); - } - } - // Send message to enable debug in workers Local process_object = parent_env_->process_object(); Local emit_fn = @@ -537,38 +536,9 @@ void Agent::Stop() { io_.reset(); enabled_ = false; } - - v8::Isolate* isolate = parent_env_->isolate(); - HandleScope handle_scope(isolate); - - // Disable tracking of async stack traces - if (!disable_async_hook_function_.IsEmpty()) { - Local disable_fn = disable_async_hook_function_.Get(isolate); - auto result = disable_fn->Call(parent_env_->context(), - Undefined(parent_env_->isolate()), 0, nullptr); - if (result.IsEmpty()) { - FatalError( - "node::InspectorAgent::Stop", - "Cannot disable Inspector's AsyncHook, please report this."); - } - } } void Agent::Connect(InspectorSessionDelegate* delegate) { - if (!enabled_) { - // Enable tracking of async stack traces - v8::Isolate* isolate = parent_env_->isolate(); - HandleScope handle_scope(isolate); - auto context = parent_env_->context(); - Local enable_fn = enable_async_hook_function_.Get(isolate); - auto result = enable_fn->Call(context, Undefined(isolate), 0, nullptr); - if (result.IsEmpty()) { - FatalError( - "node::InspectorAgent::Connect", - "Cannot enable Inspector's AsyncHook, please report this."); - } - } - enabled_ = true; client_->connectFrontend(delegate); } @@ -626,6 +596,50 @@ void Agent::RegisterAsyncHook(Isolate* isolate, v8::Local disable_function) { enable_async_hook_function_.Reset(isolate, enable_function); disable_async_hook_function_.Reset(isolate, disable_function); + if (pending_enable_async_hook_) { + CHECK(!pending_disable_async_hook_); + pending_enable_async_hook_ = false; + EnableAsyncHook(); + } else if (pending_disable_async_hook_) { + CHECK(!pending_enable_async_hook_); + pending_disable_async_hook_ = false; + DisableAsyncHook(); + } +} + +void Agent::EnableAsyncHook() { + if (!enable_async_hook_function_.IsEmpty()) { + Isolate* isolate = parent_env_->isolate(); + ToggleAsyncHook(isolate, enable_async_hook_function_.Get(isolate)); + } else if (pending_disable_async_hook_) { + CHECK(!pending_enable_async_hook_); + pending_disable_async_hook_ = false; + } else { + pending_enable_async_hook_ = true; + } +} + +void Agent::DisableAsyncHook() { + if (!disable_async_hook_function_.IsEmpty()) { + Isolate* isolate = parent_env_->isolate(); + ToggleAsyncHook(isolate, disable_async_hook_function_.Get(isolate)); + } else if (pending_enable_async_hook_) { + CHECK(!pending_disable_async_hook_); + pending_enable_async_hook_ = false; + } else { + pending_disable_async_hook_ = true; + } +} + +void Agent::ToggleAsyncHook(Isolate* isolate, Local fn) { + HandleScope handle_scope(isolate); + auto context = parent_env_->context(); + auto result = fn->Call(context, Undefined(isolate), 0, nullptr); + if (result.IsEmpty()) { + FatalError( + "node::inspector::Agent::ToggleAsyncHook", + "Cannot toggle Inspector's AsyncHook, please report this."); + } } void Agent::AsyncTaskScheduled(const StringView& task_name, void* task, diff --git a/src/inspector_agent.h b/src/inspector_agent.h index 9119e7263b2..24fae116f94 100644 --- a/src/inspector_agent.h +++ b/src/inspector_agent.h @@ -92,7 +92,12 @@ class Agent { DebugOptions& options() { return debug_options_; } void ContextCreated(v8::Local context); + void EnableAsyncHook(); + void DisableAsyncHook(); + private: + void ToggleAsyncHook(v8::Isolate* isolate, v8::Local fn); + node::Environment* parent_env_; std::unique_ptr client_; std::unique_ptr io_; @@ -102,6 +107,8 @@ class Agent { DebugOptions debug_options_; int next_context_number_; + bool pending_enable_async_hook_; + bool pending_disable_async_hook_; v8::Persistent enable_async_hook_function_; v8::Persistent disable_async_hook_function_; }; diff --git a/test/sequential/sequential.status b/test/sequential/sequential.status index 86095d16c30..797adfecd64 100644 --- a/test/sequential/sequential.status +++ b/test/sequential/sequential.status @@ -7,6 +7,7 @@ prefix sequential [true] # This section applies to all platforms [$system==win32] +test-inspector-async-call-stack : PASS, FLAKY test-inspector-bindings : PASS, FLAKY test-inspector-debug-end : PASS, FLAKY test-inspector-stop-profile-after-done: PASS, FLAKY diff --git a/test/sequential/test-inspector-async-call-stack.js b/test/sequential/test-inspector-async-call-stack.js new file mode 100644 index 00000000000..681697b8c43 --- /dev/null +++ b/test/sequential/test-inspector-async-call-stack.js @@ -0,0 +1,79 @@ +'use strict'; +const common = require('../common'); +common.skipIfInspectorDisabled(); +common.skipIf32Bits(); + +const assert = require('assert'); +const async_wrap = process.binding('async_wrap'); +const { kTotals } = async_wrap.constants; +const inspector = require('inspector'); + +const setDepth = 'Debugger.setAsyncCallStackDepth'; + +function verifyAsyncHookDisabled(message) { + assert.strictEqual(async_wrap.async_hook_fields[kTotals], 0); +} + +function verifyAsyncHookEnabled(message) { + assert.strictEqual(async_wrap.async_hook_fields[kTotals], 4); +} + +// By default inspector async hooks should not have been installed. +verifyAsyncHookDisabled('inspector async hook should be disabled at startup'); + +const session = new inspector.Session(); +verifyAsyncHookDisabled('creating a session should not enable async hooks'); + +session.connect(); +verifyAsyncHookDisabled('connecting a session should not enable async hooks'); + +session.post('Debugger.enable', () => { + verifyAsyncHookDisabled('enabling debugger should not enable async hooks'); + + session.post(setDepth, { invalid: 'message' }, () => { + verifyAsyncHookDisabled('invalid message should not enable async hooks'); + + session.post(setDepth, { maxDepth: 'five' }, () => { + verifyAsyncHookDisabled('invalid maxDepth (string) should not enable ' + + 'async hooks'); + + session.post(setDepth, { maxDepth: NaN }, () => { + verifyAsyncHookDisabled('invalid maxDepth (NaN) should not enable ' + + 'async hooks'); + + session.post(setDepth, { maxDepth: 10 }, () => { + verifyAsyncHookEnabled('valid message should enable async hooks'); + + session.post(setDepth, { maxDepth: 0 }, () => { + verifyAsyncHookDisabled('Setting maxDepth to 0 should disable ' + + 'async hooks'); + + runTestSet2(session); + }); + }); + }); + }); + }); +}); + +function runTestSet2(session) { + session.post(setDepth, { maxDepth: 32 }, () => { + verifyAsyncHookEnabled('valid message should enable async hooks'); + + session.post('Debugger.disable', () => { + verifyAsyncHookDisabled('Debugger.disable should disable async hooks'); + + session.post('Debugger.enable', () => { + verifyAsyncHookDisabled('Enabling debugger should not enable hooks'); + + session.post(setDepth, { maxDepth: 64 }, () => { + verifyAsyncHookEnabled('valid message should enable async hooks'); + + session.disconnect(); + verifyAsyncHookDisabled('Disconnecting session should disable ' + + 'async hooks'); + }); + }); + }); + }); +} diff --git a/test/sequential/test-inspector-async-hook-teardown-at-debug-end.js b/test/sequential/test-inspector-async-hook-teardown-at-debug-end.js deleted file mode 100644 index c5577363fd6..00000000000 --- a/test/sequential/test-inspector-async-hook-teardown-at-debug-end.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict'; -const common = require('../common'); -common.skipIfInspectorDisabled(); -common.skipIf32Bits(); - -const spawn = require('child_process').spawn; - -const script = ` -const assert = require('assert'); -const async_wrap = process.binding('async_wrap'); -const { kTotals } = async_wrap.constants; - -assert.strictEqual(async_wrap.async_hook_fields[kTotals], 4); -process._debugEnd(); -assert.strictEqual(async_wrap.async_hook_fields[kTotals], 0); -`; - -const args = ['--inspect', '-e', script]; -const child = spawn(process.execPath, args, { stdio: 'inherit' }); -child.on('exit', (code, signal) => { - process.exit(code || signal); -}); From 83b8474e64dedc49c35660bbc9eb80d7e0073ba7 Mon Sep 17 00:00:00 2001 From: zhangzifa Date: Thu, 19 Oct 2017 17:06:34 +0800 Subject: [PATCH 23/28] test: update test-timers-block-eventloop.js When CPU is busy, the above sequential case fails occasionally, expand the timeout value to fix it. PR-URL: https://github.com/nodejs/node/pull/16314 Fixes: https://github.com/nodejs/node/issues/16310 Reviewed-By: Gireesh Punathil Reviewed-By: Gibson Fahnestock Reviewed-By: James M Snell Reviewed-By: Refael Ackermann --- test/sequential/test-timers-block-eventloop.js | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/test/sequential/test-timers-block-eventloop.js b/test/sequential/test-timers-block-eventloop.js index 210cf0d80a1..78ecc9e3174 100644 --- a/test/sequential/test-timers-block-eventloop.js +++ b/test/sequential/test-timers-block-eventloop.js @@ -2,21 +2,23 @@ const common = require('../common'); const fs = require('fs'); +const platformTimeout = common.platformTimeout; const t1 = setInterval(() => { - common.busyLoop(12); -}, 10); + common.busyLoop(platformTimeout(12)); +}, platformTimeout(10)); const t2 = setInterval(() => { - common.busyLoop(15); -}, 10); + common.busyLoop(platformTimeout(15)); +}, platformTimeout(10)); -const t3 = setTimeout(common.mustNotCall('eventloop blocked!'), 100); +const t3 = + setTimeout(common.mustNotCall('eventloop blocked!'), platformTimeout(200)); setTimeout(function() { - fs.stat('./nonexistent.txt', (err, stats) => { + fs.stat('/dev/nonexistent', (err, stats) => { clearInterval(t1); clearInterval(t2); clearTimeout(t3); }); -}, 50); +}, platformTimeout(50)); From 8a9be4175b17b574459bccb99d8b6e6be6db4070 Mon Sep 17 00:00:00 2001 From: Anatoli Papirovski Date: Sat, 28 Oct 2017 14:40:35 -0400 Subject: [PATCH 24/28] doc: http2.connect accepts net & tls options PR-URL: https://github.com/nodejs/node/pull/16576 Fixes: https://github.com/nodejs/node/issues/15405 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- doc/api/http2.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/api/http2.md b/doc/api/http2.md index 6e59e814747..fe78187068f 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -1627,6 +1627,7 @@ added: v8.4.0 * `createConnection` {Function} An optional callback that receives the `URL` instance passed to `connect` and the `options` object, and returns any [`Duplex`][] stream that is to be used as the connection for this session. + * ...: Any [`net.connect()`][] or [`tls.connect()`][] options can be provided. * `listener` {Function} * Returns {Http2Session} @@ -2746,6 +2747,7 @@ if the stream is closed. [`http2.createServer()`]: #http2_http2_createserver_options_onrequesthandler [`http2stream.pushStream()`]: #http2_http2stream_pushstream_headers_options_callback [`net.Socket`]: net.html#net_class_net_socket +[`net.connect()`]: net.html#net_net_connect [`request.socket.getPeerCertificate()`]: tls.html#tls_tlssocket_getpeercertificate_detailed [`response.end()`]: #http2_response_end_data_encoding_callback [`response.setHeader()`]: #http2_response_setheader_name_value @@ -2755,5 +2757,6 @@ if the stream is closed. [`response.writeContinue()`]: #http2_response_writecontinue [`response.writeHead()`]: #http2_response_writehead_statuscode_statusmessage_headers [`tls.TLSSocket`]: tls.html#tls_class_tls_tlssocket +[`tls.connect()`]: tls.html#tls_tls_connect_options_callback [`tls.createServer()`]: tls.html#tls_tls_createserver_options_secureconnectionlistener [error code]: #error_codes From ca82e3088dfe204ab36baa492a3e399d46827453 Mon Sep 17 00:00:00 2001 From: Anatoli Papirovski Date: Wed, 25 Oct 2017 19:04:41 -0400 Subject: [PATCH 25/28] http2: fix several timeout related issues * correctly reset write timers: currently reset timers on both session & stream when write starts and when it ends. * prevent large writes from timing out: when writing a large chunk of data in http2, once the data is handed off to C++, the JS session & stream lose all track of the write and will timeout if the write doesn't complete within the timeout window Fix this issue by tracking whether a write request is ongoing and also tracking how many chunks have been sent since the most recent write started. (Since each write call resets the timer.) PR-URL: https://github.com/nodejs/node/pull/16525 Reviewed-By: Matteo Collina Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- lib/internal/http2/core.js | 78 ++++++++++++++-- src/env.h | 1 + src/node_http2.cc | 24 +++++ src/node_http2.h | 4 + .../test-http2-timeout-large-write-file.js | 89 +++++++++++++++++++ .../test-http2-timeout-large-write.js | 84 +++++++++++++++++ 6 files changed, 274 insertions(+), 6 deletions(-) create mode 100644 test/sequential/test-http2-timeout-large-write-file.js create mode 100644 test/sequential/test-http2-timeout-large-write.js diff --git a/lib/internal/http2/core.js b/lib/internal/http2/core.js index 727ca517989..a667a2e7609 100644 --- a/lib/internal/http2/core.js +++ b/lib/internal/http2/core.js @@ -746,7 +746,8 @@ class Http2Session extends EventEmitter { shutdown: false, shuttingDown: false, pendingAck: 0, - maxPendingAck: Math.max(1, (options.maxPendingAck | 0) || 10) + maxPendingAck: Math.max(1, (options.maxPendingAck | 0) || 10), + writeQueueSize: 0 }; this[kType] = type; @@ -1080,6 +1081,22 @@ class Http2Session extends EventEmitter { } _onTimeout() { + // This checks whether a write is currently in progress and also whether + // that write is actually sending data across the write. The kHandle + // stored `chunksSentSinceLastWrite` is only updated when a timeout event + // happens, meaning that if a write is ongoing it should never equal the + // newly fetched, updated value. + if (this[kState].writeQueueSize > 0) { + const handle = this[kHandle]; + const chunksSentSinceLastWrite = handle !== undefined ? + handle.chunksSentSinceLastWrite : null; + if (chunksSentSinceLastWrite !== null && + chunksSentSinceLastWrite !== handle.updateChunksSent()) { + _unrefActive(this); + return; + } + } + process.nextTick(emit, this, 'timeout'); } } @@ -1199,8 +1216,27 @@ function createWriteReq(req, handle, data, encoding) { } } +function trackWriteState(stream, bytes) { + const session = stream[kSession]; + stream[kState].writeQueueSize += bytes; + session[kState].writeQueueSize += bytes; + session[kHandle].chunksSentSinceLastWrite = 0; +} + function afterDoStreamWrite(status, handle, req) { - _unrefActive(handle[kOwner]); + const session = handle[kOwner]; + _unrefActive(session); + + const state = session[kState]; + const { bytes } = req; + state.writeQueueSize -= bytes; + + const stream = state.streams.get(req.stream); + if (stream !== undefined) { + _unrefActive(stream); + stream[kState].writeQueueSize -= bytes; + } + if (typeof req.callback === 'function') req.callback(); this.handle = undefined; @@ -1312,7 +1348,8 @@ class Http2Stream extends Duplex { headersSent: false, headRequest: false, aborted: false, - closeHandler: onSessionClose.bind(this) + closeHandler: onSessionClose.bind(this), + writeQueueSize: 0 }; this.once('ready', streamOnceReady); @@ -1359,6 +1396,23 @@ class Http2Stream extends Duplex { } _onTimeout() { + // This checks whether a write is currently in progress and also whether + // that write is actually sending data across the write. The kHandle + // stored `chunksSentSinceLastWrite` is only updated when a timeout event + // happens, meaning that if a write is ongoing it should never equal the + // newly fetched, updated value. + if (this[kState].writeQueueSize > 0) { + const handle = this[kSession][kHandle]; + const chunksSentSinceLastWrite = handle !== undefined ? + handle.chunksSentSinceLastWrite : null; + if (chunksSentSinceLastWrite !== null && + chunksSentSinceLastWrite !== handle.updateChunksSent()) { + _unrefActive(this); + _unrefActive(this[kSession]); + return; + } + } + process.nextTick(emit, this, 'timeout'); } @@ -1396,10 +1450,11 @@ class Http2Stream extends Duplex { this.once('ready', this._write.bind(this, data, encoding, cb)); return; } - _unrefActive(this); if (!this[kState].headersSent) this[kProceed](); const session = this[kSession]; + _unrefActive(this); + _unrefActive(session); const handle = session[kHandle]; const req = new WriteWrap(); req.stream = this[kID]; @@ -1410,7 +1465,7 @@ class Http2Stream extends Duplex { const err = createWriteReq(req, handle, data, encoding); if (err) throw util._errnoException(err, 'write', req.error); - this._bytesDispatched += req.bytes; + trackWriteState(this, req.bytes); } _writev(data, cb) { @@ -1418,10 +1473,11 @@ class Http2Stream extends Duplex { this.once('ready', this._writev.bind(this, data, cb)); return; } - _unrefActive(this); if (!this[kState].headersSent) this[kProceed](); const session = this[kSession]; + _unrefActive(this); + _unrefActive(session); const handle = session[kHandle]; const req = new WriteWrap(); req.stream = this[kID]; @@ -1438,6 +1494,7 @@ class Http2Stream extends Duplex { const err = handle.writev(req, chunks); if (err) throw util._errnoException(err, 'write', req.error); + trackWriteState(this, req.bytes); } _read(nread) { @@ -1531,6 +1588,10 @@ class Http2Stream extends Duplex { return; } + const state = this[kState]; + session[kState].writeQueueSize -= state.writeQueueSize; + state.writeQueueSize = 0; + const server = session[kServer]; if (server !== undefined && err) { server.emit('streamError', err, this); @@ -1625,7 +1686,12 @@ function processRespondWithFD(fd, headers, offset = 0, length = -1, if (ret < 0) { err = new NghttpError(ret); process.nextTick(emit, this, 'error', err); + break; } + // exact length of the file doesn't matter here, since the + // stream is closing anyway — just use 1 to signify that + // a write does exist + trackWriteState(this, 1); } } diff --git a/src/env.h b/src/env.h index c4156a405f6..a5554337cb3 100644 --- a/src/env.h +++ b/src/env.h @@ -111,6 +111,7 @@ class ModuleWrap; V(callback_string, "callback") \ V(change_string, "change") \ V(channel_string, "channel") \ + V(chunks_sent_since_last_write_string, "chunksSentSinceLastWrite") \ V(constants_string, "constants") \ V(oncertcb_string, "oncertcb") \ V(onclose_string, "_onclose") \ diff --git a/src/node_http2.cc b/src/node_http2.cc index 4b29013636b..ec65e0052f0 100644 --- a/src/node_http2.cc +++ b/src/node_http2.cc @@ -603,6 +603,8 @@ void Http2Session::SubmitFile(const FunctionCallbackInfo& args) { return args.GetReturnValue().Set(NGHTTP2_ERR_INVALID_STREAM_ID); } + session->chunks_sent_since_last_write_ = 0; + Headers list(isolate, context, headers); args.GetReturnValue().Set(stream->SubmitFile(fd, *list, list.length(), @@ -757,6 +759,23 @@ void Http2Session::FlushData(const FunctionCallbackInfo& args) { stream->FlushDataChunks(); } +void Http2Session::UpdateChunksSent(const FunctionCallbackInfo& args) { + Http2Session* session; + Environment* env = Environment::GetCurrent(args); + Isolate* isolate = env->isolate(); + ASSIGN_OR_RETURN_UNWRAP(&session, args.Holder()); + + HandleScope scope(isolate); + + uint32_t length = session->chunks_sent_since_last_write_; + + session->object()->Set(env->context(), + env->chunks_sent_since_last_write_string(), + Integer::NewFromUnsigned(isolate, length)).FromJust(); + + args.GetReturnValue().Set(length); +} + void Http2Session::SubmitPushPromise(const FunctionCallbackInfo& args) { Http2Session* session; Environment* env = Environment::GetCurrent(args); @@ -811,6 +830,8 @@ int Http2Session::DoWrite(WriteWrap* req_wrap, } } + chunks_sent_since_last_write_ = 0; + nghttp2_stream_write_t* req = new nghttp2_stream_write_t; req->data = req_wrap; @@ -846,6 +867,7 @@ void Http2Session::Send(uv_buf_t* buf, size_t length) { this, AfterWrite); + chunks_sent_since_last_write_++; uv_buf_t actual = uv_buf_init(buf->base, length); if (stream_->DoWrite(write_req, &actual, 1, nullptr)) { write_req->Dispose(); @@ -1255,6 +1277,8 @@ void Initialize(Local target, Http2Session::DestroyStream); env->SetProtoMethod(session, "flushData", Http2Session::FlushData); + env->SetProtoMethod(session, "updateChunksSent", + Http2Session::UpdateChunksSent); StreamBase::AddMethods(env, session, StreamBase::kFlagHasWritev | StreamBase::kFlagNoShutdown); diff --git a/src/node_http2.h b/src/node_http2.h index e35c189ea68..1b24b97ff04 100644 --- a/src/node_http2.h +++ b/src/node_http2.h @@ -475,6 +475,7 @@ class Http2Session : public AsyncWrap, static void SubmitGoaway(const FunctionCallbackInfo& args); static void DestroyStream(const FunctionCallbackInfo& args); static void FlushData(const FunctionCallbackInfo& args); + static void UpdateChunksSent(const FunctionCallbackInfo& args); template static void GetSettings(const FunctionCallbackInfo& args); @@ -493,6 +494,9 @@ class Http2Session : public AsyncWrap, StreamResource::Callback prev_read_cb_; padding_strategy_type padding_strategy_ = PADDING_STRATEGY_NONE; + // use this to allow timeout tracking during long-lasting writes + uint32_t chunks_sent_since_last_write_ = 0; + char stream_buf_[kAllocBufferSize]; }; diff --git a/test/sequential/test-http2-timeout-large-write-file.js b/test/sequential/test-http2-timeout-large-write-file.js new file mode 100644 index 00000000000..f52523780dc --- /dev/null +++ b/test/sequential/test-http2-timeout-large-write-file.js @@ -0,0 +1,89 @@ +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +const assert = require('assert'); +const fixtures = require('../common/fixtures'); +const fs = require('fs'); +const http2 = require('http2'); +const path = require('path'); + +common.refreshTmpDir(); + +// This test assesses whether long-running writes can complete +// or timeout because the session or stream are not aware that the +// backing stream is still writing. +// To simulate a slow client, we write a really large chunk and +// then proceed through the following cycle: +// 1) Receive first 'data' event and record currently written size +// 2) Once we've read up to currently written size recorded above, +// we pause the stream and wait longer than the server timeout +// 3) Socket.prototype._onTimeout triggers and should confirm +// that the backing stream is still active and writing +// 4) Our timer fires, we resume the socket and start at 1) + +const writeSize = 3000000; +const minReadSize = 500000; +const serverTimeout = common.platformTimeout(500); +let offsetTimeout = common.platformTimeout(100); +let didReceiveData = false; + +const content = Buffer.alloc(writeSize, 0x44); +const filepath = path.join(common.tmpDir, 'http2-large-write.tmp'); +fs.writeFileSync(filepath, content, 'binary'); +const fd = fs.openSync(filepath, 'r'); + +const server = http2.createSecureServer({ + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem') +}); +server.on('stream', common.mustCall((stream) => { + stream.respondWithFD(fd, { + 'Content-Type': 'application/octet-stream', + 'Content-Length': content.length.toString(), + 'Vary': 'Accept-Encoding' + }); + stream.setTimeout(serverTimeout); + stream.on('timeout', () => { + assert.strictEqual(didReceiveData, false, 'Should not timeout'); + }); + stream.end(); +})); +server.setTimeout(serverTimeout); +server.on('timeout', () => { + assert.strictEqual(didReceiveData, false, 'Should not timeout'); +}); + +server.listen(0, common.mustCall(() => { + const client = http2.connect(`https://localhost:${server.address().port}`, + { rejectUnauthorized: false }); + + const req = client.request({ ':path': '/' }); + req.end(); + + const resume = () => req.resume(); + let receivedBufferLength = 0; + let firstReceivedAt; + req.on('data', common.mustCallAtLeast((buf) => { + if (receivedBufferLength === 0) { + didReceiveData = false; + firstReceivedAt = Date.now(); + } + receivedBufferLength += buf.length; + if (receivedBufferLength >= minReadSize && + receivedBufferLength < writeSize) { + didReceiveData = true; + receivedBufferLength = 0; + req.pause(); + setTimeout( + resume, + serverTimeout + offsetTimeout - (Date.now() - firstReceivedAt) + ); + offsetTimeout = 0; + } + }, 1)); + req.on('end', common.mustCall(() => { + client.destroy(); + server.close(); + })); +})); diff --git a/test/sequential/test-http2-timeout-large-write.js b/test/sequential/test-http2-timeout-large-write.js new file mode 100644 index 00000000000..f0a11b2e444 --- /dev/null +++ b/test/sequential/test-http2-timeout-large-write.js @@ -0,0 +1,84 @@ +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); +const assert = require('assert'); +const fixtures = require('../common/fixtures'); +const http2 = require('http2'); + +// This test assesses whether long-running writes can complete +// or timeout because the session or stream are not aware that the +// backing stream is still writing. +// To simulate a slow client, we write a really large chunk and +// then proceed through the following cycle: +// 1) Receive first 'data' event and record currently written size +// 2) Once we've read up to currently written size recorded above, +// we pause the stream and wait longer than the server timeout +// 3) Socket.prototype._onTimeout triggers and should confirm +// that the backing stream is still active and writing +// 4) Our timer fires, we resume the socket and start at 1) + +const writeSize = 3000000; +const minReadSize = 500000; +const serverTimeout = common.platformTimeout(500); +let offsetTimeout = common.platformTimeout(100); +let didReceiveData = false; + +const server = http2.createSecureServer({ + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem') +}); +server.on('stream', common.mustCall((stream) => { + const content = Buffer.alloc(writeSize, 0x44); + + stream.respond({ + 'Content-Type': 'application/octet-stream', + 'Content-Length': content.length.toString(), + 'Vary': 'Accept-Encoding' + }); + + stream.write(content); + stream.setTimeout(serverTimeout); + stream.on('timeout', () => { + assert.strictEqual(didReceiveData, false, 'Should not timeout'); + }); + stream.end(); +})); +server.setTimeout(serverTimeout); +server.on('timeout', () => { + assert.strictEqual(didReceiveData, false, 'Should not timeout'); +}); + +server.listen(0, common.mustCall(() => { + const client = http2.connect(`https://localhost:${server.address().port}`, + { rejectUnauthorized: false }); + + const req = client.request({ ':path': '/' }); + req.end(); + + const resume = () => req.resume(); + let receivedBufferLength = 0; + let firstReceivedAt; + req.on('data', common.mustCallAtLeast((buf) => { + if (receivedBufferLength === 0) { + didReceiveData = false; + firstReceivedAt = Date.now(); + } + receivedBufferLength += buf.length; + if (receivedBufferLength >= minReadSize && + receivedBufferLength < writeSize) { + didReceiveData = true; + receivedBufferLength = 0; + req.pause(); + setTimeout( + resume, + serverTimeout + offsetTimeout - (Date.now() - firstReceivedAt) + ); + offsetTimeout = 0; + } + }, 1)); + req.on('end', common.mustCall(() => { + client.destroy(); + server.close(); + })); +})); From 980ebd2d35d9e001bcf24aee8a1f061ed2c7c70f Mon Sep 17 00:00:00 2001 From: Anatoli Papirovski Date: Sat, 28 Oct 2017 14:04:16 -0400 Subject: [PATCH 26/28] http2: simplify mapToHeaders, stricter validation No longer check whether key is a symbol as Object.keys does not return symbols. No longer convert key to string as it is always a string. Validate that only one value is passed for each pseudo-header. Extend illegal connection header message to include the name of the problematic header. Extend tests to cover this behaviour. PR-URL: https://github.com/nodejs/node/pull/16575 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- lib/internal/errors.js | 2 +- lib/internal/http2/util.js | 29 ++++++++++--------- ...st-http2-server-push-stream-errors-args.js | 2 +- test/parallel/test-http2-util-headers-list.js | 27 +++++++++++++---- 4 files changed, 38 insertions(+), 22 deletions(-) diff --git a/lib/internal/errors.js b/lib/internal/errors.js index a01d9b30cd2..5fe33dafaef 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -201,7 +201,7 @@ E('ERR_HTTP2_INFO_HEADERS_AFTER_RESPOND', E('ERR_HTTP2_INFO_STATUS_NOT_ALLOWED', 'Informational status codes cannot be used'); E('ERR_HTTP2_INVALID_CONNECTION_HEADERS', - 'HTTP/1 Connection specific headers are forbidden'); + 'HTTP/1 Connection specific headers are forbidden: "%s"'); E('ERR_HTTP2_INVALID_HEADER_VALUE', 'Value must not be undefined or null'); E('ERR_HTTP2_INVALID_INFO_STATUS', (code) => `Invalid informational status code: ${code}`); diff --git a/lib/internal/http2/util.js b/lib/internal/http2/util.js index 013642d40d1..4b6f8cc5c68 100644 --- a/lib/internal/http2/util.js +++ b/lib/internal/http2/util.js @@ -399,10 +399,10 @@ function mapToHeaders(map, for (var i = 0; i < keys.length; i++) { let key = keys[i]; let value = map[key]; - let val; - if (typeof key === 'symbol' || value === undefined || !key) + if (value === undefined || key === '') continue; - key = String(key).toLowerCase(); + key = key.toLowerCase(); + const isSingleValueHeader = kSingleValueHeaders.has(key); let isArray = Array.isArray(value); if (isArray) { switch (value.length) { @@ -413,34 +413,35 @@ function mapToHeaders(map, isArray = false; break; default: - if (kSingleValueHeaders.has(key)) + if (isSingleValueHeader) return new errors.Error('ERR_HTTP2_HEADER_SINGLE_VALUE', key); } + } else { + value = String(value); + } + if (isSingleValueHeader) { + if (singles.has(key)) + return new errors.Error('ERR_HTTP2_HEADER_SINGLE_VALUE', key); + singles.add(key); } if (key[0] === ':') { const err = assertValuePseudoHeader(key); if (err !== undefined) return err; - ret = `${key}\0${String(value)}\0${ret}`; + ret = `${key}\0${value}\0${ret}`; count++; } else { - if (kSingleValueHeaders.has(key)) { - if (singles.has(key)) - return new errors.Error('ERR_HTTP2_HEADER_SINGLE_VALUE', key); - singles.add(key); - } if (isIllegalConnectionSpecificHeader(key, value)) { - return new errors.Error('ERR_HTTP2_INVALID_CONNECTION_HEADERS'); + return new errors.Error('ERR_HTTP2_INVALID_CONNECTION_HEADERS', key); } if (isArray) { for (var k = 0; k < value.length; k++) { - val = String(value[k]); + const val = String(value[k]); ret += `${key}\0${val}\0`; } count += value.length; } else { - val = String(value); - ret += `${key}\0${val}\0`; + ret += `${key}\0${value}\0`; count++; } } diff --git a/test/parallel/test-http2-server-push-stream-errors-args.js b/test/parallel/test-http2-server-push-stream-errors-args.js index 5055c081b40..73fa064b439 100644 --- a/test/parallel/test-http2-server-push-stream-errors-args.js +++ b/test/parallel/test-http2-server-push-stream-errors-args.js @@ -31,7 +31,7 @@ server.on('stream', common.mustCall((stream, headers) => { () => stream.pushStream({ 'connection': 'test' }, {}, () => {}), { code: 'ERR_HTTP2_INVALID_CONNECTION_HEADERS', - message: 'HTTP/1 Connection specific headers are forbidden' + message: 'HTTP/1 Connection specific headers are forbidden: "connection"' } ); diff --git a/test/parallel/test-http2-util-headers-list.js b/test/parallel/test-http2-util-headers-list.js index f5e20253850..0bbe1972d07 100644 --- a/test/parallel/test-http2-util-headers-list.js +++ b/test/parallel/test-http2-util-headers-list.js @@ -158,7 +158,7 @@ const { // Arrays containing a single set-cookie value are handled correctly // (https://github.com/nodejs/node/issues/16452) const headers = { - 'set-cookie': 'foo=bar' + 'set-cookie': ['foo=bar'] }; assert.deepStrictEqual( mapToHeaders(headers), @@ -166,6 +166,20 @@ const { ); } +{ + // pseudo-headers are only allowed a single value + const headers = { + ':status': 200, + ':statuS': 204, + }; + + common.expectsError({ + code: 'ERR_HTTP2_HEADER_SINGLE_VALUE', + type: Error, + message: 'Header field ":status" must have only a single value' + })(mapToHeaders(headers)); +} + // The following are not allowed to have multiple values [ HTTP2_HEADER_STATUS, @@ -248,8 +262,6 @@ const { assert(!(mapToHeaders({ [name]: [1, 2, 3] }) instanceof Error), name); }); -const regex = - /^HTTP\/1 Connection specific headers are forbidden$/; [ HTTP2_HEADER_CONNECTION, HTTP2_HEADER_UPGRADE, @@ -269,18 +281,21 @@ const regex = ].forEach((name) => { common.expectsError({ code: 'ERR_HTTP2_INVALID_CONNECTION_HEADERS', - message: regex + message: 'HTTP/1 Connection specific headers are forbidden: ' + + `"${name.toLowerCase()}"` })(mapToHeaders({ [name]: 'abc' })); }); common.expectsError({ code: 'ERR_HTTP2_INVALID_CONNECTION_HEADERS', - message: regex + message: 'HTTP/1 Connection specific headers are forbidden: ' + + `"${HTTP2_HEADER_TE}"` })(mapToHeaders({ [HTTP2_HEADER_TE]: ['abc'] })); common.expectsError({ code: 'ERR_HTTP2_INVALID_CONNECTION_HEADERS', - message: regex + message: 'HTTP/1 Connection specific headers are forbidden: ' + + `"${HTTP2_HEADER_TE}"` })(mapToHeaders({ [HTTP2_HEADER_TE]: ['abc', 'trailers'] })); assert(!(mapToHeaders({ te: 'trailers' }) instanceof Error)); From 1cdcab09f2a9cc010437129376983ac77c9afd7e Mon Sep 17 00:00:00 2001 From: James M Snell Date: Thu, 26 Oct 2017 09:08:38 -0700 Subject: [PATCH 27/28] doc: slightly relax 50 character rule MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Allow commit message first line to exceed 50 chars if necessary PR-URL: https://github.com/nodejs/node/pull/16523 Reviewed-By: Anna Henningsen Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Evan Lucas Reviewed-By: Anatoli Papirovski Reviewed-By: Refael Ackermann Reviewed-By: Gibson Fahnestock Reviewed-By: Joyee Cheung --- CONTRIBUTING.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1c6de8d2549..99278a843af 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -309,8 +309,8 @@ notes about [commit squashing](#commit-squashing)). A good commit message should describe what changed and why. 1. The first line should: - - contain a short description of the change - - be 50 characters or less + - contain a short description of the change (preferably 50 characters or less, + and no more than 72 characters) - be entirely in lowercase with the exception of proper nouns, acronyms, and the words that refer to code, like function/variable names - be prefixed with the name of the changed subsystem and start with an From 896eaf6820a0194fb2879c44c4dbcece29e616d3 Mon Sep 17 00:00:00 2001 From: James M Snell Date: Thu, 26 Oct 2017 17:09:30 -0700 Subject: [PATCH 28/28] zlib: finish migrating to internal/errors PR-URL: https://github.com/nodejs/node/pull/16540 Reviewed-By: Refael Ackermann Reviewed-By: Khaidi Chu Reviewed-By: Joyee Cheung Reviewed-By: Matteo Collina Reviewed-By: Luigi Pinca --- doc/api/errors.md | 6 ++++++ lib/internal/errors.js | 1 + lib/zlib.js | 17 +++++++++++++++-- src/node_zlib.cc | 24 +++++++++++------------- test/parallel/test-zlib-failed-init.js | 10 +++++++--- 5 files changed, 40 insertions(+), 18 deletions(-) diff --git a/doc/api/errors.md b/doc/api/errors.md index 77da5583719..82246fff702 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1474,6 +1474,11 @@ Used when a given value is out of the accepted range. Used when an attempt is made to use a `zlib` object after it has already been closed. + +### ERR_ZLIB_INITIALIZATION_FAILED + +Used when creation of a [`zlib`][] object fails due to incorrect configuration. + [`--force-fips`]: cli.html#cli_force_fips [`crypto.timingSafeEqual()`]: crypto.html#crypto_crypto_timingsafeequal_a_b [`dgram.createSocket()`]: dgram.html#dgram_dgram_createsocket_options_callback @@ -1515,3 +1520,4 @@ closed. [try-catch]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/try...catch [vm]: vm.html [WHATWG Supported Encodings]: util.html#util_whatwg_supported_encodings +[`zlib`]: zlib.html diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 5fe33dafaef..77cbe33099f 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -364,6 +364,7 @@ E('ERR_VALUE_OUT_OF_RANGE', (start, end, value) => { return `The value of "${start}" must be ${end}. Received "${value}"`; }); E('ERR_ZLIB_BINDING_CLOSED', 'zlib binding closed'); +E('ERR_ZLIB_INITIALIZATION_FAILED', 'Initialization failed'); function invalidArgType(name, expected, actual) { internalAssert(name, 'name is required'); diff --git a/lib/zlib.js b/lib/zlib.js index 059b9aac91e..751c1765598 100644 --- a/lib/zlib.js +++ b/lib/zlib.js @@ -161,6 +161,12 @@ function Zlib(opts, mode) { var memLevel = Z_DEFAULT_MEMLEVEL; var strategy = Z_DEFAULT_STRATEGY; var dictionary; + + if (typeof mode !== 'number') + throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'mode', 'number'); + if (mode < DEFLATE || mode > UNZIP) + throw new errors.RangeError('ERR_OUT_OF_RANGE', 'mode'); + if (opts) { chunkSize = opts.chunkSize; if (chunkSize !== undefined && chunkSize === chunkSize) { @@ -258,8 +264,15 @@ function Zlib(opts, mode) { this._hadError = false; this._writeState = new Uint32Array(2); - this._handle.init(windowBits, level, memLevel, strategy, this._writeState, - processCallback, dictionary); + if (!this._handle.init(windowBits, + level, + memLevel, + strategy, + this._writeState, + processCallback, + dictionary)) { + throw new errors.Error('ERR_ZLIB_INITIALIZATION_FAILED'); + } this._outBuffer = Buffer.allocUnsafe(chunkSize); this._outOffset = 0; diff --git a/src/node_zlib.cc b/src/node_zlib.cc index fdfd3142226..93583181608 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -418,16 +418,8 @@ class ZCtx : public AsyncWrap { static void New(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - - if (args.Length() < 1 || !args[0]->IsInt32()) { - return env->ThrowTypeError("Bad argument"); - } + CHECK(args[0]->IsInt32()); node_zlib_mode mode = static_cast(args[0]->Int32Value()); - - if (mode < DEFLATE || mode > UNZIP) { - return env->ThrowTypeError("Bad argument"); - } - new ZCtx(env, args.This(), mode); } @@ -476,9 +468,14 @@ class ZCtx : public AsyncWrap { memcpy(dictionary, dictionary_, dictionary_len); } - Init(ctx, level, windowBits, memLevel, strategy, write_result, - write_js_callback, dictionary, dictionary_len); + bool ret = Init(ctx, level, windowBits, memLevel, strategy, write_result, + write_js_callback, dictionary, dictionary_len); + if (!ret) goto end; + SetDictionary(ctx); + + end: + return args.GetReturnValue().Set(ret); } static void Params(const FunctionCallbackInfo& args) { @@ -495,7 +492,7 @@ class ZCtx : public AsyncWrap { SetDictionary(ctx); } - static void Init(ZCtx *ctx, int level, int windowBits, int memLevel, + static bool Init(ZCtx *ctx, int level, int windowBits, int memLevel, int strategy, uint32_t* write_result, Local write_js_callback, char* dictionary, size_t dictionary_len) { @@ -561,11 +558,12 @@ class ZCtx : public AsyncWrap { ctx->dictionary_ = nullptr; } ctx->mode_ = NONE; - ctx->env()->ThrowError("Init error"); + return false; } ctx->write_result_ = write_result; ctx->write_js_callback_.Reset(ctx->env()->isolate(), write_js_callback); + return true; } static void SetDictionary(ZCtx* ctx) { diff --git a/test/parallel/test-zlib-failed-init.js b/test/parallel/test-zlib-failed-init.js index 8597543429e..7dd53ea3702 100644 --- a/test/parallel/test-zlib-failed-init.js +++ b/test/parallel/test-zlib-failed-init.js @@ -11,9 +11,13 @@ const zlib = require('zlib'); // no such rejection which is the reason for the version check below // (http://zlib.net/ChangeLog.txt). if (!/^1\.2\.[0-8]$/.test(process.versions.zlib)) { - assert.throws(() => { - zlib.createDeflateRaw({ windowBits: 8 }); - }, /^Error: Init error$/); + common.expectsError( + () => zlib.createDeflateRaw({ windowBits: 8 }), + { + code: 'ERR_ZLIB_INITIALIZATION_FAILED', + type: Error, + message: 'Initialization failed' + }); } // Regression tests for bugs in the validation logic.