diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b9fe7ac42e..e2787fbc8ba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,7 +29,8 @@ release. -9.4.0
+9.5.0
+9.4.0
9.3.0
9.2.1
9.2.0
diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js index 55ebcc96ba2..76e02504b27 100644 --- a/benchmark/_http-benchmarkers.js +++ b/benchmark/_http-benchmarkers.js @@ -185,7 +185,7 @@ exports.run = function(options, callback) { port: exports.PORT, path: '/', connections: 100, - duration: 10, + duration: 5, benchmarker: exports.default_http_benchmarker }, options); if (!options.benchmarker) { diff --git a/benchmark/assert/deepequal-buffer.js b/benchmark/assert/deepequal-buffer.js index 0e7494544d3..9556a81ec3b 100644 --- a/benchmark/assert/deepequal-buffer.js +++ b/benchmark/assert/deepequal-buffer.js @@ -14,8 +14,6 @@ const bench = common.createBenchmark(main, { }); function main({ len, n, method }) { - var i; - const data = Buffer.allocUnsafe(len + 1); const actual = Buffer.alloc(len); const expected = Buffer.alloc(len); @@ -24,40 +22,13 @@ function main({ len, n, method }) { data.copy(expected); data.copy(expectedWrong); - switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. - case 'deepEqual': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.deepEqual(actual, expected); - } - bench.end(n); - break; - case 'deepStrictEqual': - bench.start(); - for (i = 0; i < n; ++i) { - assert.deepStrictEqual(actual, expected); - } - bench.end(n); - break; - case 'notDeepEqual': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.notDeepEqual(actual, expectedWrong); - } - bench.end(n); - break; - case 'notDeepStrictEqual': - bench.start(); - for (i = 0; i < n; ++i) { - assert.notDeepStrictEqual(actual, expectedWrong); - } - bench.end(n); - break; - default: - throw new Error('Unsupported method'); + // eslint-disable-next-line no-restricted-properties + const fn = method !== '' ? assert[method] : assert.deepEqual; + const value2 = method.includes('not') ? expectedWrong : expected; + + bench.start(); + for (var i = 0; i < n; ++i) { + fn(actual, value2); } + bench.end(n); } diff --git a/benchmark/assert/deepequal-map.js b/benchmark/assert/deepequal-map.js index 085274e8bfb..bdd3c5c6b8c 100644 --- a/benchmark/assert/deepequal-map.js +++ b/benchmark/assert/deepequal-map.js @@ -117,6 +117,6 @@ function main({ n, len, method }) { benchmark(assert.notDeepEqual, n, values, values2); break; default: - throw new Error('Unsupported method'); + throw new Error(`Unsupported method ${method}`); } } diff --git a/benchmark/assert/deepequal-object.js b/benchmark/assert/deepequal-object.js index 2c2549d5848..4c95006b3b8 100644 --- a/benchmark/assert/deepequal-object.js +++ b/benchmark/assert/deepequal-object.js @@ -28,47 +28,19 @@ function createObj(source, add = '') { function main({ size, n, method }) { // TODO: Fix this "hack". `n` should not be manipulated. n = n / size; - var i; const source = Array.apply(null, Array(size)); const actual = createObj(source); const expected = createObj(source); const expectedWrong = createObj(source, '4'); - switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. - case 'deepEqual': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.deepEqual(actual, expected); - } - bench.end(n); - break; - case 'deepStrictEqual': - bench.start(); - for (i = 0; i < n; ++i) { - assert.deepStrictEqual(actual, expected); - } - bench.end(n); - break; - case 'notDeepEqual': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.notDeepEqual(actual, expectedWrong); - } - bench.end(n); - break; - case 'notDeepStrictEqual': - bench.start(); - for (i = 0; i < n; ++i) { - assert.notDeepStrictEqual(actual, expectedWrong); - } - bench.end(n); - break; - default: - throw new Error('Unsupported method'); + // eslint-disable-next-line no-restricted-properties + const fn = method !== '' ? assert[method] : assert.deepEqual; + const value2 = method.includes('not') ? expectedWrong : expected; + + bench.start(); + for (var i = 0; i < n; ++i) { + fn(actual, value2); } + bench.end(n); } diff --git a/benchmark/assert/deepequal-prims-and-objs-big-array-set.js b/benchmark/assert/deepequal-prims-and-objs-big-array-set.js index 04802a76928..90dbf105936 100644 --- a/benchmark/assert/deepequal-prims-and-objs-big-array-set.js +++ b/benchmark/assert/deepequal-prims-and-objs-big-array-set.js @@ -30,12 +30,19 @@ const bench = common.createBenchmark(main, { ] }); +function run(fn, n, actual, expected) { + bench.start(); + for (var i = 0; i < n; ++i) { + fn(actual, expected); + } + bench.end(n); +} + function main({ n, len, primitive, method }) { const prim = primValues[primitive]; const actual = []; const expected = []; const expectedWrong = []; - var i; for (var x = 0; x < len; x++) { actual.push(prim); @@ -51,69 +58,37 @@ function main({ n, len, primitive, method }) { const expectedWrongSet = new Set(expectedWrong); switch (method) { + // Empty string falls through to next line as default, mostly for tests. case '': - // Empty string falls through to next line as default, mostly for tests. case 'deepEqual_Array': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.deepEqual(actual, expected); - } - bench.end(n); + // eslint-disable-next-line no-restricted-properties + run(assert.deepEqual, n, actual, expected); break; case 'deepStrictEqual_Array': - bench.start(); - for (i = 0; i < n; ++i) { - assert.deepStrictEqual(actual, expected); - } - bench.end(n); + run(assert.deepStrictEqual, n, actual, expected); break; case 'notDeepEqual_Array': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.notDeepEqual(actual, expectedWrong); - } - bench.end(n); + // eslint-disable-next-line no-restricted-properties + run(assert.notDeepEqual, n, actual, expectedWrong); break; case 'notDeepStrictEqual_Array': - bench.start(); - for (i = 0; i < n; ++i) { - assert.notDeepStrictEqual(actual, expectedWrong); - } - bench.end(n); + run(assert.notDeepStrictEqual, n, actual, expectedWrong); break; case 'deepEqual_Set': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.deepEqual(actualSet, expectedSet); - } - bench.end(n); + // eslint-disable-next-line no-restricted-properties + run(assert.deepEqual, n, actualSet, expectedSet); break; case 'deepStrictEqual_Set': - bench.start(); - for (i = 0; i < n; ++i) { - assert.deepStrictEqual(actualSet, expectedSet); - } - bench.end(n); + run(assert.deepStrictEqual, n, actualSet, expectedSet); break; case 'notDeepEqual_Set': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.notDeepEqual(actualSet, expectedWrongSet); - } - bench.end(n); + // eslint-disable-next-line no-restricted-properties + run(assert.notDeepEqual, n, actualSet, expectedWrongSet); break; case 'notDeepStrictEqual_Set': - bench.start(); - for (i = 0; i < n; ++i) { - assert.notDeepStrictEqual(actualSet, expectedWrongSet); - } - bench.end(n); + run(assert.notDeepStrictEqual, n, actualSet, expectedWrongSet); break; default: - throw new Error('Unsupported method'); + throw new Error(`Unsupported method "${method}"`); } } diff --git a/benchmark/assert/deepequal-prims-and-objs-big-loop.js b/benchmark/assert/deepequal-prims-and-objs-big-loop.js index 09797dfaf2d..ec51201d518 100644 --- a/benchmark/assert/deepequal-prims-and-objs-big-loop.js +++ b/benchmark/assert/deepequal-prims-and-objs-big-loop.js @@ -29,43 +29,14 @@ function main({ n, primitive, method }) { const actual = prim; const expected = prim; const expectedWrong = 'b'; - var i; - // Creates new array to avoid loop invariant code motion - switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. - case 'deepEqual': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.deepEqual([actual], [expected]); - } - bench.end(n); - break; - case 'deepStrictEqual': - bench.start(); - for (i = 0; i < n; ++i) { - assert.deepStrictEqual([actual], [expected]); - } - bench.end(n); - break; - case 'notDeepEqual': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.notDeepEqual([actual], [expectedWrong]); - } - bench.end(n); - break; - case 'notDeepStrictEqual': - bench.start(); - for (i = 0; i < n; ++i) { - assert.notDeepStrictEqual([actual], [expectedWrong]); - } - bench.end(n); - break; - default: - throw new Error('Unsupported method'); + // eslint-disable-next-line no-restricted-properties + const fn = method !== '' ? assert[method] : assert.deepEqual; + const value2 = method.includes('not') ? expectedWrong : expected; + + bench.start(); + for (var i = 0; i < n; ++i) { + fn([actual], [value2]); } + bench.end(n); } diff --git a/benchmark/assert/deepequal-set.js b/benchmark/assert/deepequal-set.js index ebcf33cc6d5..e70ddf10e93 100644 --- a/benchmark/assert/deepequal-set.js +++ b/benchmark/assert/deepequal-set.js @@ -126,6 +126,6 @@ function main({ n, len, method }) { benchmark(assert.notDeepEqual, n, values, values2); break; default: - throw new Error('Unsupported method'); + throw new Error(`Unsupported method "${method}"`); } } diff --git a/benchmark/assert/deepequal-typedarrays.js b/benchmark/assert/deepequal-typedarrays.js index 01546801ff3..50e6e525b20 100644 --- a/benchmark/assert/deepequal-typedarrays.js +++ b/benchmark/assert/deepequal-typedarrays.js @@ -31,42 +31,14 @@ function main({ type, n, len, method }) { const expectedWrong = Buffer.alloc(len); const wrongIndex = Math.floor(len / 2); expectedWrong[wrongIndex] = 123; - var i; - switch (method) { - case '': - // Empty string falls through to next line as default, mostly for tests. - case 'deepEqual': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.deepEqual(actual, expected); - } - bench.end(n); - break; - case 'deepStrictEqual': - bench.start(); - for (i = 0; i < n; ++i) { - assert.deepStrictEqual(actual, expected); - } - bench.end(n); - break; - case 'notDeepEqual': - bench.start(); - for (i = 0; i < n; ++i) { - // eslint-disable-next-line no-restricted-properties - assert.notDeepEqual(actual, expectedWrong); - } - bench.end(n); - break; - case 'notDeepStrictEqual': - bench.start(); - for (i = 0; i < n; ++i) { - assert.notDeepStrictEqual(actual, expectedWrong); - } - bench.end(n); - break; - default: - throw new Error('Unsupported method'); + // eslint-disable-next-line no-restricted-properties + const fn = method !== '' ? assert[method] : assert.deepEqual; + const value2 = method.includes('not') ? expectedWrong : expected; + + bench.start(); + for (var i = 0; i < n; ++i) { + fn(actual, value2); } + bench.end(n); } diff --git a/benchmark/async_hooks/gc-tracking.js b/benchmark/async_hooks/gc-tracking.js index a569fb8fa92..d74b2bac463 100644 --- a/benchmark/async_hooks/gc-tracking.js +++ b/benchmark/async_hooks/gc-tracking.js @@ -22,22 +22,23 @@ function endAfterGC(n) { } function main({ n, method }) { + var i; switch (method) { case 'trackingEnabled': bench.start(); - for (let i = 0; i < n; i++) { + for (i = 0; i < n; i++) { new AsyncResource('foobar'); } endAfterGC(n); break; case 'trackingDisabled': bench.start(); - for (let i = 0; i < n; i++) { + for (i = 0; i < n; i++) { new AsyncResource('foobar', { requireManualDestroy: true }); } endAfterGC(n); break; default: - throw new Error('Unsupported method'); + throw new Error(`Unsupported method "${method}"`); } } diff --git a/benchmark/buffers/buffer-bytelength.js b/benchmark/buffers/buffer-bytelength.js index 0617b4feb3f..fa8852a233e 100644 --- a/benchmark/buffers/buffer-bytelength.js +++ b/benchmark/buffers/buffer-bytelength.js @@ -17,10 +17,9 @@ const chars = [ function main({ n, len, encoding }) { var strings = []; - var results; + var results = [ len * 16 ]; if (encoding === 'buffer') { strings = [ Buffer.alloc(len * 16, 'a') ]; - results = [ len * 16 ]; } else { for (const string of chars) { // Strings must be built differently, depending on encoding diff --git a/benchmark/buffers/buffer-compare-offset.js b/benchmark/buffers/buffer-compare-offset.js index 850fe11d3f4..551fcd2f0ce 100644 --- a/benchmark/buffers/buffer-compare-offset.js +++ b/benchmark/buffers/buffer-compare-offset.js @@ -8,26 +8,22 @@ const bench = common.createBenchmark(main, { }); function compareUsingSlice(b0, b1, len, iter) { - var i; - bench.start(); - for (i = 0; i < iter; i++) + for (var i = 0; i < iter; i++) Buffer.compare(b0.slice(1, len), b1.slice(1, len)); - bench.end(iter / 1e6); } function compareUsingOffset(b0, b1, len, iter) { - var i; - bench.start(); - for (i = 0; i < iter; i++) + for (var i = 0; i < iter; i++) b0.compare(b1, 1, len, 1, len); - bench.end(iter / 1e6); } function main({ millions, size, method }) { const iter = millions * 1e6; const fn = method === 'slice' ? compareUsingSlice : compareUsingOffset; + bench.start(); fn(Buffer.alloc(size, 'a'), Buffer.alloc(size, 'b'), size >> 1, iter); + bench.end(millions); } diff --git a/benchmark/buffers/buffer-creation.js b/benchmark/buffers/buffer-creation.js index 73e620955e9..a7b340131eb 100644 --- a/benchmark/buffers/buffer-creation.js +++ b/benchmark/buffers/buffer-creation.js @@ -16,51 +16,38 @@ const bench = common.createBenchmark(main, { }); function main({ len, n, type }) { + let fn, i; switch (type) { case '': case 'fast-alloc': - bench.start(); - for (let i = 0; i < n * 1024; i++) { - Buffer.alloc(len); - } - bench.end(n); + fn = Buffer.alloc; break; case 'fast-alloc-fill': bench.start(); - for (let i = 0; i < n * 1024; i++) { + for (i = 0; i < n * 1024; i++) { Buffer.alloc(len, 0); } bench.end(n); - break; + return; case 'fast-allocUnsafe': - bench.start(); - for (let i = 0; i < n * 1024; i++) { - Buffer.allocUnsafe(len); - } - bench.end(n); + fn = Buffer.allocUnsafe; break; case 'slow-allocUnsafe': - bench.start(); - for (let i = 0; i < n * 1024; i++) { - Buffer.allocUnsafeSlow(len); - } - bench.end(n); + fn = Buffer.allocUnsafeSlow; break; case 'slow': - bench.start(); - for (let i = 0; i < n * 1024; i++) { - SlowBuffer(len); - } - bench.end(n); + fn = SlowBuffer; break; case 'buffer()': - bench.start(); - for (let i = 0; i < n * 1024; i++) { - Buffer(len); - } - bench.end(n); + fn = Buffer; break; default: - assert.fail(null, null, 'Should not get here'); + assert.fail('Should not get here'); + } + + bench.start(); + for (i = 0; i < n * 1024; i++) { + fn(len); } + bench.end(n); } diff --git a/benchmark/buffers/buffer-hex.js b/benchmark/buffers/buffer-hex.js index 1bdef81139f..4d87313961a 100644 --- a/benchmark/buffers/buffer-hex.js +++ b/benchmark/buffers/buffer-hex.js @@ -9,15 +9,16 @@ const bench = common.createBenchmark(main, { function main({ len, n }) { const buf = Buffer.alloc(len); + var i; - for (let i = 0; i < buf.length; i++) + for (i = 0; i < buf.length; i++) buf[i] = i & 0xff; const hex = buf.toString('hex'); bench.start(); - for (let i = 0; i < n; i += 1) + for (i = 0; i < n; i += 1) Buffer.from(hex, 'hex'); bench.end(n); diff --git a/benchmark/buffers/buffer-iterate.js b/benchmark/buffers/buffer-iterate.js index 8531e1cae82..7a275b0bcb8 100644 --- a/benchmark/buffers/buffer-iterate.js +++ b/benchmark/buffers/buffer-iterate.js @@ -20,36 +20,30 @@ function main({ size, type, method, n }) { const clazz = type === 'fast' ? Buffer : SlowBuffer; const buffer = new clazz(size); buffer.fill(0); - methods[method || 'for'](buffer, n); -} - + const fn = methods[method || 'for']; -function benchFor(buffer, n) { bench.start(); + fn(buffer, n); + bench.end(n); +} +function benchFor(buffer, n) { for (var k = 0; k < n; k++) { for (var i = 0; i < buffer.length; i++) { assert(buffer[i] === 0); } } - - bench.end(n); } function benchForOf(buffer, n) { - bench.start(); - for (var k = 0; k < n; k++) { for (const b of buffer) { assert(b === 0); } } - bench.end(n); } function benchIterator(buffer, n) { - bench.start(); - for (var k = 0; k < n; k++) { const iter = buffer[Symbol.iterator](); var cur = iter.next(); @@ -60,6 +54,4 @@ function benchIterator(buffer, n) { } } - - bench.end(n); } diff --git a/benchmark/buffers/buffer-read-float.js b/benchmark/buffers/buffer-read-float.js index 5dda2486c67..afd9edf5578 100644 --- a/benchmark/buffers/buffer-read-float.js +++ b/benchmark/buffers/buffer-read-float.js @@ -9,12 +9,10 @@ const bench = common.createBenchmark(main, { millions: [1] }); -function main(conf) { - const noAssert = conf.noAssert === 'true'; - const len = +conf.millions * 1e6; +function main({ noAssert, millions, type, endian, value }) { + noAssert = noAssert === 'true'; + type = type || 'Double'; const buff = Buffer.alloc(8); - const type = conf.type || 'Double'; - const endian = conf.endian; const fn = `read${type}${endian}`; const values = { Double: { @@ -32,15 +30,12 @@ function main(conf) { nan: NaN, }, }; - const value = values[type][conf.value]; - buff[`write${type}${endian}`](value, 0, noAssert); - const testFunction = new Function('buff', ` - for (var i = 0; i !== ${len}; i++) { - buff.${fn}(0, ${JSON.stringify(noAssert)}); - } - `); + buff[`write${type}${endian}`](values[type][value], 0, noAssert); + bench.start(); - testFunction(buff); - bench.end(len / 1e6); + for (var i = 0; i !== millions * 1e6; i++) { + buff[fn](0, noAssert); + } + bench.end(millions); } diff --git a/benchmark/buffers/buffer-read-with-byteLength.js b/benchmark/buffers/buffer-read-with-byteLength.js index 2a659c1bec5..9fe5e6f4bf4 100644 --- a/benchmark/buffers/buffer-read-with-byteLength.js +++ b/benchmark/buffers/buffer-read-with-byteLength.js @@ -16,21 +16,17 @@ const bench = common.createBenchmark(main, { byteLength: [1, 2, 4, 6] }); -function main(conf) { - const noAssert = conf.noAssert === 'true'; - const len = conf.millions * 1e6; - const clazz = conf.buf === 'fast' ? Buffer : require('buffer').SlowBuffer; +function main({ millions, noAssert, buf, type, byteLength }) { + noAssert = noAssert === 'true'; + type = type || 'UInt8'; + const clazz = buf === 'fast' ? Buffer : require('buffer').SlowBuffer; const buff = new clazz(8); - const type = conf.type || 'UInt8'; const fn = `read${type}`; buff.writeDoubleLE(0, 0, noAssert); - const testFunction = new Function('buff', ` - for (var i = 0; i !== ${len}; i++) { - buff.${fn}(0, ${conf.byteLength}, ${JSON.stringify(noAssert)}); - } - `); bench.start(); - testFunction(buff); - bench.end(len / 1e6); + for (var i = 0; i !== millions * 1e6; i++) { + buff[fn](0, byteLength, noAssert); + } + bench.end(millions); } diff --git a/benchmark/buffers/buffer-read.js b/benchmark/buffers/buffer-read.js index 41e842f3123..868f5cede8b 100644 --- a/benchmark/buffers/buffer-read.js +++ b/benchmark/buffers/buffer-read.js @@ -27,18 +27,14 @@ const bench = common.createBenchmark(main, { function main({ noAssert, millions, buf, type }) { noAssert = noAssert === 'true'; - const len = millions * 1e6; const clazz = buf === 'fast' ? Buffer : require('buffer').SlowBuffer; const buff = new clazz(8); const fn = `read${type || 'UInt8'}`; buff.writeDoubleLE(0, 0, noAssert); - const testFunction = new Function('buff', ` - for (var i = 0; i !== ${len}; i++) { - buff.${fn}(0, ${JSON.stringify(noAssert)}); - } - `); bench.start(); - testFunction(buff); - bench.end(len / 1e6); + for (var i = 0; i !== millions * 1e6; i++) { + buff[fn](0, noAssert); + } + bench.end(millions); } diff --git a/benchmark/buffers/buffer-write.js b/benchmark/buffers/buffer-write.js index ce2fbe3103c..823e95bf15d 100644 --- a/benchmark/buffers/buffer-write.js +++ b/benchmark/buffers/buffer-write.js @@ -46,36 +46,29 @@ const mod = { }; function main({ noAssert, millions, buf, type }) { - const len = millions * 1e6; const clazz = buf === 'fast' ? Buffer : require('buffer').SlowBuffer; const buff = new clazz(8); const fn = `write${type || 'UInt8'}`; if (/Int/.test(fn)) - benchInt(buff, fn, len, noAssert); + benchInt(buff, fn, millions, noAssert); else - benchFloat(buff, fn, len, noAssert); + benchFloat(buff, fn, millions, noAssert); } -function benchInt(buff, fn, len, noAssert) { +function benchInt(buff, fn, millions, noAssert) { const m = mod[fn]; - const testFunction = new Function('buff', ` - for (var i = 0; i !== ${len}; i++) { - buff.${fn}(i & ${m}, 0, ${noAssert}); - } - `); bench.start(); - testFunction(buff); - bench.end(len / 1e6); + for (var i = 0; i !== millions * 1e6; i++) { + buff[fn](i & m, 0, noAssert); + } + bench.end(millions); } -function benchFloat(buff, fn, len, noAssert) { - const testFunction = new Function('buff', ` - for (var i = 0; i !== ${len}; i++) { - buff.${fn}(i, 0, ${noAssert}); - } - `); +function benchFloat(buff, fn, millions, noAssert) { bench.start(); - testFunction(buff); - bench.end(len / 1e6); + for (var i = 0; i !== millions * 1e6; i++) { + buff[fn](i, 0, noAssert); + } + bench.end(millions); } diff --git a/benchmark/buffers/buffer_zero.js b/benchmark/buffers/buffer_zero.js index 06b68c313f1..1263732dce8 100644 --- a/benchmark/buffers/buffer_zero.js +++ b/benchmark/buffers/buffer_zero.js @@ -11,12 +11,9 @@ const zeroBuffer = Buffer.alloc(0); const zeroString = ''; function main({ n, type }) { - bench.start(); - - if (type === 'buffer') - for (let i = 0; i < n * 1024; i++) Buffer.from(zeroBuffer); - else if (type === 'string') - for (let i = 0; i < n * 1024; i++) Buffer.from(zeroString); + const data = type === 'buffer' ? zeroBuffer : zeroString; + bench.start(); + for (var i = 0; i < n * 1024; i++) Buffer.from(data); bench.end(n); } diff --git a/benchmark/crypto/aes-gcm-throughput.js b/benchmark/crypto/aes-gcm-throughput.js index 246455de78a..5c1e71e7280 100644 --- a/benchmark/crypto/aes-gcm-throughput.js +++ b/benchmark/crypto/aes-gcm-throughput.js @@ -8,13 +8,13 @@ const bench = common.createBenchmark(main, { len: [1024, 4 * 1024, 16 * 1024, 64 * 1024, 256 * 1024, 1024 * 1024] }); -function main(conf) { - const message = Buffer.alloc(conf.len, 'b'); - const key = crypto.randomBytes(keylen[conf.cipher]); +function main({ n, len, cipher }) { + const message = Buffer.alloc(len, 'b'); + const key = crypto.randomBytes(keylen[cipher]); const iv = crypto.randomBytes(12); const associate_data = Buffer.alloc(16, 'z'); bench.start(); - AEAD_Bench(conf.cipher, message, associate_data, key, iv, conf.n, conf.len); + AEAD_Bench(cipher, message, associate_data, key, iv, n, len); } function AEAD_Bench(cipher, message, associate_data, key, iv, n, len) { diff --git a/benchmark/crypto/cipher-stream.js b/benchmark/crypto/cipher-stream.js index ca36bd736d9..64f6ff7b729 100644 --- a/benchmark/crypto/cipher-stream.js +++ b/benchmark/crypto/cipher-stream.js @@ -9,8 +9,7 @@ const bench = common.createBenchmark(main, { api: ['legacy', 'stream'] }); -function main(conf) { - var api = conf.api; +function main({ api, cipher, type, len, writes }) { if (api === 'stream' && /^v0\.[0-8]\./.test(process.version)) { console.error('Crypto streams not available until v0.10'); // use the legacy, just so that we can compare them. @@ -33,25 +32,25 @@ function main(conf) { // alice_secret and bob_secret should be the same assert(alice_secret === bob_secret); - const alice_cipher = crypto.createCipher(conf.cipher, alice_secret); - const bob_cipher = crypto.createDecipher(conf.cipher, bob_secret); + const alice_cipher = crypto.createCipher(cipher, alice_secret); + const bob_cipher = crypto.createDecipher(cipher, bob_secret); var message; var encoding; - switch (conf.type) { + switch (type) { case 'asc': - message = 'a'.repeat(conf.len); + message = 'a'.repeat(len); encoding = 'ascii'; break; case 'utf': - message = 'ü'.repeat(conf.len / 2); + message = 'ü'.repeat(len / 2); encoding = 'utf8'; break; case 'buf': - message = Buffer.alloc(conf.len, 'b'); + message = Buffer.alloc(len, 'b'); break; default: - throw new Error(`unknown message type: ${conf.type}`); + throw new Error(`unknown message type: ${type}`); } const fn = api === 'stream' ? streamWrite : legacyWrite; @@ -59,7 +58,7 @@ function main(conf) { // write data as fast as possible to alice, and have bob decrypt. // use old API for comparison to v0.8 bench.start(); - fn(alice_cipher, bob_cipher, message, encoding, conf.writes); + fn(alice_cipher, bob_cipher, message, encoding, writes); } function streamWrite(alice, bob, message, encoding, writes) { diff --git a/benchmark/crypto/get-ciphers.js b/benchmark/crypto/get-ciphers.js index 3f5ad17ad38..d4c10a2427d 100644 --- a/benchmark/crypto/get-ciphers.js +++ b/benchmark/crypto/get-ciphers.js @@ -7,12 +7,10 @@ const bench = common.createBenchmark(main, { v: ['crypto', 'tls'] }); -function main(conf) { - const n = +conf.n; - const v = conf.v; +function main({ n, v }) { const method = require(v).getCiphers; var i = 0; - // first call to getChipers will dominate the results + // First call to getChipers will dominate the results if (n > 1) { for (; i < n; i++) method(); diff --git a/benchmark/crypto/hash-stream-creation.js b/benchmark/crypto/hash-stream-creation.js index 5ac5a4f70b5..faaa12a9e5d 100644 --- a/benchmark/crypto/hash-stream-creation.js +++ b/benchmark/crypto/hash-stream-creation.js @@ -13,8 +13,7 @@ const bench = common.createBenchmark(main, { api: ['legacy', 'stream'] }); -function main(conf) { - var api = conf.api; +function main({ api, type, len, out, writes, algo }) { if (api === 'stream' && /^v0\.[0-8]\./.test(process.version)) { console.error('Crypto streams not available until v0.10'); // use the legacy, just so that we can compare them. @@ -23,26 +22,26 @@ function main(conf) { var message; var encoding; - switch (conf.type) { + switch (type) { case 'asc': - message = 'a'.repeat(conf.len); + message = 'a'.repeat(len); encoding = 'ascii'; break; case 'utf': - message = 'ü'.repeat(conf.len / 2); + message = 'ü'.repeat(len / 2); encoding = 'utf8'; break; case 'buf': - message = Buffer.alloc(conf.len, 'b'); + message = Buffer.alloc(len, 'b'); break; default: - throw new Error(`unknown message type: ${conf.type}`); + throw new Error(`unknown message type: ${type}`); } const fn = api === 'stream' ? streamWrite : legacyWrite; bench.start(); - fn(conf.algo, message, encoding, conf.writes, conf.len, conf.out); + fn(algo, message, encoding, writes, len, out); } function legacyWrite(algo, message, encoding, writes, len, outEnc) { diff --git a/benchmark/crypto/hash-stream-throughput.js b/benchmark/crypto/hash-stream-throughput.js index 21ec3c7902b..934e7a0b11b 100644 --- a/benchmark/crypto/hash-stream-throughput.js +++ b/benchmark/crypto/hash-stream-throughput.js @@ -12,8 +12,7 @@ const bench = common.createBenchmark(main, { api: ['legacy', 'stream'] }); -function main(conf) { - var api = conf.api; +function main({ api, type, len, algo, writes }) { if (api === 'stream' && /^v0\.[0-8]\./.test(process.version)) { console.error('Crypto streams not available until v0.10'); // use the legacy, just so that we can compare them. @@ -22,26 +21,26 @@ function main(conf) { var message; var encoding; - switch (conf.type) { + switch (type) { case 'asc': - message = 'a'.repeat(conf.len); + message = 'a'.repeat(len); encoding = 'ascii'; break; case 'utf': - message = 'ü'.repeat(conf.len / 2); + message = 'ü'.repeat(len / 2); encoding = 'utf8'; break; case 'buf': - message = Buffer.alloc(conf.len, 'b'); + message = Buffer.alloc(len, 'b'); break; default: - throw new Error(`unknown message type: ${conf.type}`); + throw new Error(`unknown message type: ${type}`); } const fn = api === 'stream' ? streamWrite : legacyWrite; bench.start(); - fn(conf.algo, message, encoding, conf.writes, conf.len); + fn(algo, message, encoding, writes, len); } function legacyWrite(algo, message, encoding, writes, len) { diff --git a/benchmark/crypto/rsa-encrypt-decrypt-throughput.js b/benchmark/crypto/rsa-encrypt-decrypt-throughput.js index edab5ae08f7..40b69c31f97 100644 --- a/benchmark/crypto/rsa-encrypt-decrypt-throughput.js +++ b/benchmark/crypto/rsa-encrypt-decrypt-throughput.js @@ -22,10 +22,10 @@ const bench = common.createBenchmark(main, { len: [16, 32, 64] }); -function main(conf) { - const message = Buffer.alloc(conf.len, 'b'); +function main({ len, algo, keylen, n }) { + const message = Buffer.alloc(len, 'b'); bench.start(); - StreamWrite(conf.algo, conf.keylen, message, conf.n, conf.len); + StreamWrite(algo, keylen, message, n, len); } function StreamWrite(algo, keylen, message, n, len) { diff --git a/benchmark/crypto/rsa-sign-verify-throughput.js b/benchmark/crypto/rsa-sign-verify-throughput.js index bcde3a43d4d..3a0373b57d0 100644 --- a/benchmark/crypto/rsa-sign-verify-throughput.js +++ b/benchmark/crypto/rsa-sign-verify-throughput.js @@ -23,10 +23,10 @@ const bench = common.createBenchmark(main, { len: [1024, 102400, 2 * 102400, 3 * 102400, 1024 * 1024] }); -function main(conf) { - const message = Buffer.alloc(conf.len, 'b'); +function main({ len, algo, keylen, writes }) { + const message = Buffer.alloc(len, 'b'); bench.start(); - StreamWrite(conf.algo, conf.keylen, message, conf.writes, conf.len); + StreamWrite(algo, keylen, message, writes, len); } function StreamWrite(algo, keylen, message, writes, len) { diff --git a/benchmark/dgram/bind-params.js b/benchmark/dgram/bind-params.js index 5f7999f7a39..ea1f430eed9 100644 --- a/benchmark/dgram/bind-params.js +++ b/benchmark/dgram/bind-params.js @@ -15,10 +15,11 @@ const noop = () => {}; function main({ n, port, address }) { port = port === 'true' ? 0 : undefined; address = address === 'true' ? '0.0.0.0' : undefined; + var i; if (port !== undefined && address !== undefined) { bench.start(); - for (let i = 0; i < n; i++) { + for (i = 0; i < n; i++) { dgram.createSocket('udp4').bind(port, address) .on('error', noop) .unref(); @@ -26,7 +27,7 @@ function main({ n, port, address }) { bench.end(n); } else if (port !== undefined) { bench.start(); - for (let i = 0; i < n; i++) { + for (i = 0; i < n; i++) { dgram.createSocket('udp4') .bind(port) .on('error', noop) @@ -35,7 +36,7 @@ function main({ n, port, address }) { bench.end(n); } else if (port === undefined && address === undefined) { bench.start(); - for (let i = 0; i < n; i++) { + for (i = 0; i < n; i++) { dgram.createSocket('udp4') .bind() .on('error', noop) diff --git a/benchmark/domain/domain-fn-args.js b/benchmark/domain/domain-fn-args.js index fe912e34d20..c889b35442d 100644 --- a/benchmark/domain/domain-fn-args.js +++ b/benchmark/domain/domain-fn-args.js @@ -28,15 +28,6 @@ function main({ n, args }) { bench.end(n); } -function fn(a, b, c) { - if (!a) - a = 1; - - if (!b) - b = 2; - - if (!c) - c = 3; - +function fn(a = 1, b = 2, c = 3) { return a + b + c; } diff --git a/benchmark/es/defaultparams-bench.js b/benchmark/es/defaultparams-bench.js index ce2132718ca..a00b50137c1 100644 --- a/benchmark/es/defaultparams-bench.js +++ b/benchmark/es/defaultparams-bench.js @@ -20,37 +20,31 @@ function defaultParams(x = 1, y = 2) { assert.strictEqual(y, 2); } -function runOldStyleDefaults(n) { - - var i = 0; +function runOldStyleDefaults(millions) { bench.start(); - for (; i < n; i++) + for (var i = 0; i < millions * 1e6; i++) oldStyleDefaults(); - bench.end(n / 1e6); + bench.end(millions); } -function runDefaultParams(n) { - - var i = 0; +function runDefaultParams(millions) { bench.start(); - for (; i < n; i++) + for (var i = 0; i < millions * 1e6; i++) defaultParams(); - bench.end(n / 1e6); + bench.end(millions); } function main({ millions, method }) { - const n = millions * 1e6; - switch (method) { case '': // Empty string falls through to next line as default, mostly for tests. case 'withoutdefaults': - runOldStyleDefaults(n); + runOldStyleDefaults(millions); break; case 'withdefaults': - runDefaultParams(n); + runDefaultParams(millions); break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } } diff --git a/benchmark/es/destructuring-bench.js b/benchmark/es/destructuring-bench.js index f244506860d..2168940bdc4 100644 --- a/benchmark/es/destructuring-bench.js +++ b/benchmark/es/destructuring-bench.js @@ -8,10 +8,10 @@ const bench = common.createBenchmark(main, { millions: [100] }); -function runSwapManual(n) { +function runSwapManual(millions) { var x, y, r; bench.start(); - for (var i = 0; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { x = 1, y = 2; r = x; x = y; @@ -19,34 +19,32 @@ function runSwapManual(n) { assert.strictEqual(x, 2); assert.strictEqual(y, 1); } - bench.end(n / 1e6); + bench.end(millions); } -function runSwapDestructured(n) { +function runSwapDestructured(millions) { var x, y; bench.start(); - for (var i = 0; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { x = 1, y = 2; [x, y] = [y, x]; assert.strictEqual(x, 2); assert.strictEqual(y, 1); } - bench.end(n / 1e6); + bench.end(millions); } function main({ millions, method }) { - const n = millions * 1e6; - switch (method) { case '': // Empty string falls through to next line as default, mostly for tests. case 'swap': - runSwapManual(n); + runSwapManual(millions); break; case 'destructure': - runSwapDestructured(n); + runSwapDestructured(millions); break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } } diff --git a/benchmark/es/destructuring-object-bench.js b/benchmark/es/destructuring-object-bench.js index 73687f018de..a84977c59bc 100644 --- a/benchmark/es/destructuring-object-bench.js +++ b/benchmark/es/destructuring-object-bench.js @@ -7,45 +7,43 @@ const bench = common.createBenchmark(main, { millions: [100] }); -function runNormal(n) { +function runNormal(millions) { var i = 0; const o = { x: 0, y: 1 }; bench.start(); - for (; i < n; i++) { + for (; i < millions * 1e6; i++) { /* eslint-disable no-unused-vars */ const x = o.x; const y = o.y; const r = o.r || 2; /* eslint-enable no-unused-vars */ } - bench.end(n / 1e6); + bench.end(millions); } -function runDestructured(n) { +function runDestructured(millions) { var i = 0; const o = { x: 0, y: 1 }; bench.start(); - for (; i < n; i++) { + for (; i < millions * 1e6; i++) { /* eslint-disable no-unused-vars */ const { x, y, r = 2 } = o; /* eslint-enable no-unused-vars */ } - bench.end(n / 1e6); + bench.end(millions); } function main({ millions, method }) { - const n = millions * 1e6; - switch (method) { case '': // Empty string falls through to next line as default, mostly for tests. case 'normal': - runNormal(n); + runNormal(millions); break; case 'destructureObject': - runDestructured(n); + runDestructured(millions); break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } } diff --git a/benchmark/es/foreach-bench.js b/benchmark/es/foreach-bench.js index c7caa7cee6f..e9179ed8ded 100644 --- a/benchmark/es/foreach-bench.js +++ b/benchmark/es/foreach-bench.js @@ -8,56 +8,51 @@ const bench = common.createBenchmark(main, { millions: [5] }); -function useFor(n, items, count) { - var i, j; +function useFor(millions, items, count) { bench.start(); - for (i = 0; i < n; i++) { - for (j = 0; j < count; j++) { + for (var i = 0; i < millions * 1e6; i++) { + for (var j = 0; j < count; j++) { /* eslint-disable no-unused-vars */ const item = items[j]; /* esline-enable no-unused-vars */ } } - bench.end(n / 1e6); + bench.end(millions); } -function useForOf(n, items) { - var i, item; +function useForOf(millions, items) { + var item; bench.start(); - for (i = 0; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { for (item of items) {} } - bench.end(n / 1e6); + bench.end(millions); } -function useForIn(n, items) { - var i, j, item; +function useForIn(millions, items) { bench.start(); - for (i = 0; i < n; i++) { - for (j in items) { + for (var i = 0; i < millions * 1e6; i++) { + for (var j in items) { /* eslint-disable no-unused-vars */ - item = items[j]; + const item = items[j]; /* esline-enable no-unused-vars */ } } - bench.end(n / 1e6); + bench.end(millions); } -function useForEach(n, items) { - var i; +function useForEach(millions, items) { bench.start(); - for (i = 0; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { items.forEach((item) => {}); } - bench.end(n / 1e6); + bench.end(millions); } function main({ millions, count, method }) { - const n = millions * 1e6; const items = new Array(count); - var i; var fn; - for (i = 0; i < count; i++) + for (var i = 0; i < count; i++) items[i] = i; switch (method) { @@ -76,7 +71,7 @@ function main({ millions, count, method }) { fn = useForEach; break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } - fn(n, items, count); + fn(millions, items, count); } diff --git a/benchmark/es/map-bench.js b/benchmark/es/map-bench.js index ba8e35c2eb9..445031aa983 100644 --- a/benchmark/es/map-bench.js +++ b/benchmark/es/map-bench.js @@ -11,63 +11,59 @@ const bench = common.createBenchmark(main, { millions: [1] }); -function runObject(n) { +function runObject(millions) { const m = {}; - var i = 0; bench.start(); - for (; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { m[`i${i}`] = i; m[`s${i}`] = String(i); assert.strictEqual(String(m[`i${i}`]), m[`s${i}`]); m[`i${i}`] = undefined; m[`s${i}`] = undefined; } - bench.end(n / 1e6); + bench.end(millions); } -function runNullProtoObject(n) { +function runNullProtoObject(millions) { const m = Object.create(null); - var i = 0; bench.start(); - for (; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { m[`i${i}`] = i; m[`s${i}`] = String(i); assert.strictEqual(String(m[`i${i}`]), m[`s${i}`]); m[`i${i}`] = undefined; m[`s${i}`] = undefined; } - bench.end(n / 1e6); + bench.end(millions); } -function runNullProtoLiteralObject(n) { +function runNullProtoLiteralObject(millions) { const m = { __proto__: null }; - var i = 0; bench.start(); - for (; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { m[`i${i}`] = i; m[`s${i}`] = String(i); assert.strictEqual(String(m[`i${i}`]), m[`s${i}`]); m[`i${i}`] = undefined; m[`s${i}`] = undefined; } - bench.end(n / 1e6); + bench.end(millions); } function StorageObject() {} StorageObject.prototype = Object.create(null); -function runStorageObject(n) { +function runStorageObject(millions) { const m = new StorageObject(); - var i = 0; bench.start(); - for (; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { m[`i${i}`] = i; m[`s${i}`] = String(i); assert.strictEqual(String(m[`i${i}`]), m[`s${i}`]); m[`i${i}`] = undefined; m[`s${i}`] = undefined; } - bench.end(n / 1e6); + bench.end(millions); } function fakeMap() { @@ -80,59 +76,55 @@ function fakeMap() { }; } -function runFakeMap(n) { +function runFakeMap(millions) { const m = fakeMap(); - var i = 0; bench.start(); - for (; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { m.set(`i${i}`, i); m.set(`s${i}`, String(i)); assert.strictEqual(String(m.get(`i${i}`)), m.get(`s${i}`)); m.set(`i${i}`, undefined); m.set(`s${i}`, undefined); } - bench.end(n / 1e6); + bench.end(millions); } -function runMap(n) { +function runMap(millions) { const m = new Map(); - var i = 0; bench.start(); - for (; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { m.set(`i${i}`, i); m.set(`s${i}`, String(i)); assert.strictEqual(String(m.get(`i${i}`)), m.get(`s${i}`)); m.set(`i${i}`, undefined); m.set(`s${i}`, undefined); } - bench.end(n / 1e6); + bench.end(millions); } function main({ millions, method }) { - const n = millions * 1e6; - switch (method) { case '': // Empty string falls through to next line as default, mostly for tests. case 'object': - runObject(n); + runObject(millions); break; case 'nullProtoObject': - runNullProtoObject(n); + runNullProtoObject(millions); break; case 'nullProtoLiteralObject': - runNullProtoLiteralObject(n); + runNullProtoLiteralObject(millions); break; case 'storageObject': - runStorageObject(n); + runStorageObject(millions); break; case 'fakeMap': - runFakeMap(n); + runFakeMap(millions); break; case 'map': - runMap(n); + runMap(millions); break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } } diff --git a/benchmark/es/restparams-bench.js b/benchmark/es/restparams-bench.js index 78299d292ce..6ad766f10f1 100644 --- a/benchmark/es/restparams-bench.js +++ b/benchmark/es/restparams-bench.js @@ -33,49 +33,39 @@ function useArguments() { assert.strictEqual(arguments[3], 'b'); } -function runCopyArguments(n) { - - var i = 0; - bench.start(); - for (; i < n; i++) +function runCopyArguments(millions) { + for (var i = 0; i < millions * 1e6; i++) copyArguments(1, 2, 'a', 'b'); - bench.end(n / 1e6); } -function runRestArguments(n) { - - var i = 0; - bench.start(); - for (; i < n; i++) +function runRestArguments(millions) { + for (var i = 0; i < millions * 1e6; i++) restArguments(1, 2, 'a', 'b'); - bench.end(n / 1e6); } -function runUseArguments(n) { - - var i = 0; - bench.start(); - for (; i < n; i++) +function runUseArguments(millions) { + for (var i = 0; i < millions * 1e6; i++) useArguments(1, 2, 'a', 'b'); - bench.end(n / 1e6); } function main({ millions, method }) { - const n = millions * 1e6; - + var fn; switch (method) { case '': // Empty string falls through to next line as default, mostly for tests. case 'copy': - runCopyArguments(n); + fn = runCopyArguments; break; case 'rest': - runRestArguments(n); + fn = runRestArguments; break; case 'arguments': - runUseArguments(n); + fn = runUseArguments; break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } + bench.start(); + fn(millions); + bench.end(millions); } diff --git a/benchmark/es/spread-assign.js b/benchmark/es/spread-assign.js new file mode 100644 index 00000000000..00c634ff879 --- /dev/null +++ b/benchmark/es/spread-assign.js @@ -0,0 +1,46 @@ +'use strict'; + +const common = require('../common.js'); +const util = require('util'); + +const bench = common.createBenchmark(main, { + method: ['spread', 'assign', '_extend'], + count: [5, 10, 20], + millions: [1] +}); + +function main({ millions, context, count, rest, method }) { + const n = millions * 1e6; + + const src = {}; + for (let n = 0; n < count; n++) + src[`p${n}`] = n; + + let obj; // eslint-disable-line + let i; + + switch (method) { + case '': + // Empty string falls through to next line as default, mostly for tests. + case '_extend': + bench.start(); + for (i = 0; i < n; i++) + obj = util._extend({}, src); + bench.end(n); + break; + case 'assign': + bench.start(); + for (i = 0; i < n; i++) + obj = Object.assign({}, src); + bench.end(n); + break; + case 'spread': + bench.start(); + for (i = 0; i < n; i++) + obj = { ...src }; + bench.end(n); + break; + default: + throw new Error('Unexpected method'); + } +} diff --git a/benchmark/es/spread-bench.js b/benchmark/es/spread-bench.js index 3c6cc93ea4f..067299cd650 100644 --- a/benchmark/es/spread-bench.js +++ b/benchmark/es/spread-bench.js @@ -24,7 +24,6 @@ function makeTest(count, rest) { } function main({ millions, context, count, rest, method }) { - const n = millions * 1e6; const ctx = context === 'context' ? {} : null; var fn = makeTest(count, rest); const args = new Array(count); @@ -37,25 +36,25 @@ function main({ millions, context, count, rest, method }) { // Empty string falls through to next line as default, mostly for tests. case 'apply': bench.start(); - for (i = 0; i < n; i++) + for (i = 0; i < millions * 1e6; i++) fn.apply(ctx, args); - bench.end(n / 1e6); + bench.end(millions); break; case 'spread': if (ctx !== null) fn = fn.bind(ctx); bench.start(); - for (i = 0; i < n; i++) + for (i = 0; i < millions * 1e6; i++) fn(...args); - bench.end(n / 1e6); + bench.end(millions); break; case 'call-spread': bench.start(); - for (i = 0; i < n; i++) + for (i = 0; i < millions * 1e6; i++) fn.call(ctx, ...args); - bench.end(n / 1e6); + bench.end(millions); break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } } diff --git a/benchmark/es/string-concatenations.js b/benchmark/es/string-concatenations.js index a40b7fa8c3b..72fb7f9969b 100644 --- a/benchmark/es/string-concatenations.js +++ b/benchmark/es/string-concatenations.js @@ -16,7 +16,6 @@ const configs = { const bench = common.createBenchmark(main, configs); - function main({ n, mode }) { const str = 'abc'; const num = 123; @@ -63,7 +62,7 @@ function main({ n, mode }) { bench.end(n); break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${mode}"`); } return string; diff --git a/benchmark/es/string-repeat.js b/benchmark/es/string-repeat.js index e5bdbb5cc19..9e33e4acf47 100644 --- a/benchmark/es/string-repeat.js +++ b/benchmark/es/string-repeat.js @@ -33,7 +33,7 @@ function main({ n, size, encoding, mode }) { bench.end(n); break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${mode}"`); } assert.strictEqual([...str].length, size); diff --git a/benchmark/fs/bench-realpath.js b/benchmark/fs/bench-realpath.js index 6690d7e87b0..de03e71b42d 100644 --- a/benchmark/fs/bench-realpath.js +++ b/benchmark/fs/bench-realpath.js @@ -16,10 +16,8 @@ function main({ n, pathType }) { bench.start(); if (pathType === 'relative') relativePath(n); - else if (pathType === 'resolved') - resolvedPath(n); else - throw new Error(`unknown "pathType": ${pathType}`); + resolvedPath(n); } function relativePath(n) { diff --git a/benchmark/fs/bench-realpathSync.js b/benchmark/fs/bench-realpathSync.js index 1c751156f73..7a01bd18cb7 100644 --- a/benchmark/fs/bench-realpathSync.js +++ b/benchmark/fs/bench-realpathSync.js @@ -15,24 +15,10 @@ const bench = common.createBenchmark(main, { function main({ n, pathType }) { + const path = pathType === 'relative' ? relative_path : resolved_path; bench.start(); - if (pathType === 'relative') - relativePath(n); - else if (pathType === 'resolved') - resolvedPath(n); - else - throw new Error(`unknown "pathType": ${pathType}`); - bench.end(n); -} - -function relativePath(n) { - for (var i = 0; i < n; i++) { - fs.realpathSync(relative_path); - } -} - -function resolvedPath(n) { for (var i = 0; i < n; i++) { - fs.realpathSync(resolved_path); + fs.realpathSync(path); } + bench.end(n); } diff --git a/benchmark/fs/readfile-partitioned.js b/benchmark/fs/readfile-partitioned.js new file mode 100644 index 00000000000..be3b7fd057b --- /dev/null +++ b/benchmark/fs/readfile-partitioned.js @@ -0,0 +1,86 @@ +// Submit a mix of short and long jobs to the threadpool. +// Report total job throughput. +// If we partition the long job, overall job throughput goes up significantly. +// However, this comes at the cost of the long job throughput. +// +// Short jobs: small zip jobs. +// Long jobs: fs.readFile on a large file. + +'use strict'; + +const path = require('path'); +const common = require('../common.js'); +const filename = path.resolve(__dirname, + `.removeme-benchmark-garbage-${process.pid}`); +const fs = require('fs'); +const zlib = require('zlib'); +const assert = require('assert'); + +const bench = common.createBenchmark(main, { + dur: [5], + len: [1024, 16 * 1024 * 1024], + concurrent: [1, 10] +}); + +function main(conf) { + const len = +conf.len; + try { fs.unlinkSync(filename); } catch (e) {} + var data = Buffer.alloc(len, 'x'); + fs.writeFileSync(filename, data); + data = null; + + var zipData = Buffer.alloc(1024, 'a'); + + var reads = 0; + var zips = 0; + var benchEnded = false; + bench.start(); + setTimeout(function() { + const totalOps = reads + zips; + benchEnded = true; + bench.end(totalOps); + try { fs.unlinkSync(filename); } catch (e) {} + }, +conf.dur * 1000); + + function read() { + fs.readFile(filename, afterRead); + } + + function afterRead(er, data) { + if (er) { + if (er.code === 'ENOENT') { + // Only OK if unlinked by the timer from main. + assert.ok(benchEnded); + return; + } + throw er; + } + + if (data.length !== len) + throw new Error('wrong number of bytes returned'); + + reads++; + if (!benchEnded) + read(); + } + + function zip() { + zlib.deflate(zipData, afterZip); + } + + function afterZip(er, data) { + if (er) + throw er; + + zips++; + if (!benchEnded) + zip(); + } + + // Start reads + var cur = +conf.concurrent; + while (cur--) read(); + + // Start a competing zip + zip(); +} diff --git a/benchmark/fs/readfile.js b/benchmark/fs/readfile.js index 7da7758ed06..282b4ac7621 100644 --- a/benchmark/fs/readfile.js +++ b/benchmark/fs/readfile.js @@ -8,6 +8,7 @@ const common = require('../common.js'); const filename = path.resolve(process.env.NODE_TMPDIR || __dirname, `.removeme-benchmark-garbage-${process.pid}`); const fs = require('fs'); +const assert = require('assert'); const bench = common.createBenchmark(main, { dur: [5], @@ -22,10 +23,10 @@ function main({ len, dur, concurrent }) { data = null; var reads = 0; - var bench_ended = false; + var benchEnded = false; bench.start(); setTimeout(function() { - bench_ended = true; + benchEnded = true; bench.end(reads); try { fs.unlinkSync(filename); } catch (e) {} process.exit(0); @@ -36,14 +37,20 @@ function main({ len, dur, concurrent }) { } function afterRead(er, data) { - if (er) + if (er) { + if (er.code === 'ENOENT') { + // Only OK if unlinked by the timer from main. + assert.ok(benchEnded); + return; + } throw er; + } if (data.length !== len) throw new Error('wrong number of bytes returned'); reads++; - if (!bench_ended) + if (!benchEnded) read(); } diff --git a/benchmark/fs/write-stream-throughput.js b/benchmark/fs/write-stream-throughput.js index 6fe00cde48c..60ad47bc4ea 100644 --- a/benchmark/fs/write-stream-throughput.js +++ b/benchmark/fs/write-stream-throughput.js @@ -36,7 +36,6 @@ function main({ dur, encodingType, size }) { try { fs.unlinkSync(filename); } catch (e) {} var started = false; - var ending = false; var ended = false; var f = fs.createWriteStream(filename); @@ -52,15 +51,9 @@ function main({ dur, encodingType, size }) { function write() { - // don't try to write after we end, even if a 'drain' event comes. - // v0.8 streams are so sloppy! - if (ending) - return; - if (!started) { started = true; setTimeout(function() { - ending = true; f.end(); }, dur * 1000); bench.start(); diff --git a/benchmark/http/bench-parser.js b/benchmark/http/bench-parser.js index 4c691d71345..d629fe67e59 100644 --- a/benchmark/http/bench-parser.js +++ b/benchmark/http/bench-parser.js @@ -14,7 +14,6 @@ const bench = common.createBenchmark(main, { n: [1e5], }); - function main({ len, n }) { var header = `GET /hello HTTP/1.1${CRLF}Content-Type: text/plain${CRLF}`; @@ -26,7 +25,6 @@ function main({ len, n }) { processHeader(Buffer.from(header), n); } - function processHeader(header, n) { const parser = newParser(REQUEST); @@ -38,7 +36,6 @@ function processHeader(header, n) { bench.end(n); } - function newParser(type) { const parser = new HTTPParser(type); diff --git a/benchmark/http/check_invalid_header_char.js b/benchmark/http/check_invalid_header_char.js index b9933d690e2..399e71b2dfc 100644 --- a/benchmark/http/check_invalid_header_char.js +++ b/benchmark/http/check_invalid_header_char.js @@ -3,6 +3,10 @@ const common = require('../common.js'); const _checkInvalidHeaderChar = require('_http_common')._checkInvalidHeaderChar; +// Put it here so the benchmark result lines will not be super long. +const LONG_AND_INVALID = 'Here is a value that is really a folded header ' + + 'value\r\n this should be supported, but it is not currently'; + const bench = common.createBenchmark(main, { key: [ // Valid @@ -21,8 +25,7 @@ const bench = common.createBenchmark(main, { 'en-US', // Invalid - 'Here is a value that is really a folded header value\r\n this should be \ - supported, but it is not currently', + 'LONG_AND_INVALID', '中文呢', // unicode 'foo\nbar', '\x7F' @@ -31,6 +34,9 @@ const bench = common.createBenchmark(main, { }); function main({ n, key }) { + if (key === 'LONG_AND_INVALID') { + key = LONG_AND_INVALID; + } bench.start(); for (var i = 0; i < n; i++) { _checkInvalidHeaderChar(key); diff --git a/benchmark/http/http_server_for_chunky_client.js b/benchmark/http/http_server_for_chunky_client.js index f079544e03d..1e5a4583669 100644 --- a/benchmark/http/http_server_for_chunky_client.js +++ b/benchmark/http/http_server_for_chunky_client.js @@ -2,22 +2,15 @@ const assert = require('assert'); const http = require('http'); -const fs = require('fs'); const { fork } = require('child_process'); const common = require('../common.js'); -const { PIPE, tmpDir } = require('../../test/common'); +const { PIPE } = require('../../test/common'); +const tmpdir = require('../../test/common/tmpdir'); process.env.PIPE_NAME = PIPE; -try { - fs.accessSync(tmpDir, fs.F_OK); -} catch (e) { - fs.mkdirSync(tmpDir); -} +tmpdir.refresh(); var server; -try { - fs.unlinkSync(process.env.PIPE_NAME); -} catch (e) { /* ignore */ } server = http.createServer(function(req, res) { const headers = { diff --git a/benchmark/http/set-header.js b/benchmark/http/set-header.js new file mode 100644 index 00000000000..f0987f2cc77 --- /dev/null +++ b/benchmark/http/set-header.js @@ -0,0 +1,32 @@ +'use strict'; +const common = require('../common.js'); +const PORT = common.PORT; + +const bench = common.createBenchmark(main, { + res: ['normal', 'setHeader', 'setHeaderWH'] +}); + +const type = 'bytes'; +const len = 4; +const chunks = 0; +const chunkedEnc = 0; +const c = 50; + +// normal: writeHead(status, {...}) +// setHeader: statusCode = status, setHeader(...) x2 +// setHeaderWH: setHeader(...), writeHead(status, ...) +function main({ res }) { + process.env.PORT = PORT; + var server = require('../fixtures/simple-http-server.js') + .listen(PORT) + .on('listening', function() { + const path = `/${type}/${len}/${chunks}/normal/${chunkedEnc}`; + + bench.http({ + path: path, + connections: c + }, function() { + server.close(); + }); + }); +} diff --git a/benchmark/http/simple.js b/benchmark/http/simple.js index d5351815fc1..6d1851c45e1 100644 --- a/benchmark/http/simple.js +++ b/benchmark/http/simple.js @@ -1,6 +1,5 @@ 'use strict'; const common = require('../common.js'); -const PORT = common.PORT; const bench = common.createBenchmark(main, { // unicode confuses ab on os x. @@ -8,16 +7,14 @@ const bench = common.createBenchmark(main, { len: [4, 1024, 102400], chunks: [1, 4], c: [50, 500], - chunkedEnc: [1, 0], - res: ['normal', 'setHeader', 'setHeaderWH'] + chunkedEnc: [1, 0] }); function main({ type, len, chunks, c, chunkedEnc, res }) { - process.env.PORT = PORT; var server = require('../fixtures/simple-http-server.js') - .listen(PORT) + .listen(common.PORT) .on('listening', function() { - const path = `/${type}/${len}/${chunks}/${res}/${chunkedEnc}`; + const path = `/${type}/${len}/${chunks}/normal/${chunkedEnc}`; bench.http({ path: path, diff --git a/benchmark/http/upgrade.js b/benchmark/http/upgrade.js index 0feaecc8ff1..6b39323396a 100644 --- a/benchmark/http/upgrade.js +++ b/benchmark/http/upgrade.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common.js'); -const PORT = common.PORT; const net = require('net'); const bench = common.createBenchmark(main, { @@ -20,7 +19,6 @@ const resData = 'HTTP/1.1 101 Web Socket Protocol Handshake\r\n' + '\r\n\r\n'; function main({ n }) { - process.env.PORT = PORT; var server = require('../fixtures/simple-http-server.js') .listen(common.PORT) .on('listening', function() { diff --git a/benchmark/http2/respond-with-fd.js b/benchmark/http2/respond-with-fd.js index 6076cf91be9..fa7b2fbd16b 100644 --- a/benchmark/http2/respond-with-fd.js +++ b/benchmark/http2/respond-with-fd.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common.js'); -const PORT = common.PORT; const path = require('path'); const fs = require('fs'); @@ -25,7 +24,7 @@ function main({ requests, streams, clients }) { stream.respondWithFD(fd); stream.on('error', (err) => {}); }); - server.listen(PORT, () => { + server.listen(common.PORT, () => { bench.http({ path: '/', requests, diff --git a/benchmark/http2/simple.js b/benchmark/http2/simple.js index 37c78d34018..cf017e67354 100644 --- a/benchmark/http2/simple.js +++ b/benchmark/http2/simple.js @@ -1,11 +1,8 @@ 'use strict'; const common = require('../common.js'); -const PORT = common.PORT; - const path = require('path'); const fs = require('fs'); - const file = path.join(path.resolve(__dirname, '../fixtures'), 'alice.html'); const bench = common.createBenchmark(main, { @@ -24,7 +21,7 @@ function main({ requests, streams, clients }) { out.pipe(stream); stream.on('error', (err) => {}); }); - server.listen(PORT, () => { + server.listen(common.PORT, () => { bench.http({ path: '/', requests, diff --git a/benchmark/http2/write.js b/benchmark/http2/write.js index 7a802ef84fd..6fcb1254ca3 100644 --- a/benchmark/http2/write.js +++ b/benchmark/http2/write.js @@ -1,7 +1,6 @@ 'use strict'; const common = require('../common.js'); -const PORT = common.PORT; const bench = common.createBenchmark(main, { streams: [100, 200, 1000], @@ -26,7 +25,7 @@ function main({ streams, length, size }) { } write(); }); - server.listen(PORT, () => { + server.listen(common.PORT, () => { bench.http({ path: '/', requests: 10000, diff --git a/benchmark/misc/freelist.js b/benchmark/misc/freelist.js index 0530255728f..8c3281cc407 100644 --- a/benchmark/misc/freelist.js +++ b/benchmark/misc/freelist.js @@ -12,7 +12,6 @@ function main({ n }) { const FreeList = require('internal/freelist'); const poolSize = 1000; const list = new FreeList('test', poolSize, Object); - var i; var j; const used = []; @@ -23,7 +22,7 @@ function main({ n }) { bench.start(); - for (i = 0; i < n; i++) { + for (var i = 0; i < n; i++) { // Return all the items to the pool for (j = 0; j < poolSize; j++) { list.free(used[j]); diff --git a/benchmark/misc/function_call/index.js b/benchmark/misc/function_call/index.js index 91efa573597..28561bc48cd 100644 --- a/benchmark/misc/function_call/index.js +++ b/benchmark/misc/function_call/index.js @@ -32,11 +32,9 @@ const bench = common.createBenchmark(main, { }); function main({ millions, type }) { - const n = millions * 1e6; - const fn = type === 'cxx' ? cxx : js; bench.start(); - for (var i = 0; i < n; i++) { + for (var i = 0; i < millions * 1e6; i++) { fn(); } bench.end(millions); diff --git a/benchmark/misc/object-property-bench.js b/benchmark/misc/object-property-bench.js index 37da82d8875..ddc6faed7fc 100644 --- a/benchmark/misc/object-property-bench.js +++ b/benchmark/misc/object-property-bench.js @@ -78,6 +78,6 @@ function main({ millions, method }) { runSymbol(n); break; default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } } diff --git a/benchmark/misc/punycode.js b/benchmark/misc/punycode.js index 7016fa11712..369adcf17d3 100644 --- a/benchmark/misc/punycode.js +++ b/benchmark/misc/punycode.js @@ -55,9 +55,8 @@ function runPunycode(n, val) { } function runICU(n, val) { - var i = 0; bench.start(); - for (; i < n; i++) + for (var i = 0; i < n; i++) usingICU(val); bench.end(n); } @@ -76,6 +75,6 @@ function main({ n, val, method }) { } // fallthrough default: - throw new Error('Unexpected method'); + throw new Error(`Unexpected method "${method}"`); } } diff --git a/benchmark/module/module-loader.js b/benchmark/module/module-loader.js index 8393d1f92e0..58d4dcf81c9 100644 --- a/benchmark/module/module-loader.js +++ b/benchmark/module/module-loader.js @@ -3,8 +3,8 @@ const fs = require('fs'); const path = require('path'); const common = require('../common.js'); -const { refreshTmpDir, tmpDir } = require('../../test/common'); -const benchmarkDirectory = path.join(tmpDir, 'nodejs-benchmark-module'); +const tmpdir = require('../../test/common/tmpdir'); +const benchmarkDirectory = path.join(tmpdir.path, 'nodejs-benchmark-module'); const bench = common.createBenchmark(main, { thousands: [50], @@ -13,12 +13,10 @@ const bench = common.createBenchmark(main, { }); function main({ thousands, fullPath, useCache }) { - const n = thousands * 1e3; - - refreshTmpDir(); + tmpdir.refresh(); try { fs.mkdirSync(benchmarkDirectory); } catch (e) {} - for (var i = 0; i <= n; i++) { + for (var i = 0; i <= thousands * 1e3; i++) { fs.mkdirSync(`${benchmarkDirectory}${i}`); fs.writeFileSync( `${benchmarkDirectory}${i}/package.json`, @@ -31,37 +29,37 @@ function main({ thousands, fullPath, useCache }) { } if (fullPath === 'true') - measureFull(n, useCache === 'true'); + measureFull(thousands, useCache === 'true'); else - measureDir(n, useCache === 'true'); + measureDir(thousands, useCache === 'true'); - refreshTmpDir(); + tmpdir.refresh(); } -function measureFull(n, useCache) { +function measureFull(thousands, useCache) { var i; if (useCache) { - for (i = 0; i <= n; i++) { + for (i = 0; i <= thousands * 1e3; i++) { require(`${benchmarkDirectory}${i}/index.js`); } } bench.start(); - for (i = 0; i <= n; i++) { + for (i = 0; i <= thousands * 1e3; i++) { require(`${benchmarkDirectory}${i}/index.js`); } - bench.end(n / 1e3); + bench.end(thousands); } -function measureDir(n, useCache) { +function measureDir(thousands, useCache) { var i; if (useCache) { - for (i = 0; i <= n; i++) { + for (i = 0; i <= thousands * 1e3; i++) { require(`${benchmarkDirectory}${i}`); } } bench.start(); - for (i = 0; i <= n; i++) { + for (i = 0; i <= thousands * 1e3; i++) { require(`${benchmarkDirectory}${i}`); } - bench.end(n / 1e3); + bench.end(thousands); } diff --git a/benchmark/path/basename-win32.js b/benchmark/path/basename-win32.js index 8a66f56d6e3..937dc6f6948 100644 --- a/benchmark/path/basename-win32.js +++ b/benchmark/path/basename-win32.js @@ -1,6 +1,6 @@ 'use strict'; const common = require('../common.js'); -const { posix } = require('path'); +const { win32 } = require('path'); const bench = common.createBenchmark(main, { pathext: [ @@ -28,7 +28,7 @@ function main({ n, pathext }) { bench.start(); for (var i = 0; i < n; i++) { - posix.basename(pathext, ext); + win32.basename(pathext, ext); } bench.end(n); } diff --git a/benchmark/timers/set-immediate-breadth.js b/benchmark/timers/set-immediate-breadth.js index a4b217b5bff..4f7d2cd2761 100644 --- a/benchmark/timers/set-immediate-breadth.js +++ b/benchmark/timers/set-immediate-breadth.js @@ -9,7 +9,7 @@ function main({ millions }) { const N = millions * 1e6; process.on('exit', function() { - bench.end(N / 1e6); + bench.end(millions); }); function cb() {} diff --git a/benchmark/timers/set-immediate-depth-args.js b/benchmark/timers/set-immediate-depth-args.js index fe1340c4bd5..aa5ec95f7da 100644 --- a/benchmark/timers/set-immediate-depth-args.js +++ b/benchmark/timers/set-immediate-depth-args.js @@ -9,7 +9,7 @@ function main({ millions }) { const N = millions * 1e6; process.on('exit', function() { - bench.end(N / 1e6); + bench.end(millions); }); function cb3(n, arg2, arg3) { diff --git a/benchmark/timers/timers-cancel-pooled.js b/benchmark/timers/timers-cancel-pooled.js index 33897507c83..3e262f820a3 100644 --- a/benchmark/timers/timers-cancel-pooled.js +++ b/benchmark/timers/timers-cancel-pooled.js @@ -28,5 +28,5 @@ function main({ millions }) { } function cb() { - assert(false, 'Timer should not call callback'); + assert.fail('Timer should not call callback'); } diff --git a/benchmark/timers/timers-cancel-unpooled.js b/benchmark/timers/timers-cancel-unpooled.js index 57e0139dfe1..15866731133 100644 --- a/benchmark/timers/timers-cancel-unpooled.js +++ b/benchmark/timers/timers-cancel-unpooled.js @@ -22,5 +22,5 @@ function main({ millions }) { } function cb() { - assert(false, `Timer ${this._idleTimeout} should not call callback`); + assert.fail(`Timer ${this._idleTimeout} should not call callback`); } diff --git a/benchmark/timers/timers-insert-unpooled.js b/benchmark/timers/timers-insert-unpooled.js index 56526633358..fbbeebb759f 100644 --- a/benchmark/timers/timers-insert-unpooled.js +++ b/benchmark/timers/timers-insert-unpooled.js @@ -23,5 +23,5 @@ function main({ millions }) { } function cb() { - assert(false, `Timer ${this._idleTimeout} should not call callback`); + assert.fail(`Timer ${this._idleTimeout} should not call callback`); } diff --git a/benchmark/tls/convertprotocols.js b/benchmark/tls/convertprotocols.js index 1ee2672bee7..9f4969344d1 100644 --- a/benchmark/tls/convertprotocols.js +++ b/benchmark/tls/convertprotocols.js @@ -8,14 +8,15 @@ const bench = common.createBenchmark(main, { }); function main({ n }) { - var i = 0; + const input = ['ABC', 'XYZ123', 'FOO']; var m = {}; // First call dominates results if (n > 1) { - tls.convertNPNProtocols(['ABC', 'XYZ123', 'FOO'], m); + tls.convertNPNProtocols(input, m); m = {}; } bench.start(); - for (; i < n; i++) tls.convertNPNProtocols(['ABC', 'XYZ123', 'FOO'], m); + for (var i = 0; i < n; i++) + tls.convertNPNProtocols(input, m); bench.end(n); } diff --git a/benchmark/tls/tls-connect.js b/benchmark/tls/tls-connect.js index 67f2d5f8a93..da0f5e08d5e 100644 --- a/benchmark/tls/tls-connect.js +++ b/benchmark/tls/tls-connect.js @@ -11,12 +11,13 @@ const bench = common.createBenchmark(main, { var clientConn = 0; var serverConn = 0; -var server; var dur; var concurrency; var running = true; -function main({ dur, concurrency }) { +function main(conf) { + dur = conf.dur; + concurrency = conf.concurrency; const cert_dir = path.resolve(__dirname, '../../test/fixtures'); const options = { key: fs.readFileSync(`${cert_dir}/test_key.pem`), @@ -25,7 +26,7 @@ function main({ dur, concurrency }) { ciphers: 'AES256-GCM-SHA384' }; - server = tls.createServer(options, onConnection); + const server = tls.createServer(options, onConnection); server.listen(common.PORT, onListening); } diff --git a/benchmark/url/legacy-vs-whatwg-url-get-prop.js b/benchmark/url/legacy-vs-whatwg-url-get-prop.js index 93603c258cf..2cc3ab8c75e 100644 --- a/benchmark/url/legacy-vs-whatwg-url-get-prop.js +++ b/benchmark/url/legacy-vs-whatwg-url-get-prop.js @@ -74,7 +74,7 @@ function useWHATWG(n, input) { function main({ type, n, method }) { const input = inputs[type]; if (!input) { - throw new Error('Unknown input type'); + throw new Error(`Unknown input type "${type}"`); } var noDead; // Avoid dead code elimination. @@ -86,7 +86,7 @@ function main({ type, n, method }) { noDead = useWHATWG(n, input); break; default: - throw new Error('Unknown method'); + throw new Error(`Unknown method "${method}"`); } assert.ok(noDead); diff --git a/benchmark/url/legacy-vs-whatwg-url-parse.js b/benchmark/url/legacy-vs-whatwg-url-parse.js index da42d5a189a..2be55e17cc3 100644 --- a/benchmark/url/legacy-vs-whatwg-url-parse.js +++ b/benchmark/url/legacy-vs-whatwg-url-parse.js @@ -34,7 +34,7 @@ function useWHATWG(n, input) { function main({ type, n, method }) { const input = inputs[type]; if (!input) { - throw new Error('Unknown input type'); + throw new Error(`Unknown input type "${type}"`); } var noDead; // Avoid dead code elimination. @@ -46,7 +46,7 @@ function main({ type, n, method }) { noDead = useWHATWG(n, input); break; default: - throw new Error('Unknown method'); + throw new Error(`Unknown method ${method}`); } assert.ok(noDead); diff --git a/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js b/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js index 51953ec8707..e915ceb54f9 100644 --- a/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js +++ b/benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js @@ -31,7 +31,7 @@ function useWHATWG(n, input) { function main({ type, n, method }) { const input = inputs[type]; if (!input) { - throw new Error('Unknown input type'); + throw new Error(`Unknown input type "${type}"`); } switch (method) { @@ -42,6 +42,6 @@ function main({ type, n, method }) { useWHATWG(n, input); break; default: - throw new Error('Unknown method'); + throw new Error(`Unknown method ${method}`); } } diff --git a/benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js b/benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js index 3490782a1bf..8fe3e546f07 100644 --- a/benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js +++ b/benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js @@ -33,7 +33,7 @@ function useWHATWG(n, input, prop) { function main({ type, n, method }) { const input = inputs[type]; if (!input) { - throw new Error('Unknown input type'); + throw new Error(`Unknown input type "${type}"`); } switch (method) { @@ -44,6 +44,6 @@ function main({ type, n, method }) { useWHATWG(n, input); break; default: - throw new Error('Unknown method'); + throw new Error(`Unknown method ${method}`); } } diff --git a/benchmark/url/legacy-vs-whatwg-url-serialize.js b/benchmark/url/legacy-vs-whatwg-url-serialize.js index e92b941b5d5..017ec4328c5 100644 --- a/benchmark/url/legacy-vs-whatwg-url-serialize.js +++ b/benchmark/url/legacy-vs-whatwg-url-serialize.js @@ -36,7 +36,7 @@ function useWHATWG(n, input, prop) { function main({ type, n, method }) { const input = inputs[type]; if (!input) { - throw new Error('Unknown input type'); + throw new Error(`Unknown input type "${type}"`); } var noDead; // Avoid dead code elimination. @@ -48,7 +48,7 @@ function main({ type, n, method }) { noDead = useWHATWG(n, input); break; default: - throw new Error('Unknown method'); + throw new Error(`Unknown method ${method}`); } assert.ok(noDead); diff --git a/benchmark/url/url-searchparams-iteration.js b/benchmark/url/url-searchparams-iteration.js index 2b13992bdfc..cae2ef5df61 100644 --- a/benchmark/url/url-searchparams-iteration.js +++ b/benchmark/url/url-searchparams-iteration.js @@ -53,6 +53,6 @@ function main({ method, n }) { iterator(n); break; default: - throw new Error('Unknown method'); + throw new Error(`Unknown method ${method}`); } } diff --git a/benchmark/url/url-searchparams-read.js b/benchmark/url/url-searchparams-read.js index 29235ee81e0..0cf66dabbc3 100644 --- a/benchmark/url/url-searchparams-read.js +++ b/benchmark/url/url-searchparams-read.js @@ -10,45 +10,14 @@ const bench = common.createBenchmark(main, { const str = 'one=single&two=first&three=first&two=2nd&three=2nd&three=3rd'; -function get(n, param) { - const params = new URLSearchParams(str); - - bench.start(); - for (var i = 0; i < n; i += 1) - params.get(param); - bench.end(n); -} - -function getAll(n, param) { - const params = new URLSearchParams(str); - - bench.start(); - for (var i = 0; i < n; i += 1) - params.getAll(param); - bench.end(n); -} - -function has(n, param) { +function main({ method, param, n }) { const params = new URLSearchParams(str); + const fn = params[method]; + if (!fn) + throw new Error(`Unknown method ${method}`); bench.start(); for (var i = 0; i < n; i += 1) - params.has(param); + fn(param); bench.end(n); } - -function main({ method, param, n }) { - switch (method) { - case 'get': - get(n, param); - break; - case 'getAll': - getAll(n, param); - break; - case 'has': - has(n, param); - break; - default: - throw new Error('Unknown method'); - } -} diff --git a/benchmark/url/url-searchparams-sort.js b/benchmark/url/url-searchparams-sort.js index 524dacb6d52..fe152bf8234 100644 --- a/benchmark/url/url-searchparams-sort.js +++ b/benchmark/url/url-searchparams-sort.js @@ -37,9 +37,8 @@ function main({ type, n }) { const params = new URLSearchParams(); const array = getParams(input); - var i; bench.start(); - for (i = 0; i < n; i++) { + for (var i = 0; i < n; i++) { params[searchParams] = array.slice(); params.sort(); } diff --git a/benchmark/util/format.js b/benchmark/util/format.js index 5f9c4c3b594..042b8a93ccf 100644 --- a/benchmark/util/format.js +++ b/benchmark/util/format.js @@ -22,9 +22,7 @@ const bench = common.createBenchmark(main, { function main({ n, type }) { // For testing, if supplied with an empty type, default to string. - type = type || 'string'; - - const [first, second] = inputs[type]; + const [first, second] = inputs[type || 'string']; bench.start(); for (var i = 0; i < n; i++) { diff --git a/benchmark/util/inspect-array.js b/benchmark/util/inspect-array.js index 74332d18579..8b3c54aeb94 100644 --- a/benchmark/util/inspect-array.js +++ b/benchmark/util/inspect-array.js @@ -18,14 +18,13 @@ function main({ n, len, type }) { var arr = Array(len); var i, opts; - // For testing, if supplied with an empty type, default to denseArray. - type = type || 'denseArray'; - switch (type) { case 'denseArray_showHidden': opts = { showHidden: true }; arr = arr.fill('denseArray'); break; + // For testing, if supplied with an empty type, default to denseArray. + case '': case 'denseArray': arr = arr.fill('denseArray'); break; diff --git a/benchmark/v8/get-stats.js b/benchmark/v8/get-stats.js index 6ee74285862..84a0655f5db 100644 --- a/benchmark/v8/get-stats.js +++ b/benchmark/v8/get-stats.js @@ -12,9 +12,8 @@ const bench = common.createBenchmark(main, { }); function main({ method, n }) { - var i = 0; bench.start(); - for (; i < n; i++) + for (var i = 0; i < n; i++) v8[method](); bench.end(n); } diff --git a/benchmark/vm/run-in-context.js b/benchmark/vm/run-in-context.js index da8f56a6e01..9b57067a19c 100644 --- a/benchmark/vm/run-in-context.js +++ b/benchmark/vm/run-in-context.js @@ -17,12 +17,10 @@ function main({ n, breakOnSigint, withSigintListener }) { if (withSigintListener) process.on('SIGINT', () => {}); - var i = 0; - const contextifiedSandbox = vm.createContext(); bench.start(); - for (; i < n; i++) + for (var i = 0; i < n; i++) vm.runInContext('0', contextifiedSandbox, options); bench.end(n); } diff --git a/benchmark/vm/run-in-this-context.js b/benchmark/vm/run-in-this-context.js index 33fd3a34d81..0754287376d 100644 --- a/benchmark/vm/run-in-this-context.js +++ b/benchmark/vm/run-in-this-context.js @@ -17,10 +17,8 @@ function main({ n, breakOnSigint, withSigintListener }) { if (withSigintListener) process.on('SIGINT', () => {}); - var i = 0; - bench.start(); - for (; i < n; i++) + for (var i = 0; i < n; i++) vm.runInThisContext('0', options); bench.end(n); } diff --git a/common.gypi b/common.gypi index e686caf17a3..343e48eefc9 100644 --- a/common.gypi +++ b/common.gypi @@ -46,32 +46,32 @@ 'conditions': [ ['GENERATOR=="ninja"', { - 'OBJ_DIR': '<(PRODUCT_DIR)/obj', - 'V8_BASE': '<(PRODUCT_DIR)/obj/deps/v8/src/libv8_base.a', - 'CHAKRASHIM_BASE': '<(PRODUCT_DIR)/obj/deps/chakrashim/libchakrashim.a', + 'obj_dir': '<(PRODUCT_DIR)/obj', + 'v8_base': '<(PRODUCT_DIR)/obj/deps/v8/src/libv8_base.a', + 'chakrashim_base': '<(PRODUCT_DIR)/obj/deps/chakrashim/libchakrashim.a', }, { - 'OBJ_DIR%': '<(PRODUCT_DIR)/obj.target', - 'V8_BASE%': '<(PRODUCT_DIR)/obj.target/deps/v8/src/libv8_base.a', - 'CHAKRASHIM_BASE': '<(PRODUCT_DIR)/obj.target/deps/chakrashim/libchakrashim.a', + 'obj_dir%': '<(PRODUCT_DIR)/obj.target', + 'v8_base%': '<(PRODUCT_DIR)/obj.target/deps/v8/src/libv8_base.a', + 'chakrashim_base': '<(PRODUCT_DIR)/obj.target/deps/chakrashim/libchakrashim.a', }], ['OS == "win"', { 'os_posix': 0, 'v8_postmortem_support%': 'false', - 'OBJ_DIR': '<(PRODUCT_DIR)/obj', - 'V8_BASE': '<(PRODUCT_DIR)/lib/v8_libbase.lib', + 'obj_dir': '<(PRODUCT_DIR)/obj', + 'v8_base': '<(PRODUCT_DIR)/lib/v8_libbase.lib', }, { 'os_posix': 1, 'v8_postmortem_support%': 'true', }], ['OS== "mac"', { - 'CHAKRASHIM_BASE': '<(PRODUCT_DIR)/libchakrashim.a', - 'OBJ_DIR%': '<(PRODUCT_DIR)/obj.target', - 'V8_BASE': '<(PRODUCT_DIR)/libv8_base.a', + 'chakrashim_base': '<(PRODUCT_DIR)/libchakrashim.a', + 'obj_dir%': '<(PRODUCT_DIR)/obj.target', + 'v8_base': '<(PRODUCT_DIR)/libv8_base.a', }], ['openssl_fips != ""', { - 'OPENSSL_PRODUCT': '<(STATIC_LIB_PREFIX)crypto<(STATIC_LIB_SUFFIX)', + 'openssl_product': '<(STATIC_LIB_PREFIX)crypto<(STATIC_LIB_SUFFIX)', }, { - 'OPENSSL_PRODUCT': '<(STATIC_LIB_PREFIX)openssl<(STATIC_LIB_SUFFIX)', + 'openssl_product': '<(STATIC_LIB_PREFIX)openssl<(STATIC_LIB_SUFFIX)', }], ['OS=="mac"', { 'clang%': 1, diff --git a/deps/node-inspect/CHANGELOG.md b/deps/node-inspect/CHANGELOG.md index 41ed928e781..0db3a7842eb 100644 --- a/deps/node-inspect/CHANGELOG.md +++ b/deps/node-inspect/CHANGELOG.md @@ -1,3 +1,12 @@ +### 1.11.3 + +* [`93caa0f`](https://github.com/nodejs/node-inspect/commit/93caa0f5267c7ab452b258d3b03329a0bb5ac7f7) **docs:** Add missing oc in protocol +* [`2d87cbe`](https://github.com/nodejs/node-inspect/commit/2d87cbe76aa968dfc1ac69d9571af1be81abd8e0) **fix:** Make --inspect-port=0 work +* [`ebfd02e`](https://github.com/nodejs/node-inspect/commit/ebfd02ece9b642586023f7791da71defeb13d746) **chore:** Bump tap to 10.7 +* [`c07adb1`](https://github.com/nodejs/node-inspect/commit/c07adb17b164c1cf3da8d38659ea9f5d7ff42e9c) **test:** Use useful break location +* [`94f0bf9`](https://github.com/nodejs/node-inspect/commit/94f0bf97d24c376baf3ecced2088d81715a73464) **fix:** Fix `takeHeapSnapshot()` truncation bug + + ### 1.11.2 * [`42e0cd1`](https://github.com/nodejs/node-inspect/commit/42e0cd111d89ed09faba1c0ec45089b0b44de011) **fix:** look for generic hint text diff --git a/deps/node-inspect/README.md b/deps/node-inspect/README.md index ecd939b3ea2..b52cc188a62 100644 --- a/deps/node-inspect/README.md +++ b/deps/node-inspect/README.md @@ -10,7 +10,7 @@ node has two options: 1. `node --debug `: Start `file` with remote debugging enabled. 2. `node debug `: Start an interactive CLI debugger for ``. -But for the Chrome inspector protol, +But for the Chrome inspector protocol, there's only one: `node --inspect `. This project tries to provide the missing second option diff --git a/deps/node-inspect/lib/_inspect.js b/deps/node-inspect/lib/_inspect.js index 26912274cda..d846efbe6a4 100644 --- a/deps/node-inspect/lib/_inspect.js +++ b/deps/node-inspect/lib/_inspect.js @@ -42,18 +42,9 @@ const [ InspectClient, createRepl ] = const debuglog = util.debuglog('inspect'); -const DEBUG_PORT_PATTERN = /^--(?:debug|inspect)(?:-port|-brk)?=(\d{1,5})$/; -function getDefaultPort() { - for (const arg of process.execArgv) { - const match = arg.match(DEBUG_PORT_PATTERN); - if (match) { - return +match[1]; - } - } - return 9229; -} - function portIsFree(host, port, timeout = 2000) { + if (port === 0) return Promise.resolve(); // Binding to a random port. + const retryDelay = 150; let didTimeOut = false; @@ -110,9 +101,11 @@ function runScript(script, scriptArgs, inspectHost, inspectPort, childPrint) { let output = ''; function waitForListenHint(text) { output += text; - if (/Debugger listening on/.test(output)) { + if (/Debugger listening on ws:\/\/\[?(.+?)\]?:(\d+)\//.test(output)) { + const host = RegExp.$1; + const port = Number.parseInt(RegExp.$2); child.stderr.removeListener('data', waitForListenHint); - resolve(child); + resolve([child, port, host]); } } @@ -160,10 +153,11 @@ class NodeInspector { options.port, this.childPrint.bind(this)); } else { - this._runScript = () => Promise.resolve(null); + this._runScript = + () => Promise.resolve([null, options.port, options.host]); } - this.client = new InspectClient(options.port, options.host); + this.client = new InspectClient(); this.domainNames = ['Debugger', 'HeapProfiler', 'Profiler', 'Runtime']; this.domainNames.forEach((domain) => { @@ -223,9 +217,8 @@ class NodeInspector { run() { this.killChild(); - const { host, port } = this.options; - return this._runScript().then((child) => { + return this._runScript().then(([child, port, host]) => { this.child = child; let connectionAttempts = 0; @@ -233,7 +226,7 @@ class NodeInspector { ++connectionAttempts; debuglog('connection attempt #%d', connectionAttempts); this.stdout.write('.'); - return this.client.connect() + return this.client.connect(port, host) .then(() => { debuglog('connection established'); this.stdout.write(' ok'); @@ -288,7 +281,7 @@ class NodeInspector { function parseArgv([target, ...args]) { let host = '127.0.0.1'; - let port = getDefaultPort(); + let port = 9229; let isRemote = false; let script = target; let scriptArgs = args; diff --git a/deps/node-inspect/lib/internal/inspect_client.js b/deps/node-inspect/lib/internal/inspect_client.js index c247e2add87..9b8529de21a 100644 --- a/deps/node-inspect/lib/internal/inspect_client.js +++ b/deps/node-inspect/lib/internal/inspect_client.js @@ -164,12 +164,12 @@ function decodeFrameHybi17(data) { } class Client extends EventEmitter { - constructor(port, host) { + constructor() { super(); this.handleChunk = this._handleChunk.bind(this); - this._port = port; - this._host = host; + this._port = undefined; + this._host = undefined; this.reset(); } @@ -284,7 +284,9 @@ class Client extends EventEmitter { }); } - connect() { + connect(port, host) { + this._port = port; + this._host = host; return this._discoverWebsocketPath() .then((urlPath) => this._connectWebsocket(urlPath)); } diff --git a/deps/node-inspect/lib/internal/inspect_repl.js b/deps/node-inspect/lib/internal/inspect_repl.js index 937c1843d3a..38fe4684cf6 100644 --- a/deps/node-inspect/lib/internal/inspect_repl.js +++ b/deps/node-inspect/lib/internal/inspect_repl.js @@ -900,10 +900,8 @@ function createRepl(inspector) { return new Promise((resolve, reject) => { const absoluteFile = Path.resolve(filename); const writer = FS.createWriteStream(absoluteFile); - let totalSize; let sizeWritten = 0; function onProgress({ done, total, finished }) { - totalSize = total; if (finished) { print('Heap snaphost prepared.'); } else { @@ -913,13 +911,18 @@ function createRepl(inspector) { function onChunk({ chunk }) { sizeWritten += chunk.length; writer.write(chunk); - print(`Writing snapshot: ${sizeWritten}/${totalSize}`, true); - if (sizeWritten >= totalSize) { - writer.end(); + print(`Writing snapshot: ${sizeWritten}`, true); + } + function onResolve() { + writer.end(() => { teardown(); print(`Wrote snapshot: ${absoluteFile}`); resolve(); - } + }); + } + function onReject(error) { + teardown(); + reject(error); } function teardown() { HeapProfiler.removeListener( @@ -932,10 +935,7 @@ function createRepl(inspector) { print('Heap snapshot: 0/0', true); HeapProfiler.takeHeapSnapshot({ reportProgress: true }) - .then(null, (error) => { - teardown(); - reject(error); - }); + .then(onResolve, onReject); }); }, diff --git a/deps/node-inspect/package.json b/deps/node-inspect/package.json index 070abfa8fe5..d25376b5d4b 100644 --- a/deps/node-inspect/package.json +++ b/deps/node-inspect/package.json @@ -1,6 +1,6 @@ { "name": "node-inspect", - "version": "1.11.2", + "version": "1.11.3", "description": "Node Inspect", "license": "MIT", "main": "lib/_inspect.js", @@ -29,7 +29,7 @@ "devDependencies": { "eslint": "^3.10.2", "nlm": "^3.0.0", - "tap": "^7.1.2" + "tap": "^10.7.0" }, "author": { "name": "Jan Krems", diff --git a/deps/node-inspect/test/cli/break.test.js b/deps/node-inspect/test/cli/break.test.js index 59b12cde388..ce8c8d6d7d9 100644 --- a/deps/node-inspect/test/cli/break.test.js +++ b/deps/node-inspect/test/cli/break.test.js @@ -134,7 +134,7 @@ test('sb before loading file', (t) => { return cli.waitForInitialBreak() .then(() => cli.waitForPrompt()) - .then(() => cli.command('sb("other.js", 3)')) + .then(() => cli.command('sb("other.js", 2)')) .then(() => { t.match( cli.output, @@ -145,7 +145,7 @@ test('sb before loading file', (t) => { .then(() => { t.match( cli.output, - `break in ${otherScript}:3`, + `break in ${otherScript}:2`, 'found breakpoint in file that was not loaded yet'); }) .then(() => cli.quit()) diff --git a/deps/node-inspect/test/cli/heap-profiler.test.js b/deps/node-inspect/test/cli/heap-profiler.test.js new file mode 100644 index 00000000000..ebd734e03cb --- /dev/null +++ b/deps/node-inspect/test/cli/heap-profiler.test.js @@ -0,0 +1,34 @@ +'use strict'; +const { test } = require('tap'); +const { readFileSync, unlinkSync } = require('fs'); + +const startCLI = require('./start-cli'); +const filename = 'node.heapsnapshot'; + +function cleanup() { + try { + unlinkSync(filename); + } catch (_) { + // Ignore. + } +} + +cleanup(); + +test('Heap profiler take snapshot', (t) => { + const cli = startCLI(['examples/empty.js']); + + function onFatal(error) { + cli.quit(); + throw error; + } + + // Check that the snapshot is valid JSON. + return cli.waitForInitialBreak() + .then(() => cli.waitForPrompt()) + .then(() => cli.command('takeHeapSnapshot()')) + .then(() => JSON.parse(readFileSync(filename, 'utf8'))) + .then(() => cleanup()) + .then(() => cli.quit()) + .then(null, onFatal); +}); diff --git a/deps/node-inspect/test/cli/launch.test.js b/deps/node-inspect/test/cli/launch.test.js index f7efc6eb3f2..8808d47a08b 100644 --- a/deps/node-inspect/test/cli/launch.test.js +++ b/deps/node-inspect/test/cli/launch.test.js @@ -26,6 +26,46 @@ test('custom port', (t) => { }); }); +test('random port', (t) => { + const script = Path.join('examples', 'three-lines.js'); + + const cli = startCLI(['--port=0', script]); + + return cli.waitForInitialBreak() + .then(() => cli.waitForPrompt()) + .then(() => { + t.match(cli.output, 'debug>', 'prints a prompt'); + t.match( + cli.output, + /< Debugger listening on /, + 'forwards child output'); + }) + .then(() => cli.quit()) + .then((code) => { + t.equal(code, 0, 'exits with success'); + }); +}); + +test('random port with --inspect-port=0', (t) => { + const script = Path.join('examples', 'three-lines.js'); + + const cli = startCLI([script], ['--inspect-port=0']); + + return cli.waitForInitialBreak() + .then(() => cli.waitForPrompt()) + .then(() => { + t.match(cli.output, 'debug>', 'prints a prompt'); + t.match( + cli.output, + /< Debugger listening on /, + 'forwards child output'); + }) + .then(() => cli.quit()) + .then((code) => { + t.equal(code, 0, 'exits with success'); + }); +}); + test('examples/three-lines.js', (t) => { const script = Path.join('examples', 'three-lines.js'); const cli = startCLI([script]); diff --git a/deps/node-inspect/test/cli/start-cli.js b/deps/node-inspect/test/cli/start-cli.js index ae904308e02..b086dcd8ba2 100644 --- a/deps/node-inspect/test/cli/start-cli.js +++ b/deps/node-inspect/test/cli/start-cli.js @@ -16,8 +16,8 @@ const BREAK_MESSAGE = new RegExp('(?:' + [ 'exception', 'other', 'promiseRejection', ].join('|') + ') in', 'i'); -function startCLI(args) { - const child = spawn(process.execPath, [CLI, ...args]); +function startCLI(args, flags = []) { + const child = spawn(process.execPath, [...flags, CLI, ...args]); let isFirstStdoutChunk = true; const outputBuffer = []; diff --git a/doc/api/assert.md b/doc/api/assert.md index 04f312aa663..dc9a9026951 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -249,6 +249,8 @@ are recursively evaluated also by the following rules. * Map keys and Set items are compared unordered. * Recursion stops when both sides differ or both sides encounter a circular reference. +* [`WeakMap`][] and [`WeakSet`][] comparison does not rely on their values. See + below for further details. ```js const assert = require('assert').strict; @@ -290,6 +292,16 @@ assert.deepStrictEqual({ [symbol1]: 1 }, { [symbol1]: 1 }); // OK, because it is the same symbol on both objects. assert.deepStrictEqual({ [symbol1]: 1 }, { [symbol2]: 1 }); // Fails because symbol1 !== symbol2! + +const weakMap1 = new WeakMap(); +const weakMap2 = new WeakMap([[{}, {}]]); +const weakMap3 = new WeakMap(); +weakMap3.unequal = true; + +assert.deepStrictEqual(weakMap1, weakMap2); +// OK, because it is impossible to compare the entries +assert.deepStrictEqual(weakMap1, weakMap3); +// Fails because weakMap3 has a property that weakMap1 does not contain! ``` If the values are not equal, an `AssertionError` is thrown with a `message` @@ -692,9 +704,8 @@ parameter is an instance of an [`Error`][] then it will be thrown instead of the added: v0.1.21 changes: - version: REPLACEME - pr-url: https://github.com/nodejs/node/pull/17581 - description: assert.ok() will throw a `ERR_MISSING_ARGS` error. - Use assert.fail() instead. + pr-url: https://github.com/nodejs/node/pull/REPLACEME + description: assert.ok() (no arguments) will now use a predefined error msg. --> * `value` {any} * `message` {any} @@ -707,6 +718,8 @@ property set equal to the value of the `message` parameter. If the `message` parameter is `undefined`, a default error message is assigned. If the `message` parameter is an instance of an [`Error`][] then it will be thrown instead of the `AssertionError`. +If no arguments are passed in at all `message` will be set to the string: +"No value argument passed to assert.ok". Be aware that in the `repl` the error message will be different to the one thrown in a file! See below for further details. @@ -719,6 +732,10 @@ assert.ok(true); assert.ok(1); // OK +assert.ok(); +// throws: +// "AssertionError: No value argument passed to `assert.ok`. + assert.ok(false, 'it\'s false'); // throws "AssertionError: it's false" @@ -915,6 +932,8 @@ second argument. This might lead to difficult-to-spot errors. [`Set`]: https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Set [`Symbol`]: https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Symbol [`TypeError`]: errors.html#errors_class_typeerror +[`WeakMap`]: https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/WeakMap +[`WeakSet`]: https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/WeakSet [`assert.deepEqual()`]: #assert_assert_deepequal_actual_expected_message [`assert.deepStrictEqual()`]: #assert_assert_deepstrictequal_actual_expected_message [`assert.notDeepStrictEqual()`]: #assert_assert_notdeepstrictequal_actual_expected_message diff --git a/doc/api/async_hooks.md b/doc/api/async_hooks.md index 4fa23f28d11..781509900cc 100644 --- a/doc/api/async_hooks.md +++ b/doc/api/async_hooks.md @@ -86,7 +86,7 @@ added: v8.1.0 * `before` {Function} The [`before` callback][]. * `after` {Function} The [`after` callback][]. * `destroy` {Function} The [`destroy` callback][]. -* Returns: `{AsyncHook}` Instance used for disabling and enabling hooks +* Returns: {AsyncHook} Instance used for disabling and enabling hooks Registers functions to be called for different lifetime events of each async operation. diff --git a/doc/api/cli.md b/doc/api/cli.md index b8ea4826dec..6bae0a67c3d 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -475,6 +475,8 @@ Node options that are allowed are: V8 options that are allowed are: - `--abort-on-uncaught-exception` - `--max-old-space-size` +- `--perf-basic-prof` +- `--perf-prof` - `--stack-trace-limit` ### `NODE_PENDING_DEPRECATION=1` diff --git a/doc/api/cluster.md b/doc/api/cluster.md index 1817ac82027..3063af83bdb 100644 --- a/doc/api/cluster.md +++ b/doc/api/cluster.md @@ -267,7 +267,7 @@ changes: description: This method now returns a reference to `worker`. --> -* Returns: {Worker} A reference to `worker`. +* Returns: {cluster.Worker} A reference to `worker`. In a worker, this function will close all servers, wait for the `'close'` event on those servers, and then disconnect the IPC channel. diff --git a/doc/api/console.md b/doc/api/console.md index 76f6c6ac462..68cab5b68f3 100644 --- a/doc/api/console.md +++ b/doc/api/console.md @@ -78,8 +78,8 @@ const { Console } = console; ``` ### new Console(stdout[, stderr]) -* `stdout` {Writable} -* `stderr` {Writable} +* `stdout` {stream.Writable} +* `stderr` {stream.Writable} Creates a new `Console` with one or two writable stream instances. `stdout` is a writable stream to print log or info output. `stderr` is used for warning or diff --git a/doc/api/crypto.md b/doc/api/crypto.md index ce91007640a..5e2af21dab1 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -1222,6 +1222,7 @@ related operations. The specific constants currently defined are described in ### crypto.DEFAULT_ENCODING The default encoding to use for functions that can take either strings @@ -1231,8 +1232,9 @@ default to [`Buffer`][] objects. The `crypto.DEFAULT_ENCODING` mechanism is provided for backwards compatibility with legacy programs that expect `'latin1'` to be the default encoding. -New applications should expect the default to be `'buffer'`. This property may -become deprecated in a future Node.js release. +New applications should expect the default to be `'buffer'`. + +This property is deprecated. ### crypto.fips -* Extends: {Duplex} +* Extends: {stream.Duplex} Each instance of the `Http2Stream` class represents a bidirectional HTTP/2 communications stream over an `Http2Session` instance. Any single `Http2Session` @@ -991,7 +991,7 @@ calling `http2stream.close()`, or `http2stream.destroy()`. Will be #### http2stream.sentHeaders * Value: {HTTP2 Headers Object} @@ -1000,7 +1000,7 @@ An object containing the outbound headers sent for this `Http2Stream`. #### http2stream.sentInfoHeaders * Value: {HTTP2 Headers Object[]} @@ -1010,7 +1010,7 @@ sent for this `Http2Stream`. #### http2stream.sentTrailers * Value: {HTTP2 Headers Object} diff --git a/doc/api/perf_hooks.md b/doc/api/perf_hooks.md index 2f2910af93d..608bee8d574 100644 --- a/doc/api/perf_hooks.md +++ b/doc/api/perf_hooks.md @@ -31,7 +31,7 @@ instance of this class is provided via the `performance` property. ### performance.clearEntries(name) Remove all performance entry objects with `entryType` equal to `name` from the @@ -125,6 +125,20 @@ Creates a new `PerformanceMark` entry in the Performance Timeline. A `performanceEntry.duration` is always `0`. Performance marks are used to mark specific significant moments in the Performance Timeline. +### performance.maxEntries + + +Value: {number} + +The maximum number of Performance Entry items that should be added to the +Performance Timeline. This limit is not strictly enforced, but a process +warning will be emitted if the number of entries in the timeline exceeds +this limit. + +Defaults to 150. + ### performance.measure(name, startMark, endMark) -* `stream` {Writable} +* `stream` {stream.Writable} * `dir` {number} * `-1` - to the left from cursor * `1` - to the right from cursor @@ -338,7 +338,7 @@ in a specified direction identified by `dir`. added: v0.7.7 --> -* `stream` {Writable} +* `stream` {stream.Writable} The `readline.clearScreenDown()` method clears the given [TTY][] stream from the current position of the cursor down. @@ -362,9 +362,9 @@ changes: --> * `options` {Object} - * `input` {Readable} The [Readable][] stream to listen to. This option is + * `input` {stream.Readable} The [Readable][] stream to listen to. This option is *required*. - * `output` {Writable} The [Writable][] stream to write readline data to. + * `output` {stream.Writable} The [Writable][] stream to write readline data to. * `completer` {Function} An optional function used for Tab autocompletion. * `terminal` {boolean} `true` if the `input` and `output` streams should be treated like a TTY, and have ANSI/VT100 escape codes written to it. @@ -444,7 +444,7 @@ function completer(linePartial, callback) { added: v0.7.7 --> -* `stream` {Writable} +* `stream` {stream.Writable} * `x` {number} * `y` {number} @@ -456,7 +456,7 @@ given [TTY][] `stream`. added: v0.7.7 --> -* `stream` {Readable} +* `stream` {stream.Readable} * `interface` {readline.Interface} The `readline.emitKeypressEvents()` method causes the given [Readable][] @@ -482,7 +482,7 @@ if (process.stdin.isTTY) added: v0.7.7 --> -* `stream` {Writable} +* `stream` {stream.Writable} * `dx` {number} * `dy` {number} diff --git a/doc/api/repl.md b/doc/api/repl.md index a1dfffa9cc0..506f54a4b8a 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -412,9 +412,9 @@ changes: * `options` {Object|string} * `prompt` {string} The input prompt to display. Defaults to `> ` (with a trailing space). - * `input` {Readable} The Readable stream from which REPL input will be read. + * `input` {stream.Readable} The Readable stream from which REPL input will be read. Defaults to `process.stdin`. - * `output` {Writable} The Writable stream to which REPL output will be + * `output` {stream.Writable} The Writable stream to which REPL output will be written. Defaults to `process.stdout`. * `terminal` {boolean} If `true`, specifies that the `output` should be treated as a TTY terminal, and have ANSI/VT100 escape codes written to it. diff --git a/doc/api/stream.md b/doc/api/stream.md index 222f8dbd49e..345e0d824d5 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -394,7 +394,7 @@ changes: --> * `encoding` {string} The new default encoding -* Returns: `this` +* Returns: {this} The `writable.setDefaultEncoding()` method sets the default `encoding` for a [Writable][] stream. @@ -533,7 +533,7 @@ A Writable stream in object mode will always ignore the `encoding` argument. added: v8.0.0 --> -* Returns: `this` +* Returns: {this} Destroy the stream, and emit the passed error. After this call, the writable stream has ended. Implementors should not override this method, @@ -580,8 +580,8 @@ The Readable can switch back to paused mode using one of the following: * If there are no pipe destinations, by calling the [`stream.pause()`][stream-pause] method. -* If there are pipe destinations, by removing any [`'data'`][] event - handlers, and removing all pipe destinations by calling the +* If there are pipe destinations, by removing all pipe destinations. + Multiple pipe destinations may be removed by calling the [`stream.unpipe()`][] method. The important concept to remember is that a Readable will not generate data @@ -824,7 +824,7 @@ readable.isPaused(); // === false added: v0.9.4 --> -* Returns: `this` +* Returns: {this} The `readable.pause()` method will cause a stream in flowing mode to stop emitting [`'data'`][] events, switching out of flowing mode. Any data that @@ -973,7 +973,7 @@ the status of the `highWaterMark`. added: v0.9.4 --> -* Returns: `this` +* Returns: {this} The `readable.resume()` method causes an explicitly paused Readable stream to resume emitting [`'data'`][] events, switching the stream into flowing mode. @@ -996,7 +996,7 @@ added: v0.9.4 --> * `encoding` {string} The encoding to use. -* Returns: `this` +* Returns: {this} The `readable.setEncoding()` method sets the character encoding for data read from the Readable stream. @@ -1459,7 +1459,7 @@ write succeeded. All calls to `writable.write()` that occur between the time `writable._write()` is called and the `callback` is called will cause the written data to be -buffered. Once the `callback` is invoked, the stream will emit a [`'drain'`][] +buffered. When the `callback` is invoked, the stream might emit a [`'drain'`][] event. If a stream implementation is capable of processing multiple chunks of data at once, the `writable._writev()` method should be implemented. diff --git a/doc/api/tty.md b/doc/api/tty.md index ce6dbae8fa6..64612477fc7 100644 --- a/doc/api/tty.md +++ b/doc/api/tty.md @@ -126,7 +126,7 @@ is updated whenever the `'resize'` event is emitted. added: REPLACEME --> -* `env` {object} A object containing the environment variables to check. +* `env` {Object} A object containing the environment variables to check. Defaults to `process.env`. * Returns: {number} diff --git a/doc/changelogs/CHANGELOG_V9.md b/doc/changelogs/CHANGELOG_V9.md index 57fb7825f02..2f823638f80 100644 --- a/doc/changelogs/CHANGELOG_V9.md +++ b/doc/changelogs/CHANGELOG_V9.md @@ -8,6 +8,7 @@ +9.5.0
9.4.0
9.3.0
9.2.1
@@ -29,6 +30,189 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2018-01-31, Version 9.5.0 (Current), @evanlucas + +### Notable Changes + +* **cluster** + - add cwd to cluster.settings (cjihrig) [#18399](https://github.com/nodejs/node/pull/18399) +* **deps** + - upgrade libuv to 1.19.1 (cjihrig) [#18260](https://github.com/nodejs/node/pull/18260) +* **meta** + - add Leko to collaborators (Leko) [#18117](https://github.com/nodejs/node/pull/18117) + - add vdeturckheim as collaborator (vdeturckheim) [#18432](https://github.com/nodejs/node/pull/18432) +* **n-api** + - expose n-api version in process.versions (Michael Dawson) [#18067](https://github.com/nodejs/node/pull/18067) +* **perf_hooks** + - add performance.clear() (James M Snell) [#18046](https://github.com/nodejs/node/pull/18046) +* **stream** + - avoid writeAfterEnd() while ending (陈刚) [#18170](https://github.com/nodejs/node/pull/18170) + +### Commits + +* [[`0a68018ad0`](https://github.com/nodejs/node/commit/0a68018ad0)] - **async_hooks**: update defaultTriggerAsyncIdScope for perf (Anatoli Papirovski) [#18004](https://github.com/nodejs/node/pull/18004) +* [[`dd56bd1591`](https://github.com/nodejs/node/commit/dd56bd1591)] - **async_hooks**: use typed array stack as fast path (Anna Henningsen) [#17780](https://github.com/nodejs/node/pull/17780) +* [[`a880e272ff`](https://github.com/nodejs/node/commit/a880e272ff)] - **async_hooks**: use scope for defaultTriggerAsyncId (Andreas Madsen) [#17273](https://github.com/nodejs/node/pull/17273) +* [[`f56eb2a41e`](https://github.com/nodejs/node/commit/f56eb2a41e)] - **async_hooks**: separate missing from default context (Andreas Madsen) [#17273](https://github.com/nodejs/node/pull/17273) +* [[`2a4f849c39`](https://github.com/nodejs/node/commit/2a4f849c39)] - **async_hooks**: rename initTriggerId (Andreas Madsen) [#17273](https://github.com/nodejs/node/pull/17273) +* [[`ac2f98d6a6`](https://github.com/nodejs/node/commit/ac2f98d6a6)] - **(SEMVER-MINOR)** **async_hooks,http**: set HTTPParser trigger to socket (Andreas Madsen) [#18003](https://github.com/nodejs/node/pull/18003) +* [[`e9397d67a3`](https://github.com/nodejs/node/commit/e9397d67a3)] - **async_hooks,test**: only use IPv6 in http test (Andreas Madsen) [#18143](https://github.com/nodejs/node/pull/18143) +* [[`2efa7d1bfd`](https://github.com/nodejs/node/commit/2efa7d1bfd)] - **benchmark**: implement duration in http test double (Joyee Cheung) [#18380](https://github.com/nodejs/node/pull/18380) +* [[`b5ec6ea3d0`](https://github.com/nodejs/node/commit/b5ec6ea3d0)] - **benchmark**: make compare.R easier to understand (Andreas Madsen) [#18373](https://github.com/nodejs/node/pull/18373) +* [[`ea19f7db0d`](https://github.com/nodejs/node/commit/ea19f7db0d)] - **benchmark**: use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`cd9bc8bc50`](https://github.com/nodejs/node/commit/cd9bc8bc50)] - **benchmark**: (dgram) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`e19c77b14e`](https://github.com/nodejs/node/commit/e19c77b14e)] - **benchmark**: (child_process) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`5cf5ab154e`](https://github.com/nodejs/node/commit/5cf5ab154e)] - **benchmark**: (buffers) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`71faa5c1b4`](https://github.com/nodejs/node/commit/71faa5c1b4)] - **benchmark**: (events) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`c25d4d66dc`](https://github.com/nodejs/node/commit/c25d4d66dc)] - **benchmark**: (es) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`59271c8f7f`](https://github.com/nodejs/node/commit/59271c8f7f)] - **benchmark**: (fs) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`4e19cbef86`](https://github.com/nodejs/node/commit/4e19cbef86)] - **benchmark**: (http) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`e9c426b35b`](https://github.com/nodejs/node/commit/e9c426b35b)] - **benchmark**: (misc) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`d13d900eee`](https://github.com/nodejs/node/commit/d13d900eee)] - **benchmark**: (http2) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`97e882061d`](https://github.com/nodejs/node/commit/97e882061d)] - **benchmark**: (string_decoder) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`5b0e3b9860`](https://github.com/nodejs/node/commit/5b0e3b9860)] - **benchmark**: (path) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`7bc5bad74f`](https://github.com/nodejs/node/commit/7bc5bad74f)] - **benchmark**: (os) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`cf666d8529`](https://github.com/nodejs/node/commit/cf666d8529)] - **benchmark**: (net) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`88f4bf219d`](https://github.com/nodejs/node/commit/88f4bf219d)] - **benchmark**: (process) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`f4918289e7`](https://github.com/nodejs/node/commit/f4918289e7)] - **benchmark**: (querystring) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`81abea592f`](https://github.com/nodejs/node/commit/81abea592f)] - **benchmark**: (streams) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`11d6458fd7`](https://github.com/nodejs/node/commit/11d6458fd7)] - **benchmark**: (timers) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`3e3254a2e7`](https://github.com/nodejs/node/commit/3e3254a2e7)] - **benchmark**: (tls) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`c0707c54a5`](https://github.com/nodejs/node/commit/c0707c54a5)] - **benchmark**: (util/v8/vm) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`76f671b84e`](https://github.com/nodejs/node/commit/76f671b84e)] - **benchmark**: (zlib) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`aa47fe0ef9`](https://github.com/nodejs/node/commit/aa47fe0ef9)] - **benchmark**: (url) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`e00dac7b06`](https://github.com/nodejs/node/commit/e00dac7b06)] - **benchmark**: (assert) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`3543458988`](https://github.com/nodejs/node/commit/3543458988)] - **benchmark**: (arrays) use destructuring (Ruben Bridgewater) [#18250](https://github.com/nodejs/node/pull/18250) +* [[`aa21d55403`](https://github.com/nodejs/node/commit/aa21d55403)] - **benchmark**: remove redundant + (sreepurnajasti) [#17803](https://github.com/nodejs/node/pull/17803) +* [[`a4ba791566`](https://github.com/nodejs/node/commit/a4ba791566)] - **benchmark**: add JSStreamWrap benchmark (Anna Henningsen) [#17983](https://github.com/nodejs/node/pull/17983) +* [[`deac028cb6`](https://github.com/nodejs/node/commit/deac028cb6)] - **build**: fix rm commands in tarball rule (Ben Noordhuis) [#18332](https://github.com/nodejs/node/pull/18332) +* [[`2a9afc4c0e`](https://github.com/nodejs/node/commit/2a9afc4c0e)] - **build**: make lint-js independent of local node (Joyee Cheung) [#18272](https://github.com/nodejs/node/pull/18272) +* [[`ce1eb0be7e`](https://github.com/nodejs/node/commit/ce1eb0be7e)] - **build**: make lint-md independent of local node (Joyee Cheung) [#18272](https://github.com/nodejs/node/pull/18272) +* [[`f050521a71`](https://github.com/nodejs/node/commit/f050521a71)] - **build**: define NOMINMAX on windows (Ben Noordhuis) [#18216](https://github.com/nodejs/node/pull/18216) +* [[`70d6fda9f3`](https://github.com/nodejs/node/commit/70d6fda9f3)] - **build**: remove unused vars from configure (Ben Noordhuis) [#18206](https://github.com/nodejs/node/pull/18206) +* [[`f81c62246d`](https://github.com/nodejs/node/commit/f81c62246d)] - **build**: refine static and shared lib build (Yihong Wang) [#17604](https://github.com/nodejs/node/pull/17604) +* [[`1506eb5f25`](https://github.com/nodejs/node/commit/1506eb5f25)] - **build**: remove bench-* targets (Joyee Cheung) [#18150](https://github.com/nodejs/node/pull/18150) +* [[`969c89bf55`](https://github.com/nodejs/node/commit/969c89bf55)] - **build**: fix Makefile wrt finding node executable (Yang Guo) [#18040](https://github.com/nodejs/node/pull/18040) +* [[`dd72f9c9b7`](https://github.com/nodejs/node/commit/dd72f9c9b7)] - **build**: fix cctest target with --enable-static (Qingyan Li) [#17992](https://github.com/nodejs/node/pull/17992) +* [[`2c4e0216de`](https://github.com/nodejs/node/commit/2c4e0216de)] - **build,win**: update lint-cpp on Windows (Kyle Farnung) [#18012](https://github.com/nodejs/node/pull/18012) +* [[`d8ac817cb6`](https://github.com/nodejs/node/commit/d8ac817cb6)] - **build,win**: restore vcbuild TAG functionality (Rod Vagg) [#18031](https://github.com/nodejs/node/pull/18031) +* [[`799fd24acb`](https://github.com/nodejs/node/commit/799fd24acb)] - **(SEMVER-MINOR)** **cluster**: add cwd to cluster.settings (cjihrig) [#18399](https://github.com/nodejs/node/pull/18399) +* [[`6b687cf3c9`](https://github.com/nodejs/node/commit/6b687cf3c9)] - **cluster**: resolve relative unix socket paths (laino) [#16749](https://github.com/nodejs/node/pull/16749) +* [[`693159e627`](https://github.com/nodejs/node/commit/693159e627)] - **(SEMVER-MINOR)** **deps**: upgrade libuv to 1.19.1 (cjihrig) [#18260](https://github.com/nodejs/node/pull/18260) +* [[`506d85bfba`](https://github.com/nodejs/node/commit/506d85bfba)] - **deps**: cherry-pick c3458a8 from upstream V8 (Michaël Zasso) [#18060](https://github.com/nodejs/node/pull/18060) +* [[`45051fa48c`](https://github.com/nodejs/node/commit/45051fa48c)] - **doc**: add vdeturckheim as collaborator (vdeturckheim) [#18432](https://github.com/nodejs/node/pull/18432) +* [[`03cb06944b`](https://github.com/nodejs/node/commit/03cb06944b)] - **doc**: unify type linkification (Vse Mozhet Byt) [#18407](https://github.com/nodejs/node/pull/18407) +* [[`d829237b92`](https://github.com/nodejs/node/commit/d829237b92)] - **doc**: fix typo in REPL docs (Adam Engebretson) [#18404](https://github.com/nodejs/node/pull/18404) +* [[`6ae7bb143a`](https://github.com/nodejs/node/commit/6ae7bb143a)] - **doc**: fix e.g., to e.g. in docs (sreepurnajasti) [#18369](https://github.com/nodejs/node/pull/18369) +* [[`574d3b9ce8`](https://github.com/nodejs/node/commit/574d3b9ce8)] - **doc**: fix documentation of http2Stream.pushstream() (Peter Dalgaard-Jensen) [#18258](https://github.com/nodejs/node/pull/18258) +* [[`4d3121b6ed`](https://github.com/nodejs/node/commit/4d3121b6ed)] - **doc**: fix return value for require.resolve.paths() (Peter Dalgaard-Jensen) [#18350](https://github.com/nodejs/node/pull/18350) +* [[`987480c232`](https://github.com/nodejs/node/commit/987480c232)] - **doc**: add missing word in modules.md (Robert Adamian) [#18343](https://github.com/nodejs/node/pull/18343) +* [[`224cc64d0c`](https://github.com/nodejs/node/commit/224cc64d0c)] - **doc**: add doc for performance.clearGC() (Antony Tran) [#18331](https://github.com/nodejs/node/pull/18331) +* [[`e5f6159958`](https://github.com/nodejs/node/commit/e5f6159958)] - **doc**: document the collaborator nomination process (Joyee Cheung) [#18268](https://github.com/nodejs/node/pull/18268) +* [[`c9e09adc8d`](https://github.com/nodejs/node/commit/c9e09adc8d)] - **doc**: improve the instructions of onboarding PR (Joyee Cheung) [#18268](https://github.com/nodejs/node/pull/18268) +* [[`b055c9efe5`](https://github.com/nodejs/node/commit/b055c9efe5)] - **doc**: split CONTRIBUTING.md (Joyee Cheung) [#18271](https://github.com/nodejs/node/pull/18271) +* [[`485d60eea2`](https://github.com/nodejs/node/commit/485d60eea2)] - **doc**: fix typos in async_hooks (Matthew Turner) [#18314](https://github.com/nodejs/node/pull/18314) +* [[`e3cc0919f6`](https://github.com/nodejs/node/commit/e3cc0919f6)] - **doc**: add missing URL argument types in fs.md (Vse Mozhet Byt) [#18309](https://github.com/nodejs/node/pull/18309) +* [[`1efb9cd271`](https://github.com/nodejs/node/commit/1efb9cd271)] - **doc**: remove confusing signature in fs.md (Vse Mozhet Byt) [#18310](https://github.com/nodejs/node/pull/18310) +* [[`195bed21eb`](https://github.com/nodejs/node/commit/195bed21eb)] - **doc**: use PBKDF2 in text (Tobias Nießen) [#18279](https://github.com/nodejs/node/pull/18279) +* [[`17ef69e6e2`](https://github.com/nodejs/node/commit/17ef69e6e2)] - **doc**: fix typo in async_hooks.md (Matthew Turner) [#18286](https://github.com/nodejs/node/pull/18286) +* [[`01599e2959`](https://github.com/nodejs/node/commit/01599e2959)] - **doc**: Add example of null to assert.ifError (Leko) [#18236](https://github.com/nodejs/node/pull/18236) +* [[`5c5aa4969c`](https://github.com/nodejs/node/commit/5c5aa4969c)] - **doc**: improve process.platform (Mars Wong) [#18057](https://github.com/nodejs/node/pull/18057) +* [[`61df843c95`](https://github.com/nodejs/node/commit/61df843c95)] - **doc**: cjs format is now commonjs (Gus Caplan) [#18165](https://github.com/nodejs/node/pull/18165) +* [[`361fd33709`](https://github.com/nodejs/node/commit/361fd33709)] - **doc**: V8 branch used in 8.x not active anymore (Franziska Hinkelmann) [#18155](https://github.com/nodejs/node/pull/18155) +* [[`b553daa29b`](https://github.com/nodejs/node/commit/b553daa29b)] - **doc**: add change info for async_hooks.executionAsyncId() (Stephen Belanger) [#17813](https://github.com/nodejs/node/pull/17813) +* [[`4b918d79df`](https://github.com/nodejs/node/commit/4b918d79df)] - **doc**: remove uannecessary Require (Michael Dawson) [#18184](https://github.com/nodejs/node/pull/18184) +* [[`926467ab80`](https://github.com/nodejs/node/commit/926467ab80)] - **doc**: add builtin module in building.md (Suixinlei) [#17705](https://github.com/nodejs/node/pull/17705) +* [[`1ef8f4e22e`](https://github.com/nodejs/node/commit/1ef8f4e22e)] - **doc**: warn users about non-ASCII paths on build (Matheus Marchini) [#16735](https://github.com/nodejs/node/pull/16735) +* [[`a1096a6b05`](https://github.com/nodejs/node/commit/a1096a6b05)] - **doc**: simplify sentences that use "considered" (Rich Trott) [#18095](https://github.com/nodejs/node/pull/18095) +* [[`1d74c33148`](https://github.com/nodejs/node/commit/1d74c33148)] - **doc**: update sample output for process.versions (Michael Dawson) [#18167](https://github.com/nodejs/node/pull/18167) +* [[`2fb5f19894`](https://github.com/nodejs/node/commit/2fb5f19894)] - **doc**: fix typo in TextEncoding section (Yosuke Furukawa) [#18201](https://github.com/nodejs/node/pull/18201) +* [[`b4e7260d3e`](https://github.com/nodejs/node/commit/b4e7260d3e)] - **doc**: fix typo in http2stream.close param default (Moritz Peters) [#18166](https://github.com/nodejs/node/pull/18166) +* [[`b05f09a587`](https://github.com/nodejs/node/commit/b05f09a587)] - **doc**: suggest not to throw JS errors from C++ (Joyee Cheung) [#18149](https://github.com/nodejs/node/pull/18149) +* [[`5a95905d91`](https://github.com/nodejs/node/commit/5a95905d91)] - **doc**: napi: make header style consistent (Ali Ijaz Sheikh) [#18122](https://github.com/nodejs/node/pull/18122) +* [[`990abbf06c`](https://github.com/nodejs/node/commit/990abbf06c)] - **doc**: napi: fix unbalanced emphasis (Ali Ijaz Sheikh) [#18122](https://github.com/nodejs/node/pull/18122) +* [[`f8f809b7fa`](https://github.com/nodejs/node/commit/f8f809b7fa)] - **doc**: add documentation for deprecation properties (Jon Moss) [#16539](https://github.com/nodejs/node/pull/16539) +* [[`0e8596e2a6`](https://github.com/nodejs/node/commit/0e8596e2a6)] - **doc**: prefer make test-only when verifying the build (Joyee Cheung) [#18061](https://github.com/nodejs/node/pull/18061) +* [[`bbdc3c4ae8`](https://github.com/nodejs/node/commit/bbdc3c4ae8)] - **doc**: add Leko to collaborators (Leko) [#18117](https://github.com/nodejs/node/pull/18117) +* [[`afc30a56e3`](https://github.com/nodejs/node/commit/afc30a56e3)] - **doc**: decapitalize primitive types (Vse Mozhet Byt) [#18110](https://github.com/nodejs/node/pull/18110) +* [[`30e2221a15`](https://github.com/nodejs/node/commit/30e2221a15)] - **doc**: fix s/rstStream/close in example (James M Snell) [#18088](https://github.com/nodejs/node/pull/18088) +* [[`1c81a055df`](https://github.com/nodejs/node/commit/1c81a055df)] - **doc**: update pushStream docs to use err first (James M Snell) [#18088](https://github.com/nodejs/node/pull/18088) +* [[`de70a363eb`](https://github.com/nodejs/node/commit/de70a363eb)] - **doc**: be less tentative about undefined behavior (Rich Trott) [#18091](https://github.com/nodejs/node/pull/18091) +* [[`5ebd0178a6`](https://github.com/nodejs/node/commit/5ebd0178a6)] - **doc**: add descriptions of state properties (James M Snell) [#18044](https://github.com/nodejs/node/pull/18044) +* [[`7911b9b493`](https://github.com/nodejs/node/commit/7911b9b493)] - **doc**: examples for fast-tracking regression fixes (Refael Ackermann) [#17379](https://github.com/nodejs/node/pull/17379) +* [[`f0a0fdd83a`](https://github.com/nodejs/node/commit/f0a0fdd83a)] - **doc**: multiple updates to BUILDING.md (Rich Trott) [#17985](https://github.com/nodejs/node/pull/17985) +* [[`278450fc72`](https://github.com/nodejs/node/commit/278450fc72)] - **doc**: multiple updates to child_process.md (Rich Trott) [#17990](https://github.com/nodejs/node/pull/17990) +* [[`722fe464bc`](https://github.com/nodejs/node/commit/722fe464bc)] - ***Revert*** "**doc**: import() is supported now" (Myles Borins) [#18141](https://github.com/nodejs/node/pull/18141) +* [[`39970e9caf`](https://github.com/nodejs/node/commit/39970e9caf)] - **doc**: un-mark Socket#write “removal” as notable change (Anna Henningsen) [#18083](https://github.com/nodejs/node/pull/18083) +* [[`df8cb401a0`](https://github.com/nodejs/node/commit/df8cb401a0)] - **errors**: remove ERR_OUTOFMEMORY (Tobias Nießen) [#17877](https://github.com/nodejs/node/pull/17877) +* [[`230a102647`](https://github.com/nodejs/node/commit/230a102647)] - **fs**: cleanup fd lchown and lchownSync (James M Snell) [#18329](https://github.com/nodejs/node/pull/18329) +* [[`778d57c2c2`](https://github.com/nodejs/node/commit/778d57c2c2)] - **fs**: fix options.end of fs.ReadStream() (陈刚) [#18121](https://github.com/nodejs/node/pull/18121) +* [[`7fc395a0d7`](https://github.com/nodejs/node/commit/7fc395a0d7)] - **http**: there is no `corked` property of `stream` (Fedor Indutny) [#18325](https://github.com/nodejs/node/pull/18325) +* [[`b87939cf53`](https://github.com/nodejs/node/commit/b87939cf53)] - **http**: use strict comparison (leeseean) [#17011](https://github.com/nodejs/node/pull/17011) +* [[`0250e1b9c0`](https://github.com/nodejs/node/commit/0250e1b9c0)] - **http**: free the parser before emitting 'upgrade' (Luigi Pinca) [#18209](https://github.com/nodejs/node/pull/18209) +* [[`155622847f`](https://github.com/nodejs/node/commit/155622847f)] - **http**: fix parsing of binary upgrade response body (Ben Noordhuis) [#17806](https://github.com/nodejs/node/pull/17806) +* [[`8e084d8bfb`](https://github.com/nodejs/node/commit/8e084d8bfb)] - **http**: simplify parser lifetime tracking (Anna Henningsen) [#18135](https://github.com/nodejs/node/pull/18135) +* [[`ee6217a4c7`](https://github.com/nodejs/node/commit/ee6217a4c7)] - **http2**: add checks for server close callback (James M Snell) [#18182](https://github.com/nodejs/node/pull/18182) +* [[`b3332cce46`](https://github.com/nodejs/node/commit/b3332cce46)] - **http2**: refactor read mechanism (Anna Henningsen) [#18030](https://github.com/nodejs/node/pull/18030) +* [[`eee40c71c9`](https://github.com/nodejs/node/commit/eee40c71c9)] - **http2**: remember sent headers (James M Snell) [#18045](https://github.com/nodejs/node/pull/18045) +* [[`39612a8657`](https://github.com/nodejs/node/commit/39612a8657)] - **http2,perf_hooks**: perf state using AliasedBuffer (Kyle Farnung) [#18300](https://github.com/nodejs/node/pull/18300) +* [[`14f7f607f6`](https://github.com/nodejs/node/commit/14f7f607f6)] - **(SEMVER-MINOR)** **lib**: add internal removeColors helper (Ruben Bridgewater) [#17615](https://github.com/nodejs/node/pull/17615) +* [[`74c1f4ef78`](https://github.com/nodejs/node/commit/74c1f4ef78)] - **lib**: fix typo in trace_events_async_hooks.js (Gilles De Mey) [#18280](https://github.com/nodejs/node/pull/18280) +* [[`485d656013`](https://github.com/nodejs/node/commit/485d656013)] - **lib**: use american spelling as per style guide (sreepurnajasti) [#18226](https://github.com/nodejs/node/pull/18226) +* [[`dcdb646ada`](https://github.com/nodejs/node/commit/dcdb646ada)] - **lib**: fix spelling in comments (Tobias Nießen) [#18018](https://github.com/nodejs/node/pull/18018) +* [[`8f8e7479cb`](https://github.com/nodejs/node/commit/8f8e7479cb)] - **lib**: remove queue implementation from JSStreamWrap (Anna Henningsen) [#17918](https://github.com/nodejs/node/pull/17918) +* [[`9edf023694`](https://github.com/nodejs/node/commit/9edf023694)] - **n-api**: throw RangeError napi_create_typedarray() (Jinho Bang) [#18037](https://github.com/nodejs/node/pull/18037) +* [[`0668a75c39`](https://github.com/nodejs/node/commit/0668a75c39)] - **(SEMVER-MINOR)** **n-api**: expose n-api version in process.versions (Michael Dawson) [#18067](https://github.com/nodejs/node/pull/18067) +* [[`f693e81ee5`](https://github.com/nodejs/node/commit/f693e81ee5)] - **n-api**: throw RangeError in napi_create_dataview() with invalid range (Jinho Bang) [#17869](https://github.com/nodejs/node/pull/17869) +* [[`470832f203`](https://github.com/nodejs/node/commit/470832f203)] - **path**: fix path.normalize for relative paths (Weijia Wang) [#17974](https://github.com/nodejs/node/pull/17974) +* [[`645be73b9d`](https://github.com/nodejs/node/commit/645be73b9d)] - **(SEMVER-MINOR)** **perf_hooks,http2**: add performance.clear() (James M Snell) [#18046](https://github.com/nodejs/node/pull/18046) +* [[`11982aecd4`](https://github.com/nodejs/node/commit/11982aecd4)] - **process**: JS fast path for bindings (Anatoli Papirovski) [#18365](https://github.com/nodejs/node/pull/18365) +* [[`ce7ce9d1ee`](https://github.com/nodejs/node/commit/ce7ce9d1ee)] - **process**: clean up signal handler setup (Anatoli Papirovski) [#18330](https://github.com/nodejs/node/pull/18330) +* [[`a5b35db5d2`](https://github.com/nodejs/node/commit/a5b35db5d2)] - **process**: remove dead code (Anatoli Papirovski) [#18330](https://github.com/nodejs/node/pull/18330) +* [[`56a9ae7773`](https://github.com/nodejs/node/commit/56a9ae7773)] - **readline**: update references to archived repository (Tobias Nießen) [#17924](https://github.com/nodejs/node/pull/17924) +* [[`144cfb4b99`](https://github.com/nodejs/node/commit/144cfb4b99)] - **src**: remove outdated domain reference (Anatoli Papirovski) [#18291](https://github.com/nodejs/node/pull/18291) +* [[`3ab391d3d3`](https://github.com/nodejs/node/commit/3ab391d3d3)] - **src**: remove unnecessary block scope (Anatoli Papirovski) [#18291](https://github.com/nodejs/node/pull/18291) +* [[`84f8e62f97`](https://github.com/nodejs/node/commit/84f8e62f97)] - **src**: DRY ip address parsing code in cares_wrap.cc (Ben Noordhuis) [#18398](https://github.com/nodejs/node/pull/18398) +* [[`ecf5bea485`](https://github.com/nodejs/node/commit/ecf5bea485)] - **src**: remove unused variable (cjihrig) [#18385](https://github.com/nodejs/node/pull/18385) +* [[`1c8df28752`](https://github.com/nodejs/node/commit/1c8df28752)] - **src**: fix -Wimplicit-fallthrough warning (Ben Noordhuis) [#18205](https://github.com/nodejs/node/pull/18205) +* [[`4513cbb4fe`](https://github.com/nodejs/node/commit/4513cbb4fe)] - **src**: refactor callback #defines into C++ templates (Anna Henningsen) [#18133](https://github.com/nodejs/node/pull/18133) +* [[`077bcbd202`](https://github.com/nodejs/node/commit/077bcbd202)] - **src**: introduce internal buffer slice constructor (Anna Henningsen) [#18030](https://github.com/nodejs/node/pull/18030) +* [[`87e3d3db89`](https://github.com/nodejs/node/commit/87e3d3db89)] - **src**: fix code coverage cleanup (Michael Dawson) [#18081](https://github.com/nodejs/node/pull/18081) +* [[`15aaf18b72`](https://github.com/nodejs/node/commit/15aaf18b72)] - **src**: remove declarations for missing functions (Anna Henningsen) [#18134](https://github.com/nodejs/node/pull/18134) +* [[`ac0a0a6775`](https://github.com/nodejs/node/commit/ac0a0a6775)] - **src**: harden JSStream callbacks (Anna Henningsen) [#18028](https://github.com/nodejs/node/pull/18028) +* [[`217ddd8ba2`](https://github.com/nodejs/node/commit/217ddd8ba2)] - **src,doc,test**: Fix common misspellings (Roman Reiss) [#18151](https://github.com/nodejs/node/pull/18151) +* [[`c4abdcdc30`](https://github.com/nodejs/node/commit/c4abdcdc30)] - **(SEMVER-MINOR)** **stream**: avoid writeAfterEnd() while ending (陈刚) [#18170](https://github.com/nodejs/node/pull/18170) +* [[`25bebae61c`](https://github.com/nodejs/node/commit/25bebae61c)] - **stream**: simplify `src._readableState` to `state` (陈刚) [#18264](https://github.com/nodejs/node/pull/18264) +* [[`f7d57d039a`](https://github.com/nodejs/node/commit/f7d57d039a)] - **stream**: remove unreachable code (Luigi Pinca) [#18239](https://github.com/nodejs/node/pull/18239) +* [[`117b20e621`](https://github.com/nodejs/node/commit/117b20e621)] - **test**: adds tests for vm invalid arguments (Gilles De Mey) [#18282](https://github.com/nodejs/node/pull/18282) +* [[`c84dd03120`](https://github.com/nodejs/node/commit/c84dd03120)] - **test**: refactor addons-napi/test_exception/test.js (Rich Trott) [#18340](https://github.com/nodejs/node/pull/18340) +* [[`1458e51d2f`](https://github.com/nodejs/node/commit/1458e51d2f)] - **test**: fix test-tls-server-verify.js on Windows CI (Rich Trott) [#18382](https://github.com/nodejs/node/pull/18382) +* [[`7d27228e90`](https://github.com/nodejs/node/commit/7d27228e90)] - **test**: use correct size in test-stream-buffer-list (Luigi Pinca) [#18239](https://github.com/nodejs/node/pull/18239) +* [[`5855a57d52`](https://github.com/nodejs/node/commit/5855a57d52)] - **test**: change assert message to default (ryanmahan) [#18259](https://github.com/nodejs/node/pull/18259) +* [[`fc89cea5cd`](https://github.com/nodejs/node/commit/fc89cea5cd)] - **test**: use countdown timer (Mandeep Singh) [#17326](https://github.com/nodejs/node/pull/17326) +* [[`761f26eb12`](https://github.com/nodejs/node/commit/761f26eb12)] - **test**: make async-wrap-getasyncid parallelizable (Joyee Cheung) [#18245](https://github.com/nodejs/node/pull/18245) +* [[`506c6e841c`](https://github.com/nodejs/node/commit/506c6e841c)] - **test**: refactor test-http-parser (Jon Moss) [#18219](https://github.com/nodejs/node/pull/18219) +* [[`5b5f5b1b32`](https://github.com/nodejs/node/commit/5b5f5b1b32)] - **test**: add assertions for TextEncoder/Decoder (Sho Miyamoto) [#18132](https://github.com/nodejs/node/pull/18132) +* [[`3299a1a19b`](https://github.com/nodejs/node/commit/3299a1a19b)] - **test**: remove trivial buffer imports (sreepurnajasti) [#18034](https://github.com/nodejs/node/pull/18034) +* [[`78e05da071`](https://github.com/nodejs/node/commit/78e05da071)] - **test**: use shorthand properties (Tobias Nießen) [#18105](https://github.com/nodejs/node/pull/18105) +* [[`63be0d6daa`](https://github.com/nodejs/node/commit/63be0d6daa)] - **test**: simplify loadDHParam in TLS test (Tobias Nießen) [#18103](https://github.com/nodejs/node/pull/18103) +* [[`1dcae5756e`](https://github.com/nodejs/node/commit/1dcae5756e)] - **test**: improve to use template string (sreepurnajasti) [#18097](https://github.com/nodejs/node/pull/18097) +* [[`0c8b5d5bfb`](https://github.com/nodejs/node/commit/0c8b5d5bfb)] - **test**: fixed typos in napi test (furstenheim) [#18148](https://github.com/nodejs/node/pull/18148) +* [[`2aeb025999`](https://github.com/nodejs/node/commit/2aeb025999)] - **test**: add common.crashOnUnhandledRejection to addons/callback-scope (Sho Miyamoto) [#18076](https://github.com/nodejs/node/pull/18076) +* [[`7706e5f1ea`](https://github.com/nodejs/node/commit/7706e5f1ea)] - **test**: remove orphaned entries from status (Kyle Farnung) [#18092](https://github.com/nodejs/node/pull/18092) +* [[`5fccb6ea3a`](https://github.com/nodejs/node/commit/5fccb6ea3a)] - **test**: fix spelling in test case comments (Tobias Nießen) [#18018](https://github.com/nodejs/node/pull/18018) +* [[`3456e61b44`](https://github.com/nodejs/node/commit/3456e61b44)] - **test**: use smaller input file for test-zlib.js (Rich Trott) [#17988](https://github.com/nodejs/node/pull/17988) +* [[`733df362fa`](https://github.com/nodejs/node/commit/733df362fa)] - **test**: update references to archived repository (Tobias Nießen) [#17924](https://github.com/nodejs/node/pull/17924) +* [[`2eb1aa81fa`](https://github.com/nodejs/node/commit/2eb1aa81fa)] - **test**: move common.fires() to inspector-helper (Rich Trott) [#17401](https://github.com/nodejs/node/pull/17401) +* [[`167e9c6dcd`](https://github.com/nodejs/node/commit/167e9c6dcd)] - **test**: refactor test-repl (Anna Henningsen) [#17926](https://github.com/nodejs/node/pull/17926) +* [[`7b73e704ca`](https://github.com/nodejs/node/commit/7b73e704ca)] - **timers**: attach listOnTimeout function to TimerWrap (Matteo Collina) [#18388](https://github.com/nodejs/node/pull/18388) +* [[`96b072233a`](https://github.com/nodejs/node/commit/96b072233a)] - **tls**: refactor write queues away (Anna Henningsen) [#17883](https://github.com/nodejs/node/pull/17883) +* [[`be9958afb6`](https://github.com/nodejs/node/commit/be9958afb6)] - **tools**: use babel-eslint as ESLint parser (Michaël Zasso) [#17820](https://github.com/nodejs/node/pull/17820) +* [[`715e673d06`](https://github.com/nodejs/node/commit/715e673d06)] - **tools**: add babel-eslint (Michaël Zasso) [#17820](https://github.com/nodejs/node/pull/17820) +* [[`d349fcae11`](https://github.com/nodejs/node/commit/d349fcae11)] - **tools**: update ESLint to 4.15.0 (Michaël Zasso) [#17820](https://github.com/nodejs/node/pull/17820) +* [[`4bc4d004b1`](https://github.com/nodejs/node/commit/4bc4d004b1)] - **tools**: move eslint from tools to tools/node_modules (Michaël Zasso) [#17820](https://github.com/nodejs/node/pull/17820) + ## 2018-01-10, Version 9.4.0 (Current), @MylesBorins diff --git a/lib/_http_client.js b/lib/_http_client.js index a9ee686c69a..ebfd809e21e 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -132,6 +132,40 @@ function ClientRequest(options, cb) { this.once('response', cb); } + if (method === 'GET' || + method === 'HEAD' || + method === 'DELETE' || + method === 'OPTIONS' || + method === 'CONNECT') { + this.useChunkedEncodingByDefault = false; + } else { + this.useChunkedEncodingByDefault = true; + } + + this._ended = false; + this.res = null; + this.aborted = undefined; + this.timeoutCb = null; + this.upgradeOrConnect = false; + this.parser = null; + this.maxHeadersCount = null; + + var called = false; + + if (this.agent) { + // If there is an agent we should default to Connection:keep-alive, + // but only if the Agent will actually reuse the connection! + // If it's not a keepAlive agent, and the maxSockets==Infinity, then + // there's never a case where this socket will actually be reused + if (!this.agent.keepAlive && !Number.isFinite(this.agent.maxSockets)) { + this._last = true; + this.shouldKeepAlive = false; + } else { + this._last = false; + this.shouldKeepAlive = true; + } + } + var headersArray = Array.isArray(options.headers); if (!headersArray) { if (options.headers) { @@ -141,6 +175,7 @@ function ClientRequest(options, cb) { this.setHeader(key, options.headers[key]); } } + if (host && !this.getHeader('host') && setHost) { var hostHeader = host; @@ -159,45 +194,25 @@ function ClientRequest(options, cb) { } this.setHeader('Host', hostHeader); } - } - if (options.auth && !this.getHeader('Authorization')) { - this.setHeader('Authorization', 'Basic ' + - Buffer.from(options.auth).toString('base64')); - } + if (options.auth && !this.getHeader('Authorization')) { + this.setHeader('Authorization', 'Basic ' + + Buffer.from(options.auth).toString('base64')); + } - if (method === 'GET' || - method === 'HEAD' || - method === 'DELETE' || - method === 'OPTIONS' || - method === 'CONNECT') { - this.useChunkedEncodingByDefault = false; - } else { - this.useChunkedEncodingByDefault = true; - } + if (this.getHeader('expect')) { + if (this._header) { + throw new errors.Error('ERR_HTTP_HEADERS_SENT', 'render'); + } - if (headersArray) { - this._storeHeader(this.method + ' ' + this.path + ' HTTP/1.1\r\n', - options.headers); - } else if (this.getHeader('expect')) { - if (this._header) { - throw new errors.Error('ERR_HTTP_HEADERS_SENT', 'render'); + this._storeHeader(this.method + ' ' + this.path + ' HTTP/1.1\r\n', + this[outHeadersKey]); } - + } else { this._storeHeader(this.method + ' ' + this.path + ' HTTP/1.1\r\n', - this[outHeadersKey]); + options.headers); } - this._ended = false; - this.res = null; - this.aborted = undefined; - this.timeoutCb = null; - this.upgradeOrConnect = false; - this.parser = null; - this.maxHeadersCount = null; - - var called = false; - var oncreate = (err, socket) => { if (called) return; @@ -210,18 +225,8 @@ function ClientRequest(options, cb) { this._deferToConnect(null, null, () => this._flush()); }; + // initiate connection if (this.agent) { - // If there is an agent we should default to Connection:keep-alive, - // but only if the Agent will actually reuse the connection! - // If it's not a keepAlive agent, and the maxSockets==Infinity, then - // there's never a case where this socket will actually be reused - if (!this.agent.keepAlive && !Number.isFinite(this.agent.maxSockets)) { - this._last = true; - this.shouldKeepAlive = false; - } else { - this._last = false; - this.shouldKeepAlive = true; - } this.agent.addRequest(this, options); } else { // No agent, default to Connection:close. diff --git a/lib/_http_server.js b/lib/_http_server.js index c60119822a9..496ebf285c8 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -666,7 +666,7 @@ function onSocketPause() { function unconsume(parser, socket) { if (socket._handle) { if (parser._consumed) - parser.unconsume(socket._handle._externalStream); + parser.unconsume(); parser._consumed = false; socket.removeListener('pause', onSocketPause); socket.removeListener('resume', onSocketResume); diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 6854b3d9ebc..19e9e7a7439 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -987,106 +987,18 @@ function fromList(n, state) { if (state.decoder) ret = state.buffer.join(''); else if (state.buffer.length === 1) - ret = state.buffer.head.data; + ret = state.buffer.first(); else ret = state.buffer.concat(state.length); state.buffer.clear(); } else { // read part of list - ret = fromListPartial(n, state.buffer, state.decoder); + ret = state.buffer.consume(n, state.decoder); } return ret; } -// Extracts only enough buffered data to satisfy the amount requested. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromListPartial(n, list, hasStrings) { - var ret; - if (n < list.head.data.length) { - // slice is the same for buffers and strings - ret = list.head.data.slice(0, n); - list.head.data = list.head.data.slice(n); - } else if (n === list.head.data.length) { - // first chunk is a perfect match - ret = list.shift(); - } else { - // result spans more than one buffer - ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); - } - return ret; -} - -// Copies a specified amount of characters from the list of buffered data -// chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBufferString(n, list) { - var p = list.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - const str = p.data; - const nb = (n > str.length ? str.length : n); - if (nb === str.length) - ret += str; - else - ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) - list.head = p.next; - else - list.head = list.tail = null; - } else { - list.head = p; - p.data = str.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} - -// Copies a specified amount of bytes from the list of buffered data chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBuffer(n, list) { - const ret = Buffer.allocUnsafe(n); - var p = list.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - const buf = p.data; - const nb = (n > buf.length ? buf.length : n); - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) - list.head = p.next; - else - list.head = list.tail = null; - } else { - list.head = p; - p.data = buf.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} - function endReadable(stream) { var state = stream._readableState; diff --git a/lib/assert.js b/lib/assert.js index 149e9402ad4..340aed850f5 100644 --- a/lib/assert.js +++ b/lib/assert.js @@ -141,11 +141,11 @@ function getBuffer(fd, assertLine) { function innerOk(args, fn) { var [value, message] = args; - if (args.length === 0) - throw new TypeError('ERR_MISSING_ARGS', 'value'); - if (!value) { - if (message == null && process.jsEngine !== 'chakracore') { + + if (args.length === 0) { + message = 'No value argument passed to `assert.ok()`'; + } else if (message == null && process.jsEngine !== 'chakracore') { // Use the call as error message if possible. // This does not work with e.g. the repl. const err = new Error(); diff --git a/lib/crypto.js b/lib/crypto.js index d7c59f553ed..aa6d4f463d7 100644 --- a/lib/crypto.js +++ b/lib/crypto.js @@ -205,8 +205,10 @@ Object.defineProperties(exports, { DEFAULT_ENCODING: { enumerable: true, configurable: true, - get: getDefaultEncoding, - set: setDefaultEncoding + get: deprecate(getDefaultEncoding, + 'crypto.DEFAULT_ENCODING is deprecated.', 'DEP0091'), + set: deprecate(setDefaultEncoding, + 'crypto.DEFAULT_ENCODING is deprecated.', 'DEP0091') }, constants: { configurable: false, diff --git a/lib/fs.js b/lib/fs.js index 0103500f0b5..0b9cf0cc9b8 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -508,7 +508,7 @@ ReadFileContext.prototype.read = function() { } else { buffer = this.buffer; offset = this.pos; - length = this.size - this.pos; + length = Math.min(kReadFileBufferLength, this.size - this.pos); } var req = new FSReqWrap(); @@ -924,8 +924,12 @@ fs.renameSync = function(oldPath, newPath) { nullCheck(newPath); validatePath(oldPath, 'oldPath'); validatePath(newPath, 'newPath'); - return binding.rename(pathModule.toNamespacedPath(oldPath), - pathModule.toNamespacedPath(newPath)); + const ctx = { path: oldPath, dest: newPath }; + binding.rename(pathModule.toNamespacedPath(oldPath), + pathModule.toNamespacedPath(newPath), undefined, ctx); + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } }; fs.truncate = function(path, len, callback) { @@ -991,7 +995,11 @@ fs.ftruncateSync = function(fd, len = 0) { validateUint32(fd, 'fd'); validateLen(len); len = Math.max(0, len); - return binding.ftruncate(fd, len); + const ctx = {}; + binding.ftruncate(fd, len, undefined, ctx); + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } }; fs.rmdir = function(path, callback) { @@ -1021,7 +1029,11 @@ fs.fdatasync = function(fd, callback) { fs.fdatasyncSync = function(fd) { validateUint32(fd, 'fd'); - return binding.fdatasync(fd); + const ctx = {}; + binding.fdatasync(fd, undefined, ctx); + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } }; fs.fsync = function(fd, callback) { @@ -1033,7 +1045,11 @@ fs.fsync = function(fd, callback) { fs.fsyncSync = function(fd) { validateUint32(fd, 'fd'); - return binding.fsync(fd); + const ctx = {}; + binding.fsync(fd, undefined, ctx); + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } }; fs.mkdir = function(path, mode, callback) { @@ -1163,7 +1179,13 @@ fs.readlinkSync = function(path, options) { handleError((path = getPathFromURL(path))); nullCheck(path); validatePath(path, 'oldPath'); - return binding.readlink(pathModule.toNamespacedPath(path), options.encoding); + const ctx = { path }; + const result = binding.readlink(pathModule.toNamespacedPath(path), + options.encoding, undefined, ctx); + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } + return result; }; function preprocessSymlinkDestination(path, type, linkPath) { @@ -1220,6 +1242,7 @@ fs.symlink = function(target, path, type_, callback_) { const flags = stringToSymlinkType(type); const req = new FSReqWrap(); req.oncomplete = callback; + binding.symlink(preprocessSymlinkDestination(target, type, path), pathModule.toNamespacedPath(path), flags, req); }; @@ -1234,8 +1257,19 @@ fs.symlinkSync = function(target, path, type) { validatePath(target, 'target'); validatePath(path); const flags = stringToSymlinkType(type); - return binding.symlink(preprocessSymlinkDestination(target, type, path), - pathModule.toNamespacedPath(path), flags); + + const ctx = { path: target, dest: path }; + binding.symlink(preprocessSymlinkDestination(target, type, path), + pathModule.toNamespacedPath(path), flags, undefined, ctx); + + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } else if (ctx.error) { + // TODO(joyeecheung): this is an encoding error usually caused by memory + // problems. We need to figure out proper error code(s) for this. + Error.captureStackTrace(ctx.error); + throw ctx.error; + } }; fs.link = function(existingPath, newPath, callback) { @@ -1268,8 +1302,15 @@ fs.linkSync = function(existingPath, newPath) { nullCheck(newPath); validatePath(existingPath, 'existingPath'); validatePath(newPath, 'newPath'); - return binding.link(pathModule.toNamespacedPath(existingPath), - pathModule.toNamespacedPath(newPath)); + + const ctx = { path: existingPath, dest: newPath }; + const result = binding.link(pathModule.toNamespacedPath(existingPath), + pathModule.toNamespacedPath(newPath), + undefined, ctx); + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } + return result; }; fs.unlink = function(path, callback) { @@ -1287,7 +1328,11 @@ fs.unlinkSync = function(path) { handleError((path = getPathFromURL(path))); nullCheck(path); validatePath(path); - return binding.unlink(pathModule.toNamespacedPath(path)); + const ctx = { path }; + binding.unlink(pathModule.toNamespacedPath(path), undefined, ctx); + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } }; fs.fchmod = function(fd, mode, callback) { @@ -1963,7 +2008,10 @@ fs.realpathSync = function realpathSync(p, options) { if (ctx.errno !== undefined) { throw new errors.uvException(ctx); } - linkTarget = binding.readlink(baseLong); + linkTarget = binding.readlink(baseLong, undefined, undefined, ctx); + if (ctx.errno !== undefined) { + throw new errors.uvException(ctx); + } } resolvedLink = pathModule.resolve(previous, linkTarget); diff --git a/lib/internal/async_hooks.js b/lib/internal/async_hooks.js index f1c98f13ac0..92cd9ba1e42 100644 --- a/lib/internal/async_hooks.js +++ b/lib/internal/async_hooks.js @@ -11,16 +11,17 @@ const async_wrap = process.binding('async_wrap'); * the various asynchronous states of the application. These are: * kExecutionAsyncId: The async_id assigned to the resource responsible for the * current execution stack. - * kTriggerAsyncId: The trigger_async_id of the resource responsible for - * the current execution stack. + * kTriggerAsyncId: The async_id of the resource that caused (or 'triggered') + * the resource corresponding to the current execution stack. * kAsyncIdCounter: Incremental counter tracking the next assigned async_id. * kDefaultTriggerAsyncId: Written immediately before a resource's constructor - * that sets the value of the init()'s triggerAsyncId. The order of - * retrieving the triggerAsyncId value is passing directly to the - * constructor -> value set in kDefaultTriggerAsyncId -> executionAsyncId of - * the current resource. + * that sets the value of the init()'s triggerAsyncId. The precedence order + * of retrieving the triggerAsyncId value is: + * 1. the value passed directly to the constructor + * 2. value set in kDefaultTriggerAsyncId + * 3. executionAsyncId of the current resource. * - * async_ids_fast_stack is a Float64Array that contains part of the async ID + * async_ids_stack is a Float64Array that contains part of the async ID * stack. Each pushAsyncIds() call adds two doubles to it, and each * popAsyncIds() call removes two doubles from it. * It has a fixed size, so if that is exceeded, calls to the native @@ -28,10 +29,10 @@ const async_wrap = process.binding('async_wrap'); */ const { async_id_symbol, async_hook_fields, async_id_fields } = async_wrap; // Store the pair executionAsyncId and triggerAsyncId in a std::stack on -// Environment::AsyncHooks::ids_stack_ tracks the resource responsible for the -// current execution stack. This is unwound as each resource exits. In the case -// of a fatal exception this stack is emptied after calling each hook's after() -// callback. +// Environment::AsyncHooks::async_ids_stack_ tracks the resource responsible for +// the current execution stack. This is unwound as each resource exits. In the +// case of a fatal exception this stack is emptied after calling each hook's +// after() callback. const { pushAsyncIds: pushAsyncIds_, popAsyncIds: popAsyncIds_ } = async_wrap; // For performance reasons, only track Promises when a hook is enabled. const { enablePromiseHook, disablePromiseHook } = async_wrap; diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js index 8bdb4916283..4c58e091779 100644 --- a/lib/internal/bootstrap_node.js +++ b/lib/internal/bootstrap_node.js @@ -76,11 +76,7 @@ NativeModule.require('internal/inspector_async_hook').setup(); NativeModule.require('trace_mgr'); //ENABLE_TTD; - // Do not initialize channel in debugger agent, it deletes env variable - // and the main thread won't see it. - if (process.argv[1] !== '--debug-agent') - _process.setupChannel(); - + _process.setupChannel(); _process.setupRawDebug(); const browserGlobals = !process._noBrowserGlobals; @@ -119,6 +115,7 @@ process.emitWarning( 'The ESM module loader is experimental.', 'ExperimentalWarning', undefined); + NativeModule.require('internal/process/modules').setup(); } diff --git a/lib/internal/child_process.js b/lib/internal/child_process.js index eb17ec21d40..2bade01f95f 100644 --- a/lib/internal/child_process.js +++ b/lib/internal/child_process.js @@ -465,7 +465,10 @@ function setupChannel(target, channel) { var jsonBuffer = ''; var pendingHandle = null; channel.buffering = false; - channel.onread = function(nread, pool, recvHandle) { + channel.pendingHandle = null; + channel.onread = function(nread, pool) { + const recvHandle = channel.pendingHandle; + channel.pendingHandle = null; // TODO(bnoordhuis) Check that nread > 0. if (pool) { if (recvHandle) diff --git a/lib/internal/encoding.js b/lib/internal/encoding.js index 763ee42426b..fa178f3a8c7 100644 --- a/lib/internal/encoding.js +++ b/lib/internal/encoding.js @@ -32,6 +32,21 @@ function lazyBuffer() { return Buffer; } +function validateEncoder(obj) { + if (obj == null || obj[kEncoder] !== true) + throw new errors.TypeError('ERR_INVALID_THIS', 'TextEncoder'); +} + +function validateDecoder(obj) { + if (obj == null || obj[kDecoder] !== true) + throw new errors.TypeError('ERR_INVALID_THIS', 'TextDecoder'); +} + +function validateArgument(prop, expected, propName, expectedName) { + if (typeof prop !== expected) + throw new errors.Error('ERR_INVALID_ARG_TYPE', propName, expectedName); +} + const CONVERTER_FLAGS_FLUSH = 0x1; const CONVERTER_FLAGS_FATAL = 0x2; const CONVERTER_FLAGS_IGNORE_BOM = 0x4; @@ -288,20 +303,17 @@ class TextEncoder { } get encoding() { - if (this == null || this[kEncoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextEncoder'); + validateEncoder(this); return 'utf-8'; } encode(input = '') { - if (this == null || this[kEncoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextEncoder'); + validateEncoder(this); return encodeUtf8String(`${input}`); } [inspect](depth, opts) { - if (this == null || this[kEncoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextEncoder'); + validateEncoder(this); if (typeof depth === 'number' && depth < 0) return opts.stylize('[Object]', 'special'); var ctor = getConstructorOf(this); @@ -329,8 +341,7 @@ const { hasConverter, TextDecoder } = makeTextDecoderJS(); function hasTextDecoder(encoding = 'utf-8') { - if (typeof encoding !== 'string') - throw new errors.Error('ERR_INVALID_ARG_TYPE', 'encoding', 'string'); + validateArgument(encoding, 'string', 'encoding', 'string'); return hasConverter(getEncodingFromLabel(encoding)); } @@ -344,8 +355,7 @@ function makeTextDecoderICU() { class TextDecoder { constructor(encoding = 'utf-8', options = {}) { encoding = `${encoding}`; - if (typeof options !== 'object') - throw new errors.Error('ERR_INVALID_ARG_TYPE', 'options', 'Object'); + validateArgument(options, 'object', 'options', 'Object'); const enc = getEncodingFromLabel(encoding); if (enc === undefined) @@ -369,17 +379,14 @@ function makeTextDecoderICU() { decode(input = empty, options = {}) { - if (this == null || this[kDecoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextDecoder'); + validateDecoder(this); if (isArrayBuffer(input)) { input = lazyBuffer().from(input); } else if (!isArrayBufferView(input)) { throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'input', ['ArrayBuffer', 'ArrayBufferView']); } - if (typeof options !== 'object') { - throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'options', 'Object'); - } + validateArgument(options, 'object', 'options', 'Object'); var flags = 0; if (options !== null) @@ -416,8 +423,7 @@ function makeTextDecoderJS() { class TextDecoder { constructor(encoding = 'utf-8', options = {}) { encoding = `${encoding}`; - if (typeof options !== 'object') - throw new errors.Error('ERR_INVALID_ARG_TYPE', 'options', 'Object'); + validateArgument(options, 'object', 'options', 'Object'); const enc = getEncodingFromLabel(encoding); if (enc === undefined || !hasConverter(enc)) @@ -440,8 +446,7 @@ function makeTextDecoderJS() { } decode(input = empty, options = {}) { - if (this == null || this[kDecoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextDecoder'); + validateDecoder(this); if (isArrayBuffer(input)) { input = lazyBuffer().from(input); } else if (isArrayBufferView(input)) { @@ -451,9 +456,7 @@ function makeTextDecoderJS() { throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'input', ['ArrayBuffer', 'ArrayBufferView']); } - if (typeof options !== 'object') { - throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'options', 'Object'); - } + validateArgument(options, 'object', 'options', 'Object'); if (this[kFlags] & CONVERTER_FLAGS_FLUSH) { this[kBOMSeen] = false; @@ -496,27 +499,23 @@ function makeTextDecoderJS() { TextDecoder.prototype, Object.getOwnPropertyDescriptors({ get encoding() { - if (this == null || this[kDecoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextDecoder'); + validateDecoder(this); return this[kEncoding]; }, get fatal() { - if (this == null || this[kDecoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextDecoder'); + validateDecoder(this); return (this[kFlags] & CONVERTER_FLAGS_FATAL) === CONVERTER_FLAGS_FATAL; }, get ignoreBOM() { - if (this == null || this[kDecoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextDecoder'); + validateDecoder(this); return (this[kFlags] & CONVERTER_FLAGS_IGNORE_BOM) === CONVERTER_FLAGS_IGNORE_BOM; }, [inspect](depth, opts) { - if (this == null || this[kDecoder] !== true) - throw new errors.TypeError('ERR_INVALID_THIS', 'TextDecoder'); + validateDecoder(this); if (typeof depth === 'number' && depth < 0) return opts.stylize('[Object]', 'special'); var ctor = getConstructorOf(this); diff --git a/lib/internal/loader/ModuleJob.js b/lib/internal/loader/ModuleJob.js index 2d6325b85c6..db37765b20b 100644 --- a/lib/internal/loader/ModuleJob.js +++ b/lib/internal/loader/ModuleJob.js @@ -6,9 +6,6 @@ const { decorateErrorStack } = require('internal/util'); const assert = require('assert'); const resolvedPromise = SafePromise.resolve(); -const enableDebug = (process.env.NODE_DEBUG || '').match(/\besm\b/) || - process.features.debug; - /* A ModuleJob tracks the loading of a single Module, and the ModuleJobs of * its dependencies, over time. */ class ModuleJob { @@ -27,7 +24,6 @@ class ModuleJob { // Wait for the ModuleWrap instance being linked with all dependencies. const link = async () => { - const dependencyJobs = []; ({ module: this.module, reflect: this.reflect } = await this.modulePromise); if (inspectBrk) { @@ -35,17 +31,17 @@ class ModuleJob { initWrapper(this.module.instantiate, this.module); } assert(this.module instanceof ModuleWrap); - this.module.link(async (dependencySpecifier) => { - const dependencyJobPromise = - this.loader.getModuleJob(dependencySpecifier, url); - dependencyJobs.push(dependencyJobPromise); - const dependencyJob = await dependencyJobPromise; - return (await dependencyJob.modulePromise).module; + + const dependencyJobs = []; + const promises = this.module.link(async (specifier) => { + const jobPromise = this.loader.getModuleJob(specifier, url); + dependencyJobs.push(jobPromise); + return (await (await jobPromise).modulePromise).module; }); - if (enableDebug) { - // Make sure all dependencies are entered into the list synchronously. - Object.freeze(dependencyJobs); - } + + if (promises !== undefined) + await SafePromise.all(promises); + return SafePromise.all(dependencyJobs); }; // Promise for the list of all dependencyJobs. diff --git a/lib/internal/process/modules.js b/lib/internal/process/modules.js new file mode 100644 index 00000000000..eda47f80cdd --- /dev/null +++ b/lib/internal/process/modules.js @@ -0,0 +1,17 @@ +'use strict'; + +const { + setInitializeImportMetaObjectCallback +} = internalBinding('module_wrap'); + +function initializeImportMetaObject(wrap, meta) { + meta.url = wrap.url; +} + +function setupModules() { + setInitializeImportMetaObjectCallback(initializeImportMetaObject); +} + +module.exports = { + setup: setupModules +}; diff --git a/lib/internal/streams/BufferList.js b/lib/internal/streams/BufferList.js index b3980e007a4..a72bf37a314 100644 --- a/lib/internal/streams/BufferList.js +++ b/lib/internal/streams/BufferList.js @@ -73,6 +73,91 @@ module.exports = class BufferList { return ret; } + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + + first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + const str = p.data; + const nb = (n > str.length ? str.length : n); + if (nb === str.length) + ret += str; + else + ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) + this.head = p.next; + else + this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + const buf = p.data; + const nb = (n > buf.length ? buf.length : n); + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) + this.head = p.next; + else + this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + [inspect.custom]() { const obj = inspect({ length: this.length }); return `${this.constructor.name} ${obj}`; diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index 15256a63c0b..6fd6e4a6b76 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -53,6 +53,9 @@ const kClearEntry = Symbol('clear-entry'); const kGetEntries = Symbol('get-entries'); const kIndex = Symbol('index'); const kMarks = Symbol('marks'); +const kCount = Symbol('count'); +const kMaxCount = Symbol('max-count'); +const kDefaultMaxCount = 150; observerCounts[NODE_PERFORMANCE_ENTRY_TYPE_MARK] = 1; observerCounts[NODE_PERFORMANCE_ENTRY_TYPE_MEASURE] = 1; @@ -250,10 +253,17 @@ const nodeTiming = new PerformanceNodeTiming(); // Maintains a list of entries as a linked list stored in insertion order. class PerformanceObserverEntryList { constructor() { - Object.defineProperty(this, kEntries, { - writable: true, - enumerable: false, - value: {} + Object.defineProperties(this, { + [kEntries]: { + writable: true, + enumerable: false, + value: {} + }, + [kCount]: { + writable: true, + enumerable: false, + value: 0 + } }); L.init(this[kEntries]); } @@ -261,9 +271,14 @@ class PerformanceObserverEntryList { [kInsertEntry](entry) { const item = { entry }; L.append(this[kEntries], item); + this[kCount]++; this[kIndexEntry](item); } + get length() { + return this[kCount]; + } + [kIndexEntry](entry) { // Default implementation does nothing } @@ -384,9 +399,22 @@ class Performance extends PerformanceObserverEntryList { this[kIndex] = { [kMarks]: new Set() }; + this[kMaxCount] = kDefaultMaxCount; this[kInsertEntry](nodeTiming); } + set maxEntries(val) { + if (typeof val !== 'number' || val >>> 0 !== val) { + const errors = lazyErrors(); + throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'val', 'number'); + } + this[kMaxCount] = Math.max(1, val >>> 0); + } + + get maxEntries() { + return this[kMaxCount]; + } + [kIndexEntry](item) { const index = this[kIndex]; const type = item.entry.entryType; @@ -397,6 +425,17 @@ class Performance extends PerformanceObserverEntryList { } const entry = item.entry; L.append(items, { entry, item }); + const count = this[kCount]; + if (count > this[kMaxCount]) { + const text = count === 1 ? 'is 1 entry' : `are ${count} entries`; + process.emitWarning('Possible perf_hooks memory leak detected. ' + + `There ${text} in the ` + + 'Performance Timeline. Use the clear methods ' + + 'to remove entries that are no longer needed or ' + + 'set performance.maxEntries equal to a higher ' + + 'value (currently the maxEntries is ' + + `${this[kMaxCount]}).`); + } } [kClearEntry](type, name) { @@ -411,10 +450,12 @@ class Performance extends PerformanceObserverEntryList { if (entry.name === `${name}`) { L.remove(item); // remove from the index L.remove(item.item); // remove from the master + this[kCount]--; } } else { L.remove(item); // remove from the index L.remove(item.item); // remove from the master + this[kCount]--; } item = next; } diff --git a/lib/util.js b/lib/util.js index 0f0ed408ba4..4525792b2ec 100644 --- a/lib/util.js +++ b/lib/util.js @@ -342,6 +342,7 @@ inspect.colors = Object.assign(Object.create(null), { inspect.styles = Object.assign(Object.create(null), { 'special': 'cyan', 'number': 'yellow', + 'bigint': 'yellow', 'boolean': 'yellow', 'undefined': 'grey', 'null': 'bold', @@ -650,6 +651,9 @@ function formatPrimitive(fn, value, ctx) { } if (typeof value === 'number') return formatNumber(fn, value); + // eslint-disable-next-line valid-typeof + if (typeof value === 'bigint') + return fn(`${value}n`, 'bigint'); if (typeof value === 'boolean') return fn(`${value}`, 'boolean'); if (typeof value === 'undefined') diff --git a/node.gyp b/node.gyp index 7623a9ebb04..68fcb67d70d 100644 --- a/node.gyp +++ b/node.gyp @@ -114,6 +114,7 @@ 'lib/internal/net.js', 'lib/internal/module.js', 'lib/internal/os.js', + 'lib/internal/process/modules.js', 'lib/internal/process/next_tick.js', 'lib/internal/process/promises.js', 'lib/internal/process/stdio.js', @@ -249,7 +250,7 @@ 'conditions': [ ['OS in "linux freebsd openbsd solaris android"', { 'ldflags': [ - '-Wl,--whole-archive,<(OBJ_DIR)/<(STATIC_LIB_PREFIX)' + '-Wl,--whole-archive,<(obj_dir)/<(STATIC_LIB_PREFIX)' '<(node_core_target_name)<(STATIC_LIB_SUFFIX)', '-Wl,--no-whole-archive', ], @@ -817,10 +818,10 @@ { 'action_name': 'node_dtrace_provider_o', 'inputs': [ - '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace.o', + '<(obj_dir)/<(node_lib_target_name)/src/node_dtrace.o', ], 'outputs': [ - '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace_provider.o' + '<(obj_dir)/<(node_lib_target_name)/src/node_dtrace_provider.o' ], 'action': [ 'dtrace', '-G', '-xnolibs', '-s', 'src/node_provider.d', '<@(_inputs)', '-o', '<@(_outputs)' ] @@ -852,7 +853,7 @@ { 'action_name': 'node_dtrace_ustack_constants', 'inputs': [ - '<(V8_BASE)' + '<(v8_base)' ], 'outputs': [ '<(SHARED_INTERMEDIATE_DIR)/v8constants.h' @@ -870,7 +871,7 @@ '<(SHARED_INTERMEDIATE_DIR)/v8constants.h' ], 'outputs': [ - '<(OBJ_DIR)/<(node_lib_target_name)/src/node_dtrace_ustack.o' + '<(obj_dir)/<(node_lib_target_name)/src/node_dtrace_ustack.o' ], 'conditions': [ [ 'target_arch=="ia32" or target_arch=="arm"', { @@ -960,32 +961,32 @@ ], 'variables': { - 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/src', - 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/gen', - 'OBJ_TRACING_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/src/tracing', - 'OBJ_SUFFIX': 'o', - 'OBJ_SEPARATOR': '/', + 'obj_path': '<(obj_dir)/<(node_lib_target_name)/src', + 'obj_gen_path': '<(obj_dir)/<(node_lib_target_name)/gen', + 'obj_tracing_path': '<(obj_dir)/<(node_lib_target_name)/src/tracing', + 'obj_suffix': 'o', + 'obj_separator': '/', 'conditions': [ ['OS=="win"', { - 'OBJ_SUFFIX': 'obj', + 'obj_suffix': 'obj', }], ['GENERATOR=="ninja"', { - 'OBJ_PATH': '<(OBJ_DIR)/src', - 'OBJ_GEN_PATH': '<(OBJ_DIR)/gen', - 'OBJ_TRACING_PATH': '<(OBJ_DIR)/src/tracing', - 'OBJ_SEPARATOR': '/<(node_lib_target_name).', + 'obj_path': '<(obj_dir)/src', + 'obj_gen_path': '<(obj_dir)/gen', + 'obj_tracing_path': '<(obj_dir)/src/tracing', + 'obj_separator': '/<(node_lib_target_name).', }, { 'conditions': [ ['OS=="win"', { - 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)', - 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)', - 'OBJ_TRACING_PATH': '<(OBJ_DIR)/<(node_lib_target_name)', + 'obj_path': '<(obj_dir)/<(node_lib_target_name)', + 'obj_gen_path': '<(obj_dir)/<(node_lib_target_name)', + 'obj_tracing_path': '<(obj_dir)/<(node_lib_target_name)', }], ['OS=="aix"', { - 'OBJ_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/src', - 'OBJ_GEN_PATH': '<(OBJ_DIR)/<(node_lib_target_name)/gen', - 'OBJ_TRACING_PATH': - '<(OBJ_DIR)/<(node_lib_target_name)/src/tracing', + 'obj_path': '<(obj_dir)/<(node_lib_target_name)/src', + 'obj_gen_path': '<(obj_dir)/<(node_lib_target_name)/gen', + 'obj_tracing_path': + '<(obj_dir)/<(node_lib_target_name)/src/tracing', }], ]} ] @@ -1017,26 +1018,26 @@ 'test/cctest/test_url.cc' ], 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)async_wrap.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)handle_wrap.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)env.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_buffer.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_debug_options.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_i18n.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_perf.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_platform.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_url.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)util.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)string_bytes.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)string_search.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)stream_base.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_constants.<(OBJ_SUFFIX)', - '<(OBJ_TRACING_PATH)<(OBJ_SEPARATOR)agent.<(OBJ_SUFFIX)', - '<(OBJ_TRACING_PATH)<(OBJ_SEPARATOR)node_trace_buffer.<(OBJ_SUFFIX)', - '<(OBJ_TRACING_PATH)<(OBJ_SEPARATOR)node_trace_writer.<(OBJ_SUFFIX)', - '<(OBJ_TRACING_PATH)<(OBJ_SEPARATOR)trace_event.<(OBJ_SUFFIX)', - '<(OBJ_GEN_PATH)<(OBJ_SEPARATOR)node_javascript.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)async_wrap.<(obj_suffix)', + '<(obj_path)<(obj_separator)handle_wrap.<(obj_suffix)', + '<(obj_path)<(obj_separator)env.<(obj_suffix)', + '<(obj_path)<(obj_separator)node.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_buffer.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_debug_options.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_i18n.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_perf.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_platform.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_url.<(obj_suffix)', + '<(obj_path)<(obj_separator)util.<(obj_suffix)', + '<(obj_path)<(obj_separator)string_bytes.<(obj_suffix)', + '<(obj_path)<(obj_separator)string_search.<(obj_suffix)', + '<(obj_path)<(obj_separator)stream_base.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_constants.<(obj_suffix)', + '<(obj_tracing_path)<(obj_separator)agent.<(obj_suffix)', + '<(obj_tracing_path)<(obj_separator)node_trace_buffer.<(obj_suffix)', + '<(obj_tracing_path)<(obj_separator)node_trace_writer.<(obj_suffix)', + '<(obj_tracing_path)<(obj_separator)trace_event.<(obj_suffix)', + '<(obj_gen_path)<(obj_separator)node_javascript.<(obj_suffix)', ], 'conditions': [ @@ -1073,10 +1074,10 @@ 'conditions': [ ['node_target_type!="static_library"', { 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_crypto.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_crypto_bio.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_crypto_clienthello.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)tls_wrap.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)node_crypto.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_crypto_bio.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_crypto_clienthello.<(obj_suffix)', + '<(obj_path)<(obj_separator)tls_wrap.<(obj_suffix)', ], }], ], @@ -1090,9 +1091,9 @@ [ 'node_use_perfctr=="true"', { 'defines': [ 'HAVE_PERFCTR=1' ], 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_counters.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)' - 'node_win32_perfctr_provider.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)node_counters.<(obj_suffix)', + '<(obj_path)<(obj_separator)' + 'node_win32_perfctr_provider.<(obj_suffix)', ], }], ['v8_enable_inspector==1', { @@ -1103,11 +1104,11 @@ 'conditions': [ ['node_target_type!="static_library"', { 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)inspector_agent.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)inspector_io.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)inspector_js_api.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)inspector_socket.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)inspector_socket_server.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)inspector_agent.<(obj_suffix)', + '<(obj_path)<(obj_separator)inspector_io.<(obj_suffix)', + '<(obj_path)<(obj_separator)inspector_js_api.<(obj_suffix)', + '<(obj_path)<(obj_separator)inspector_socket.<(obj_suffix)', + '<(obj_path)<(obj_separator)inspector_socket_server.<(obj_suffix)', ], }], ], @@ -1117,19 +1118,19 @@ }], [ 'node_use_dtrace=="true" and node_target_type!="static_library"', { 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_dtrace.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)node_dtrace.<(obj_suffix)', ], 'conditions': [ ['OS!="mac" and OS!="linux"', { 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_dtrace_provider.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_dtrace_ustack.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)node_dtrace_provider.<(obj_suffix)', + '<(obj_path)<(obj_separator)node_dtrace_ustack.<(obj_suffix)', ] }], ['OS=="linux"', { 'libraries': [ - '<(SHARED_INTERMEDIATE_DIR)<(OBJ_SEPARATOR)' - 'node_dtrace_provider.<(OBJ_SUFFIX)', + '<(SHARED_INTERMEDIATE_DIR)<(obj_separator)' + 'node_dtrace_provider.<(obj_suffix)', ] }], ], @@ -1137,16 +1138,16 @@ 'conditions': [ [ 'node_use_etw=="true" and OS=="win"', { 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)node_dtrace.<(OBJ_SUFFIX)', - '<(OBJ_PATH)<(OBJ_SEPARATOR)' - 'node_win32_etw_provider.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)node_dtrace.<(obj_suffix)', + '<(obj_path)<(obj_separator)' + 'node_win32_etw_provider.<(obj_suffix)', ], }] ] }], [ 'OS=="win" and node_target_type!="static_library"', { 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)backtrace_win32.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)backtrace_win32.<(obj_suffix)', ], 'conditions': [ # this is only necessary for chakra on windows because chakra is dynamically linked on windows @@ -1158,7 +1159,7 @@ 'conditions': [ ['node_target_type!="static_library"', { 'libraries': [ - '<(OBJ_PATH)<(OBJ_SEPARATOR)backtrace_posix.<(OBJ_SUFFIX)', + '<(obj_path)<(obj_separator)backtrace_posix.<(obj_suffix)', ], }], ], diff --git a/node.gypi b/node.gypi index 7e972b833eb..50237106a6c 100644 --- a/node.gypi +++ b/node.gypi @@ -99,7 +99,7 @@ [ 'force_load=="true"', { 'xcode_settings': { 'OTHER_LDFLAGS': [ - '-Wl,-force_load,<(V8_BASE)', + '-Wl,-force_load,<(v8_base)', ], }, }], @@ -117,7 +117,7 @@ [ 'force_load=="true"', { 'xcode_settings': { 'OTHER_LDFLAGS': [ - '-Wl,-force_load,<(CHAKRASHIM_BASE)', + '-Wl,-force_load,<(chakrashim_base)', ], }, }], @@ -172,7 +172,7 @@ { 'action_name': 'expfile', 'inputs': [ - '<(OBJ_DIR)' + '<(obj_dir)' ], 'outputs': [ '<(PRODUCT_DIR)/node.exp' @@ -206,11 +206,11 @@ 'ldflags': [ '-Wl,-z,noexecstack' ], 'conditions': [ [ 'node_engine=="v8"', { - 'ldflags': [ '-Wl,--whole-archive <(V8_BASE)', + 'ldflags': [ '-Wl,--whole-archive <(v8_base)', '-Wl,--no-whole-archive' ], }], ['node_engine=="chakracore"', { - 'ldflags': [ '-Wl,--whole-archive <(CHAKRASHIM_BASE)', + 'ldflags': [ '-Wl,--whole-archive <(chakrashim_base)', '-Wl,--no-whole-archive' ], }], ] @@ -218,6 +218,8 @@ [ '(OS=="freebsd" or OS=="linux") and node_shared=="false"' ' and coverage=="true" and force_load=="true"', { 'ldflags': [ '-Wl,-z,noexecstack', + '-Wl,--whole-archive <(v8_base)', + '-Wl,--no-whole-archive', '--coverage', '-g', '-O0' ], @@ -261,15 +263,15 @@ [ 'force_load=="true"', { 'xcode_settings': { 'OTHER_LDFLAGS': [ - '-Wl,-force_load,<(PRODUCT_DIR)/<(OPENSSL_PRODUCT)', + '-Wl,-force_load,<(PRODUCT_DIR)/<(openssl_product)', ], }, 'conditions': [ ['OS in "linux freebsd" and node_shared=="false"', { 'ldflags': [ '-Wl,--whole-archive,' - '<(OBJ_DIR)/deps/openssl/' - '<(OPENSSL_PRODUCT)', + '<(obj_dir)/deps/openssl/' + '<(openssl_product)', '-Wl,--no-whole-archive', ], }], diff --git a/src/async_wrap.cc b/src/async_wrap.cc index 5258674ff33..706cd879901 100644 --- a/src/async_wrap.cc +++ b/src/async_wrap.cc @@ -540,12 +540,12 @@ void AsyncWrap::Initialize(Local target, // this way to allow JS and C++ to read/write each value as quickly as // possible. The fields are represented as follows: // - // kAsyncUid: Maintains the state of the next unique id to be assigned. + // kAsyncIdCounter: Maintains the state of the next unique id to be assigned. // // kDefaultTriggerAsyncId: Write the id of the resource responsible for a // handle's creation just before calling the new handle's constructor. // After the new handle is constructed kDefaultTriggerAsyncId is set back - // to 0. + // to -1. FORCE_SET_TARGET_FIELD(target, "async_id_fields", env->async_hooks()->async_id_fields().GetJSArray()); diff --git a/src/connection_wrap.cc b/src/connection_wrap.cc index 8de77f361dc..a6cf67ceee2 100644 --- a/src/connection_wrap.cc +++ b/src/connection_wrap.cc @@ -3,6 +3,7 @@ #include "connect_wrap.h" #include "env-inl.h" #include "pipe_wrap.h" +#include "stream_base-inl.h" #include "stream_wrap.h" #include "tcp_wrap.h" #include "util-inl.h" diff --git a/src/env.h b/src/env.h index d73be8156ec..79fc848386c 100644 --- a/src/env.h +++ b/src/env.h @@ -210,6 +210,7 @@ class ModuleWrap; V(owner_string, "owner") \ V(parse_error_string, "Parse Error") \ V(path_string, "path") \ + V(pending_handle_string, "pendingHandle") \ V(pbkdf2_error_string, "PBKDF2 Error") \ V(pid_string, "pid") \ V(pipe_string, "pipe") \ @@ -249,6 +250,7 @@ class ModuleWrap; V(type_string, "type") \ V(uid_string, "uid") \ V(unknown_string, "") \ + V(url_string, "url") \ V(user_string, "user") \ V(username_string, "username") \ V(valid_from_string, "valid_from") \ @@ -278,6 +280,7 @@ class ModuleWrap; V(context, v8::Context) \ V(domain_callback, v8::Function) \ V(host_import_module_dynamically_callback, v8::Function) \ + V(host_initialize_import_meta_object_callback, v8::Function) \ V(http2ping_constructor_template, v8::ObjectTemplate) \ V(http2stream_constructor_template, v8::ObjectTemplate) \ V(http2settings_constructor_template, v8::ObjectTemplate) \ diff --git a/src/js_stream.cc b/src/js_stream.cc index 59a70ca257e..1558e86373e 100644 --- a/src/js_stream.cc +++ b/src/js_stream.cc @@ -25,9 +25,6 @@ JSStream::JSStream(Environment* env, Local obj) StreamBase(env) { node::Wrap(obj, this); MakeWeak(this); - - set_alloc_cb({ OnAllocImpl, this }); - set_read_cb({ OnReadImpl, this }); } @@ -35,45 +32,6 @@ JSStream::~JSStream() { } -void JSStream::OnAllocImpl(size_t size, uv_buf_t* buf, void* ctx) { - buf->base = Malloc(size); - buf->len = size; -} - - -void JSStream::OnReadImpl(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx) { - JSStream* wrap = static_cast(ctx); - CHECK_NE(wrap, nullptr); - Environment* env = wrap->env(); - HandleScope handle_scope(env->isolate()); - Context::Scope context_scope(env->context()); - - if (nread < 0) { - if (buf != nullptr && buf->base != nullptr) - free(buf->base); - wrap->EmitData(nread, Local(), Local()); - return; - } - - if (nread == 0) { - if (buf->base != nullptr) - free(buf->base); - return; - } - - CHECK_LE(static_cast(nread), buf->len); - char* base = node::Realloc(buf->base, nread); - - CHECK_EQ(pending, UV_UNKNOWN_HANDLE); - - Local obj = Buffer::New(env, base, nread).ToLocalChecked(); - wrap->EmitData(nread, obj, Local()); -} - - AsyncWrap* JSStream::GetAsyncWrap() { return static_cast(this); } @@ -213,18 +171,19 @@ void JSStream::ReadBuffer(const FunctionCallbackInfo& args) { char* data = Buffer::Data(args[0]); int len = Buffer::Length(args[0]); - do { - uv_buf_t buf; + // Repeatedly ask the stream's owner for memory, copy the data that we + // just read from JS into those buffers and emit them as reads. + while (len != 0) { + uv_buf_t buf = wrap->EmitAlloc(len); ssize_t avail = len; - wrap->EmitAlloc(len, &buf); if (static_cast(buf.len) < avail) avail = buf.len; memcpy(buf.base, data, avail); data += avail; len -= avail; - wrap->EmitRead(avail, &buf); - } while (len != 0); + wrap->EmitRead(avail, buf); + } } @@ -232,7 +191,7 @@ void JSStream::EmitEOF(const FunctionCallbackInfo& args) { JSStream* wrap; ASSIGN_OR_RETURN_UNWRAP(&wrap, args.Holder()); - wrap->EmitRead(UV_EOF, nullptr); + wrap->EmitRead(UV_EOF); } diff --git a/src/module_wrap.cc b/src/module_wrap.cc index ba07fcdc79d..0fda1250d70 100644 --- a/src/module_wrap.cc +++ b/src/module_wrap.cc @@ -37,6 +37,7 @@ using v8::ScriptCompiler; using v8::ScriptOrigin; using v8::String; using v8::TryCatch; +using v8::Undefined; using v8::Value; static const char* const EXTENSIONS[] = {".mjs", ".js", ".json", ".node"}; @@ -64,6 +65,19 @@ ModuleWrap::~ModuleWrap() { context_.Reset(); } +ModuleWrap* ModuleWrap::GetFromModule(Environment* env, + Local module) { + ModuleWrap* ret = nullptr; + auto range = env->module_map.equal_range(module->GetIdentityHash()); + for (auto it = range.first; it != range.second; ++it) { + if (it->second->module_ == module) { + ret = it->second; + break; + } + } + return ret; +} + void ModuleWrap::New(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); @@ -133,9 +147,7 @@ void ModuleWrap::New(const FunctionCallbackInfo& args) { } } - Local url_str = FIXED_ONE_BYTE_STRING(isolate, "url"); - - if (!that->Set(context, url_str, url).FromMaybe(false)) { + if (!that->Set(context, env->url_string(), url).FromMaybe(false)) { return; } @@ -171,6 +183,9 @@ void ModuleWrap::Link(const FunctionCallbackInfo& args) { Local mod_context = obj->context_.Get(isolate); Local module = obj->module_.Get(isolate); + Local promises = Array::New(isolate, + module->GetModuleRequestsLength()); + // call the dependency resolve callbacks for (int i = 0; i < module->GetModuleRequestsLength(); i++) { Local specifier = module->GetModuleRequest(i); @@ -193,9 +208,11 @@ void ModuleWrap::Link(const FunctionCallbackInfo& args) { } Local resolve_promise = resolve_return_value.As(); obj->resolve_cache_[specifier_std].Reset(env->isolate(), resolve_promise); + + promises->Set(mod_context, specifier, resolve_promise).FromJust(); } - args.GetReturnValue().Set(that); + args.GetReturnValue().Set(promises); } void ModuleWrap::Instantiate(const FunctionCallbackInfo& args) { @@ -356,14 +373,7 @@ MaybeLocal ModuleWrap::ResolveCallback(Local context, return MaybeLocal(); } - ModuleWrap* dependent = nullptr; - auto range = env->module_map.equal_range(referrer->GetIdentityHash()); - for (auto it = range.first; it != range.second; ++it) { - if (it->second->module_ == referrer) { - dependent = it->second; - break; - } - } + ModuleWrap* dependent = ModuleWrap::GetFromModule(env, referrer); if (dependent == nullptr) { env->ThrowError("linking error, null dep"); @@ -723,6 +733,40 @@ void ModuleWrap::SetImportModuleDynamicallyCallback( iso->SetHostImportModuleDynamicallyCallback(ImportModuleDynamically); } +void ModuleWrap::HostInitializeImportMetaObjectCallback( + Local context, Local module, Local meta) { + Isolate* isolate = context->GetIsolate(); + Environment* env = Environment::GetCurrent(context); + ModuleWrap* module_wrap = ModuleWrap::GetFromModule(env, module); + + if (module_wrap == nullptr) { + return; + } + + Local wrap = module_wrap->object(); + Local callback = + env->host_initialize_import_meta_object_callback(); + Local args[] = { wrap, meta }; + callback->Call(context, Undefined(isolate), arraysize(args), args) + .ToLocalChecked(); +} + +void ModuleWrap::SetInitializeImportMetaObjectCallback( + const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + Isolate* isolate = env->isolate(); + if (!args[0]->IsFunction()) { + env->ThrowError("first argument is not a function"); + return; + } + + Local import_meta_callback = args[0].As(); + env->set_host_initialize_import_meta_object_callback(import_meta_callback); + + isolate->SetHostInitializeImportMetaObjectCallback( + HostInitializeImportMetaObjectCallback); +} + void ModuleWrap::Initialize(Local target, Local unused, Local context) { @@ -747,6 +791,9 @@ void ModuleWrap::Initialize(Local target, env->SetMethod(target, "setImportModuleDynamicallyCallback", node::loader::ModuleWrap::SetImportModuleDynamicallyCallback); + env->SetMethod(target, + "setInitializeImportMetaObjectCallback", + ModuleWrap::SetInitializeImportMetaObjectCallback); #define V(name) \ target->Set(context, \ diff --git a/src/module_wrap.h b/src/module_wrap.h index bedf665165c..5950c5a1be0 100644 --- a/src/module_wrap.h +++ b/src/module_wrap.h @@ -23,6 +23,10 @@ class ModuleWrap : public BaseObject { static void Initialize(v8::Local target, v8::Local unused, v8::Local context); + static void HostInitializeImportMetaObjectCallback( + v8::Local context, + v8::Local module, + v8::Local meta); private: ModuleWrap(Environment* env, @@ -44,10 +48,14 @@ class ModuleWrap : public BaseObject { static void Resolve(const v8::FunctionCallbackInfo& args); static void SetImportModuleDynamicallyCallback( const v8::FunctionCallbackInfo& args); + static void SetInitializeImportMetaObjectCallback( + const v8::FunctionCallbackInfo& args); static v8::MaybeLocal ResolveCallback( v8::Local context, v8::Local specifier, v8::Local referrer); + static ModuleWrap* GetFromModule(node::Environment*, v8::Local); + v8::Persistent module_; v8::Persistent url_; diff --git a/src/node.cc b/src/node.cc index 501035d9680..2aa6d2bb030 100644 --- a/src/node.cc +++ b/src/node.cc @@ -3603,6 +3603,8 @@ static void CheckIfAllowedInEnv(const char* exe, bool is_env, "--icu-data-dir", // V8 options (define with '_', which allows '-' or '_') + "--perf_prof", + "--perf_basic_prof", "--abort_on_uncaught_exception", "--max_old_space_size", "--stack_trace_limit", @@ -3791,6 +3793,10 @@ static void ParseArgs(int* argc, config_preserve_symlinks = true; } else if (strcmp(arg, "--experimental-modules") == 0) { config_experimental_modules = true; + new_v8_argv[new_v8_argc] = "--harmony-dynamic-import"; + new_v8_argc += 1; + new_v8_argv[new_v8_argc] = "--harmony-import-meta"; + new_v8_argc += 1; } else if (strcmp(arg, "--experimental-vm-modules") == 0) { config_experimental_vm_modules = true; } else if (strcmp(arg, "--loader") == 0) { diff --git a/src/node_crypto.cc b/src/node_crypto.cc index b572c90aa50..1f185dd7158 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -1724,26 +1724,31 @@ void SSLWrap::OnClientHello(void* arg, Base* w = static_cast(arg); Environment* env = w->ssl_env(); HandleScope handle_scope(env->isolate()); - Context::Scope context_scope(env->context()); + Local context = env->context(); + Context::Scope context_scope(context); Local hello_obj = Object::New(env->isolate()); Local buff = Buffer::Copy( env, reinterpret_cast(hello.session_id()), hello.session_size()).ToLocalChecked(); - hello_obj->Set(env->session_id_string(), buff); + hello_obj->Set(context, env->session_id_string(), buff).FromJust(); if (hello.servername() == nullptr) { - hello_obj->Set(env->servername_string(), String::Empty(env->isolate())); + hello_obj->Set(context, + env->servername_string(), + String::Empty(env->isolate())).FromJust(); } else { Local servername = OneByteString(env->isolate(), hello.servername(), hello.servername_size()); - hello_obj->Set(env->servername_string(), servername); + hello_obj->Set(context, env->servername_string(), servername).FromJust(); } - hello_obj->Set(env->tls_ticket_string(), - Boolean::New(env->isolate(), hello.has_ticket())); - hello_obj->Set(env->ocsp_request_string(), - Boolean::New(env->isolate(), hello.ocsp_request())); + hello_obj->Set(context, + env->tls_ticket_string(), + Boolean::New(env->isolate(), hello.has_ticket())).FromJust(); + hello_obj->Set(context, + env->ocsp_request_string(), + Boolean::New(env->isolate(), hello.ocsp_request())).FromJust(); Local argv[] = { hello_obj }; w->MakeCallback(env->onclienthello_string(), arraysize(argv), argv); @@ -1788,7 +1793,7 @@ static bool SafeX509ExtPrint(BIO* out, X509_EXTENSION* ext) { static Local X509ToObject(Environment* env, X509* cert) { EscapableHandleScope scope(env->isolate()); - + Local context = env->context(); Local info = Object::New(env->isolate()); BIO* bio = BIO_new(BIO_s_mem()); @@ -1798,18 +1803,20 @@ static Local X509ToObject(Environment* env, X509* cert) { 0, X509_NAME_FLAGS) > 0) { BIO_get_mem_ptr(bio, &mem); - info->Set(env->subject_string(), + info->Set(context, env->subject_string(), String::NewFromUtf8(env->isolate(), mem->data, - String::kNormalString, mem->length)); + String::kNormalString, + mem->length)).FromJust(); } USE(BIO_reset(bio)); X509_NAME* issuer_name = X509_get_issuer_name(cert); if (X509_NAME_print_ex(bio, issuer_name, 0, X509_NAME_FLAGS) > 0) { BIO_get_mem_ptr(bio, &mem); - info->Set(env->issuer_string(), + info->Set(context, env->issuer_string(), String::NewFromUtf8(env->isolate(), mem->data, - String::kNormalString, mem->length)); + String::kNormalString, + mem->length)).FromJust(); } USE(BIO_reset(bio)); @@ -1834,9 +1841,10 @@ static Local X509ToObject(Environment* env, X509* cert) { } BIO_get_mem_ptr(bio, &mem); - info->Set(keys[i], + info->Set(context, keys[i], String::NewFromUtf8(env->isolate(), mem->data, - String::kNormalString, mem->length)); + String::kNormalString, + mem->length)).FromJust(); USE(BIO_reset(bio)); } @@ -1852,9 +1860,10 @@ static Local X509ToObject(Environment* env, X509* cert) { RSA_get0_key(rsa, &n, &e, nullptr); BN_print(bio, n); BIO_get_mem_ptr(bio, &mem); - info->Set(env->modulus_string(), + info->Set(context, env->modulus_string(), String::NewFromUtf8(env->isolate(), mem->data, - String::kNormalString, mem->length)); + String::kNormalString, + mem->length)).FromJust(); USE(BIO_reset(bio)); uint64_t exponent_word = static_cast(BN_get_word(e)); @@ -1866,9 +1875,10 @@ static Local X509ToObject(Environment* env, X509* cert) { BIO_printf(bio, "0x%x%08x", hi, lo); } BIO_get_mem_ptr(bio, &mem); - info->Set(env->exponent_string(), + info->Set(context, env->exponent_string(), String::NewFromUtf8(env->isolate(), mem->data, - String::kNormalString, mem->length)); + String::kNormalString, + mem->length)).FromJust(); USE(BIO_reset(bio)); } @@ -1883,16 +1893,18 @@ static Local X509ToObject(Environment* env, X509* cert) { ASN1_TIME_print(bio, X509_get_notBefore(cert)); BIO_get_mem_ptr(bio, &mem); - info->Set(env->valid_from_string(), + info->Set(context, env->valid_from_string(), String::NewFromUtf8(env->isolate(), mem->data, - String::kNormalString, mem->length)); + String::kNormalString, + mem->length)).FromJust(); USE(BIO_reset(bio)); ASN1_TIME_print(bio, X509_get_notAfter(cert)); BIO_get_mem_ptr(bio, &mem); - info->Set(env->valid_to_string(), + info->Set(context, env->valid_to_string(), String::NewFromUtf8(env->isolate(), mem->data, - String::kNormalString, mem->length)); + String::kNormalString, + mem->length)).FromJust(); BIO_free_all(bio); unsigned int md_size, i; @@ -1913,8 +1925,8 @@ static Local X509ToObject(Environment* env, X509* cert) { fingerprint[0] = '\0'; } - info->Set(env->fingerprint_string(), - OneByteString(env->isolate(), fingerprint)); + info->Set(context, env->fingerprint_string(), + OneByteString(env->isolate(), fingerprint)).FromJust(); } STACK_OF(ASN1_OBJECT)* eku = static_cast( @@ -1926,18 +1938,20 @@ static Local X509ToObject(Environment* env, X509* cert) { int j = 0; for (int i = 0; i < sk_ASN1_OBJECT_num(eku); i++) { if (OBJ_obj2txt(buf, sizeof(buf), sk_ASN1_OBJECT_value(eku, i), 1) >= 0) - ext_key_usage->Set(j++, OneByteString(env->isolate(), buf)); + ext_key_usage->Set(context, + j++, + OneByteString(env->isolate(), buf)).FromJust(); } sk_ASN1_OBJECT_pop_free(eku, ASN1_OBJECT_free); - info->Set(env->ext_key_usage_string(), ext_key_usage); + info->Set(context, env->ext_key_usage_string(), ext_key_usage).FromJust(); } if (ASN1_INTEGER* serial_number = X509_get_serialNumber(cert)) { if (BIGNUM* bn = ASN1_INTEGER_to_BN(serial_number, nullptr)) { if (char* buf = BN_bn2hex(bn)) { - info->Set(env->serial_number_string(), - OneByteString(env->isolate(), buf)); + info->Set(context, env->serial_number_string(), + OneByteString(env->isolate(), buf)).FromJust(); OPENSSL_free(buf); } BN_free(bn); @@ -1950,7 +1964,7 @@ static Local X509ToObject(Environment* env, X509* cert) { unsigned char* serialized = reinterpret_cast( Buffer::Data(buff)); i2d_X509(cert, &serialized); - info->Set(env->raw_string(), buff); + info->Set(context, env->raw_string(), buff).FromJust(); return scope.Escape(info); } @@ -1963,6 +1977,7 @@ void SSLWrap::GetPeerCertificate( Base* w; ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); Environment* env = w->ssl_env(); + Local context = env->context(); ClearErrorOnReturn clear_error_on_return; @@ -2014,7 +2029,7 @@ void SSLWrap::GetPeerCertificate( continue; Local ca_info = X509ToObject(env, ca); - info->Set(env->issuercert_string(), ca_info); + info->Set(context, env->issuercert_string(), ca_info).FromJust(); info = ca_info; // NOTE: Intentionally freeing cert that is not used anymore @@ -2037,7 +2052,7 @@ void SSLWrap::GetPeerCertificate( break; Local ca_info = X509ToObject(env, ca); - info->Set(env->issuercert_string(), ca_info); + info->Set(context, env->issuercert_string(), ca_info).FromJust(); info = ca_info; // NOTE: Intentionally freeing cert that is not used anymore @@ -2049,7 +2064,7 @@ void SSLWrap::GetPeerCertificate( // Self-issued certificate if (X509_check_issued(cert, cert) == X509_V_OK) - info->Set(env->issuercert_string(), info); + info->Set(context, env->issuercert_string(), info).FromJust(); CHECK_NE(cert, nullptr); @@ -2245,6 +2260,7 @@ void SSLWrap::GetEphemeralKeyInfo( Base* w; ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); Environment* env = Environment::GetCurrent(args); + Local context = env->context(); CHECK_NE(w->ssl_, nullptr); @@ -2259,22 +2275,24 @@ void SSLWrap::GetEphemeralKeyInfo( if (SSL_get_server_tmp_key(w->ssl_, &key)) { switch (EVP_PKEY_id(key)) { case EVP_PKEY_DH: - info->Set(env->type_string(), - FIXED_ONE_BYTE_STRING(env->isolate(), "DH")); - info->Set(env->size_string(), - Integer::New(env->isolate(), EVP_PKEY_bits(key))); + info->Set(context, env->type_string(), + FIXED_ONE_BYTE_STRING(env->isolate(), "DH")).FromJust(); + info->Set(context, env->size_string(), + Integer::New(env->isolate(), EVP_PKEY_bits(key))).FromJust(); break; case EVP_PKEY_EC: { EC_KEY* ec = EVP_PKEY_get1_EC_KEY(key); int nid = EC_GROUP_get_curve_name(EC_KEY_get0_group(ec)); EC_KEY_free(ec); - info->Set(env->type_string(), - FIXED_ONE_BYTE_STRING(env->isolate(), "ECDH")); - info->Set(env->name_string(), - OneByteString(args.GetIsolate(), OBJ_nid2sn(nid))); - info->Set(env->size_string(), - Integer::New(env->isolate(), EVP_PKEY_bits(key))); + info->Set(context, env->type_string(), + FIXED_ONE_BYTE_STRING(env->isolate(), "ECDH")).FromJust(); + info->Set(context, env->name_string(), + OneByteString(args.GetIsolate(), + OBJ_nid2sn(nid))).FromJust(); + info->Set(context, env->size_string(), + Integer::New(env->isolate(), + EVP_PKEY_bits(key))).FromJust(); } } EVP_PKEY_free(key); @@ -2367,7 +2385,8 @@ void SSLWrap::VerifyError(const FunctionCallbackInfo& args) { Local reason_string = OneByteString(isolate, reason); Local exception_value = Exception::Error(reason_string); Local exception_object = exception_value->ToObject(isolate); - exception_object->Set(w->env()->code_string(), OneByteString(isolate, code)); + exception_object->Set(w->env()->context(), w->env()->code_string(), + OneByteString(isolate, code)).FromJust(); args.GetReturnValue().Set(exception_object); } @@ -2377,6 +2396,7 @@ void SSLWrap::GetCurrentCipher(const FunctionCallbackInfo& args) { Base* w; ASSIGN_OR_RETURN_UNWRAP(&w, args.Holder()); Environment* env = w->ssl_env(); + Local context = env->context(); const SSL_CIPHER* c = SSL_get_current_cipher(w->ssl_); if (c == nullptr) @@ -2384,9 +2404,10 @@ void SSLWrap::GetCurrentCipher(const FunctionCallbackInfo& args) { Local info = Object::New(env->isolate()); const char* cipher_name = SSL_CIPHER_get_name(c); - info->Set(env->name_string(), OneByteString(args.GetIsolate(), cipher_name)); - info->Set(env->version_string(), - OneByteString(args.GetIsolate(), "TLSv1/SSLv3")); + info->Set(context, env->name_string(), + OneByteString(args.GetIsolate(), cipher_name)).FromJust(); + info->Set(context, env->version_string(), + OneByteString(args.GetIsolate(), "TLSv1/SSLv3")).FromJust(); args.GetReturnValue().Set(info); } @@ -2695,19 +2716,22 @@ int SSLWrap::SSLCertCallback(SSL* s, void* arg) { return -1; Environment* env = w->env(); + Local context = env->context(); HandleScope handle_scope(env->isolate()); - Context::Scope context_scope(env->context()); + Context::Scope context_scope(context); w->cert_cb_running_ = true; Local info = Object::New(env->isolate()); const char* servername = SSL_get_servername(s, TLSEXT_NAMETYPE_host_name); if (servername == nullptr) { - info->Set(env->servername_string(), String::Empty(env->isolate())); + info->Set(context, + env->servername_string(), + String::Empty(env->isolate())).FromJust(); } else { Local str = OneByteString(env->isolate(), servername, strlen(servername)); - info->Set(env->servername_string(), str); + info->Set(context, env->servername_string(), str).FromJust(); } bool ocsp = false; @@ -2715,7 +2739,8 @@ int SSLWrap::SSLCertCallback(SSL* s, void* arg) { ocsp = SSL_get_tlsext_status_type(s) == TLSEXT_STATUSTYPE_ocsp; #endif - info->Set(env->ocsp_request_string(), Boolean::New(env->isolate(), ocsp)); + info->Set(context, env->ocsp_request_string(), + Boolean::New(env->isolate(), ocsp)).FromJust(); Local argv[] = { info }; w->MakeCallback(env->oncertcb_string(), arraysize(argv), argv); @@ -4997,7 +5022,7 @@ void PBKDF2(const FunctionCallbackInfo& args) { keylen)); if (args[5]->IsFunction()) { - obj->Set(env->ondone_string(), args[5]); + obj->Set(env->context(), env->ondone_string(), args[5]).FromJust(); uv_queue_work(env->event_loop(), req.release()->work_req(), @@ -5185,7 +5210,7 @@ void RandomBytes(const FunctionCallbackInfo& args) { RandomBytesRequest::FREE_DATA)); if (args[1]->IsFunction()) { - obj->Set(env->ondone_string(), args[1]); + obj->Set(env->context(), env->ondone_string(), args[1]).FromJust(); uv_queue_work(env->event_loop(), req.release()->work_req(), @@ -5254,7 +5279,10 @@ void GetSSLCiphers(const FunctionCallbackInfo& args) { for (int i = 0; i < sk_SSL_CIPHER_num(ciphers); ++i) { const SSL_CIPHER* cipher = sk_SSL_CIPHER_value(ciphers, i); - arr->Set(i, OneByteString(args.GetIsolate(), SSL_CIPHER_get_name(cipher))); + arr->Set(env->context(), + i, + OneByteString(args.GetIsolate(), + SSL_CIPHER_get_name(cipher))).FromJust(); } SSL_free(ssl); @@ -5317,7 +5345,10 @@ void GetCurves(const FunctionCallbackInfo& args) { if (EC_get_builtin_curves(curves, num_curves)) { for (size_t i = 0; i < num_curves; i++) { - arr->Set(i, OneByteString(env->isolate(), OBJ_nid2sn(curves[i].nid))); + arr->Set(env->context(), + i, + OneByteString(env->isolate(), + OBJ_nid2sn(curves[i].nid))).FromJust(); } } diff --git a/src/node_file.cc b/src/node_file.cc index ec1f62137a4..5d625918724 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -610,29 +610,34 @@ static void FStat(const FunctionCallbackInfo& args) { static void Symlink(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - CHECK_GE(args.Length(), 3); + int argc = args.Length(); + CHECK_GE(argc, 4); BufferValue target(env->isolate(), args[0]); CHECK_NE(*target, nullptr); BufferValue path(env->isolate(), args[1]); CHECK_NE(*path, nullptr); - CHECK(args[2]->IsUint32()); - int flags = args[2]->Uint32Value(env->context()).ToChecked(); + CHECK(args[2]->IsInt32()); + int flags = args[2].As()->Value(); if (args[3]->IsObject()) { // symlink(target, path, flags, req) CHECK_EQ(args.Length(), 4); AsyncDestCall(env, args, "symlink", *path, path.length(), UTF8, AfterNoArgs, uv_fs_symlink, *target, *path, flags); - } else { // symlink(target, path, flags) - SYNC_DEST_CALL(symlink, *target, *path, *target, *path, flags) + } else { // symlink(target, path, flags, undefinec, ctx) + CHECK_EQ(argc, 5); + fs_req_wrap req_wrap; + SyncCall(env, args[4], &req_wrap, "symlink", + uv_fs_symlink, *target, *path, flags); } } static void Link(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - CHECK_GE(args.Length(), 2); + int argc = args.Length(); + CHECK_GE(argc, 3); BufferValue src(env->isolate(), args[0]); CHECK_NE(*src, nullptr); @@ -641,20 +646,22 @@ static void Link(const FunctionCallbackInfo& args) { CHECK_NE(*dest, nullptr); if (args[2]->IsObject()) { // link(src, dest, req) - CHECK_EQ(args.Length(), 3); + CHECK_EQ(argc, 3); AsyncDestCall(env, args, "link", *dest, dest.length(), UTF8, AfterNoArgs, uv_fs_link, *src, *dest); - } else { // link(src, dest) - SYNC_DEST_CALL(link, *src, *dest, *src, *dest) + } else { // link(src, dest, undefined, ctx) + CHECK_EQ(argc, 4); + fs_req_wrap req_wrap; + SyncCall(env, args[3], &req_wrap, "link", + uv_fs_link, *src, *dest); } } static void ReadLink(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - const int argc = args.Length(); - - CHECK_GE(argc, 1); + int argc = args.Length(); + CHECK_GE(argc, 3); BufferValue path(env->isolate(), args[0]); CHECK_NE(*path, nullptr); @@ -662,12 +669,18 @@ static void ReadLink(const FunctionCallbackInfo& args) { const enum encoding encoding = ParseEncoding(env->isolate(), args[1], UTF8); if (args[2]->IsObject()) { // readlink(path, encoding, req) - CHECK_EQ(args.Length(), 3); + CHECK_EQ(argc, 3); AsyncCall(env, args, "readlink", encoding, AfterStringPtr, uv_fs_readlink, *path); - } else { - SYNC_CALL(readlink, *path, *path) - const char* link_path = static_cast(SYNC_REQ.ptr); + } else { // readlink(path, encoding, undefined, ctx) + CHECK_EQ(argc, 4); + fs_req_wrap req_wrap; + int err = SyncCall(env, args[3], &req_wrap, "readlink", + uv_fs_readlink, *path); + if (err) { + return; // syscall failed, no need to continue, error info is in ctx + } + const char* link_path = static_cast(req_wrap.req.ptr); Local error; MaybeLocal rc = StringBytes::Encode(env->isolate(), @@ -675,9 +688,11 @@ static void ReadLink(const FunctionCallbackInfo& args) { encoding, &error); if (rc.IsEmpty()) { - env->isolate()->ThrowException(error); + Local ctx = args[3].As(); + ctx->Set(env->context(), env->error_string(), error).FromJust(); return; } + args.GetReturnValue().Set(rc.ToLocalChecked()); } } @@ -685,86 +700,110 @@ static void ReadLink(const FunctionCallbackInfo& args) { static void Rename(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - CHECK_GE(args.Length(), 2); + int argc = args.Length(); + CHECK_GE(argc, 3); BufferValue old_path(env->isolate(), args[0]); CHECK_NE(*old_path, nullptr); BufferValue new_path(env->isolate(), args[1]); CHECK_NE(*new_path, nullptr); - if (args[2]->IsObject()) { - CHECK_EQ(args.Length(), 3); + if (args[2]->IsObject()) { // rename(old_path, new_path, req) + CHECK_EQ(argc, 3); AsyncDestCall(env, args, "rename", *new_path, new_path.length(), UTF8, AfterNoArgs, uv_fs_rename, *old_path, *new_path); - } else { - SYNC_DEST_CALL(rename, *old_path, *new_path, *old_path, *new_path) + } else { // rename(old_path, new_path, undefined, ctx) + CHECK_EQ(argc, 4); + fs_req_wrap req_wrap; + SyncCall(env, args[3], &req_wrap, "rename", + uv_fs_rename, *old_path, *new_path); } } static void FTruncate(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); + const int argc = args.Length(); + CHECK_GE(argc, 3); + CHECK(args[0]->IsInt32()); - CHECK(args[1]->IsNumber()); + const int fd = args[0].As()->Value(); - int fd = args[0]->Int32Value(); - const int64_t len = args[1]->IntegerValue(); + CHECK(args[1]->IsNumber()); + const int64_t len = args[1].As()->Value(); - if (args[2]->IsObject()) { - CHECK_EQ(args.Length(), 3); + if (args[2]->IsObject()) { // ftruncate(fd, len, req) + CHECK_EQ(argc, 3); AsyncCall(env, args, "ftruncate", UTF8, AfterNoArgs, uv_fs_ftruncate, fd, len); - } else { - SYNC_CALL(ftruncate, 0, fd, len) + } else { // ftruncate(fd, len, undefined, ctx) + CHECK_EQ(argc, 4); + fs_req_wrap req_wrap; + SyncCall(env, args[3], &req_wrap, "ftruncate", + uv_fs_ftruncate, fd, len); } } static void Fdatasync(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - CHECK(args[0]->IsInt32()); + const int argc = args.Length(); + CHECK_GE(argc, 2); - int fd = args[0]->Int32Value(); + CHECK(args[0]->IsInt32()); + const int fd = args[0].As()->Value(); - if (args[1]->IsObject()) { - CHECK_EQ(args.Length(), 2); + if (args[1]->IsObject()) { // fdatasync(fd, req) + CHECK_EQ(argc, 2); AsyncCall(env, args, "fdatasync", UTF8, AfterNoArgs, uv_fs_fdatasync, fd); - } else { - SYNC_CALL(fdatasync, 0, fd) + } else { // fdatasync(fd, undefined, ctx) + CHECK_EQ(argc, 3); + fs_req_wrap req_wrap; + SyncCall(env, args[2], &req_wrap, "fdatasync", + uv_fs_fdatasync, fd); } } static void Fsync(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - CHECK(args[0]->IsInt32()); + const int argc = args.Length(); + CHECK_GE(argc, 2); - int fd = args[0]->Int32Value(); + CHECK(args[0]->IsInt32()); + const int fd = args[0].As()->Value(); - if (args[1]->IsObject()) { - CHECK_EQ(args.Length(), 2); + if (args[1]->IsObject()) { // fsync(fd, req) + CHECK_EQ(argc, 2); AsyncCall(env, args, "fsync", UTF8, AfterNoArgs, uv_fs_fsync, fd); - } else { - SYNC_CALL(fsync, 0, fd) + } else { // fsync(fd, undefined, ctx) + CHECK_EQ(argc, 3); + fs_req_wrap req_wrap; + SyncCall(env, args[2], &req_wrap, "fsync", + uv_fs_fsync, fd); } } static void Unlink(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - CHECK_GE(args.Length(), 1); + const int argc = args.Length(); + CHECK_GE(argc, 2); BufferValue path(env->isolate(), args[0]); CHECK_NE(*path, nullptr); - if (args[1]->IsObject()) { - CHECK_EQ(args.Length(), 2); + if (args[1]->IsObject()) { // unlink(fd, req) + CHECK_EQ(argc, 2); AsyncCall(env, args, "unlink", UTF8, AfterNoArgs, uv_fs_unlink, *path); - } else { - SYNC_CALL(unlink, *path, *path) + } else { // unlink(fd, undefined, ctx) + CHECK_EQ(argc, 3); + fs_req_wrap req_wrap; + SyncCall(env, args[2], &req_wrap, "unlink", + uv_fs_unlink, *path); } } diff --git a/src/node_http2.cc b/src/node_http2.cc index 1a2e8aada43..44d5d478564 100644 --- a/src/node_http2.cc +++ b/src/node_http2.cc @@ -538,24 +538,12 @@ Http2Session::Http2Session(Environment* env, outgoing_buffers_.reserve(32); } -void Http2Session::Unconsume() { - if (stream_ != nullptr) { - DEBUG_HTTP2SESSION(this, "unconsuming the i/o stream"); - stream_->set_destruct_cb({ nullptr, nullptr }); - stream_->set_alloc_cb({ nullptr, nullptr }); - stream_->set_read_cb({ nullptr, nullptr }); - stream_->Unconsume(); - stream_ = nullptr; - } -} - Http2Session::~Http2Session() { CHECK_EQ(flags_ & SESSION_STATE_HAS_SCOPE, 0); if (!object().IsEmpty()) ClearWrap(object()); persistent().Reset(); CHECK(persistent().IsEmpty()); - Unconsume(); DEBUG_HTTP2SESSION(this, "freeing nghttp2 session"); nghttp2_session_del(session_); } @@ -653,7 +641,8 @@ void Http2Session::Close(uint32_t code, bool socket_closed) { DEBUG_HTTP2SESSION2(this, "terminating session with code %d", code); CHECK_EQ(nghttp2_session_terminate_session(session_, code), 0); } else { - Unconsume(); + if (stream_ != nullptr) + stream_->RemoveStreamListener(this); } // If there are outstanding pings, those will need to be canceled, do @@ -1051,22 +1040,38 @@ inline int Http2Session::OnDataChunkReceived(nghttp2_session* handle, stream->statistics_.received_bytes += len; - // There is a single large array buffer for the entire data read from the - // network; create a slice of that array buffer and emit it as the - // received data buffer. - CHECK(!session->stream_buf_ab_.IsEmpty()); - size_t offset = reinterpret_cast(data) - session->stream_buf_; - // Verify that the data offset is inside the current read buffer. - CHECK_LE(offset, session->stream_buf_size_); - - Local buf = - Buffer::New(env, session->stream_buf_ab_, offset, len).ToLocalChecked(); - - stream->EmitData(len, buf, Local()); - if (!stream->IsReading()) - stream->inbound_consumed_data_while_paused_ += len; - else - nghttp2_session_consume_stream(handle, id, len); + // Repeatedly ask the stream's owner for memory, and copy the read data + // into those buffers. + // The typical case is actually the exception here; Http2StreamListeners + // know about the HTTP2 session associated with this stream, so they know + // about the larger from-socket read buffer, so they do not require copying. + do { + uv_buf_t buf = stream->EmitAlloc(len); + ssize_t avail = len; + if (static_cast(buf.len) < avail) + avail = buf.len; + + // `buf.base == nullptr` is the default Http2StreamListener's way + // of saying that it wants a pointer to the raw original. + // Since it has access to the original socket buffer from which the data + // was read in the first place, it can use that to minizime ArrayBuffer + // allocations. + if (LIKELY(buf.base == nullptr)) + buf.base = reinterpret_cast(const_cast(data)); + else + memcpy(buf.base, data, avail); + data += avail; + len -= avail; + stream->EmitRead(avail, buf); + + // If the stream owner (e.g. the JS Http2Stream) wants more data, just + // tell nghttp2 that all data has been consumed. Otherwise, defer until + // more data is being requested. + if (stream->IsReading()) + nghttp2_session_consume_stream(handle, id, avail); + else + stream->inbound_consumed_data_while_paused_ += avail; + } while (len != 0); } return 0; } @@ -1136,6 +1141,38 @@ inline void Http2Session::GetTrailers(Http2Stream* stream, uint32_t* flags) { } } +uv_buf_t Http2StreamListener::OnStreamAlloc(size_t size) { + // See the comments in Http2Session::OnDataChunkReceived + // (which is the only possible call site for this method). + return uv_buf_init(nullptr, size); +} + +void Http2StreamListener::OnStreamRead(ssize_t nread, const uv_buf_t& buf) { + Http2Stream* stream = static_cast(stream_); + Http2Session* session = stream->session(); + Environment* env = stream->env(); + + if (nread < 0) { + PassReadErrorToPreviousListener(nread); + return; + } + + CHECK(!session->stream_buf_ab_.IsEmpty()); + + // There is a single large array buffer for the entire data read from the + // network; create a slice of that array buffer and emit it as the + // received data buffer. + size_t offset = buf.base - session->stream_buf_.base; + + // Verify that the data offset is inside the current read buffer. + CHECK_LE(offset, session->stream_buf_.len); + CHECK_LE(offset + buf.len, session->stream_buf_.len); + + Local buffer = + Buffer::New(env, session->stream_buf_ab_, offset, nread).ToLocalChecked(); + + stream->CallJSOnreadMethod(nread, buffer); +} Http2Stream::SubmitTrailers::SubmitTrailers( Http2Session* session, @@ -1264,7 +1301,7 @@ inline void Http2Session::HandleDataFrame(const nghttp2_frame* frame) { return; if (frame->hd.flags & NGHTTP2_FLAG_END_STREAM) { - stream->EmitData(UV_EOF, Local(), Local()); + stream->EmitRead(UV_EOF); } } @@ -1385,16 +1422,15 @@ inline void Http2Session::HandleSettingsFrame(const nghttp2_frame* frame) { } // Callback used when data has been written to the stream. -void Http2Session::OnStreamAfterWriteImpl(WriteWrap* w, int status, void* ctx) { - Http2Session* session = static_cast(ctx); - DEBUG_HTTP2SESSION2(session, "write finished with status %d", status); +void Http2Session::OnStreamAfterWrite(WriteWrap* w, int status) { + DEBUG_HTTP2SESSION2(this, "write finished with status %d", status); // Inform all pending writes about their completion. - session->ClearOutgoing(status); + ClearOutgoing(status); - if (!(session->flags_ & SESSION_STATE_WRITE_SCHEDULED)) { + if (!(flags_ & SESSION_STATE_WRITE_SCHEDULED)) { // Schedule a new write if nghttp2 wants to send data. - session->MaybeScheduleWrite(); + MaybeScheduleWrite(); } } @@ -1632,97 +1668,76 @@ WriteWrap* Http2Session::AllocateSend() { Local obj = env()->write_wrap_constructor_function() ->NewInstance(env()->context()).ToLocalChecked(); - return WriteWrap::New(env(), obj, stream_); -} - -// Allocates the data buffer used to receive inbound data from the i/o stream -void Http2Session::OnStreamAllocImpl(size_t suggested_size, - uv_buf_t* buf, - void* ctx) { - Http2Session* session = static_cast(ctx); - CHECK_EQ(session->stream_buf_, nullptr); - CHECK_EQ(session->stream_buf_size_, 0); - buf->base = session->stream_buf_ = Malloc(suggested_size); - buf->len = session->stream_buf_size_ = suggested_size; - session->IncrementCurrentSessionMemory(suggested_size); + return WriteWrap::New(env(), obj, static_cast(stream_)); } // Callback used to receive inbound data from the i/o stream -void Http2Session::OnStreamReadImpl(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx) { - Http2Session* session = static_cast(ctx); - Http2Scope h2scope(session); - CHECK_NE(session->stream_, nullptr); - DEBUG_HTTP2SESSION2(session, "receiving %d bytes", nread); +void Http2Session::OnStreamRead(ssize_t nread, const uv_buf_t& buf) { + Http2Scope h2scope(this); + CHECK_NE(stream_, nullptr); + DEBUG_HTTP2SESSION2(this, "receiving %d bytes", nread); + IncrementCurrentSessionMemory(buf.len); + CHECK(stream_buf_ab_.IsEmpty()); + if (nread <= 0) { - free(session->stream_buf_); + free(buf.base); if (nread < 0) { - uv_buf_t tmp_buf = uv_buf_init(nullptr, 0); - session->prev_read_cb_.fn(nread, - &tmp_buf, - pending, - session->prev_read_cb_.ctx); + PassReadErrorToPreviousListener(nread); } } else { // Only pass data on if nread > 0 + // Makre sure that there was no read previously active. + CHECK_EQ(stream_buf_.base, nullptr); + CHECK_EQ(stream_buf_.len, 0); + + // Remember the current buffer, so that OnDataChunkReceived knows the + // offset of a DATA frame's data into the socket read buffer. + stream_buf_ = uv_buf_init(buf.base, nread); + // Verify that currently: There is memory allocated into which // the data has been read, and that memory buffer is at least as large // as the amount of data we have read, but we have not yet made an // ArrayBuffer out of it. - CHECK_NE(session->stream_buf_, nullptr); - CHECK_EQ(session->stream_buf_, buf->base); - CHECK_EQ(session->stream_buf_size_, buf->len); - CHECK_GE(session->stream_buf_size_, static_cast(nread)); - CHECK(session->stream_buf_ab_.IsEmpty()); + CHECK_LE(static_cast(nread), stream_buf_.len); - Environment* env = session->env(); - Isolate* isolate = env->isolate(); + Isolate* isolate = env()->isolate(); HandleScope scope(isolate); - Local context = env->context(); - Context::Scope context_scope(context); + Context::Scope context_scope(env()->context()); // Create an array buffer for the read data. DATA frames will be emitted // as slices of this array buffer to avoid having to copy memory. - session->stream_buf_ab_ = + stream_buf_ab_ = ArrayBuffer::New(isolate, - session->stream_buf_, - session->stream_buf_size_, + buf.base, + nread, v8::ArrayBufferCreationMode::kInternalized); - uv_buf_t buf_ = uv_buf_init(buf->base, nread); - session->statistics_.data_received += nread; - ssize_t ret = session->Write(&buf_, 1); + statistics_.data_received += nread; + ssize_t ret = Write(&stream_buf_, 1); // Note: if ssize_t is not defined (e.g. on Win32), nghttp2 will typedef // ssize_t to int. Cast here so that the < 0 check actually works on // Windows. if (static_cast(ret) < 0) { - DEBUG_HTTP2SESSION2(session, "fatal error receiving data: %d", ret); + DEBUG_HTTP2SESSION2(this, "fatal error receiving data: %d", ret); - Local argv[1] = { + Local argv[] = { Integer::New(isolate, ret), }; - session->MakeCallback(env->error_string(), arraysize(argv), argv); + MakeCallback(env()->error_string(), arraysize(argv), argv); } else { - DEBUG_HTTP2SESSION2(session, "processed %d bytes. wants more? %d", ret, - nghttp2_session_want_read(**session)); + DEBUG_HTTP2SESSION2(this, "processed %d bytes. wants more? %d", ret, + nghttp2_session_want_read(session_)); } } // Since we are finished handling this write, reset the stream buffer. // The memory has either been free()d or was handed over to V8. - session->DecrementCurrentSessionMemory(session->stream_buf_size_); - session->stream_buf_ = nullptr; - session->stream_buf_size_ = 0; - session->stream_buf_ab_ = Local(); -} + DecrementCurrentSessionMemory(buf.len); -void Http2Session::OnStreamDestructImpl(void* ctx) { - Http2Session* session = static_cast(ctx); - session->stream_ = nullptr; + stream_buf_ab_ = Local(); + stream_buf_ = uv_buf_init(nullptr, 0); } // Every Http2Session session is tightly bound to a single i/o StreamBase @@ -1731,14 +1746,7 @@ void Http2Session::OnStreamDestructImpl(void* ctx) { // C++ layer via the StreamBase API. void Http2Session::Consume(Local external) { StreamBase* stream = static_cast(external->Value()); - stream->Consume(); - stream_ = stream; - prev_alloc_cb_ = stream->alloc_cb(); - prev_read_cb_ = stream->read_cb(); - stream->set_alloc_cb({ Http2Session::OnStreamAllocImpl, this }); - stream->set_read_cb({ Http2Session::OnStreamReadImpl, this }); - stream->set_after_write_cb({ Http2Session::OnStreamAfterWriteImpl, this }); - stream->set_destruct_cb({ Http2Session::OnStreamDestructImpl, this }); + stream->PushStreamListener(this); DEBUG_HTTP2SESSION(this, "i/o stream consumed"); } @@ -1776,6 +1784,8 @@ Http2Stream::Http2Stream( if (options & STREAM_OPTION_GET_TRAILERS) flags_ |= NGHTTP2_STREAM_FLAG_TRAILERS; + PushStreamListener(&stream_listener_); + if (options & STREAM_OPTION_EMPTY_PAYLOAD) Shutdown(); session->AddStream(this); diff --git a/src/node_http2.h b/src/node_http2.h index 9027ed7feb7..bf41d74ed49 100644 --- a/src/node_http2.h +++ b/src/node_http2.h @@ -535,6 +535,12 @@ class Http2Priority { nghttp2_priority_spec spec; }; +class Http2StreamListener : public StreamListener { + public: + uv_buf_t OnStreamAlloc(size_t suggested_size) override; + void OnStreamRead(ssize_t nread, const uv_buf_t& buf) override; +}; + class Http2Stream : public AsyncWrap, public StreamBase { public: @@ -747,6 +753,8 @@ class Http2Stream : public AsyncWrap, int64_t fd_offset_ = 0; int64_t fd_length_ = -1; + Http2StreamListener stream_listener_; + friend class Http2Session; }; @@ -798,7 +806,7 @@ class Http2Stream::Provider::Stream : public Http2Stream::Provider { }; -class Http2Session : public AsyncWrap { +class Http2Session : public AsyncWrap, public StreamListener { public: Http2Session(Environment* env, Local wrap, @@ -872,21 +880,11 @@ class Http2Session : public AsyncWrap { size_t self_size() const override { return sizeof(*this); } - char* stream_alloc() { - return stream_buf_; - } - inline void GetTrailers(Http2Stream* stream, uint32_t* flags); - static void OnStreamAllocImpl(size_t suggested_size, - uv_buf_t* buf, - void* ctx); - static void OnStreamReadImpl(ssize_t nread, - const uv_buf_t* bufs, - uv_handle_type pending, - void* ctx); - static void OnStreamAfterWriteImpl(WriteWrap* w, int status, void* ctx); - static void OnStreamDestructImpl(void* ctx); + // Handle reads/writes from the underlying network transport. + void OnStreamRead(ssize_t nread, const uv_buf_t& buf) override; + void OnStreamAfterWrite(WriteWrap* w, int status) override; // The JavaScript API static void New(const FunctionCallbackInfo& args); @@ -1074,16 +1072,12 @@ class Http2Session : public AsyncWrap { int flags_ = SESSION_STATE_NONE; // The StreamBase instance being used for i/o - StreamBase* stream_; - StreamResource::Callback prev_alloc_cb_; - StreamResource::Callback prev_read_cb_; padding_strategy_type padding_strategy_ = PADDING_STRATEGY_NONE; // use this to allow timeout tracking during long-lasting writes uint32_t chunks_sent_since_last_write_ = 0; - char* stream_buf_ = nullptr; - size_t stream_buf_size_ = 0; + uv_buf_t stream_buf_ = uv_buf_init(nullptr, 0); v8::Local stream_buf_ab_; size_t max_outstanding_pings_ = DEFAULT_MAX_PINGS; @@ -1099,6 +1093,7 @@ class Http2Session : public AsyncWrap { void ClearOutgoing(int status); friend class Http2Scope; + friend class Http2StreamListener; }; class Http2SessionPerformanceEntry : public PerformanceEntry { diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc index 9debb8a205e..d4044f8bbee 100644 --- a/src/node_http_parser.cc +++ b/src/node_http_parser.cc @@ -144,7 +144,7 @@ struct StringPtr { }; -class Parser : public AsyncWrap { +class Parser : public AsyncWrap, public StreamListener { public: Parser(Environment* env, Local wrap, enum http_parser_type type) : AsyncWrap(env, wrap, AsyncWrap::PROVIDER_HTTPPARSER), @@ -494,14 +494,7 @@ class Parser : public AsyncWrap { Local stream_obj = args[0].As(); StreamBase* stream = static_cast(stream_obj->Value()); CHECK_NE(stream, nullptr); - - stream->Consume(); - - parser->prev_alloc_cb_ = stream->alloc_cb(); - parser->prev_read_cb_ = stream->read_cb(); - - stream->set_alloc_cb({ OnAllocImpl, parser }); - stream->set_read_cb({ OnReadImpl, parser }); + stream->PushStreamListener(parser); } @@ -510,22 +503,10 @@ class Parser : public AsyncWrap { ASSIGN_OR_RETURN_UNWRAP(&parser, args.Holder()); // Already unconsumed - if (parser->prev_alloc_cb_.is_empty()) + if (parser->stream_ == nullptr) return; - // Restore stream's callbacks - if (args.Length() == 1 && args[0]->IsExternal()) { - Local stream_obj = args[0].As(); - StreamBase* stream = static_cast(stream_obj->Value()); - CHECK_NE(stream, nullptr); - - stream->set_alloc_cb(parser->prev_alloc_cb_); - stream->set_read_cb(parser->prev_read_cb_); - stream->Unconsume(); - } - - parser->prev_alloc_cb_.clear(); - parser->prev_read_cb_.clear(); + parser->stream_->RemoveStreamListener(parser); } @@ -544,33 +525,19 @@ class Parser : public AsyncWrap { protected: static const size_t kAllocBufferSize = 64 * 1024; - static void OnAllocImpl(size_t suggested_size, uv_buf_t* buf, void* ctx) { - Parser* parser = static_cast(ctx); - Environment* env = parser->env(); + uv_buf_t OnStreamAlloc(size_t suggested_size) override { + if (env()->http_parser_buffer() == nullptr) + env()->set_http_parser_buffer(new char[kAllocBufferSize]); - if (env->http_parser_buffer() == nullptr) - env->set_http_parser_buffer(new char[kAllocBufferSize]); - - buf->base = env->http_parser_buffer(); - buf->len = kAllocBufferSize; + return uv_buf_init(env()->http_parser_buffer(), kAllocBufferSize); } - static void OnReadImpl(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx) { - Parser* parser = static_cast(ctx); - HandleScope scope(parser->env()->isolate()); + void OnStreamRead(ssize_t nread, const uv_buf_t& buf) override { + HandleScope scope(env()->isolate()); if (nread < 0) { - uv_buf_t tmp_buf; - tmp_buf.base = nullptr; - tmp_buf.len = 0; - parser->prev_read_cb_.fn(nread, - &tmp_buf, - pending, - parser->prev_read_cb_.ctx); + PassReadErrorToPreviousListener(nread); return; } @@ -578,27 +545,27 @@ class Parser : public AsyncWrap { if (nread == 0) return; - parser->current_buffer_.Clear(); - Local ret = parser->Execute(buf->base, nread); + current_buffer_.Clear(); + Local ret = Execute(buf.base, nread); // Exception if (ret.IsEmpty()) return; - Local obj = parser->object(); - Local cb = obj->Get(kOnExecute); + Local cb = + object()->Get(env()->context(), kOnExecute).ToLocalChecked(); if (!cb->IsFunction()) return; // Hooks for GetCurrentBuffer - parser->current_buffer_len_ = nread; - parser->current_buffer_data_ = buf->base; + current_buffer_len_ = nread; + current_buffer_data_ = buf.base; - parser->MakeCallback(cb.As(), 1, &ret); + MakeCallback(cb.As(), 1, &ret); - parser->current_buffer_len_ = 0; - parser->current_buffer_data_ = nullptr; + current_buffer_len_ = 0; + current_buffer_data_ = nullptr; } @@ -713,8 +680,6 @@ class Parser : public AsyncWrap { Local current_buffer_; size_t current_buffer_len_; char* current_buffer_data_; - StreamResource::Callback prev_alloc_cb_; - StreamResource::Callback prev_read_cb_; // These are helper functions for filling `http_parser_settings`, which turn // a member function of Parser into a C-style HTTP parser callback. diff --git a/src/node_url.cc b/src/node_url.cc index 853a23d40d7..cac2831af6e 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -92,6 +92,16 @@ class URLHost { Value value_; HostType type_ = HostType::H_FAILED; + inline void Reset() { + using string = std::string; + switch (type_) { + case HostType::H_DOMAIN: value_.domain.~string(); break; + case HostType::H_OPAQUE: value_.opaque.~string(); break; + default: break; + } + type_ = HostType::H_FAILED; + } + // Setting the string members of the union with = is brittle because // it relies on them being initialized to a state that requires no // destruction of old data. @@ -101,23 +111,20 @@ class URLHost { // These helpers are the easiest solution but we might want to consider // just not forcing strings into an union. inline void SetOpaque(std::string&& string) { + Reset(); type_ = HostType::H_OPAQUE; new(&value_.opaque) std::string(std::move(string)); } inline void SetDomain(std::string&& string) { + Reset(); type_ = HostType::H_DOMAIN; new(&value_.domain) std::string(std::move(string)); } }; URLHost::~URLHost() { - using string = std::string; - switch (type_) { - case HostType::H_DOMAIN: value_.domain.~string(); break; - case HostType::H_OPAQUE: value_.opaque.~string(); break; - default: break; - } + Reset(); } #define ARGS(XX) \ diff --git a/src/pipe_wrap.cc b/src/pipe_wrap.cc index c5958a2271a..016ce480b6a 100644 --- a/src/pipe_wrap.cc +++ b/src/pipe_wrap.cc @@ -29,6 +29,7 @@ #include "node_buffer.h" #include "node_wrap.h" #include "connect_wrap.h" +#include "stream_base-inl.h" #include "stream_wrap.h" #include "util-inl.h" diff --git a/src/process_wrap.cc b/src/process_wrap.cc index b01ef562707..314131e1dd3 100644 --- a/src/process_wrap.cc +++ b/src/process_wrap.cc @@ -22,6 +22,7 @@ #include "env-inl.h" #include "handle_wrap.h" #include "node_wrap.h" +#include "stream_base-inl.h" #include "util-inl.h" #include diff --git a/src/stream_base-inl.h b/src/stream_base-inl.h index cdcff67cc55..76922c1d8af 100644 --- a/src/stream_base-inl.h +++ b/src/stream_base-inl.h @@ -25,6 +25,83 @@ using v8::Value; using AsyncHooks = Environment::AsyncHooks; + +inline StreamListener::~StreamListener() { + if (stream_ != nullptr) + stream_->RemoveStreamListener(this); +} + +inline void StreamListener::PassReadErrorToPreviousListener(ssize_t nread) { + CHECK_NE(previous_listener_, nullptr); + previous_listener_->OnStreamRead(nread, uv_buf_init(nullptr, 0)); +} + + +inline StreamResource::~StreamResource() { + while (listener_ != nullptr) { + listener_->OnStreamDestroy(); + RemoveStreamListener(listener_); + } +} + +inline void StreamResource::PushStreamListener(StreamListener* listener) { + CHECK_NE(listener, nullptr); + CHECK_EQ(listener->stream_, nullptr); + + listener->previous_listener_ = listener_; + listener->stream_ = this; + + listener_ = listener; +} + +inline void StreamResource::RemoveStreamListener(StreamListener* listener) { + CHECK_NE(listener, nullptr); + + StreamListener* previous; + StreamListener* current; + + // Remove from the linked list. + for (current = listener_, previous = nullptr; + /* No loop condition because we want a crash if listener is not found */ + ; previous = current, current = current->previous_listener_) { + CHECK_NE(current, nullptr); + if (current == listener) { + if (previous != nullptr) + previous->previous_listener_ = current->previous_listener_; + else + listener_ = listener->previous_listener_; + break; + } + } + + listener->stream_ = nullptr; + listener->previous_listener_ = nullptr; +} + + +inline uv_buf_t StreamResource::EmitAlloc(size_t suggested_size) { + return listener_->OnStreamAlloc(suggested_size); +} + +inline void StreamResource::EmitRead(ssize_t nread, const uv_buf_t& buf) { + if (nread > 0) + bytes_read_ += static_cast(nread); + listener_->OnStreamRead(nread, buf); +} + +inline void StreamResource::EmitAfterWrite(WriteWrap* w, int status) { + listener_->OnStreamAfterWrite(w, status); +} + + +inline StreamBase::StreamBase(Environment* env) : env_(env) { + PushStreamListener(&default_listener_); +} + +inline Environment* StreamBase::stream_env() const { + return env_; +} + template void StreamBase::AddMethods(Environment* env, Local t, @@ -70,8 +147,8 @@ void StreamBase::AddMethods(Environment* env, Local(), attributes); - env->SetProtoMethod(t, "readStart", JSMethod); - env->SetProtoMethod(t, "readStop", JSMethod); + env->SetProtoMethod(t, "readStart", JSMethod); + env->SetProtoMethod(t, "readStop", JSMethod); if ((flags & kFlagNoShutdown) == 0) env->SetProtoMethod(t, "shutdown", JSMethod); if ((flags & kFlagHasWritev) != 0) diff --git a/src/stream_base.cc b/src/stream_base.cc index 0fb801ddd57..8bdcebe88ab 100644 --- a/src/stream_base.cc +++ b/src/stream_base.cc @@ -34,12 +34,12 @@ template int StreamBase::WriteString( const FunctionCallbackInfo& args); -int StreamBase::ReadStart(const FunctionCallbackInfo& args) { +int StreamBase::ReadStartJS(const FunctionCallbackInfo& args) { return ReadStart(); } -int StreamBase::ReadStop(const FunctionCallbackInfo& args) { +int StreamBase::ReadStopJS(const FunctionCallbackInfo& args) { return ReadStop(); } @@ -437,23 +437,17 @@ void StreamBase::AfterWrite(WriteWrap* req_wrap, int status) { } -void StreamBase::EmitData(ssize_t nread, - Local buf, - Local handle) { +void StreamBase::CallJSOnreadMethod(ssize_t nread, Local buf) { Environment* env = env_; Local argv[] = { Integer::New(env->isolate(), nread), - buf, - handle + buf }; if (argv[1].IsEmpty()) argv[1] = Undefined(env->isolate()); - if (argv[2].IsEmpty()) - argv[2] = Undefined(env->isolate()); - AsyncWrap* wrap = GetAsyncWrap(); CHECK_NE(wrap, nullptr); wrap->MakeCallback(env->onread_string(), arraysize(argv), argv); @@ -490,4 +484,30 @@ void StreamResource::ClearError() { // No-op } + +uv_buf_t StreamListener::OnStreamAlloc(size_t suggested_size) { + return uv_buf_init(Malloc(suggested_size), suggested_size); +} + + +void EmitToJSStreamListener::OnStreamRead(ssize_t nread, const uv_buf_t& buf) { + CHECK_NE(stream_, nullptr); + StreamBase* stream = static_cast(stream_); + Environment* env = stream->stream_env(); + HandleScope handle_scope(env->isolate()); + Context::Scope context_scope(env->context()); + + if (nread <= 0) { + free(buf.base); + if (nread < 0) + stream->CallJSOnreadMethod(nread, Local()); + return; + } + + CHECK_LE(static_cast(nread), buf.len); + + Local obj = Buffer::New(env, buf.base, nread).ToLocalChecked(); + stream->CallJSOnreadMethod(nread, obj); +} + } // namespace node diff --git a/src/stream_base.h b/src/stream_base.h index d063176b04a..f18b6bda0a0 100644 --- a/src/stream_base.h +++ b/src/stream_base.h @@ -15,6 +15,7 @@ namespace node { // Forward declarations class StreamBase; +class StreamResource; template class StreamReq { @@ -123,38 +124,69 @@ class WriteWrap : public ReqWrap, const size_t storage_size_; }; -class StreamResource { + +// This is the generic interface for objects that control Node.js' C++ streams. +// For example, the default `EmitToJSStreamListener` emits a stream's data +// as Buffers in JS, or `TLSWrap` reads and decrypts data from a stream. +class StreamListener { public: - template - struct Callback { - Callback() : fn(nullptr), ctx(nullptr) {} - Callback(T fn, void* ctx) : fn(fn), ctx(ctx) {} - Callback(const Callback&) = default; - - inline bool is_empty() { return fn == nullptr; } - inline void clear() { - fn = nullptr; - ctx = nullptr; - } + virtual ~StreamListener(); + + // This is called when a stream wants to allocate memory immediately before + // reading data into the freshly allocated buffer (i.e. it is always followed + // by a `OnStreamRead()` call). + // This memory may be statically or dynamically allocated; for example, + // a protocol parser may want to read data into a static buffer if it knows + // that all data is going to be fully handled during the next + // `OnStreamRead()` call. + // The returned buffer does not need to contain `suggested_size` bytes. + // The default implementation of this method returns a buffer that has exactly + // the suggested size and is allocated using malloc(). + virtual uv_buf_t OnStreamAlloc(size_t suggested_size); + + // `OnStreamRead()` is called when data is available on the socket and has + // been read into the buffer provided by `OnStreamAlloc()`. + // The `buf` argument is the return value of `uv_buf_t`, or may be a buffer + // with base nullpptr in case of an error. + // `nread` is the number of read bytes (which is at most the buffer length), + // or, if negative, a libuv error code. + virtual void OnStreamRead(ssize_t nread, + const uv_buf_t& buf) = 0; + + // This is called once a Write has finished. `status` may be 0 or, + // if negative, a libuv error code. + virtual void OnStreamAfterWrite(WriteWrap* w, int status) {} + + // This is called immediately before the stream is destroyed. + virtual void OnStreamDestroy() {} - T fn; - void* ctx; - }; + protected: + // Pass along a read error to the `StreamListener` instance that was active + // before this one. For example, a protocol parser does not care about read + // errors and may instead want to let the original handler + // (e.g. the JS handler) take care of the situation. + void PassReadErrorToPreviousListener(ssize_t nread); - typedef void (*AfterWriteCb)(WriteWrap* w, int status, void* ctx); - typedef void (*AllocCb)(size_t size, uv_buf_t* buf, void* ctx); - typedef void (*ReadCb)(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx); - typedef void (*DestructCb)(void* ctx); + StreamResource* stream_ = nullptr; + StreamListener* previous_listener_ = nullptr; - StreamResource() : bytes_read_(0) { - } - virtual ~StreamResource() { - if (!destruct_cb_.is_empty()) - destruct_cb_.fn(destruct_cb_.ctx); - } + friend class StreamResource; +}; + + +// A default emitter that just pushes data chunks as Buffer instances to +// JS land via the handle’s .ondata method. +class EmitToJSStreamListener : public StreamListener { + public: + void OnStreamRead(ssize_t nread, const uv_buf_t& buf) override; +}; + + +// A generic stream, comparable to JS land’s `Duplex` streams. +// A stream is always controlled through one `StreamListener` instance. +class StreamResource { + public: + virtual ~StreamResource(); virtual int DoShutdown(ShutdownWrap* req_wrap) = 0; virtual int DoTryWrite(uv_buf_t** bufs, size_t* count); @@ -162,50 +194,43 @@ class StreamResource { uv_buf_t* bufs, size_t count, uv_stream_t* send_handle) = 0; - virtual const char* Error() const; - virtual void ClearError(); - - // Events - inline void EmitAfterWrite(WriteWrap* w, int status) { - if (!after_write_cb_.is_empty()) - after_write_cb_.fn(w, status, after_write_cb_.ctx); - } - inline void EmitAlloc(size_t size, uv_buf_t* buf) { - if (!alloc_cb_.is_empty()) - alloc_cb_.fn(size, buf, alloc_cb_.ctx); - } - - inline void EmitRead(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending = UV_UNKNOWN_HANDLE) { - if (nread > 0) - bytes_read_ += static_cast(nread); - if (!read_cb_.is_empty()) - read_cb_.fn(nread, buf, pending, read_cb_.ctx); - } - - inline void set_after_write_cb(Callback c) { - after_write_cb_ = c; - } + // Start reading from the underlying resource. This is called by the consumer + // when more data is desired. + virtual int ReadStart() = 0; + // Stop reading from the underlying resource. This is called by the + // consumer when its buffers are full and no more data can be handled. + virtual int ReadStop() = 0; - inline void set_alloc_cb(Callback c) { alloc_cb_ = c; } - inline void set_read_cb(Callback c) { read_cb_ = c; } - inline void set_destruct_cb(Callback c) { destruct_cb_ = c; } + // Optionally, this may provide an error message to be used for + // failing writes. + virtual const char* Error() const; + // Clear the current error (i.e. that would be returned by Error()). + virtual void ClearError(); - inline Callback after_write_cb() { return after_write_cb_; } - inline Callback alloc_cb() { return alloc_cb_; } - inline Callback read_cb() { return read_cb_; } - inline Callback destruct_cb() { return destruct_cb_; } + // Transfer ownership of this tream to `listener`. The previous listener + // will not receive any more callbacks while the new listener was active. + void PushStreamListener(StreamListener* listener); + // Remove a listener, and, if this was the currently active one, + // transfer ownership back to the previous listener. + void RemoveStreamListener(StreamListener* listener); protected: - Callback after_write_cb_; - Callback alloc_cb_; - Callback read_cb_; - Callback destruct_cb_; - uint64_t bytes_read_; + // Call the current listener's OnStreamAlloc() method. + uv_buf_t EmitAlloc(size_t suggested_size); + // Call the current listener's OnStreamRead() method and update the + // stream's read byte counter. + void EmitRead(ssize_t nread, const uv_buf_t& buf = uv_buf_init(nullptr, 0)); + // Call the current listener's OnStreamAfterWrite() method. + void EmitAfterWrite(WriteWrap* w, int status); + + StreamListener* listener_ = nullptr; + uint64_t bytes_read_ = 0; + + friend class StreamListener; }; + class StreamBase : public StreamResource { public: enum Flags { @@ -224,40 +249,26 @@ class StreamBase : public StreamResource { virtual bool IsIPCPipe(); virtual int GetFD(); - virtual int ReadStart() = 0; - virtual int ReadStop() = 0; - - inline void Consume() { - CHECK_EQ(consumed_, false); - consumed_ = true; - } - - inline void Unconsume() { - CHECK_EQ(consumed_, true); - consumed_ = false; - } - - void EmitData(ssize_t nread, - v8::Local buf, - v8::Local handle); + void CallJSOnreadMethod(ssize_t nread, v8::Local buf); // These are called by the respective {Write,Shutdown}Wrap class. virtual void AfterShutdown(ShutdownWrap* req, int status); virtual void AfterWrite(WriteWrap* req, int status); - protected: - explicit StreamBase(Environment* env) : env_(env), consumed_(false) { - } + // This is named `stream_env` to avoid name clashes, because a lot of + // subclasses are also `BaseObject`s. + Environment* stream_env() const; - virtual ~StreamBase() = default; + protected: + explicit StreamBase(Environment* env); // One of these must be implemented virtual AsyncWrap* GetAsyncWrap() = 0; virtual v8::Local GetObject(); // JS Methods - int ReadStart(const v8::FunctionCallbackInfo& args); - int ReadStop(const v8::FunctionCallbackInfo& args); + int ReadStartJS(const v8::FunctionCallbackInfo& args); + int ReadStopJS(const v8::FunctionCallbackInfo& args); int Shutdown(const v8::FunctionCallbackInfo& args); int Writev(const v8::FunctionCallbackInfo& args); int WriteBuffer(const v8::FunctionCallbackInfo& args); @@ -280,7 +291,7 @@ class StreamBase : public StreamResource { private: Environment* env_; - bool consumed_; + EmitToJSStreamListener default_listener_; }; } // namespace node diff --git a/src/stream_wrap.cc b/src/stream_wrap.cc index b639d945004..bc10cf80e82 100644 --- a/src/stream_wrap.cc +++ b/src/stream_wrap.cc @@ -93,8 +93,6 @@ LibuvStreamWrap::LibuvStreamWrap(Environment* env, provider), StreamBase(env), stream_(stream) { - set_alloc_cb({ OnAllocImpl, this }); - set_read_cb({ OnReadImpl, this }); } @@ -147,7 +145,13 @@ bool LibuvStreamWrap::IsIPCPipe() { int LibuvStreamWrap::ReadStart() { - return uv_read_start(stream(), OnAlloc, OnRead); + return uv_read_start(stream(), [](uv_handle_t* handle, + size_t suggested_size, + uv_buf_t* buf) { + static_cast(handle->data)->OnUvAlloc(suggested_size, buf); + }, [](uv_stream_t* stream, ssize_t nread, const uv_buf_t* buf) { + static_cast(stream->data)->OnUvRead(nread, buf); + }); } @@ -156,24 +160,14 @@ int LibuvStreamWrap::ReadStop() { } -void LibuvStreamWrap::OnAlloc(uv_handle_t* handle, - size_t suggested_size, - uv_buf_t* buf) { - LibuvStreamWrap* wrap = static_cast(handle->data); - HandleScope scope(wrap->env()->isolate()); - Context::Scope context_scope(wrap->env()->context()); +void LibuvStreamWrap::OnUvAlloc(size_t suggested_size, uv_buf_t* buf) { + HandleScope scope(env()->isolate()); + Context::Scope context_scope(env()->context()); - CHECK_EQ(wrap->stream(), reinterpret_cast(handle)); - - return wrap->EmitAlloc(suggested_size, buf); + *buf = EmitAlloc(suggested_size); } -void LibuvStreamWrap::OnAllocImpl(size_t size, uv_buf_t* buf, void* ctx) { - buf->base = node::Malloc(size); - buf->len = size; -} - template static Local AcceptHandle(Environment* env, LibuvStreamWrap* parent) { @@ -196,74 +190,47 @@ static Local AcceptHandle(Environment* env, LibuvStreamWrap* parent) { } -void LibuvStreamWrap::OnReadImpl(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx) { - LibuvStreamWrap* wrap = static_cast(ctx); - Environment* env = wrap->env(); - HandleScope handle_scope(env->isolate()); - Context::Scope context_scope(env->context()); - - Local pending_obj; - - if (nread < 0) { - if (buf->base != nullptr) - free(buf->base); - wrap->EmitData(nread, Local(), pending_obj); - return; - } - - if (nread == 0) { - if (buf->base != nullptr) - free(buf->base); - return; - } - - CHECK_LE(static_cast(nread), buf->len); - char* base = node::Realloc(buf->base, nread); - - if (pending == UV_TCP) { - pending_obj = AcceptHandle(env, wrap); - } else if (pending == UV_NAMED_PIPE) { - pending_obj = AcceptHandle(env, wrap); - } else if (pending == UV_UDP) { - pending_obj = AcceptHandle(env, wrap); - } else { - CHECK_EQ(pending, UV_UNKNOWN_HANDLE); - } - - Local obj = Buffer::New(env, base, nread).ToLocalChecked(); - wrap->EmitData(nread, obj, pending_obj); -} - - -void LibuvStreamWrap::OnRead(uv_stream_t* handle, - ssize_t nread, - const uv_buf_t* buf) { - LibuvStreamWrap* wrap = static_cast(handle->data); - HandleScope scope(wrap->env()->isolate()); - Context::Scope context_scope(wrap->env()->context()); +void LibuvStreamWrap::OnUvRead(ssize_t nread, const uv_buf_t* buf) { + HandleScope scope(env()->isolate()); + Context::Scope context_scope(env()->context()); uv_handle_type type = UV_UNKNOWN_HANDLE; - if (wrap->is_named_pipe_ipc() && - uv_pipe_pending_count(reinterpret_cast(handle)) > 0) { - type = uv_pipe_pending_type(reinterpret_cast(handle)); + if (is_named_pipe_ipc() && + uv_pipe_pending_count(reinterpret_cast(stream())) > 0) { + type = uv_pipe_pending_type(reinterpret_cast(stream())); } // We should not be getting this callback if someone as already called // uv_close() on the handle. - CHECK_EQ(wrap->persistent().IsEmpty(), false); + CHECK_EQ(persistent().IsEmpty(), false); if (nread > 0) { - if (wrap->is_tcp()) { + if (is_tcp()) { NODE_COUNT_NET_BYTES_RECV(nread); - } else if (wrap->is_named_pipe()) { + } else if (is_named_pipe()) { NODE_COUNT_PIPE_BYTES_RECV(nread); } + + Local pending_obj; + + if (type == UV_TCP) { + pending_obj = AcceptHandle(env(), this); + } else if (type == UV_NAMED_PIPE) { + pending_obj = AcceptHandle(env(), this); + } else if (type == UV_UDP) { + pending_obj = AcceptHandle(env(), this); + } else { + CHECK_EQ(type, UV_UNKNOWN_HANDLE); + } + + if (!pending_obj.IsEmpty()) { + object()->Set(env()->context(), + env()->pending_handle_string(), + pending_obj).FromJust(); + } } - wrap->EmitRead(nread, buf, type); + EmitRead(nread, *buf); } @@ -389,11 +356,6 @@ void LibuvStreamWrap::AfterUvWrite(uv_write_t* req, int status) { req_wrap->Done(status); } - -void LibuvStreamWrap::AfterWrite(WriteWrap* w, int status) { - StreamBase::AfterWrite(w, status); -} - } // namespace node NODE_BUILTIN_MODULE_CONTEXT_AWARE(stream_wrap, diff --git a/src/stream_wrap.h b/src/stream_wrap.h index 0146d41c6e8..e5ad25b91e6 100644 --- a/src/stream_wrap.h +++ b/src/stream_wrap.h @@ -79,9 +79,6 @@ class LibuvStreamWrap : public HandleWrap, public StreamBase { uv_stream_t* stream, AsyncWrap::ProviderType provider); - ~LibuvStreamWrap() { - } - AsyncWrap* GetAsyncWrap() override; static void AddMethods(Environment* env, @@ -94,25 +91,12 @@ class LibuvStreamWrap : public HandleWrap, public StreamBase { static void SetBlocking(const v8::FunctionCallbackInfo& args); // Callbacks for libuv - static void OnAlloc(uv_handle_t* handle, - size_t suggested_size, - uv_buf_t* buf); + void OnUvAlloc(size_t suggested_size, uv_buf_t* buf); + void OnUvRead(ssize_t nread, const uv_buf_t* buf); - static void OnRead(uv_stream_t* handle, - ssize_t nread, - const uv_buf_t* buf); static void AfterUvWrite(uv_write_t* req, int status); static void AfterUvShutdown(uv_shutdown_t* req, int status); - // Resource interface implementation - static void OnAllocImpl(size_t size, uv_buf_t* buf, void* ctx); - static void OnReadImpl(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx); - - void AfterWrite(WriteWrap* req_wrap, int status) override; - uv_stream_t* const stream_; }; diff --git a/src/tcp_wrap.cc b/src/tcp_wrap.cc index 3a0a3f295e2..a0a58fb1b5c 100644 --- a/src/tcp_wrap.cc +++ b/src/tcp_wrap.cc @@ -27,6 +27,7 @@ #include "node_buffer.h" #include "node_wrap.h" #include "connect_wrap.h" +#include "stream_base-inl.h" #include "stream_wrap.h" #include "util-inl.h" diff --git a/src/tls_wrap.cc b/src/tls_wrap.cc index 18b3cf01f40..971dbb857f7 100644 --- a/src/tls_wrap.cc +++ b/src/tls_wrap.cc @@ -59,7 +59,6 @@ TLSWrap::TLSWrap(Environment* env, SSLWrap(env, sc, kind), StreamBase(env), sc_(sc), - stream_(stream), enc_in_(nullptr), enc_out_(nullptr), write_size_(0), @@ -78,14 +77,7 @@ TLSWrap::TLSWrap(Environment* env, SSL_CTX_sess_set_get_cb(sc_->ctx_, SSLWrap::GetSessionCallback); SSL_CTX_sess_set_new_cb(sc_->ctx_, SSLWrap::NewSessionCallback); - stream_->Consume(); - stream_->set_after_write_cb({ OnAfterWriteImpl, this }); - stream_->set_alloc_cb({ OnAllocImpl, this }); - stream_->set_read_cb({ OnReadImpl, this }); - stream_->set_destruct_cb({ OnDestructImpl, this }); - - set_alloc_cb({ OnAllocSelf, this }); - set_read_cb({ OnReadSelf, this }); + stream->PushStreamListener(this); InitSSL(); } @@ -100,19 +92,6 @@ TLSWrap::~TLSWrap() { #ifdef SSL_CTRL_SET_TLSEXT_SERVERNAME_CB sni_context_.Reset(); #endif // SSL_CTRL_SET_TLSEXT_SERVERNAME_CB - - // See test/parallel/test-tls-transport-destroy-after-own-gc.js: - // If this TLSWrap is garbage collected, we cannot allow callbacks to be - // called on this stream. - - if (stream_ == nullptr) - return; - stream_->set_destruct_cb({ nullptr, nullptr }); - stream_->set_after_write_cb({ nullptr, nullptr }); - stream_->set_alloc_cb({ nullptr, nullptr }); - stream_->set_read_cb({ nullptr, nullptr }); - stream_->set_destruct_cb({ nullptr, nullptr }); - stream_->Unconsume(); } @@ -208,15 +187,13 @@ void TLSWrap::Receive(const FunctionCallbackInfo& args) { char* data = Buffer::Data(args[0]); size_t len = Buffer::Length(args[0]); - uv_buf_t buf; - // Copy given buffer entirely or partiall if handle becomes closed while (len > 0 && wrap->IsAlive() && !wrap->IsClosing()) { - wrap->stream_->EmitAlloc(len, &buf); + uv_buf_t buf = wrap->OnStreamAlloc(len); size_t copy = buf.len > len ? len : buf.len; memcpy(buf.base, data, copy); buf.len = copy; - wrap->stream_->EmitRead(buf.len, &buf); + wrap->OnStreamRead(copy, buf); data += copy; len -= copy; @@ -307,7 +284,7 @@ void TLSWrap::EncOut() { ->NewInstance(env()->context()).ToLocalChecked(); WriteWrap* write_req = WriteWrap::New(env(), req_wrap_obj, - stream_); + static_cast(stream_)); uv_buf_t buf[arraysize(data)]; for (size_t i = 0; i < count; i++) @@ -324,7 +301,7 @@ void TLSWrap::EncOut() { } -void TLSWrap::EncOutAfterWrite(WriteWrap* req_wrap, int status) { +void TLSWrap::OnStreamAfterWrite(WriteWrap* req_wrap, int status) { // We should not be getting here after `DestroySSL`, because all queued writes // must be invoked with UV_ECANCELED CHECK_NE(ssl_, nullptr); @@ -421,12 +398,11 @@ void TLSWrap::ClearOut() { while (read > 0) { int avail = read; - uv_buf_t buf; - EmitAlloc(avail, &buf); + uv_buf_t buf = EmitAlloc(avail); if (static_cast(buf.len) < avail) avail = buf.len; memcpy(buf.base, current, avail); - EmitRead(avail, &buf); + EmitRead(avail, buf); // Caveat emptor: OnRead() calls into JS land which can result in // the SSL context object being destroyed. We have to carefully @@ -442,7 +418,7 @@ void TLSWrap::ClearOut() { int flags = SSL_get_shutdown(ssl_); if (!eof_ && flags & SSL_RECEIVED_SHUTDOWN) { eof_ = true; - EmitRead(UV_EOF, nullptr); + EmitRead(UV_EOF); } // We need to check whether an error occurred or the connection was @@ -524,22 +500,24 @@ AsyncWrap* TLSWrap::GetAsyncWrap() { bool TLSWrap::IsIPCPipe() { - return stream_->IsIPCPipe(); + return static_cast(stream_)->IsIPCPipe(); } int TLSWrap::GetFD() { - return stream_->GetFD(); + return static_cast(stream_)->GetFD(); } bool TLSWrap::IsAlive() { - return ssl_ != nullptr && stream_ != nullptr && stream_->IsAlive(); + return ssl_ != nullptr && + stream_ != nullptr && + static_cast(stream_)->IsAlive(); } bool TLSWrap::IsClosing() { - return stream_->IsClosing(); + return static_cast(stream_)->IsClosing(); } @@ -638,62 +616,16 @@ int TLSWrap::DoWrite(WriteWrap* w, } -void TLSWrap::OnAfterWriteImpl(WriteWrap* w, int status, void* ctx) { - TLSWrap* wrap = static_cast(ctx); - wrap->EncOutAfterWrite(w, status); -} - - -void TLSWrap::OnAllocImpl(size_t suggested_size, uv_buf_t* buf, void* ctx) { - TLSWrap* wrap = static_cast(ctx); - - if (wrap->ssl_ == nullptr) { - *buf = uv_buf_init(nullptr, 0); - return; - } - - size_t size = 0; - buf->base = crypto::NodeBIO::FromBIO(wrap->enc_in_)->PeekWritable(&size); - buf->len = size; -} - - -void TLSWrap::OnReadImpl(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx) { - TLSWrap* wrap = static_cast(ctx); - wrap->DoRead(nread, buf, pending); -} - - -void TLSWrap::OnDestructImpl(void* ctx) { - TLSWrap* wrap = static_cast(ctx); - wrap->clear_stream(); -} - - -void TLSWrap::OnAllocSelf(size_t suggested_size, uv_buf_t* buf, void* ctx) { - buf->base = node::Malloc(suggested_size); - buf->len = suggested_size; -} - +uv_buf_t TLSWrap::OnStreamAlloc(size_t suggested_size) { + CHECK_NE(ssl_, nullptr); -void TLSWrap::OnReadSelf(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx) { - TLSWrap* wrap = static_cast(ctx); - Local buf_obj; - if (buf != nullptr) - buf_obj = Buffer::New(wrap->env(), buf->base, buf->len).ToLocalChecked(); - wrap->EmitData(nread, buf_obj, Local()); + size_t size = suggested_size; + char* base = crypto::NodeBIO::FromBIO(enc_in_)->PeekWritable(&size); + return uv_buf_init(base, size); } -void TLSWrap::DoRead(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending) { +void TLSWrap::OnStreamRead(ssize_t nread, const uv_buf_t& buf) { if (nread < 0) { // Error should be emitted only after all data was read ClearOut(); @@ -705,13 +637,13 @@ void TLSWrap::DoRead(ssize_t nread, eof_ = true; } - EmitRead(nread, nullptr); + EmitRead(nread); return; } // Only client connections can receive data if (ssl_ == nullptr) { - EmitRead(UV_EPROTO, nullptr); + EmitRead(UV_EPROTO); return; } @@ -800,6 +732,9 @@ void TLSWrap::DestroySSL(const FunctionCallbackInfo& args) { // Destroy the SSL structure and friends wrap->SSLWrap::DestroySSL(); + + if (wrap->stream_ != nullptr) + wrap->stream_->RemoveStreamListener(wrap); } diff --git a/src/tls_wrap.h b/src/tls_wrap.h index ae83c82c322..a1f0b99e86b 100644 --- a/src/tls_wrap.h +++ b/src/tls_wrap.h @@ -48,7 +48,8 @@ class NodeBIO; class TLSWrap : public AsyncWrap, public crypto::SSLWrap, - public StreamBase { + public StreamBase, + public StreamListener { public: ~TLSWrap() override; @@ -76,8 +77,6 @@ class TLSWrap : public AsyncWrap, size_t self_size() const override { return sizeof(*this); } - void clear_stream() { stream_ = nullptr; } - protected: static const int kClearOutChunkSize = 16384; @@ -98,7 +97,6 @@ class TLSWrap : public AsyncWrap, static void SSLInfoCallback(const SSL* ssl_, int where, int ret); void InitSSL(); void EncOut(); - void EncOutAfterWrite(WriteWrap* req_wrap, int status); bool ClearIn(); void ClearOut(); bool InvokeQueued(int status, const char* error_str = nullptr); @@ -119,20 +117,9 @@ class TLSWrap : public AsyncWrap, bool IsIPCPipe() override; // Resource implementation - static void OnAfterWriteImpl(WriteWrap* w, int status, void* ctx); - static void OnAllocImpl(size_t size, uv_buf_t* buf, void* ctx); - static void OnReadImpl(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx); - static void OnAllocSelf(size_t size, uv_buf_t* buf, void* ctx); - static void OnReadSelf(ssize_t nread, - const uv_buf_t* buf, - uv_handle_type pending, - void* ctx); - static void OnDestructImpl(void* ctx); - - void DoRead(ssize_t nread, const uv_buf_t* buf, uv_handle_type pending); + void OnStreamAfterWrite(WriteWrap* w, int status) override; + uv_buf_t OnStreamAlloc(size_t size) override; + void OnStreamRead(ssize_t nread, const uv_buf_t& buf) override; v8::Local GetSSLError(int status, int* err, std::string* msg); @@ -154,7 +141,6 @@ class TLSWrap : public AsyncWrap, #endif // SSL_CTRL_SET_TLSEXT_SERVERNAME_CB crypto::SecureContext* sc_; - StreamBase* stream_; BIO* enc_in_; BIO* enc_out_; std::vector pending_cleartext_input_; diff --git a/src/tty_wrap.cc b/src/tty_wrap.cc index fae39158ef2..9977738afcb 100644 --- a/src/tty_wrap.cc +++ b/src/tty_wrap.cc @@ -26,6 +26,7 @@ #include "node_buffer.h" #include "node_wrap.h" #include "req_wrap-inl.h" +#include "stream_base-inl.h" #include "stream_wrap.h" #include "util-inl.h" diff --git a/test/addons-napi/test_exception/test.js b/test/addons-napi/test_exception/test.js index 83961411df7..787b7d78b1c 100644 --- a/test/addons-napi/test_exception/test.js +++ b/test/addons-napi/test_exception/test.js @@ -20,9 +20,10 @@ const theError = new Error('Some error'); // Test that the exception thrown above was marked as pending // before it was handled on the JS side - assert.strictEqual(test_exception.wasPending(), true, - 'VM was marked as having an exception pending' + - ' when it was allowed through'); + const exception_pending = test_exception.wasPending(); + assert.strictEqual(exception_pending, true, + 'Exception not pending as expected,' + + ` .wasPending() returned ${exception_pending}`); // Test that the native side does not capture a non-existing exception returnedError = test_exception.returnException(common.mustCall()); @@ -44,7 +45,8 @@ const theError = new Error('Some error'); ` ${caughtError} was passed`); // Test that the exception state remains clear when no exception is thrown - assert.strictEqual(test_exception.wasPending(), false, - 'VM was not marked as having an exception pending' + - ' when none was allowed through'); + const exception_pending = test_exception.wasPending(); + assert.strictEqual(exception_pending, false, + 'Exception state did not remain clear as expected,' + + ` .wasPending() returned ${exception_pending}`); } diff --git a/test/addons-napi/test_general/test.js b/test/addons-napi/test_general/test.js index ca04f1da148..1b43778d1db 100644 --- a/test/addons-napi/test_general/test.js +++ b/test/addons-napi/test_general/test.js @@ -28,9 +28,9 @@ assert.strictEqual(test_general.testGetPrototype(baseObject), Object.getPrototypeOf(baseObject)); assert.strictEqual(test_general.testGetPrototype(extendedObject), Object.getPrototypeOf(extendedObject)); -assert.ok(test_general.testGetPrototype(baseObject) !== - test_general.testGetPrototype(extendedObject), - 'Prototypes for base and extended should be different'); +// Prototypes for base and extended should be different. +assert.notStrictEqual(test_general.testGetPrototype(baseObject), + test_general.testGetPrototype(extendedObject)); // test version management functions // expected version is currently 1 @@ -73,17 +73,15 @@ assert.strictEqual(test_general.derefItemWasCalled(), true, // Assert that wrapping twice fails. const x = {}; test_general.wrap(x); -assert.throws(function() { - test_general.wrap(x); -}, Error); +assert.throws(() => test_general.wrap(x), Error); // Ensure that wrapping, removing the wrap, and then wrapping again works. const y = {}; test_general.wrap(y); test_general.removeWrap(y); -assert.doesNotThrow(function() { - test_general.wrap(y); -}, Error, 'Wrapping twice succeeds if a remove_wrap() separates the instances'); +assert.doesNotThrow(() => test_general.wrap(y), Error, + 'Wrapping twice succeeds if a remove_wrap()' + + ' separates the instances'); // Ensure that removing a wrap and garbage collecting does not fire the // finalize callback. diff --git a/test/addons/load-long-path/test.js b/test/addons/load-long-path/test.js index accb90d2638..ee09230676b 100644 --- a/test/addons/load-long-path/test.js +++ b/test/addons/load-long-path/test.js @@ -7,12 +7,13 @@ const fs = require('fs'); const path = require('path'); const assert = require('assert'); -common.refreshTmpDir(); +const tmpdir = require('../../common/tmpdir'); +tmpdir.refresh(); // make a path that is more than 260 chars long. // Any given folder cannot have a name longer than 260 characters, // so create 10 nested folders each with 30 character long names. -let addonDestinationDir = path.resolve(common.tmpDir); +let addonDestinationDir = path.resolve(tmpdir.path); for (let i = 0; i < 10; i++) { addonDestinationDir = path.join(addonDestinationDir, 'x'.repeat(30)); diff --git a/test/addons/openssl-client-cert-engine/binding.gyp b/test/addons/openssl-client-cert-engine/binding.gyp index b069e43429c..f43be602199 100644 --- a/test/addons/openssl-client-cert-engine/binding.gyp +++ b/test/addons/openssl-client-cert-engine/binding.gyp @@ -14,7 +14,7 @@ 'include_dirs': ['../../../deps/openssl/openssl/include'], 'link_settings': { 'libraries': [ - '../../../../out/<(PRODUCT_DIR)/<(OPENSSL_PRODUCT)' + '../../../../out/<(PRODUCT_DIR)/<(openssl_product)' ] }, }] diff --git a/test/addons/symlinked-module/test.js b/test/addons/symlinked-module/test.js index d9455c027bd..53306399cb5 100644 --- a/test/addons/symlinked-module/test.js +++ b/test/addons/symlinked-module/test.js @@ -12,10 +12,11 @@ const assert = require('assert'); // This test should pass in Node.js v4 and v5. This test will pass in Node.js // with https://github.com/nodejs/node/pull/5950 reverted. -common.refreshTmpDir(); +const tmpdir = require('../../common/tmpdir'); +tmpdir.refresh(); const addonPath = path.join(__dirname, 'build', common.buildType); -const addonLink = path.join(common.tmpDir, 'addon'); +const addonLink = path.join(tmpdir.path, 'addon'); try { fs.symlinkSync(addonPath, addonLink); diff --git a/test/async-hooks/test-graph.pipeconnect.js b/test/async-hooks/test-graph.pipeconnect.js index b3ea5c6e421..03d2902c835 100644 --- a/test/async-hooks/test-graph.pipeconnect.js +++ b/test/async-hooks/test-graph.pipeconnect.js @@ -6,7 +6,8 @@ const verifyGraph = require('./verify-graph'); const net = require('net'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const hooks = initHooks(); hooks.enable(); diff --git a/test/async-hooks/test-pipeconnectwrap.js b/test/async-hooks/test-pipeconnectwrap.js index a993f0c8fee..df4b8110e67 100644 --- a/test/async-hooks/test-pipeconnectwrap.js +++ b/test/async-hooks/test-pipeconnectwrap.js @@ -8,7 +8,8 @@ const { checkInvocations } = require('./hook-checks'); const net = require('net'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const hooks = initHooks(); hooks.enable(); diff --git a/test/common/README.md b/test/common/README.md index 3e3e5543b40..73779fd5372 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -12,6 +12,7 @@ This directory contains modules used to test the Node.js implementation. * [Fixtures module](#fixtures-module) * [HTTP2 module](#http2-module) * [Internet module](#internet-module) +* [tmpdir module](#tmpdir-module) * [WPT module](#wpt-module) ## Benchmark Module @@ -332,11 +333,6 @@ A port number for tests to use if one is needed. Logs '1..0 # Skipped: ' + `msg` -### refreshTmpDir() -* return [<String>] - -Deletes the testing 'tmp' directory and recreates it. - ### restoreStderr() Restore the original `process.stderr.write`. Used to restore `stderr` to its @@ -379,11 +375,6 @@ Platform normalizes the `pwd` command. Synchronous version of `spawnPwd`. -### tmpDir -* [<String>] - -The realpath of the 'tmp' directory. - ## Countdown Module The `Countdown` module provides a simple countdown mechanism for tests that @@ -665,6 +656,19 @@ via `NODE_TEST_*` environment variables. For example, to configure `internet.addresses.INET_HOST`, set the environment variable `NODE_TEST_INET_HOST` to a specified host. +## tmpdir Module + +The `tmpdir` module supports the use of a temporary directory for testing. + +### path +* [<String>] + +The realpath of the testing temporary directory. + +### refresh() + +Deletes and recreates the testing temporary directory. + ## WPT Module The wpt.js module is a port of parts of diff --git a/test/common/index.js b/test/common/index.js index 582cbb2dd33..c123bac31f8 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -31,16 +31,10 @@ const stream = require('stream'); const util = require('util'); const Timer = process.binding('timer_wrap').Timer; const { fixturesDir } = require('./fixtures'); - -const testRoot = process.env.NODE_TEST_DIR ? - fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..'); +const tmpdir = require('./tmpdir'); const noop = () => {}; -// Using a `.` prefixed name, which is the convention for "hidden" on POSIX, -// gets tools to ignore it by default or by simple rules, especially eslint. -let tmpDirName = '.tmp'; - Object.defineProperty(exports, 'PORT', { get: () => { if (+process.env.TEST_PARALLEL) { @@ -121,62 +115,6 @@ if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) { }).enable(); } -function rimrafSync(p) { - let st; - try { - st = fs.lstatSync(p); - } catch (e) { - if (e.code === 'ENOENT') - return; - } - - try { - if (st && st.isDirectory()) - rmdirSync(p, null); - else - fs.unlinkSync(p); - } catch (e) { - if (e.code === 'ENOENT') - return; - if (e.code === 'EPERM') - return rmdirSync(p, e); - if (e.code !== 'EISDIR') - throw e; - rmdirSync(p, e); - } -} - -function rmdirSync(p, originalEr) { - try { - fs.rmdirSync(p); - } catch (e) { - if (e.code === 'ENOTDIR') - throw originalEr; - if (e.code === 'ENOTEMPTY' || e.code === 'EEXIST' || e.code === 'EPERM') { - const enc = exports.isLinux ? 'buffer' : 'utf8'; - fs.readdirSync(p, enc).forEach((f) => { - if (f instanceof Buffer) { - const buf = Buffer.concat([Buffer.from(p), Buffer.from(path.sep), f]); - rimrafSync(buf); - } else { - rimrafSync(path.join(p, f)); - } - }); - fs.rmdirSync(p); - } - } -} - -exports.refreshTmpDir = function() { - rimrafSync(exports.tmpDir); - fs.mkdirSync(exports.tmpDir); -}; - -if (process.env.TEST_THREAD_ID) { - tmpDirName += `.${process.env.TEST_THREAD_ID}`; -} -exports.tmpDir = path.join(testRoot, tmpDirName); - let opensslCli = null; let inFreeBSDJail = null; let localhostIPv4 = null; @@ -270,7 +208,7 @@ Object.defineProperty(exports, 'hasFipsCrypto', { }); { - const localRelative = path.relative(process.cwd(), `${exports.tmpDir}/`); + const localRelative = path.relative(process.cwd(), `${tmpdir.path}/`); const pipePrefix = exports.isWindows ? '\\\\.\\pipe\\' : localRelative; const pipeName = `node-test.${process.pid}.sock`; exports.PIPE = path.join(pipePrefix, pipeName); diff --git a/test/common/tmpdir.js b/test/common/tmpdir.js new file mode 100644 index 00000000000..ed731b3e7a1 --- /dev/null +++ b/test/common/tmpdir.js @@ -0,0 +1,67 @@ +/* eslint-disable required-modules */ +'use strict'; + +const fs = require('fs'); +const path = require('path'); + +function rimrafSync(p) { + let st; + try { + st = fs.lstatSync(p); + } catch (e) { + if (e.code === 'ENOENT') + return; + } + + try { + if (st && st.isDirectory()) + rmdirSync(p, null); + else + fs.unlinkSync(p); + } catch (e) { + if (e.code === 'ENOENT') + return; + if (e.code === 'EPERM') + return rmdirSync(p, e); + if (e.code !== 'EISDIR') + throw e; + rmdirSync(p, e); + } +} + +function rmdirSync(p, originalEr) { + try { + fs.rmdirSync(p); + } catch (e) { + if (e.code === 'ENOTDIR') + throw originalEr; + if (e.code === 'ENOTEMPTY' || e.code === 'EEXIST' || e.code === 'EPERM') { + const enc = process.platform === 'linux' ? 'buffer' : 'utf8'; + fs.readdirSync(p, enc).forEach((f) => { + if (f instanceof Buffer) { + const buf = Buffer.concat([Buffer.from(p), Buffer.from(path.sep), f]); + rimrafSync(buf); + } else { + rimrafSync(path.join(p, f)); + } + }); + fs.rmdirSync(p); + } + } +} + +const testRoot = process.env.NODE_TEST_DIR ? + fs.realpathSync(process.env.NODE_TEST_DIR) : path.resolve(__dirname, '..'); + +// Using a `.` prefixed name, which is the convention for "hidden" on POSIX, +// gets tools to ignore it by default or by simple rules, especially eslint. +let tmpdirName = '.tmp'; +if (process.env.TEST_THREAD_ID) { + tmpdirName += `.${process.env.TEST_THREAD_ID}`; +} +exports.path = path.join(testRoot, tmpdirName); + +exports.refresh = () => { + rimrafSync(exports.path); + fs.mkdirSync(exports.path); +}; diff --git a/test/es-module/test-esm-dynamic-import.js b/test/es-module/test-esm-dynamic-import.js index 997eed289eb..9c4d48aaf02 100644 --- a/test/es-module/test-esm-dynamic-import.js +++ b/test/es-module/test-esm-dynamic-import.js @@ -1,4 +1,4 @@ -// Flags: --experimental-modules --harmony-dynamic-import +// Flags: --experimental-modules 'use strict'; const common = require('../common'); const assert = require('assert'); diff --git a/test/es-module/test-esm-import-meta.mjs b/test/es-module/test-esm-import-meta.mjs new file mode 100644 index 00000000000..c17e0e20d49 --- /dev/null +++ b/test/es-module/test-esm-import-meta.mjs @@ -0,0 +1,22 @@ +// Flags: --experimental-modules + +import '../common'; +import assert from 'assert'; + +assert.strictEqual(Object.getPrototypeOf(import.meta), null); + +const keys = ['url']; +assert.deepStrictEqual(Reflect.ownKeys(import.meta), keys); + +const descriptors = Object.getOwnPropertyDescriptors(import.meta); +for (const descriptor of Object.values(descriptors)) { + delete descriptor.value; // Values are verified below. + assert.deepStrictEqual(descriptor, { + enumerable: true, + writable: true, + configurable: true + }); +} + +const urlReg = /^file:\/\/\/.*\/test\/es-module\/test-esm-import-meta\.mjs$/; +assert(import.meta.url.match(urlReg)); diff --git a/test/es-module/test-esm-preserve-symlinks.js b/test/es-module/test-esm-preserve-symlinks.js index eea5bf061b2..e8473c36473 100644 --- a/test/es-module/test-esm-preserve-symlinks.js +++ b/test/es-module/test-esm-preserve-symlinks.js @@ -7,8 +7,9 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -common.refreshTmpDir(); -const tmpDir = common.tmpDir; +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +const tmpDir = tmpdir.path; const entry = path.join(tmpDir, 'entry.js'); const real = path.join(tmpDir, 'real.js'); diff --git a/test/es-module/test-esm-symlink.js b/test/es-module/test-esm-symlink.js index 3b7d689bf8f..074230ac06c 100644 --- a/test/es-module/test-esm-symlink.js +++ b/test/es-module/test-esm-symlink.js @@ -6,8 +6,9 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -common.refreshTmpDir(); -const tmpDir = common.tmpDir; +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +const tmpDir = tmpdir.path; const entry = path.join(tmpDir, 'entry.mjs'); const real = path.join(tmpDir, 'index.mjs'); diff --git a/test/known_issues/test-cwd-enoent-file.js b/test/known_issues/test-cwd-enoent-file.js index 01e6e8359fb..0f75896134f 100644 --- a/test/known_issues/test-cwd-enoent-file.js +++ b/test/known_issues/test-cwd-enoent-file.js @@ -17,8 +17,9 @@ const fs = require('fs'); if (process.argv[2] === 'child') { // Do nothing. } else { - common.refreshTmpDir(); - const dir = fs.mkdtempSync(`${common.tmpDir}/`); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); + const dir = fs.mkdtempSync(`${tmpdir.path}/`); process.chdir(dir); fs.rmdirSync(dir); assert.throws(process.cwd, diff --git a/test/known_issues/test-module-deleted-extensions.js b/test/known_issues/test-module-deleted-extensions.js index 45ec41ad604..3a51e8725ee 100644 --- a/test/known_issues/test-module-deleted-extensions.js +++ b/test/known_issues/test-module-deleted-extensions.js @@ -4,9 +4,10 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); -const file = path.join(common.tmpDir, 'test-extensions.foo.bar'); +const tmpdir = require('../common/tmpdir'); +const file = path.join(tmpdir.path, 'test-extensions.foo.bar'); -common.refreshTmpDir(); +tmpdir.refresh(); fs.writeFileSync(file, '', 'utf8'); require.extensions['.foo.bar'] = (module, path) => {}; delete require.extensions['.foo.bar']; @@ -14,4 +15,4 @@ require.extensions['.bar'] = common.mustCall((module, path) => { assert.strictEqual(module.id, file); assert.strictEqual(path, file); }); -require(path.join(common.tmpDir, 'test-extensions')); +require(path.join(tmpdir.path, 'test-extensions')); diff --git a/test/parallel/test-assert.js b/test/parallel/test-assert.js index 849e07f5789..1223282a166 100644 --- a/test/parallel/test-assert.js +++ b/test/parallel/test-assert.js @@ -745,18 +745,18 @@ function engineSpecificAssert(v8, cc) { assert.equal(assert.notDeepEqual, assert.notDeepStrictEqual); assert.equal(Object.keys(assert).length, Object.keys(a).length); assert(7); - common.expectsError( - () => assert(), + assert.throws( + () => assert(...[]), { - code: 'ERR_MISSING_ARGS', - type: TypeError + message: 'No value argument passed to `assert.ok()`', + name: 'AssertionError [ERR_ASSERTION]' } ); - common.expectsError( + assert.throws( () => a(), { - code: 'ERR_MISSING_ARGS', - type: TypeError + message: 'No value argument passed to `assert.ok()`', + name: 'AssertionError [ERR_ASSERTION]' } ); diff --git a/test/parallel/test-benchmark-fs.js b/test/parallel/test-benchmark-fs.js index e960482a636..ad01b4e5803 100644 --- a/test/parallel/test-benchmark-fs.js +++ b/test/parallel/test-benchmark-fs.js @@ -1,9 +1,10 @@ 'use strict'; -const common = require('../common'); +require('../common'); const runBenchmark = require('../common/benchmark'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); runBenchmark('fs', [ 'n=1', @@ -16,4 +17,4 @@ runBenchmark('fs', [ 'statSyncType=fstatSync', 'encodingType=buf', 'filesize=1024' -], { NODE_TMPDIR: common.tmpDir, NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); +], { NODE_TMPDIR: tmpdir.path, NODEJS_BENCHMARK_ZERO_ALLOWED: 1 }); diff --git a/test/parallel/test-child-process-fork-exec-path.js b/test/parallel/test-child-process-fork-exec-path.js index dbcb3af9bfc..c21e5769480 100644 --- a/test/parallel/test-child-process-fork-exec-path.js +++ b/test/parallel/test-child-process-fork-exec-path.js @@ -24,10 +24,11 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); +const tmpdir = require('../common/tmpdir'); const msg = { test: 'this' }; const nodePath = process.execPath; -const nodeCopyPath = path.join(common.tmpDir, 'node-copy.exe'); -const chakracoreCopyPath = path.join(common.tmpDir, 'chakracore.dll'); +const nodeCopyPath = path.join(tmpdir.path, 'node-copy.exe'); +const chakracoreCopyPath = path.join(tmpdir.path, 'chakracore.dll'); const exePaths = [ { srcPath: nodePath, destPath: nodeCopyPath }]; @@ -44,7 +45,7 @@ if (process.env.FORK) { process.send(msg); process.exit(); } else { - common.refreshTmpDir(); + tmpdir.refresh(); try { exePaths.forEach(function(value) { fs.unlinkSync(value.destPath); diff --git a/test/parallel/test-cli-node-options-disallowed.js b/test/parallel/test-cli-node-options-disallowed.js index b55543bfa24..e4ae2d1aea2 100644 --- a/test/parallel/test-cli-node-options-disallowed.js +++ b/test/parallel/test-cli-node-options-disallowed.js @@ -8,8 +8,9 @@ if (process.config.variables.node_without_node_options) const assert = require('assert'); const exec = require('child_process').execFile; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +process.chdir(tmpdir.path); disallow('--version'); disallow('-v'); diff --git a/test/parallel/test-cli-node-options.js b/test/parallel/test-cli-node-options.js index f7ac0378649..edf47879715 100644 --- a/test/parallel/test-cli-node-options.js +++ b/test/parallel/test-cli-node-options.js @@ -8,8 +8,9 @@ if (process.config.variables.node_without_node_options) const assert = require('assert'); const exec = require('child_process').execFile; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +process.chdir(tmpdir.path); expect(`-r ${require.resolve('../fixtures/printA.js')}`, 'A\nB\n'); expect('--no-deprecation', 'B\n'); @@ -28,6 +29,15 @@ expect('--zero-fill-buffers', 'B\n'); expect('--v8-pool-size=10', 'B\n'); expect('--trace-event-categories node', 'B\n'); +if (!common.isWindows) { + expect('--perf-basic-prof', 'B\n'); +} + +if (common.isLinux && ['arm', 'x64', 'mips'].includes(process.arch)) { + // PerfJitLogger is only implemented in Linux. + expect('--perf-prof', 'B\n'); +} + if (common.hasCrypto) { expect('--use-openssl-ca', 'B\n'); expect('--use-bundled-ca', 'B\n'); diff --git a/test/parallel/test-cluster-cwd.js b/test/parallel/test-cluster-cwd.js index ce3fdca51e9..485276befaf 100644 --- a/test/parallel/test-cluster-cwd.js +++ b/test/parallel/test-cluster-cwd.js @@ -2,19 +2,20 @@ const common = require('../common'); const assert = require('assert'); const cluster = require('cluster'); +const tmpdir = require('../common/tmpdir'); if (cluster.isMaster) { - common.refreshTmpDir(); + tmpdir.refresh(); assert.strictEqual(cluster.settings.cwd, undefined); cluster.fork().on('message', common.mustCall((msg) => { assert.strictEqual(msg, process.cwd()); })); - cluster.setupMaster({ cwd: common.tmpDir }); - assert.strictEqual(cluster.settings.cwd, common.tmpDir); + cluster.setupMaster({ cwd: tmpdir.path }); + assert.strictEqual(cluster.settings.cwd, tmpdir.path); cluster.fork().on('message', common.mustCall((msg) => { - assert.strictEqual(msg, common.tmpDir); + assert.strictEqual(msg, tmpdir.path); })); } else { process.send(process.cwd()); diff --git a/test/parallel/test-cluster-eaccess.js b/test/parallel/test-cluster-eaccess.js index ecf0862fa3b..c6a2a8ac25e 100644 --- a/test/parallel/test-cluster-eaccess.js +++ b/test/parallel/test-cluster-eaccess.js @@ -33,7 +33,8 @@ const net = require('net'); if (cluster.isMaster && process.argv.length !== 3) { // cluster.isMaster - common.refreshTmpDir(); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); const PIPE_NAME = common.PIPE; const worker = cluster.fork({ PIPE_NAME }); diff --git a/test/parallel/test-cluster-http-pipe.js b/test/parallel/test-cluster-http-pipe.js index 96f741e8044..9e58fb297b2 100644 --- a/test/parallel/test-cluster-http-pipe.js +++ b/test/parallel/test-cluster-http-pipe.js @@ -32,7 +32,8 @@ const cluster = require('cluster'); const http = require('http'); if (cluster.isMaster) { - common.refreshTmpDir(); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); const worker = cluster.fork(); worker.on('message', common.mustCall((msg) => { assert.strictEqual(msg, 'DONE'); diff --git a/test/parallel/test-cluster-net-listen-relative-path.js b/test/parallel/test-cluster-net-listen-relative-path.js index 2f95d05203d..ce9ead9e2eb 100644 --- a/test/parallel/test-cluster-net-listen-relative-path.js +++ b/test/parallel/test-cluster-net-listen-relative-path.js @@ -6,6 +6,8 @@ const net = require('net'); const path = require('path'); const fs = require('fs'); +const tmpdir = require('../common/tmpdir'); + if (common.isWindows) common.skip('On Windows named pipes live in their own ' + 'filesystem and don\'t have a ~100 byte limit'); @@ -20,8 +22,8 @@ assert.strictEqual(path.resolve(socketDir, socketName).length > 100, true, if (cluster.isMaster) { // ensure that the worker exits peacefully - common.refreshTmpDir(); - process.chdir(common.tmpDir); + tmpdir.refresh(); + process.chdir(tmpdir.path); fs.mkdirSync(socketDir); cluster.fork().on('exit', common.mustCall(function(statusCode) { assert.strictEqual(statusCode, 0); diff --git a/test/parallel/test-crypto-authenticated.js b/test/parallel/test-crypto-authenticated.js index 384044210d5..c016b3500de 100644 --- a/test/parallel/test-crypto-authenticated.js +++ b/test/parallel/test-crypto-authenticated.js @@ -342,6 +342,8 @@ const expectedDeprecationWarnings = [0, 1, 2, 6, 9, 10, 11, 17] .map((i) => `Permitting authentication tag lengths of ${i} bytes is ` + 'deprecated. Valid GCM tag lengths are 4, 8, 12, 13, 14, 15, 16.'); +expectedDeprecationWarnings.push('crypto.DEFAULT_ENCODING is deprecated.'); + common.expectWarning({ Warning: expectedWarnings, DeprecationWarning: expectedDeprecationWarnings diff --git a/test/parallel/test-crypto-sign-verify.js b/test/parallel/test-crypto-sign-verify.js index 48b21d0d85a..7619d911916 100644 --- a/test/parallel/test-crypto-sign-verify.js +++ b/test/parallel/test-crypto-sign-verify.js @@ -297,11 +297,12 @@ common.expectsError( padding: crypto.constants.RSA_PKCS1_PSS_PADDING }); - common.refreshTmpDir(); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); - const sigfile = path.join(common.tmpDir, 's5.sig'); + const sigfile = path.join(tmpdir.path, 's5.sig'); fs.writeFileSync(sigfile, s5); - const msgfile = path.join(common.tmpDir, 's5.msg'); + const msgfile = path.join(tmpdir.path, 's5.msg'); fs.writeFileSync(msgfile, msg); const cmd = diff --git a/test/parallel/test-cwd-enoent-preload.js b/test/parallel/test-cwd-enoent-preload.js index ec9f1fee754..b83ff6ff883 100644 --- a/test/parallel/test-cwd-enoent-preload.js +++ b/test/parallel/test-cwd-enoent-preload.js @@ -8,10 +8,11 @@ const assert = require('assert'); const fs = require('fs'); const spawn = require('child_process').spawn; const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); -const dirname = `${common.tmpDir}/cwd-does-not-exist-${process.pid}`; +const dirname = `${tmpdir.path}/cwd-does-not-exist-${process.pid}`; const abspathFile = fixtures.path('a.js'); -common.refreshTmpDir(); +tmpdir.refresh(); fs.mkdirSync(dirname); process.chdir(dirname); fs.rmdirSync(dirname); diff --git a/test/parallel/test-cwd-enoent-repl.js b/test/parallel/test-cwd-enoent-repl.js index 8f846af9030..d42679d8688 100644 --- a/test/parallel/test-cwd-enoent-repl.js +++ b/test/parallel/test-cwd-enoent-repl.js @@ -8,8 +8,10 @@ const assert = require('assert'); const fs = require('fs'); const spawn = require('child_process').spawn; -const dirname = `${common.tmpDir}/cwd-does-not-exist-${process.pid}`; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); + +const dirname = `${tmpdir.path}/cwd-does-not-exist-${process.pid}`; +tmpdir.refresh(); fs.mkdirSync(dirname); process.chdir(dirname); fs.rmdirSync(dirname); diff --git a/test/parallel/test-cwd-enoent.js b/test/parallel/test-cwd-enoent.js index c1b520aedd3..e5d93f46ce6 100644 --- a/test/parallel/test-cwd-enoent.js +++ b/test/parallel/test-cwd-enoent.js @@ -8,8 +8,10 @@ const assert = require('assert'); const fs = require('fs'); const spawn = require('child_process').spawn; -const dirname = `${common.tmpDir}/cwd-does-not-exist-${process.pid}`; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); + +const dirname = `${tmpdir.path}/cwd-does-not-exist-${process.pid}`; +tmpdir.refresh(); fs.mkdirSync(dirname); process.chdir(dirname); fs.rmdirSync(dirname); diff --git a/test/parallel/test-eslint-prefer-assert-methods.js b/test/parallel/test-eslint-prefer-assert-methods.js index 57596132d88..2d05a4851bf 100644 --- a/test/parallel/test-eslint-prefer-assert-methods.js +++ b/test/parallel/test-eslint-prefer-assert-methods.js @@ -7,31 +7,46 @@ const rule = require('../../tools/eslint-rules/prefer-assert-methods'); new RuleTester().run('prefer-assert-methods', rule, { valid: [ - 'assert.strictEqual(foo, bar)', - 'assert(foo === bar && baz)' + 'assert.strictEqual(foo, bar);', + 'assert(foo === bar && baz);', + 'assert.notStrictEqual(foo, bar);', + 'assert(foo !== bar && baz);', + 'assert.equal(foo, bar);', + 'assert(foo == bar && baz);', + 'assert.notEqual(foo, bar);', + 'assert(foo != bar && baz);', + 'assert.ok(foo);', + 'assert.ok(foo != bar);', + 'assert.ok(foo === bar && baz);' ], invalid: [ { - code: 'assert(foo == bar)', - errors: [{ message: "'assert.equal' should be used instead of '=='" }] + code: 'assert(foo == bar);', + errors: [{ + message: "'assert.equal' should be used instead of '=='" + }], + output: 'assert.equal(foo, bar);' }, { - code: 'assert(foo === bar)', + code: 'assert(foo === bar);', errors: [{ message: "'assert.strictEqual' should be used instead of '==='" - }] + }], + output: 'assert.strictEqual(foo, bar);' }, { - code: 'assert(foo != bar)', + code: 'assert(foo != bar);', errors: [{ message: "'assert.notEqual' should be used instead of '!='" - }] + }], + output: 'assert.notEqual(foo, bar);' }, { - code: 'assert(foo !== bar)', + code: 'assert(foo !== bar);', errors: [{ message: "'assert.notStrictEqual' should be used instead of '!=='" - }] - }, + }], + output: 'assert.notStrictEqual(foo, bar);' + } ] }); diff --git a/test/parallel/test-file-write-stream.js b/test/parallel/test-file-write-stream.js index 645c4a637b0..4860417dd29 100644 --- a/test/parallel/test-file-write-stream.js +++ b/test/parallel/test-file-write-stream.js @@ -25,8 +25,9 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const fn = path.join(common.tmpDir, 'write.txt'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +const fn = path.join(tmpdir.path, 'write.txt'); +tmpdir.refresh(); const file = fs.createWriteStream(fn, { highWaterMark: 10 }); diff --git a/test/parallel/test-file-write-stream2.js b/test/parallel/test-file-write-stream2.js index 1f838f08692..2db06640e18 100644 --- a/test/parallel/test-file-write-stream2.js +++ b/test/parallel/test-file-write-stream2.js @@ -20,14 +20,16 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const path = require('path'); const fs = require('fs'); +const tmpdir = require('../common/tmpdir'); -const filepath = path.join(common.tmpDir, 'write.txt'); + +const filepath = path.join(tmpdir.path, 'write.txt'); const EXPECTED = '012345678910'; @@ -58,7 +60,7 @@ function removeTestFile() { } -common.refreshTmpDir(); +tmpdir.refresh(); // drain at 0, return false at 10. const file = fs.createWriteStream(filepath, { diff --git a/test/parallel/test-file-write-stream3.js b/test/parallel/test-file-write-stream3.js index d62e9d5d3c9..6bb64e6092b 100644 --- a/test/parallel/test-file-write-stream3.js +++ b/test/parallel/test-file-write-stream3.js @@ -25,8 +25,10 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); +const tmpdir = require('../common/tmpdir'); -const filepath = path.join(common.tmpDir, 'write_pos.txt'); + +const filepath = path.join(tmpdir.path, 'write_pos.txt'); const cb_expected = 'write open close write open close write open close '; @@ -51,7 +53,7 @@ process.on('exit', function() { }); -common.refreshTmpDir(); +tmpdir.refresh(); function run_test_1() { diff --git a/test/parallel/test-fs-access.js b/test/parallel/test-fs-access.js index cde52eb3035..94031c2cbc1 100644 --- a/test/parallel/test-fs-access.js +++ b/test/parallel/test-fs-access.js @@ -7,18 +7,20 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); + const uv = process.binding('uv'); -const doesNotExist = path.join(common.tmpDir, '__this_should_not_exist'); -const readOnlyFile = path.join(common.tmpDir, 'read_only_file'); -const readWriteFile = path.join(common.tmpDir, 'read_write_file'); +const tmpdir = require('../common/tmpdir'); +const doesNotExist = path.join(tmpdir.path, '__this_should_not_exist'); +const readOnlyFile = path.join(tmpdir.path, 'read_only_file'); +const readWriteFile = path.join(tmpdir.path, 'read_write_file'); function createFileWithPerms(file, mode) { fs.writeFileSync(file, ''); fs.chmodSync(file, mode); } -common.refreshTmpDir(); +tmpdir.refresh(); createFileWithPerms(readOnlyFile, 0o444); createFileWithPerms(readWriteFile, 0o666); diff --git a/test/parallel/test-fs-append-file-sync.js b/test/parallel/test-fs-append-file-sync.js index 31e95c2e368..b836d81bd59 100644 --- a/test/parallel/test-fs-append-file-sync.js +++ b/test/parallel/test-fs-append-file-sync.js @@ -36,10 +36,11 @@ const data = '南越国是前203年至前111年存在于岭南地区的一个国 '历经五代君主。南越国是岭南地区的第一个有记载的政权国家,采用封建制和郡县制并存的制度,' + '它的建立保证了秦末乱世岭南地区社会秩序的稳定,有效的改善了岭南地区落后的政治、##济现状。\n'; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // test that empty file will be created and have content added -const filename = join(common.tmpDir, 'append-sync.txt'); +const filename = join(tmpdir.path, 'append-sync.txt'); fs.appendFileSync(filename, data); @@ -48,7 +49,7 @@ const fileData = fs.readFileSync(filename); assert.strictEqual(Buffer.byteLength(data), fileData.length); // test that appends data to a non empty file -const filename2 = join(common.tmpDir, 'append-sync2.txt'); +const filename2 = join(tmpdir.path, 'append-sync2.txt'); fs.writeFileSync(filename2, currentFileData); fs.appendFileSync(filename2, data); @@ -59,7 +60,7 @@ assert.strictEqual(Buffer.byteLength(data) + currentFileData.length, fileData2.length); // test that appendFileSync accepts buffers -const filename3 = join(common.tmpDir, 'append-sync3.txt'); +const filename3 = join(tmpdir.path, 'append-sync3.txt'); fs.writeFileSync(filename3, currentFileData); const buf = Buffer.from(data, 'utf8'); @@ -70,7 +71,7 @@ const fileData3 = fs.readFileSync(filename3); assert.strictEqual(buf.length + currentFileData.length, fileData3.length); // test that appendFile accepts numbers. -const filename4 = join(common.tmpDir, 'append-sync4.txt'); +const filename4 = join(tmpdir.path, 'append-sync4.txt'); fs.writeFileSync(filename4, currentFileData, { mode: m }); fs.appendFileSync(filename4, num, { mode: m }); @@ -87,7 +88,7 @@ assert.strictEqual(Buffer.byteLength(String(num)) + currentFileData.length, fileData4.length); // test that appendFile accepts file descriptors -const filename5 = join(common.tmpDir, 'append-sync5.txt'); +const filename5 = join(tmpdir.path, 'append-sync5.txt'); fs.writeFileSync(filename5, currentFileData); const filename5fd = fs.openSync(filename5, 'a+', 0o600); diff --git a/test/parallel/test-fs-append-file.js b/test/parallel/test-fs-append-file.js index e3e4c273d32..8e9a0619b0a 100644 --- a/test/parallel/test-fs-append-file.js +++ b/test/parallel/test-fs-append-file.js @@ -25,7 +25,9 @@ const assert = require('assert'); const fs = require('fs'); const join = require('path').join; -const filename = join(common.tmpDir, 'append.txt'); +const tmpdir = require('../common/tmpdir'); + +const filename = join(tmpdir.path, 'append.txt'); const currentFileData = 'ABCD'; @@ -40,7 +42,7 @@ const s = '南越国是前203年至前111年存在于岭南地区的一个国家 let ncallbacks = 0; -common.refreshTmpDir(); +tmpdir.refresh(); // test that empty file will be created and have content added fs.appendFile(filename, s, function(e) { @@ -56,7 +58,7 @@ fs.appendFile(filename, s, function(e) { }); // test that appends data to a non empty file -const filename2 = join(common.tmpDir, 'append2.txt'); +const filename2 = join(tmpdir.path, 'append2.txt'); fs.writeFileSync(filename2, currentFileData); fs.appendFile(filename2, s, function(e) { @@ -73,7 +75,7 @@ fs.appendFile(filename2, s, function(e) { }); // test that appendFile accepts buffers -const filename3 = join(common.tmpDir, 'append3.txt'); +const filename3 = join(tmpdir.path, 'append3.txt'); fs.writeFileSync(filename3, currentFileData); const buf = Buffer.from(s, 'utf8'); @@ -91,7 +93,7 @@ fs.appendFile(filename3, buf, function(e) { }); // test that appendFile accepts numbers. -const filename4 = join(common.tmpDir, 'append4.txt'); +const filename4 = join(tmpdir.path, 'append4.txt'); fs.writeFileSync(filename4, currentFileData); const m = 0o600; @@ -115,7 +117,7 @@ fs.appendFile(filename4, n, { mode: m }, function(e) { }); // test that appendFile accepts file descriptors -const filename5 = join(common.tmpDir, 'append5.txt'); +const filename5 = join(tmpdir.path, 'append5.txt'); fs.writeFileSync(filename5, currentFileData); fs.open(filename5, 'a+', function(e, fd) { @@ -146,7 +148,7 @@ fs.open(filename5, 'a+', function(e, fd) { // test that a missing callback emits a warning, even if the last argument is a // function. -const filename6 = join(common.tmpDir, 'append6.txt'); +const filename6 = join(tmpdir.path, 'append6.txt'); const warn = 'Calling an asynchronous function without callback is deprecated.'; common.expectWarning('DeprecationWarning', warn); fs.appendFile(filename6, console.log); diff --git a/test/parallel/test-fs-buffer.js b/test/parallel/test-fs-buffer.js index bc2add52187..bb10c164dad 100644 --- a/test/parallel/test-fs-buffer.js +++ b/test/parallel/test-fs-buffer.js @@ -6,16 +6,17 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); assert.doesNotThrow(() => { - fs.access(Buffer.from(common.tmpDir), common.mustCall((err) => { + fs.access(Buffer.from(tmpdir.path), common.mustCall((err) => { assert.ifError(err); })); }); assert.doesNotThrow(() => { - const buf = Buffer.from(path.join(common.tmpDir, 'a.txt')); + const buf = Buffer.from(path.join(tmpdir.path, 'a.txt')); fs.open(buf, 'w+', common.mustCall((err, fd) => { assert.ifError(err); assert(fd); diff --git a/test/parallel/test-fs-buffertype-writesync.js b/test/parallel/test-fs-buffertype-writesync.js index 73a6f211893..d5257d214bd 100644 --- a/test/parallel/test-fs-buffertype-writesync.js +++ b/test/parallel/test-fs-buffertype-writesync.js @@ -1,5 +1,5 @@ 'use strict'; -const common = require('../common'); +require('../common'); // This test ensures that writeSync does support inputs which // are then correctly converted into string buffers. @@ -8,10 +8,12 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -const filePath = path.join(common.tmpDir, 'test_buffer_type'); +const tmpdir = require('../common/tmpdir'); + +const filePath = path.join(tmpdir.path, 'test_buffer_type'); const v = [true, false, 0, 1, Infinity, () => {}, {}, [], undefined, null]; -common.refreshTmpDir(); +tmpdir.refresh(); v.forEach((value) => { const fd = fs.openSync(filePath, 'w'); diff --git a/test/parallel/test-fs-chmod.js b/test/parallel/test-fs-chmod.js index 92d420a0815..7c1b14ff24c 100644 --- a/test/parallel/test-fs-chmod.js +++ b/test/parallel/test-fs-chmod.js @@ -71,10 +71,11 @@ if (common.isWindows) { mode_sync = 0o644; } -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const file1 = path.join(common.tmpDir, 'a.js'); -const file2 = path.join(common.tmpDir, 'a1.js'); +const file1 = path.join(tmpdir.path, 'a.js'); +const file2 = path.join(tmpdir.path, 'a1.js'); // Create file1. fs.closeSync(fs.openSync(file1, 'w')); @@ -130,7 +131,7 @@ fs.open(file2, 'w', common.mustCall((err, fd) => { // lchmod if (fs.lchmod) { - const link = path.join(common.tmpDir, 'symbolic-link'); + const link = path.join(tmpdir.path, 'symbolic-link'); fs.symlinkSync(file2, link); diff --git a/test/parallel/test-fs-copyfile.js b/test/parallel/test-fs-copyfile.js index bfda5ccfd75..a335065bf43 100644 --- a/test/parallel/test-fs-copyfile.js +++ b/test/parallel/test-fs-copyfile.js @@ -1,11 +1,12 @@ 'use strict'; const common = require('../common'); const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); const src = fixtures.path('a.js'); -const dest = path.join(common.tmpDir, 'copyfile.out'); +const dest = path.join(tmpdir.path, 'copyfile.out'); const { COPYFILE_EXCL, UV_FS_COPYFILE_EXCL } = fs.constants; function verify(src, dest) { @@ -19,7 +20,7 @@ function verify(src, dest) { assert.strictEqual(srcStat.size, destStat.size); } -common.refreshTmpDir(); +tmpdir.refresh(); // Verify that flags are defined. assert.strictEqual(typeof COPYFILE_EXCL, 'number'); diff --git a/test/parallel/test-fs-error-messages.js b/test/parallel/test-fs-error-messages.js index ef38652d7a3..ba44c28d432 100644 --- a/test/parallel/test-fs-error-messages.js +++ b/test/parallel/test-fs-error-messages.js @@ -455,3 +455,72 @@ function re(literals, ...values) { validateError ); } + +// ftruncate +{ + const validateError = (err) => { + assert.strictEqual(err.syscall, 'ftruncate'); + // Could be EBADF or EINVAL, depending on the platform + if (err.code === 'EBADF') { + assert.strictEqual(err.message, 'EBADF: bad file descriptor, ftruncate'); + assert.strictEqual(err.errno, uv.UV_EBADF); + } else { + assert.strictEqual(err.message, 'EINVAL: invalid argument, ftruncate'); + assert.strictEqual(err.errno, uv.UV_EINVAL); + assert.strictEqual(err.code, 'EINVAL'); + } + return true; + }; + + const fd = fs.openSync(existingFile, 'r'); + fs.closeSync(fd); + + fs.ftruncate(fd, 4, common.mustCall(validateError)); + + assert.throws( + () => fs.ftruncateSync(fd, 4), + validateError + ); +} + +// fdatasync +{ + const validateError = (err) => { + assert.strictEqual(err.message, 'EBADF: bad file descriptor, fdatasync'); + assert.strictEqual(err.errno, uv.UV_EBADF); + assert.strictEqual(err.code, 'EBADF'); + assert.strictEqual(err.syscall, 'fdatasync'); + return true; + }; + + const fd = fs.openSync(existingFile, 'r'); + fs.closeSync(fd); + + fs.fdatasync(fd, common.mustCall(validateError)); + + assert.throws( + () => fs.fdatasyncSync(fd), + validateError + ); +} + +// fsync +{ + const validateError = (err) => { + assert.strictEqual(err.message, 'EBADF: bad file descriptor, fsync'); + assert.strictEqual(err.errno, uv.UV_EBADF); + assert.strictEqual(err.code, 'EBADF'); + assert.strictEqual(err.syscall, 'fsync'); + return true; + }; + + const fd = fs.openSync(existingFile, 'r'); + fs.closeSync(fd); + + fs.fsync(fd, common.mustCall(validateError)); + + assert.throws( + () => fs.fsyncSync(fd), + validateError + ); +} diff --git a/test/parallel/test-fs-fsync.js b/test/parallel/test-fs-fsync.js index ea80d4cbcdf..1f575881e37 100644 --- a/test/parallel/test-fs-fsync.js +++ b/test/parallel/test-fs-fsync.js @@ -23,15 +23,16 @@ const common = require('../common'); const assert = require('assert'); const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); const fs = require('fs'); const path = require('path'); const fileFixture = fixtures.path('a.js'); -const fileTemp = path.join(common.tmpDir, 'a.js'); +const fileTemp = path.join(tmpdir.path, 'a.js'); // Copy fixtures to temp. -common.refreshTmpDir(); +tmpdir.refresh(); fs.copyFileSync(fileFixture, fileTemp); fs.open(fileTemp, 'a', 0o777, common.mustCall(function(err, fd) { diff --git a/test/parallel/test-fs-link.js b/test/parallel/test-fs-link.js index 7cbfe5a15e2..d007f4e985b 100644 --- a/test/parallel/test-fs-link.js +++ b/test/parallel/test-fs-link.js @@ -4,11 +4,12 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // test creating and reading hard link -const srcPath = path.join(common.tmpDir, 'hardlink-target.txt'); -const dstPath = path.join(common.tmpDir, 'link1.js'); +const srcPath = path.join(tmpdir.path, 'hardlink-target.txt'); +const dstPath = path.join(tmpdir.path, 'link1.js'); fs.writeFileSync(srcPath, 'hello world'); function callback(err) { diff --git a/test/parallel/test-fs-long-path.js b/test/parallel/test-fs-long-path.js index ae60b16f1a3..74f63868b81 100644 --- a/test/parallel/test-fs-long-path.js +++ b/test/parallel/test-fs-long-path.js @@ -28,12 +28,14 @@ const fs = require('fs'); const path = require('path'); const assert = require('assert'); +const tmpdir = require('../common/tmpdir'); + // make a path that will be at least 260 chars long. -const fileNameLen = Math.max(260 - common.tmpDir.length - 1, 1); -const fileName = path.join(common.tmpDir, 'x'.repeat(fileNameLen)); +const fileNameLen = Math.max(260 - tmpdir.path.length - 1, 1); +const fileName = path.join(tmpdir.path, 'x'.repeat(fileNameLen)); const fullPath = path.resolve(fileName); -common.refreshTmpDir(); +tmpdir.refresh(); console.log({ filenameLength: fileName.length, diff --git a/test/parallel/test-fs-make-callback.js b/test/parallel/test-fs-make-callback.js index 79cf4e0bed7..ca948ede182 100644 --- a/test/parallel/test-fs-make-callback.js +++ b/test/parallel/test-fs-make-callback.js @@ -7,12 +7,13 @@ const callbackThrowValues = [null, true, false, 0, 1, 'foo', /foo/, [], {}]; const { sep } = require('path'); const warn = 'Calling an asynchronous function without callback is deprecated.'; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); function testMakeCallback(cb) { return function() { // fs.mkdtemp() calls makeCallback() on its third argument - fs.mkdtemp(`${common.tmpDir}${sep}`, {}, cb); + fs.mkdtemp(`${tmpdir.path}${sep}`, {}, cb); }; } diff --git a/test/parallel/test-fs-mkdir-rmdir.js b/test/parallel/test-fs-mkdir-rmdir.js index 8c22331e85e..865a5dba951 100644 --- a/test/parallel/test-fs-mkdir-rmdir.js +++ b/test/parallel/test-fs-mkdir-rmdir.js @@ -4,9 +4,10 @@ const common = require('../common'); const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const d = path.join(common.tmpDir, 'dir'); +const tmpdir = require('../common/tmpdir'); +const d = path.join(tmpdir.path, 'dir'); -common.refreshTmpDir(); +tmpdir.refresh(); // Make sure the directory does not exist assert(!common.fileExists(d)); diff --git a/test/parallel/test-fs-mkdir.js b/test/parallel/test-fs-mkdir.js index 7cfb09295ed..937c73aca4a 100644 --- a/test/parallel/test-fs-mkdir.js +++ b/test/parallel/test-fs-mkdir.js @@ -24,10 +24,11 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); { - const pathname = `${common.tmpDir}/test1`; + const pathname = `${tmpdir.path}/test1`; fs.mkdir(pathname, common.mustCall(function(err) { assert.strictEqual(err, null); @@ -36,7 +37,7 @@ common.refreshTmpDir(); } { - const pathname = `${common.tmpDir}/test2`; + const pathname = `${tmpdir.path}/test2`; fs.mkdir(pathname, 0o777, common.mustCall(function(err) { assert.strictEqual(err, null); @@ -45,7 +46,7 @@ common.refreshTmpDir(); } { - const pathname = `${common.tmpDir}/test3`; + const pathname = `${tmpdir.path}/test3`; fs.mkdirSync(pathname); diff --git a/test/parallel/test-fs-mkdtemp.js b/test/parallel/test-fs-mkdtemp.js index 38a306b85c7..b27ab864173 100644 --- a/test/parallel/test-fs-mkdtemp.js +++ b/test/parallel/test-fs-mkdtemp.js @@ -5,14 +5,15 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const tmpFolder = fs.mkdtempSync(path.join(common.tmpDir, 'foo.')); +const tmpFolder = fs.mkdtempSync(path.join(tmpdir.path, 'foo.')); assert.strictEqual(path.basename(tmpFolder).length, 'foo.XXXXXX'.length); assert(common.fileExists(tmpFolder)); -const utf8 = fs.mkdtempSync(path.join(common.tmpDir, '\u0222abc.')); +const utf8 = fs.mkdtempSync(path.join(tmpdir.path, '\u0222abc.')); assert.strictEqual(Buffer.byteLength(path.basename(utf8)), Buffer.byteLength('\u0222abc.XXXXXX')); assert(common.fileExists(utf8)); @@ -23,13 +24,13 @@ function handler(err, folder) { assert.strictEqual(this, undefined); } -fs.mkdtemp(path.join(common.tmpDir, 'bar.'), common.mustCall(handler)); +fs.mkdtemp(path.join(tmpdir.path, 'bar.'), common.mustCall(handler)); // Same test as above, but making sure that passing an options object doesn't // affect the way the callback function is handled. -fs.mkdtemp(path.join(common.tmpDir, 'bar.'), {}, common.mustCall(handler)); +fs.mkdtemp(path.join(tmpdir.path, 'bar.'), {}, common.mustCall(handler)); // Making sure that not passing a callback doesn't crash, as a default function // is passed internally. -assert.doesNotThrow(() => fs.mkdtemp(path.join(common.tmpDir, 'bar-'))); -assert.doesNotThrow(() => fs.mkdtemp(path.join(common.tmpDir, 'bar-'), {})); +assert.doesNotThrow(() => fs.mkdtemp(path.join(tmpdir.path, 'bar-'))); +assert.doesNotThrow(() => fs.mkdtemp(path.join(tmpdir.path, 'bar-'), {})); diff --git a/test/parallel/test-fs-non-number-arguments-throw.js b/test/parallel/test-fs-non-number-arguments-throw.js index 9e73502c290..61ed3f6c285 100644 --- a/test/parallel/test-fs-non-number-arguments-throw.js +++ b/test/parallel/test-fs-non-number-arguments-throw.js @@ -4,9 +4,10 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); -const tempFile = path.join(common.tmpDir, 'fs-non-number-arguments-throw'); +const tmpdir = require('../common/tmpdir'); +const tempFile = path.join(tmpdir.path, 'fs-non-number-arguments-throw'); -common.refreshTmpDir(); +tmpdir.refresh(); fs.writeFileSync(tempFile, 'abc\ndef'); // a sanity check when using numbers instead of strings diff --git a/test/parallel/test-fs-open-flags.js b/test/parallel/test-fs-open-flags.js index aa87f8ced87..acf5c739a93 100644 --- a/test/parallel/test-fs-open-flags.js +++ b/test/parallel/test-fs-open-flags.js @@ -84,8 +84,9 @@ common.expectsError( ); if (common.isLinux || common.isOSX) { - common.refreshTmpDir(); - const file = path.join(common.tmpDir, 'a.js'); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); + const file = path.join(tmpdir.path, 'a.js'); fs.copyFileSync(fixtures.path('a.js'), file); fs.open(file, O_DSYNC, common.mustCall(assert.ifError)); } diff --git a/test/parallel/test-fs-open-numeric-flags.js b/test/parallel/test-fs-open-numeric-flags.js index 1bd9a043927..0e5ab6997e3 100644 --- a/test/parallel/test-fs-open-numeric-flags.js +++ b/test/parallel/test-fs-open-numeric-flags.js @@ -1,14 +1,15 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // O_WRONLY without O_CREAT shall fail with ENOENT -const pathNE = path.join(common.tmpDir, 'file-should-not-exist'); +const pathNE = path.join(tmpdir.path, 'file-should-not-exist'); assert.throws( () => fs.openSync(pathNE, fs.constants.O_WRONLY), (e) => e.code === 'ENOENT' diff --git a/test/parallel/test-fs-options-immutable.js b/test/parallel/test-fs-options-immutable.js index 9d88cf0fa42..ca5079b07da 100644 --- a/test/parallel/test-fs-options-immutable.js +++ b/test/parallel/test-fs-options-immutable.js @@ -14,7 +14,8 @@ const path = require('path'); const errHandler = (e) => assert.ifError(e); const options = Object.freeze({}); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); { assert.doesNotThrow(() => @@ -31,8 +32,8 @@ common.refreshTmpDir(); } if (common.canCreateSymLink()) { - const sourceFile = path.resolve(common.tmpDir, 'test-readlink'); - const linkFile = path.resolve(common.tmpDir, 'test-readlink-link'); + const sourceFile = path.resolve(tmpdir.path, 'test-readlink'); + const linkFile = path.resolve(tmpdir.path, 'test-readlink-link'); fs.writeFileSync(sourceFile, ''); fs.symlinkSync(sourceFile, linkFile); @@ -44,7 +45,7 @@ if (common.canCreateSymLink()) { } { - const fileName = path.resolve(common.tmpDir, 'writeFile'); + const fileName = path.resolve(tmpdir.path, 'writeFile'); assert.doesNotThrow(() => fs.writeFileSync(fileName, 'ABCD', options)); assert.doesNotThrow(() => fs.writeFile(fileName, 'ABCD', options, common.mustCall(errHandler)) @@ -52,7 +53,7 @@ if (common.canCreateSymLink()) { } { - const fileName = path.resolve(common.tmpDir, 'appendFile'); + const fileName = path.resolve(tmpdir.path, 'appendFile'); assert.doesNotThrow(() => fs.appendFileSync(fileName, 'ABCD', options)); assert.doesNotThrow(() => fs.appendFile(fileName, 'ABCD', options, common.mustCall(errHandler)) @@ -82,7 +83,7 @@ if (common.canCreateSymLink()) { } { - const tempFileName = path.resolve(common.tmpDir, 'mkdtemp-'); + const tempFileName = path.resolve(tmpdir.path, 'mkdtemp-'); assert.doesNotThrow(() => fs.mkdtempSync(tempFileName, options)); assert.doesNotThrow(() => fs.mkdtemp(tempFileName, options, common.mustCall(errHandler)) @@ -90,7 +91,7 @@ if (common.canCreateSymLink()) { } { - const fileName = path.resolve(common.tmpDir, 'streams'); + const fileName = path.resolve(tmpdir.path, 'streams'); assert.doesNotThrow(() => { fs.WriteStream(fileName, options).once('open', common.mustCall(() => { assert.doesNotThrow(() => fs.ReadStream(fileName, options)); diff --git a/test/parallel/test-fs-promisified.js b/test/parallel/test-fs-promisified.js index ac6e22f9690..13cf5e0e0f4 100644 --- a/test/parallel/test-fs-promisified.js +++ b/test/parallel/test-fs-promisified.js @@ -20,9 +20,10 @@ const exists = promisify(fs.exists); })); } -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); { - const filename = path.join(common.tmpDir, 'write-promise.txt'); + const filename = path.join(tmpdir.path, 'write-promise.txt'); const fd = fs.openSync(filename, 'w'); write(fd, Buffer.from('foobar')).then(common.mustCall((obj) => { assert.strictEqual(typeof obj.bytesWritten, 'number'); diff --git a/test/parallel/test-fs-read-stream-fd.js b/test/parallel/test-fs-read-stream-fd.js index c5ee6c05ef1..7d4b264002b 100644 --- a/test/parallel/test-fs-read-stream-fd.js +++ b/test/parallel/test-fs-read-stream-fd.js @@ -20,15 +20,16 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const fs = require('fs'); const assert = require('assert'); const path = require('path'); -const file = path.join(common.tmpDir, '/read_stream_fd_test.txt'); +const tmpdir = require('../common/tmpdir'); +const file = path.join(tmpdir.path, '/read_stream_fd_test.txt'); const input = 'hello world'; let output = ''; -common.refreshTmpDir(); +tmpdir.refresh(); fs.writeFileSync(file, input); const fd = fs.openSync(file, 'r'); diff --git a/test/parallel/test-fs-readdir-ucs2.js b/test/parallel/test-fs-readdir-ucs2.js index debcfb7750b..b17dc8d7292 100644 --- a/test/parallel/test-fs-readdir-ucs2.js +++ b/test/parallel/test-fs-readdir-ucs2.js @@ -8,9 +8,10 @@ const path = require('path'); const fs = require('fs'); const assert = require('assert'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const filename = '\uD83D\uDC04'; -const root = Buffer.from(`${common.tmpDir}${path.sep}`); +const root = Buffer.from(`${tmpdir.path}${path.sep}`); const filebuff = Buffer.from(filename, 'ucs2'); const fullpath = Buffer.concat([root, filebuff]); @@ -22,7 +23,7 @@ try { throw e; } -fs.readdir(common.tmpDir, 'ucs2', common.mustCall((err, list) => { +fs.readdir(tmpdir.path, 'ucs2', common.mustCall((err, list) => { assert.ifError(err); assert.strictEqual(1, list.length); const fn = list[0]; diff --git a/test/parallel/test-fs-readdir.js b/test/parallel/test-fs-readdir.js index 76e6a4dc376..a24def6f12d 100644 --- a/test/parallel/test-fs-readdir.js +++ b/test/parallel/test-fs-readdir.js @@ -4,11 +4,13 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -const readdirDir = common.tmpDir; +const tmpdir = require('../common/tmpdir'); + +const readdirDir = tmpdir.path; const files = ['empty', 'files', 'for', 'just', 'testing']; // Make sure tmp directory is clean -common.refreshTmpDir(); +tmpdir.refresh(); // Create the necessary files files.forEach(function(currentFile) { diff --git a/test/parallel/test-fs-readfile-pipe-large.js b/test/parallel/test-fs-readfile-pipe-large.js index 17831f81f6d..740a3876a2d 100644 --- a/test/parallel/test-fs-readfile-pipe-large.js +++ b/test/parallel/test-fs-readfile-pipe-large.js @@ -18,9 +18,11 @@ if (process.argv[2] === 'child') { return; } -const filename = path.join(common.tmpDir, '/readfile_pipe_large_test.txt'); +const tmpdir = require('../common/tmpdir'); + +const filename = path.join(tmpdir.path, '/readfile_pipe_large_test.txt'); const dataExpected = 'a'.repeat(999999); -common.refreshTmpDir(); +tmpdir.refresh(); fs.writeFileSync(filename, dataExpected); const exec = require('child_process').exec; diff --git a/test/parallel/test-fs-readfile-unlink.js b/test/parallel/test-fs-readfile-unlink.js index 9ec2e849bee..1ed6fefb5cc 100644 --- a/test/parallel/test-fs-readfile-unlink.js +++ b/test/parallel/test-fs-readfile-unlink.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); // Test that unlink succeeds immediately after readFile completes. @@ -28,10 +28,12 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -const fileName = path.resolve(common.tmpDir, 'test.bin'); +const tmpdir = require('../common/tmpdir'); + +const fileName = path.resolve(tmpdir.path, 'test.bin'); const buf = Buffer.alloc(512 * 1024, 42); -common.refreshTmpDir(); +tmpdir.refresh(); fs.writeFileSync(fileName, buf); diff --git a/test/parallel/test-fs-readfile.js b/test/parallel/test-fs-readfile.js new file mode 100644 index 00000000000..287834ab201 --- /dev/null +++ b/test/parallel/test-fs-readfile.js @@ -0,0 +1,59 @@ +'use strict'; +const common = require('../common'); + +// This test ensures that fs.readFile correctly returns the +// contents of varying-sized files. + +const tmpdir = require('../../test/common/tmpdir'); +const assert = require('assert'); +const fs = require('fs'); +const path = require('path'); + +const prefix = `.removeme-fs-readfile-${process.pid}`; + +tmpdir.refresh(); + +const fileInfo = [ + { name: path.join(tmpdir.path, `${prefix}-1K.txt`), + len: 1024, + }, + { name: path.join(tmpdir.path, `${prefix}-64K.txt`), + len: 64 * 1024, + }, + { name: path.join(tmpdir.path, `${prefix}-64KLessOne.txt`), + len: (64 * 1024) - 1, + }, + { name: path.join(tmpdir.path, `${prefix}-1M.txt`), + len: 1 * 1024 * 1024, + }, + { name: path.join(tmpdir.path, `${prefix}-1MPlusOne.txt`), + len: (1 * 1024 * 1024) + 1, + }, +]; + +// Populate each fileInfo (and file) with unique fill. +const sectorSize = 512; +for (const e of fileInfo) { + e.contents = Buffer.allocUnsafe(e.len); + + // This accounts for anything unusual in Node's implementation of readFile. + // Using e.g. 'aa...aa' would miss bugs like Node re-reading + // the same section twice instead of two separate sections. + for (let offset = 0; offset < e.len; offset += sectorSize) { + const fillByte = 256 * Math.random(); + const nBytesToFill = Math.min(sectorSize, e.len - offset); + e.contents.fill(fillByte, offset, offset + nBytesToFill); + } + + fs.writeFileSync(e.name, e.contents); +} +// All files are now populated. + +// Test readFile on each size. +for (const e of fileInfo) { + fs.readFile(e.name, common.mustCall((err, buf) => { + console.log(`Validating readFile on file ${e.name} of length ${e.len}`); + assert.ifError(err, 'An error occurred'); + assert.deepStrictEqual(buf, e.contents, 'Incorrect file contents'); + })); +} diff --git a/test/parallel/test-fs-readfilesync-pipe-large.js b/test/parallel/test-fs-readfilesync-pipe-large.js index f9dea90d104..18a06b1ba11 100644 --- a/test/parallel/test-fs-readfilesync-pipe-large.js +++ b/test/parallel/test-fs-readfilesync-pipe-large.js @@ -15,9 +15,11 @@ if (process.argv[2] === 'child') { return; } -const filename = path.join(common.tmpDir, '/readfilesync_pipe_large_test.txt'); +const tmpdir = require('../common/tmpdir'); + +const filename = path.join(tmpdir.path, '/readfilesync_pipe_large_test.txt'); const dataExpected = 'a'.repeat(999999); -common.refreshTmpDir(); +tmpdir.refresh(); fs.writeFileSync(filename, dataExpected); const exec = require('child_process').exec; diff --git a/test/parallel/test-fs-realpath.js b/test/parallel/test-fs-realpath.js index cee45b2c899..6cfd79cecfb 100644 --- a/test/parallel/test-fs-realpath.js +++ b/test/parallel/test-fs-realpath.js @@ -22,6 +22,7 @@ 'use strict'; const common = require('../common'); const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); const assert = require('assert'); const fs = require('fs'); @@ -31,9 +32,9 @@ let async_completed = 0; let async_expected = 0; const unlink = []; let skipSymlinks = false; -const tmpDir = common.tmpDir; +const tmpDir = tmpdir.path; -common.refreshTmpDir(); +tmpdir.refresh(); let root = '/'; let assertEqualPath = assert.strictEqual; @@ -391,7 +392,8 @@ function test_up_multiple(realpath, realpathSync, cb) { common.printSkipMessage('symlink test (no privs)'); return cb(); } - common.refreshTmpDir(); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); fs.mkdirSync(tmp('a'), 0o755); fs.mkdirSync(tmp('a/b'), 0o755); fs.symlinkSync('..', tmp('a/d'), 'dir'); diff --git a/test/parallel/test-fs-sir-writes-alot.js b/test/parallel/test-fs-sir-writes-alot.js index 3a3458a552e..5d8c3dfec90 100644 --- a/test/parallel/test-fs-sir-writes-alot.js +++ b/test/parallel/test-fs-sir-writes-alot.js @@ -20,14 +20,16 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const fs = require('fs'); const assert = require('assert'); const join = require('path').join; -const filename = join(common.tmpDir, 'out.txt'); +const tmpdir = require('../common/tmpdir'); -common.refreshTmpDir(); +const filename = join(tmpdir.path, 'out.txt'); + +tmpdir.refresh(); const fd = fs.openSync(filename, 'w'); diff --git a/test/parallel/test-fs-stream-double-close.js b/test/parallel/test-fs-stream-double-close.js index 3a8086d0ac0..8c0037b2431 100644 --- a/test/parallel/test-fs-stream-double-close.js +++ b/test/parallel/test-fs-stream-double-close.js @@ -23,15 +23,16 @@ const common = require('../common'); const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); test1(fs.createReadStream(__filename)); test2(fs.createReadStream(__filename)); test3(fs.createReadStream(__filename)); -test1(fs.createWriteStream(`${common.tmpDir}/dummy1`)); -test2(fs.createWriteStream(`${common.tmpDir}/dummy2`)); -test3(fs.createWriteStream(`${common.tmpDir}/dummy3`)); +test1(fs.createWriteStream(`${tmpdir.path}/dummy1`)); +test2(fs.createWriteStream(`${tmpdir.path}/dummy2`)); +test3(fs.createWriteStream(`${tmpdir.path}/dummy3`)); function test1(stream) { stream.destroy(); diff --git a/test/parallel/test-fs-symlink-dir-junction-relative.js b/test/parallel/test-fs-symlink-dir-junction-relative.js index 7cb50b0291d..308ab040488 100644 --- a/test/parallel/test-fs-symlink-dir-junction-relative.js +++ b/test/parallel/test-fs-symlink-dir-junction-relative.js @@ -28,12 +28,14 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const linkPath1 = path.join(common.tmpDir, 'junction1'); -const linkPath2 = path.join(common.tmpDir, 'junction2'); +const tmpdir = require('../common/tmpdir'); + +const linkPath1 = path.join(tmpdir.path, 'junction1'); +const linkPath2 = path.join(tmpdir.path, 'junction2'); const linkTarget = fixtures.fixturesDir; const linkData = fixtures.fixturesDir; -common.refreshTmpDir(); +tmpdir.refresh(); // Test fs.symlink() fs.symlink(linkData, linkPath1, 'junction', common.mustCall(function(err) { diff --git a/test/parallel/test-fs-symlink-dir-junction.js b/test/parallel/test-fs-symlink-dir-junction.js index f7ba3a6d384..cd9459bf44a 100644 --- a/test/parallel/test-fs-symlink-dir-junction.js +++ b/test/parallel/test-fs-symlink-dir-junction.js @@ -26,11 +26,13 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); +const tmpdir = require('../common/tmpdir'); + // test creating and reading symbolic link const linkData = fixtures.path('cycles/'); -const linkPath = path.join(common.tmpDir, 'cycles_link'); +const linkPath = path.join(tmpdir.path, 'cycles_link'); -common.refreshTmpDir(); +tmpdir.refresh(); fs.symlink(linkData, linkPath, 'junction', common.mustCall(function(err) { assert.ifError(err); diff --git a/test/parallel/test-fs-symlink.js b/test/parallel/test-fs-symlink.js index a830b12c246..19903fff58c 100644 --- a/test/parallel/test-fs-symlink.js +++ b/test/parallel/test-fs-symlink.js @@ -32,11 +32,12 @@ const fs = require('fs'); let linkTime; let fileTime; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // test creating and reading symbolic link const linkData = fixtures.path('/cycles/root.js'); -const linkPath = path.join(common.tmpDir, 'symlink1.js'); +const linkPath = path.join(tmpdir.path, 'symlink1.js'); fs.symlink(linkData, linkPath, common.mustCall(function(err) { assert.ifError(err); diff --git a/test/parallel/test-fs-syncwritestream.js b/test/parallel/test-fs-syncwritestream.js index 236c412c45b..a014277a6ba 100644 --- a/test/parallel/test-fs-syncwritestream.js +++ b/test/parallel/test-fs-syncwritestream.js @@ -21,9 +21,10 @@ if (process.argv[2] === 'child') { return; } -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const filename = path.join(common.tmpDir, 'stdout'); +const filename = path.join(tmpdir.path, 'stdout'); const stdoutFd = fs.openSync(filename, 'w'); const proc = spawn(process.execPath, [__filename, 'child'], { diff --git a/test/parallel/test-fs-truncate-GH-6233.js b/test/parallel/test-fs-truncate-GH-6233.js index 07bd272024f..87663c63616 100644 --- a/test/parallel/test-fs-truncate-GH-6233.js +++ b/test/parallel/test-fs-truncate-GH-6233.js @@ -24,9 +24,11 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -const filename = `${common.tmpDir}/truncate-file.txt`; +const tmpdir = require('../common/tmpdir'); -common.refreshTmpDir(); +const filename = `${tmpdir.path}/truncate-file.txt`; + +tmpdir.refresh(); // Synchronous test. { diff --git a/test/parallel/test-fs-truncate-fd.js b/test/parallel/test-fs-truncate-fd.js index 6776dfb2668..ee6f66f720a 100644 --- a/test/parallel/test-fs-truncate-fd.js +++ b/test/parallel/test-fs-truncate-fd.js @@ -3,8 +3,9 @@ const common = require('../common'); const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const tmp = common.tmpDir; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +const tmp = tmpdir.path; +tmpdir.refresh(); const filename = path.resolve(tmp, 'truncate-file.txt'); fs.writeFileSync(filename, 'hello world', 'utf8'); diff --git a/test/parallel/test-fs-truncate-sync.js b/test/parallel/test-fs-truncate-sync.js index a7ce2f4d97f..66250cf4386 100644 --- a/test/parallel/test-fs-truncate-sync.js +++ b/test/parallel/test-fs-truncate-sync.js @@ -1,11 +1,12 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const tmp = common.tmpDir; +const tmpdir = require('../common/tmpdir'); +const tmp = tmpdir.path; -common.refreshTmpDir(); +tmpdir.refresh(); const filename = path.resolve(tmp, 'truncate-sync-file.txt'); diff --git a/test/parallel/test-fs-truncate.js b/test/parallel/test-fs-truncate.js index b32d1ceb2b4..28a9852d82a 100644 --- a/test/parallel/test-fs-truncate.js +++ b/test/parallel/test-fs-truncate.js @@ -24,11 +24,12 @@ const common = require('../common'); const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const tmp = common.tmpDir; +const tmpdir = require('../common/tmpdir'); +const tmp = tmpdir.path; const filename = path.resolve(tmp, 'truncate-file.txt'); const data = Buffer.alloc(1024 * 16, 'x'); -common.refreshTmpDir(); +tmpdir.refresh(); let stat; diff --git a/test/parallel/test-fs-utimes.js b/test/parallel/test-fs-utimes.js index ca3471cddf0..db8201d7cb9 100644 --- a/test/parallel/test-fs-utimes.js +++ b/test/parallel/test-fs-utimes.js @@ -25,7 +25,8 @@ const assert = require('assert'); const util = require('util'); const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); let tests_ok = 0; let tests_run = 0; @@ -73,8 +74,8 @@ function testIt(atime, mtime, callback) { // test synchronized code paths, these functions throw on failure // function syncTests() { - fs.utimesSync(common.tmpDir, atime, mtime); - expect_ok('utimesSync', common.tmpDir, undefined, atime, mtime); + fs.utimesSync(tmpdir.path, atime, mtime); + expect_ok('utimesSync', tmpdir.path, undefined, atime, mtime); tests_run++; // some systems don't have futimes @@ -110,17 +111,17 @@ function testIt(atime, mtime, callback) { // // test async code paths // - fs.utimes(common.tmpDir, atime, mtime, common.mustCall(function(err) { - expect_ok('utimes', common.tmpDir, err, atime, mtime); + fs.utimes(tmpdir.path, atime, mtime, common.mustCall(function(err) { + expect_ok('utimes', tmpdir.path, err, atime, mtime); fs.utimes('foobarbaz', atime, mtime, common.mustCall(function(err) { expect_errno('utimes', 'foobarbaz', err, 'ENOENT'); // don't close this fd if (common.isWindows) { - fd = fs.openSync(common.tmpDir, 'r+'); + fd = fs.openSync(tmpdir.path, 'r+'); } else { - fd = fs.openSync(common.tmpDir, 'r'); + fd = fs.openSync(tmpdir.path, 'r'); } fs.futimes(fd, atime, mtime, common.mustCall(function(err) { @@ -145,7 +146,7 @@ function testIt(atime, mtime, callback) { tests_run++; } -const stats = fs.statSync(common.tmpDir); +const stats = fs.statSync(tmpdir.path); // run tests const runTest = common.mustCall(testIt, 1); @@ -174,7 +175,7 @@ process.on('exit', function() { // Ref: https://github.com/nodejs/node/issues/13255 -const path = `${common.tmpDir}/test-utimes-precision`; +const path = `${tmpdir.path}/test-utimes-precision`; fs.writeFileSync(path, ''); // test Y2K38 for all platforms [except 'arm', and 'SunOS'] diff --git a/test/parallel/test-fs-watch-encoding.js b/test/parallel/test-fs-watch-encoding.js index 5226899d2f3..1cea6255098 100644 --- a/test/parallel/test-fs-watch-encoding.js +++ b/test/parallel/test-fs-watch-encoding.js @@ -22,10 +22,11 @@ if (common.isAIX) const fs = require('fs'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const fn = '新建文夹件.txt'; -const a = path.join(common.tmpDir, fn); +const a = path.join(tmpdir.path, fn); const watchers = new Set(); @@ -42,7 +43,7 @@ function unregisterWatcher(watcher) { } const watcher1 = fs.watch( - common.tmpDir, + tmpdir.path, { encoding: 'hex' }, (event, filename) => { if (['e696b0e5bbbae69687e5a4b9e4bbb62e747874', null].includes(filename)) @@ -52,7 +53,7 @@ const watcher1 = fs.watch( registerWatcher(watcher1); const watcher2 = fs.watch( - common.tmpDir, + tmpdir.path, (event, filename) => { if ([fn, null].includes(filename)) done(watcher2); @@ -61,7 +62,7 @@ const watcher2 = fs.watch( registerWatcher(watcher2); const watcher3 = fs.watch( - common.tmpDir, + tmpdir.path, { encoding: 'buffer' }, (event, filename) => { if (filename instanceof Buffer && filename.toString('utf8') === fn) diff --git a/test/parallel/test-fs-watch-recursive.js b/test/parallel/test-fs-watch-recursive.js index 3e3746df1ef..82d87aa2ecb 100644 --- a/test/parallel/test-fs-watch-recursive.js +++ b/test/parallel/test-fs-watch-recursive.js @@ -9,10 +9,12 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const testDir = common.tmpDir; +const tmpdir = require('../common/tmpdir'); + +const testDir = tmpdir.path; const filenameOne = 'watch.txt'; -common.refreshTmpDir(); +tmpdir.refresh(); const testsubdir = fs.mkdtempSync(testDir + path.sep); const relativePathOne = path.join(path.basename(testsubdir), filenameOne); diff --git a/test/parallel/test-fs-watch.js b/test/parallel/test-fs-watch.js index a82535d5378..7affe370c7e 100644 --- a/test/parallel/test-fs-watch.js +++ b/test/parallel/test-fs-watch.js @@ -14,7 +14,7 @@ class WatchTestCase { this.field = field; this.shouldSkip = !shouldInclude; } - get dirPath() { return join(common.tmpDir, this.dirName); } + get dirPath() { return join(tmpdir.path, this.dirName); } get filePath() { return join(this.dirPath, this.fileName); } } @@ -35,7 +35,8 @@ const cases = [ ) ]; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); for (const testCase of cases) { if (testCase.shouldSkip) continue; diff --git a/test/parallel/test-fs-watchfile.js b/test/parallel/test-fs-watchfile.js index 163eac5ae90..3c24ae84ac0 100644 --- a/test/parallel/test-fs-watchfile.js +++ b/test/parallel/test-fs-watchfile.js @@ -5,6 +5,8 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); +const tmpdir = require('../common/tmpdir'); + // Basic usage tests. common.expectsError( () => { @@ -28,7 +30,7 @@ common.expectsError(function() { fs.watchFile(new Object(), common.mustNotCall()); }, { code: 'ERR_INVALID_ARG_TYPE', type: TypeError }); -const enoentFile = path.join(common.tmpDir, 'non-existent-file'); +const enoentFile = path.join(tmpdir.path, 'non-existent-file'); const expectedStatObject = new fs.Stats( 0, // dev 0, // mode @@ -46,7 +48,7 @@ const expectedStatObject = new fs.Stats( Date.UTC(1970, 0, 1, 0, 0, 0) // birthtime ); -common.refreshTmpDir(); +tmpdir.refresh(); // If the file initially didn't exist, and gets created at a later point of // time, the callback should be invoked again with proper values in stat object @@ -80,7 +82,7 @@ watcher.start(); // should not crash // Watch events should callback with a filename on supported systems. // Omitting AIX. It works but not reliably. if (common.isLinux || common.isOSX || common.isWindows) { - const dir = path.join(common.tmpDir, 'watch'); + const dir = path.join(tmpdir.path, 'watch'); fs.mkdir(dir, common.mustCall(function(err) { if (err) assert.fail(err); diff --git a/test/parallel/test-fs-write-buffer.js b/test/parallel/test-fs-write-buffer.js index ed998958ae0..6e6154642a5 100644 --- a/test/parallel/test-fs-write-buffer.js +++ b/test/parallel/test-fs-write-buffer.js @@ -26,11 +26,12 @@ const path = require('path'); const fs = require('fs'); const expected = Buffer.from('hello'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // fs.write with all parameters provided: { - const filename = path.join(common.tmpDir, 'write1.txt'); + const filename = path.join(tmpdir.path, 'write1.txt'); fs.open(filename, 'w', 0o644, common.mustCall((err, fd) => { assert.ifError(err); @@ -50,7 +51,7 @@ common.refreshTmpDir(); // fs.write with a buffer, without the length parameter: { - const filename = path.join(common.tmpDir, 'write2.txt'); + const filename = path.join(tmpdir.path, 'write2.txt'); fs.open(filename, 'w', 0o644, common.mustCall((err, fd) => { assert.ifError(err); @@ -70,7 +71,7 @@ common.refreshTmpDir(); // fs.write with a buffer, without the offset and length parameters: { - const filename = path.join(common.tmpDir, 'write3.txt'); + const filename = path.join(tmpdir.path, 'write3.txt'); fs.open(filename, 'w', 0o644, common.mustCall(function(err, fd) { assert.ifError(err); @@ -90,7 +91,7 @@ common.refreshTmpDir(); // fs.write with the offset passed as undefined followed by the callback: { - const filename = path.join(common.tmpDir, 'write4.txt'); + const filename = path.join(tmpdir.path, 'write4.txt'); fs.open(filename, 'w', 0o644, common.mustCall(function(err, fd) { assert.ifError(err); @@ -110,7 +111,7 @@ common.refreshTmpDir(); // fs.write with offset and length passed as undefined followed by the callback: { - const filename = path.join(common.tmpDir, 'write5.txt'); + const filename = path.join(tmpdir.path, 'write5.txt'); fs.open(filename, 'w', 0o644, common.mustCall((err, fd) => { assert.ifError(err); @@ -130,7 +131,7 @@ common.refreshTmpDir(); // fs.write with a Uint8Array, without the offset and length parameters: { - const filename = path.join(common.tmpDir, 'write6.txt'); + const filename = path.join(tmpdir.path, 'write6.txt'); fs.open(filename, 'w', 0o644, common.mustCall((err, fd) => { assert.ifError(err); diff --git a/test/parallel/test-fs-write-file-buffer.js b/test/parallel/test-fs-write-file-buffer.js index f2039c87ab4..82fb7ad69a4 100644 --- a/test/parallel/test-fs-write-file-buffer.js +++ b/test/parallel/test-fs-write-file-buffer.js @@ -20,7 +20,7 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const join = require('path').join; const util = require('util'); const fs = require('fs'); @@ -46,9 +46,10 @@ let data = [ data = data.join('\n'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const buf = Buffer.from(data, 'base64'); -fs.writeFileSync(join(common.tmpDir, 'test.jpg'), buf); +fs.writeFileSync(join(tmpdir.path, 'test.jpg'), buf); util.log('Done!'); diff --git a/test/parallel/test-fs-write-file-invalid-path.js b/test/parallel/test-fs-write-file-invalid-path.js index c45eaccf2bc..a4c8ff5bf73 100644 --- a/test/parallel/test-fs-write-file-invalid-path.js +++ b/test/parallel/test-fs-write-file-invalid-path.js @@ -8,7 +8,8 @@ const path = require('path'); if (!common.isWindows) common.skip('This test is for Windows only.'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const DATA_VALUE = 'hello'; @@ -17,7 +18,7 @@ const DATA_VALUE = 'hello'; const RESERVED_CHARACTERS = '<>"|?*'; [...RESERVED_CHARACTERS].forEach((ch) => { - const pathname = path.join(common.tmpDir, `somefile_${ch}`); + const pathname = path.join(tmpdir.path, `somefile_${ch}`); assert.throws( () => { fs.writeFileSync(pathname, DATA_VALUE); @@ -28,7 +29,7 @@ const RESERVED_CHARACTERS = '<>"|?*'; // Test for ':' (NTFS data streams). // Refs: https://msdn.microsoft.com/en-us/library/windows/desktop/bb540537.aspx -const pathname = path.join(common.tmpDir, 'foo:bar'); +const pathname = path.join(tmpdir.path, 'foo:bar'); fs.writeFileSync(pathname, DATA_VALUE); let content = ''; diff --git a/test/parallel/test-fs-write-file-sync.js b/test/parallel/test-fs-write-file-sync.js index aa3864962c9..9a19b9f6e93 100644 --- a/test/parallel/test-fs-write-file-sync.js +++ b/test/parallel/test-fs-write-file-sync.js @@ -46,10 +46,11 @@ if (common.isWindows) { mode = 0o755; } -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // Test writeFileSync -const file1 = path.join(common.tmpDir, 'testWriteFileSync.txt'); +const file1 = path.join(tmpdir.path, 'testWriteFileSync.txt'); fs.writeFileSync(file1, '123', { mode }); @@ -59,7 +60,7 @@ assert.strictEqual(content, '123'); assert.strictEqual(fs.statSync(file1).mode & 0o777, mode); // Test appendFileSync -const file2 = path.join(common.tmpDir, 'testAppendFileSync.txt'); +const file2 = path.join(tmpdir.path, 'testAppendFileSync.txt'); fs.appendFileSync(file2, 'abc', { mode }); @@ -69,7 +70,7 @@ assert.strictEqual(content, 'abc'); assert.strictEqual(fs.statSync(file2).mode & mode, mode); // Test writeFileSync with file descriptor -const file3 = path.join(common.tmpDir, 'testWriteFileSyncFd.txt'); +const file3 = path.join(tmpdir.path, 'testWriteFileSyncFd.txt'); const fd = fs.openSync(file3, 'w+', mode); fs.writeFileSync(fd, '123'); diff --git a/test/parallel/test-fs-write-file-uint8array.js b/test/parallel/test-fs-write-file-uint8array.js index 219379c77a9..592bdb05814 100644 --- a/test/parallel/test-fs-write-file-uint8array.js +++ b/test/parallel/test-fs-write-file-uint8array.js @@ -4,9 +4,10 @@ const assert = require('assert'); const fs = require('fs'); const join = require('path').join; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const filename = join(common.tmpDir, 'test.txt'); +const filename = join(tmpdir.path, 'test.txt'); const s = '南越国是前203年至前111年存在于岭南地区的一个国家,国都位于番禺,疆域包括今天中国的广东、' + '广西两省区的大部份地区,福建省、湖南、贵州、云南的一小部份地区和越南的北部。' + diff --git a/test/parallel/test-fs-write-file.js b/test/parallel/test-fs-write-file.js index 6dd1a58ecba..b137e555472 100644 --- a/test/parallel/test-fs-write-file.js +++ b/test/parallel/test-fs-write-file.js @@ -25,9 +25,10 @@ const assert = require('assert'); const fs = require('fs'); const join = require('path').join; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const filename = join(common.tmpDir, 'test.txt'); +const filename = join(tmpdir.path, 'test.txt'); const n = 220; const s = '南越国是前203年至前111年存在于岭南地区的一个国家,国都位于番禺,疆域包括今天中国的广东、' + @@ -48,7 +49,7 @@ fs.writeFile(filename, s, common.mustCall(function(e) { })); // test that writeFile accepts buffers -const filename2 = join(common.tmpDir, 'test2.txt'); +const filename2 = join(tmpdir.path, 'test2.txt'); const buf = Buffer.from(s, 'utf8'); fs.writeFile(filename2, buf, common.mustCall(function(e) { @@ -62,7 +63,7 @@ fs.writeFile(filename2, buf, common.mustCall(function(e) { })); // test that writeFile accepts numbers. -const filename3 = join(common.tmpDir, 'test3.txt'); +const filename3 = join(tmpdir.path, 'test3.txt'); const m = 0o600; fs.writeFile(filename3, n, { mode: m }, common.mustCall(function(e) { @@ -82,7 +83,7 @@ fs.writeFile(filename3, n, { mode: m }, common.mustCall(function(e) { })); // test that writeFile accepts file descriptors -const filename4 = join(common.tmpDir, 'test4.txt'); +const filename4 = join(tmpdir.path, 'test4.txt'); fs.open(filename4, 'w+', common.mustCall(function(e, fd) { assert.ifError(e); diff --git a/test/parallel/test-fs-write-stream-autoclose-option.js b/test/parallel/test-fs-write-stream-autoclose-option.js index cc22ef660a1..e39f4d615ab 100644 --- a/test/parallel/test-fs-write-stream-autoclose-option.js +++ b/test/parallel/test-fs-write-stream-autoclose-option.js @@ -4,8 +4,10 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const file = path.join(common.tmpDir, 'write-autoclose-opt1.txt'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); + +const file = path.join(tmpdir.path, 'write-autoclose-opt1.txt'); +tmpdir.refresh(); let stream = fs.createWriteStream(file, { flags: 'w+', autoClose: false }); stream.write('Test1'); stream.end(); diff --git a/test/parallel/test-fs-write-stream-change-open.js b/test/parallel/test-fs-write-stream-change-open.js index 50860f2e405..8f79e59427e 100644 --- a/test/parallel/test-fs-write-stream-change-open.js +++ b/test/parallel/test-fs-write-stream-change-open.js @@ -20,14 +20,16 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const file = path.join(common.tmpDir, 'write.txt'); +const tmpdir = require('../common/tmpdir'); -common.refreshTmpDir(); +const file = path.join(tmpdir.path, 'write.txt'); + +tmpdir.refresh(); const stream = fs.WriteStream(file); const _fs_close = fs.close; diff --git a/test/parallel/test-fs-write-stream-close-without-callback.js b/test/parallel/test-fs-write-stream-close-without-callback.js index 95b52fe7af7..b07c799efdb 100644 --- a/test/parallel/test-fs-write-stream-close-without-callback.js +++ b/test/parallel/test-fs-write-stream-close-without-callback.js @@ -1,12 +1,13 @@ 'use strict'; -const common = require('../common'); +require('../common'); const fs = require('fs'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const s = fs.createWriteStream(path.join(common.tmpDir, 'nocallback')); +const s = fs.createWriteStream(path.join(tmpdir.path, 'nocallback')); s.end('hello world'); s.close(); diff --git a/test/parallel/test-fs-write-stream-double-close.js b/test/parallel/test-fs-write-stream-double-close.js index 10ce9077a0a..28e53061f74 100644 --- a/test/parallel/test-fs-write-stream-double-close.js +++ b/test/parallel/test-fs-write-stream-double-close.js @@ -5,17 +5,18 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); { - const s = fs.createWriteStream(path.join(common.tmpDir, 'rw')); + const s = fs.createWriteStream(path.join(tmpdir.path, 'rw')); s.close(common.mustCall()); s.close(common.mustCall()); } { - const s = fs.createWriteStream(path.join(common.tmpDir, 'rw2')); + const s = fs.createWriteStream(path.join(tmpdir.path, 'rw2')); let emits = 0; s.on('close', () => { @@ -36,7 +37,7 @@ common.refreshTmpDir(); } { - const s = fs.createWriteStream(path.join(common.tmpDir, 'rw'), { + const s = fs.createWriteStream(path.join(tmpdir.path, 'rw'), { autoClose: false }); diff --git a/test/parallel/test-fs-write-stream-encoding.js b/test/parallel/test-fs-write-stream-encoding.js index 5fb81088772..5803d99fd7b 100644 --- a/test/parallel/test-fs-write-stream-encoding.js +++ b/test/parallel/test-fs-write-stream-encoding.js @@ -1,17 +1,18 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const fixtures = require('../common/fixtures'); const fs = require('fs'); const path = require('path'); const stream = require('stream'); +const tmpdir = require('../common/tmpdir'); const firstEncoding = 'base64'; const secondEncoding = 'latin1'; const examplePath = fixtures.path('x.txt'); -const dummyPath = path.join(common.tmpDir, 'x.txt'); +const dummyPath = path.join(tmpdir.path, 'x.txt'); -common.refreshTmpDir(); +tmpdir.refresh(); const exampleReadStream = fs.createReadStream(examplePath, { encoding: firstEncoding diff --git a/test/parallel/test-fs-write-stream-end.js b/test/parallel/test-fs-write-stream-end.js index 9c889b94e4c..36e7cb5504c 100644 --- a/test/parallel/test-fs-write-stream-end.js +++ b/test/parallel/test-fs-write-stream-end.js @@ -25,17 +25,18 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); { - const file = path.join(common.tmpDir, 'write-end-test0.txt'); + const file = path.join(tmpdir.path, 'write-end-test0.txt'); const stream = fs.createWriteStream(file); stream.end(); stream.on('close', common.mustCall()); } { - const file = path.join(common.tmpDir, 'write-end-test1.txt'); + const file = path.join(tmpdir.path, 'write-end-test1.txt'); const stream = fs.createWriteStream(file); stream.end('a\n', 'utf8'); stream.on('close', common.mustCall(function() { diff --git a/test/parallel/test-fs-write-stream-err.js b/test/parallel/test-fs-write-stream-err.js index 077bfb24b75..36bf9dbcfb0 100644 --- a/test/parallel/test-fs-write-stream-err.js +++ b/test/parallel/test-fs-write-stream-err.js @@ -24,9 +24,10 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const stream = fs.createWriteStream(`${common.tmpDir}/out`, { +const stream = fs.createWriteStream(`${tmpdir.path}/out`, { highWaterMark: 10 }); const err = new Error('BAM'); diff --git a/test/parallel/test-fs-write-stream-throw-type-error.js b/test/parallel/test-fs-write-stream-throw-type-error.js index 42538906a5b..73312afa6b6 100644 --- a/test/parallel/test-fs-write-stream-throw-type-error.js +++ b/test/parallel/test-fs-write-stream-throw-type-error.js @@ -4,9 +4,11 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -const example = path.join(common.tmpDir, 'dummy'); +const tmpdir = require('../common/tmpdir'); -common.refreshTmpDir(); +const example = path.join(tmpdir.path, 'dummy'); + +tmpdir.refresh(); assert.doesNotThrow(() => { fs.createWriteStream(example, undefined); diff --git a/test/parallel/test-fs-write-stream.js b/test/parallel/test-fs-write-stream.js index 1dc25547a9b..e93f65e604c 100644 --- a/test/parallel/test-fs-write-stream.js +++ b/test/parallel/test-fs-write-stream.js @@ -25,9 +25,11 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const file = path.join(common.tmpDir, 'write.txt'); +const tmpdir = require('../common/tmpdir'); -common.refreshTmpDir(); +const file = path.join(tmpdir.path, 'write.txt'); + +tmpdir.refresh(); { const stream = fs.WriteStream(file); diff --git a/test/parallel/test-fs-write-string-coerce.js b/test/parallel/test-fs-write-string-coerce.js index 9356bc71850..4581c319277 100644 --- a/test/parallel/test-fs-write-string-coerce.js +++ b/test/parallel/test-fs-write-string-coerce.js @@ -4,9 +4,10 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const fn = path.join(common.tmpDir, 'write-string-coerce.txt'); +const fn = path.join(tmpdir.path, 'write-string-coerce.txt'); const data = true; const expected = String(data); diff --git a/test/parallel/test-fs-write-sync.js b/test/parallel/test-fs-write-sync.js index 41a9f2c8887..4ca7a1dd570 100644 --- a/test/parallel/test-fs-write-sync.js +++ b/test/parallel/test-fs-write-sync.js @@ -20,13 +20,14 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const filename = path.join(common.tmpDir, 'write.txt'); +const tmpdir = require('../common/tmpdir'); +const filename = path.join(tmpdir.path, 'write.txt'); -common.refreshTmpDir(); +tmpdir.refresh(); // fs.writeSync with all parameters provided: { diff --git a/test/parallel/test-fs-write.js b/test/parallel/test-fs-write.js index 385ed60a20f..5253c4fd14e 100644 --- a/test/parallel/test-fs-write.js +++ b/test/parallel/test-fs-write.js @@ -25,9 +25,13 @@ const common = require('../common'); const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const fn = path.join(common.tmpDir, 'write.txt'); -const fn2 = path.join(common.tmpDir, 'write2.txt'); -const fn3 = path.join(common.tmpDir, 'write3.txt'); +const tmpdir = require('../common/tmpdir'); + +tmpdir.refresh(); + +const fn = path.join(tmpdir.path, 'write.txt'); +const fn2 = path.join(tmpdir.path, 'write2.txt'); +const fn3 = path.join(tmpdir.path, 'write3.txt'); const expected = 'ümlaut.'; const constants = fs.constants; @@ -35,8 +39,6 @@ if (!common.isChakraEngine) { /* eslint-disable no-undef */ common.allowGlobals(externalizeString, isOneByteString, x); - common.refreshTmpDir(); - { const expected = 'ümlaut eins'; // Must be a unique string. externalizeString(expected); @@ -77,6 +79,7 @@ if (!common.isChakraEngine) { assert.strictEqual(expected, fs.readFileSync(fn, 'utf8')); } } +/* eslint-enable no-undef */ fs.open(fn, 'w', 0o644, common.mustCall(function(err, fd) { assert.ifError(err); diff --git a/test/parallel/test-http-agent-getname.js b/test/parallel/test-http-agent-getname.js index 4b4e9ac26b4..31dc255ba55 100644 --- a/test/parallel/test-http-agent-getname.js +++ b/test/parallel/test-http-agent-getname.js @@ -1,10 +1,12 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const http = require('http'); const path = require('path'); +const tmpdir = require('../common/tmpdir'); + const agent = new http.Agent(); // default to localhost @@ -33,7 +35,7 @@ assert.strictEqual( ); // unix socket -const socketPath = path.join(common.tmpDir, 'foo', 'bar'); +const socketPath = path.join(tmpdir.path, 'foo', 'bar'); assert.strictEqual( agent.getName({ socketPath diff --git a/test/parallel/test-http-chunk-problem.js b/test/parallel/test-http-chunk-problem.js index 46a7406e745..f999f055fc0 100644 --- a/test/parallel/test-http-chunk-problem.js +++ b/test/parallel/test-http-chunk-problem.js @@ -37,7 +37,9 @@ if (process.argv[2] === 'shasum') { const http = require('http'); const cp = require('child_process'); -const filename = require('path').join(common.tmpDir, 'big'); +const tmpdir = require('../common/tmpdir'); + +const filename = require('path').join(tmpdir.path, 'big'); let server; function executeRequest(cb) { @@ -59,7 +61,7 @@ function executeRequest(cb) { } -common.refreshTmpDir(); +tmpdir.refresh(); const ddcmd = common.ddCommand(filename, 10240); diff --git a/test/parallel/test-http-client-abort-keep-alive-queued-unix-socket.js b/test/parallel/test-http-client-abort-keep-alive-queued-unix-socket.js index efcbfe8dc58..745ed4ceeee 100644 --- a/test/parallel/test-http-client-abort-keep-alive-queued-unix-socket.js +++ b/test/parallel/test-http-client-abort-keep-alive-queued-unix-socket.js @@ -16,7 +16,8 @@ class Agent extends http.Agent { const server = http.createServer((req, res) => res.end()); const socketPath = common.PIPE; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); server.listen(socketPath, common.mustCall(() => { const agent = new Agent({ diff --git a/test/parallel/test-http-client-abort-unix-socket.js b/test/parallel/test-http-client-abort-unix-socket.js index 3fb2cd9b869..bf666b7935a 100644 --- a/test/parallel/test-http-client-abort-unix-socket.js +++ b/test/parallel/test-http-client-abort-unix-socket.js @@ -12,7 +12,8 @@ class Agent extends http.Agent { } } -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); server.listen(common.PIPE, common.mustCall(() => { const req = http.get({ diff --git a/test/parallel/test-http-client-headers-array.js b/test/parallel/test-http-client-headers-array.js new file mode 100644 index 00000000000..dffe04bb108 --- /dev/null +++ b/test/parallel/test-http-client-headers-array.js @@ -0,0 +1,60 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const http = require('http'); + +function execute(options) { + http.createServer(function(req, res) { + const expectHeaders = { + 'x-foo': 'boom', + cookie: 'a=1; b=2; c=3', + connection: 'close' + }; + + // no Host header when you set headers an array + if (!Array.isArray(options.headers)) { + expectHeaders.host = `localhost:${this.address().port}`; + } + + // no Authorization header when you set headers an array + if (options.auth && !Array.isArray(options.headers)) { + expectHeaders.authorization = + `Basic ${Buffer.from(options.auth).toString('base64')}`; + } + + this.close(); + + assert.deepStrictEqual(req.headers, expectHeaders); + + res.end(); + }).listen(0, function() { + options = Object.assign(options, { + port: this.address().port, + path: '/' + }); + const req = http.request(options); + req.end(); + }); +} + +// should be the same except for implicit Host header on the first two +execute({ headers: { 'x-foo': 'boom', 'cookie': 'a=1; b=2; c=3' } }); +execute({ headers: { 'x-foo': 'boom', 'cookie': [ 'a=1', 'b=2', 'c=3' ] } }); +execute({ headers: [[ 'x-foo', 'boom' ], [ 'cookie', 'a=1; b=2; c=3' ]] }); +execute({ headers: [ + [ 'x-foo', 'boom' ], [ 'cookie', [ 'a=1', 'b=2', 'c=3' ]] +] }); +execute({ headers: [ + [ 'x-foo', 'boom' ], [ 'cookie', 'a=1' ], + [ 'cookie', 'b=2' ], [ 'cookie', 'c=3'] +] }); + +// Authorization and Host header both missing from the second +execute({ auth: 'foo:bar', headers: + { 'x-foo': 'boom', 'cookie': 'a=1; b=2; c=3' } }); +execute({ auth: 'foo:bar', headers: [ + [ 'x-foo', 'boom' ], [ 'cookie', 'a=1' ], + [ 'cookie', 'b=2' ], [ 'cookie', 'c=3'] +] }); diff --git a/test/parallel/test-http-client-pipe-end.js b/test/parallel/test-http-client-pipe-end.js index 4b9f168e990..9dcdbe4a49d 100644 --- a/test/parallel/test-http-client-pipe-end.js +++ b/test/parallel/test-http-client-pipe-end.js @@ -34,7 +34,8 @@ const server = http.createServer(function(req, res) { }); }); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); server.listen(common.PIPE, function() { const req = http.request({ diff --git a/test/parallel/test-http-client-response-domain.js b/test/parallel/test-http-client-response-domain.js index ff73fd51cc5..0a32e929141 100644 --- a/test/parallel/test-http-client-response-domain.js +++ b/test/parallel/test-http-client-response-domain.js @@ -27,7 +27,8 @@ const domain = require('domain'); let d; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // first fire up a simple HTTP server const server = http.createServer(function(req, res) { diff --git a/test/parallel/test-http-dns-error.js b/test/parallel/test-http-dns-error.js index 723b7106476..900cf40e6b2 100644 --- a/test/parallel/test-http-dns-error.js +++ b/test/parallel/test-http-dns-error.js @@ -30,30 +30,41 @@ const http = require('http'); const https = require('https'); const host = '*'.repeat(256); +const MAX_TRIES = 5; -function do_not_call() { - throw new Error('This function should not have been called.'); -} - -function test(mod) { - +function tryGet(mod, tries) { // Bad host name should not throw an uncatchable exception. // Ensure that there is time to attach an error listener. - const req1 = mod.get({ host: host, port: 42 }, do_not_call); - req1.on('error', common.mustCall(function(err) { + const req = mod.get({ host: host, port: 42 }, common.mustNotCall()); + req.on('error', common.mustCall(function(err) { + if (err.code === 'EAGAIN' && tries < MAX_TRIES) { + tryGet(mod, ++tries); + return; + } assert.strictEqual(err.code, 'ENOTFOUND'); })); // http.get() called req1.end() for us +} - const req2 = mod.request({ +function tryRequest(mod, tries) { + const req = mod.request({ method: 'GET', host: host, port: 42 - }, do_not_call); - req2.on('error', common.mustCall(function(err) { + }, common.mustNotCall()); + req.on('error', common.mustCall(function(err) { + if (err.code === 'EAGAIN' && tries < MAX_TRIES) { + tryRequest(mod, ++tries); + return; + } assert.strictEqual(err.code, 'ENOTFOUND'); })); - req2.end(); + req.end(); +} + +function test(mod) { + tryGet(mod, 0); + tryRequest(mod, 0); } if (common.hasCrypto) { diff --git a/test/parallel/test-http-get-pipeline-problem.js b/test/parallel/test-http-get-pipeline-problem.js index 3182c1faacd..b8b11e7e77c 100644 --- a/test/parallel/test-http-get-pipeline-problem.js +++ b/test/parallel/test-http-get-pipeline-problem.js @@ -32,7 +32,8 @@ const Countdown = require('../common/countdown'); http.globalAgent.maxSockets = 1; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const image = fixtures.readSync('/person.jpg'); @@ -68,7 +69,7 @@ server.listen(0, function() { http.get(opts, function(res) { console.error(`recv ${x}`); - const s = fs.createWriteStream(`${common.tmpDir}/${x}.jpg`); + const s = fs.createWriteStream(`${tmpdir.path}/${x}.jpg`); res.pipe(s); s.on('finish', function() { @@ -85,13 +86,13 @@ server.listen(0, function() { function checkFiles() { // Should see 1.jpg, 2.jpg, ..., 100.jpg in tmpDir - const files = fs.readdirSync(common.tmpDir); + const files = fs.readdirSync(tmpdir.path); assert(total <= files.length); for (let i = 0; i < total; i++) { const fn = `${i}.jpg`; assert.ok(files.includes(fn), `couldn't find '${fn}'`); - const stat = fs.statSync(`${common.tmpDir}/${fn}`); + const stat = fs.statSync(`${tmpdir.path}/${fn}`); assert.strictEqual( image.length, stat.size, `size doesn't match on '${fn}'. Got ${stat.size} bytes`); diff --git a/test/parallel/test-http-pipe-fs.js b/test/parallel/test-http-pipe-fs.js index fd625bb4acc..dfb44ff3b25 100644 --- a/test/parallel/test-http-pipe-fs.js +++ b/test/parallel/test-http-pipe-fs.js @@ -29,9 +29,10 @@ const NUMBER_OF_STREAMS = 2; const countdown = new Countdown(NUMBER_OF_STREAMS, () => server.close()); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const file = path.join(common.tmpDir, 'http-pipe-fs-test.txt'); +const file = path.join(tmpdir.path, 'http-pipe-fs-test.txt'); const server = http.createServer(common.mustCall(function(req, res) { const stream = fs.createWriteStream(file); diff --git a/test/parallel/test-http-unix-socket-keep-alive.js b/test/parallel/test-http-unix-socket-keep-alive.js index 668c440325e..11b3d9b3926 100644 --- a/test/parallel/test-http-unix-socket-keep-alive.js +++ b/test/parallel/test-http-unix-socket-keep-alive.js @@ -5,7 +5,8 @@ const http = require('http'); const server = http.createServer((req, res) => res.end()); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); server.listen(common.PIPE, common.mustCall(() => asyncLoop(makeKeepAliveRequest, 10, common.mustCall(() => diff --git a/test/parallel/test-http-unix-socket.js b/test/parallel/test-http-unix-socket.js index 7a17b9bc9ca..cfed45a43c2 100644 --- a/test/parallel/test-http-unix-socket.js +++ b/test/parallel/test-http-unix-socket.js @@ -34,7 +34,8 @@ const server = http.createServer(function(req, res) { res.end(); }); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); server.listen(common.PIPE, common.mustCall(function() { diff --git a/test/parallel/test-http2-compat-serverrequest-pipe.js b/test/parallel/test-http2-compat-serverrequest-pipe.js index becc62c6621..53e54cdf913 100644 --- a/test/parallel/test-http2-compat-serverrequest-pipe.js +++ b/test/parallel/test-http2-compat-serverrequest-pipe.js @@ -11,9 +11,10 @@ const path = require('path'); // piping should work as expected with createWriteStream -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const loc = fixtures.path('url-tests.js'); -const fn = path.join(common.tmpDir, 'http2-url-tests.js'); +const fn = path.join(tmpdir.path, 'http2-url-tests.js'); const server = http2.createServer(); diff --git a/test/parallel/test-http2-pipe.js b/test/parallel/test-http2-pipe.js index 891fc6e292b..2a759f98487 100644 --- a/test/parallel/test-http2-pipe.js +++ b/test/parallel/test-http2-pipe.js @@ -11,9 +11,10 @@ const path = require('path'); // piping should work as expected with createWriteStream -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const loc = fixtures.path('url-tests.js'); -const fn = path.join(common.tmpDir, 'http2-url-tests.js'); +const fn = path.join(tmpdir.path, 'http2-url-tests.js'); const server = http2.createServer(); diff --git a/test/parallel/test-https-unix-socket-self-signed.js b/test/parallel/test-https-unix-socket-self-signed.js index 6e7cf827472..48207a7a22f 100644 --- a/test/parallel/test-https-unix-socket-self-signed.js +++ b/test/parallel/test-https-unix-socket-self-signed.js @@ -4,7 +4,8 @@ const common = require('../common'); if (!common.hasCrypto) common.skip('missing crypto'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const fixtures = require('../common/fixtures'); const https = require('https'); diff --git a/test/parallel/test-internal-fs-syncwritestream.js b/test/parallel/test-internal-fs-syncwritestream.js index 9e0024df348..c474d21cb43 100644 --- a/test/parallel/test-internal-fs-syncwritestream.js +++ b/test/parallel/test-internal-fs-syncwritestream.js @@ -7,9 +7,10 @@ const fs = require('fs'); const path = require('path'); const SyncWriteStream = require('internal/fs').SyncWriteStream; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const filename = path.join(common.tmpDir, 'sync-write-stream.txt'); +const filename = path.join(tmpdir.path, 'sync-write-stream.txt'); // Verify constructing the instance with default options. { diff --git a/test/parallel/test-module-circular-symlinks.js b/test/parallel/test-module-circular-symlinks.js index b5e04a9c622..e8d80640df0 100644 --- a/test/parallel/test-module-circular-symlinks.js +++ b/test/parallel/test-module-circular-symlinks.js @@ -29,8 +29,9 @@ const fs = require('fs'); // └── node_modules // └── moduleA -> {tmpDir}/node_modules/moduleA -common.refreshTmpDir(); -const tmpDir = common.tmpDir; +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +const tmpDir = tmpdir.path; const node_modules = path.join(tmpDir, 'node_modules'); const moduleA = path.join(node_modules, 'moduleA'); diff --git a/test/parallel/test-module-loading-globalpaths.js b/test/parallel/test-module-loading-globalpaths.js index cd3144f8cd4..e3c36cb21c2 100644 --- a/test/parallel/test-module-loading-globalpaths.js +++ b/test/parallel/test-module-loading-globalpaths.js @@ -10,10 +10,11 @@ const pkgName = 'foo'; if (process.argv[2] === 'child') { console.log(require(pkgName).string); } else { - common.refreshTmpDir(); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); // Copy node binary into a test $PREFIX directory. - const prefixPath = path.join(common.tmpDir, 'install'); + const prefixPath = path.join(tmpdir.path, 'install'); fs.mkdirSync(prefixPath); let testExecPath; if (common.isWindows) { @@ -43,7 +44,7 @@ if (process.argv[2] === 'child') { delete env['NODE_PATH']; // Test empty global path. - const noPkgHomeDir = path.join(common.tmpDir, 'home-no-pkg'); + const noPkgHomeDir = path.join(tmpdir.path, 'home-no-pkg'); fs.mkdirSync(noPkgHomeDir); env['HOME'] = env['USERPROFILE'] = noPkgHomeDir; assert.throws( diff --git a/test/parallel/test-module-symlinked-peer-modules.js b/test/parallel/test-module-symlinked-peer-modules.js index e3d538c42b0..f93dea720f9 100644 --- a/test/parallel/test-module-symlinked-peer-modules.js +++ b/test/parallel/test-module-symlinked-peer-modules.js @@ -13,9 +13,10 @@ const fs = require('fs'); const path = require('path'); const assert = require('assert'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const tmpDir = common.tmpDir; +const tmpDir = tmpdir.path; // Creates the following structure // {tmpDir} diff --git a/test/parallel/test-net-connect-options-fd.js b/test/parallel/test-net-connect-options-fd.js index 50c2a08efeb..76a5e30755b 100644 --- a/test/parallel/test-net-connect-options-fd.js +++ b/test/parallel/test-net-connect-options-fd.js @@ -8,7 +8,8 @@ const net = require('net'); const path = require('path'); const { Pipe, constants: PipeConstants } = process.binding('pipe_wrap'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); function testClients(getSocketOpt, getConnectOpt, getConnectCb) { const cloneOptions = (index) => diff --git a/test/parallel/test-net-connect-options-path.js b/test/parallel/test-net-connect-options-path.js index 3868b85a78a..9a2737c371b 100644 --- a/test/parallel/test-net-connect-options-path.js +++ b/test/parallel/test-net-connect-options-path.js @@ -5,7 +5,8 @@ const net = require('net'); // This file tests the option handling of net.connect, // net.createConnect, and new Socket().connect -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const CLIENT_VARIANTS = 12; diff --git a/test/parallel/test-net-pingpong.js b/test/parallel/test-net-pingpong.js index c83cfaf9434..9fc59db4e2f 100644 --- a/test/parallel/test-net-pingpong.js +++ b/test/parallel/test-net-pingpong.js @@ -128,7 +128,8 @@ function pingPongTest(port, host) { } /* All are run at once, so run on different ports */ -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); pingPongTest(common.PIPE); pingPongTest(0); pingPongTest(0, 'localhost'); diff --git a/test/parallel/test-net-pipe-connect-errors.js b/test/parallel/test-net-pipe-connect-errors.js index 119bca19fcc..8db45266999 100644 --- a/test/parallel/test-net-pipe-connect-errors.js +++ b/test/parallel/test-net-pipe-connect-errors.js @@ -36,12 +36,13 @@ if (common.isWindows) { // file instead emptyTxt = fixtures.path('empty.txt'); } else { - common.refreshTmpDir(); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); // Keep the file name very short so that we don't exceed the 108 char limit // on CI for a POSIX socket. Even though this isn't actually a socket file, // the error will be different from the one we are expecting if we exceed the // limit. - emptyTxt = `${common.tmpDir}0.txt`; + emptyTxt = `${tmpdir.path}0.txt`; function cleanup() { try { diff --git a/test/parallel/test-net-server-listen-handle.js b/test/parallel/test-net-server-listen-handle.js index 06f03e304cf..5bf8451302f 100644 --- a/test/parallel/test-net-server-listen-handle.js +++ b/test/parallel/test-net-server-listen-handle.js @@ -8,7 +8,8 @@ const { getSystemErrorName } = require('util'); const { TCP, constants: TCPConstants } = process.binding('tcp_wrap'); const { Pipe, constants: PipeConstants } = process.binding('pipe_wrap'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); function closeServer() { return common.mustCall(function() { diff --git a/test/parallel/test-net-server-listen-path.js b/test/parallel/test-net-server-listen-path.js index 53173fa66d1..b16b7c7ba81 100644 --- a/test/parallel/test-net-server-listen-path.js +++ b/test/parallel/test-net-server-listen-path.js @@ -3,7 +3,8 @@ const common = require('../common'); const net = require('net'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); function closeServer() { return common.mustCall(function() { diff --git a/test/parallel/test-npm-install.js b/test/parallel/test-npm-install.js index d826eb09ed4..dc9f60b799e 100644 --- a/test/parallel/test-npm-install.js +++ b/test/parallel/test-npm-install.js @@ -9,10 +9,11 @@ const assert = require('assert'); const fs = require('fs'); const fixtures = require('../common/fixtures'); -common.refreshTmpDir(); -const npmSandbox = path.join(common.tmpDir, 'npm-sandbox'); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +const npmSandbox = path.join(tmpdir.path, 'npm-sandbox'); fs.mkdirSync(npmSandbox); -const installDir = path.join(common.tmpDir, 'install-dir'); +const installDir = path.join(tmpdir.path, 'install-dir'); fs.mkdirSync(installDir); const npmPath = path.join( diff --git a/test/parallel/test-performance-warning.js b/test/parallel/test-performance-warning.js new file mode 100644 index 00000000000..f3104677a7c --- /dev/null +++ b/test/parallel/test-performance-warning.js @@ -0,0 +1,29 @@ +// Flags: --no-warnings +'use strict'; + +const common = require('../common'); +const { performance } = require('perf_hooks'); +const assert = require('assert'); + +assert.strictEqual(performance.length, 1); +assert.strictEqual(performance.maxEntries, 150); + +performance.maxEntries = 1; + +[-1, 0xffffffff + 1, '', null, undefined, Infinity].forEach((i) => { + common.expectsError( + () => performance.maxEntries = i, + { + code: 'ERR_INVALID_ARG_TYPE', + type: TypeError + } + ); +}); + +common.expectWarning('Warning', [ + 'Possible perf_hooks memory leak detected. There are 2 entries in the ' + + 'Performance Timeline. Use the clear methods to remove entries that are no ' + + 'longer needed or set performance.maxEntries equal to a higher value ' + + '(currently the maxEntries is 1).']); + +performance.mark('test'); diff --git a/test/parallel/test-pipe-address.js b/test/parallel/test-pipe-address.js index 10552abee7b..3550434932e 100644 --- a/test/parallel/test-pipe-address.js +++ b/test/parallel/test-pipe-address.js @@ -4,7 +4,8 @@ const assert = require('assert'); const net = require('net'); const server = net.createServer(common.mustNotCall()); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); server.listen(common.PIPE, common.mustCall(function() { assert.strictEqual(server.address(), common.PIPE); diff --git a/test/parallel/test-pipe-file-to-http.js b/test/parallel/test-pipe-file-to-http.js index 244dcd1a990..cfe289c30ca 100644 --- a/test/parallel/test-pipe-file-to-http.js +++ b/test/parallel/test-pipe-file-to-http.js @@ -27,9 +27,10 @@ const http = require('http'); const path = require('path'); const cp = require('child_process'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const filename = path.join(common.tmpDir || '/tmp', 'big'); +const filename = path.join(tmpdir.path || '/tmp', 'big'); let count = 0; const server = http.createServer(function(req, res) { diff --git a/test/parallel/test-pipe-stream.js b/test/parallel/test-pipe-stream.js index 8fd9d31d499..c7d9a0a6265 100644 --- a/test/parallel/test-pipe-stream.js +++ b/test/parallel/test-pipe-stream.js @@ -3,7 +3,8 @@ const common = require('../common'); const assert = require('assert'); const net = require('net'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); function test(clazz, cb) { let have_ping = false; diff --git a/test/parallel/test-pipe-unref.js b/test/parallel/test-pipe-unref.js index cfe7a97ca59..1e0245b5444 100644 --- a/test/parallel/test-pipe-unref.js +++ b/test/parallel/test-pipe-unref.js @@ -4,7 +4,8 @@ const net = require('net'); // This test should end immediately after `unref` is called -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const s = net.Server(); s.listen(common.PIPE); diff --git a/test/parallel/test-pipe-writev.js b/test/parallel/test-pipe-writev.js index db95a4b1818..5e5b42e6a78 100644 --- a/test/parallel/test-pipe-writev.js +++ b/test/parallel/test-pipe-writev.js @@ -7,7 +7,8 @@ if (common.isWindows) const assert = require('assert'); const net = require('net'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const server = net.createServer((connection) => { connection.on('error', (err) => { diff --git a/test/parallel/test-process-chdir.js b/test/parallel/test-process-chdir.js index 61707706a32..c0a245ffd34 100644 --- a/test/parallel/test-process-chdir.js +++ b/test/parallel/test-process-chdir.js @@ -1,10 +1,12 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); +const tmpdir = require('../common/tmpdir'); + process.chdir('..'); assert.notStrictEqual(process.cwd(), __dirname); process.chdir(__dirname); @@ -18,10 +20,10 @@ if (process.versions.icu) { // ICU is unavailable, use characters that can't be decomposed dirName = 'weird \ud83d\udc04 characters \ud83d\udc05'; } -const dir = path.resolve(common.tmpDir, dirName); +const dir = path.resolve(tmpdir.path, dirName); // Make sure that the tmp directory is clean -common.refreshTmpDir(); +tmpdir.refresh(); fs.mkdirSync(dir); process.chdir(dir); @@ -29,7 +31,7 @@ assert.strictEqual(process.cwd().normalize(), dir.normalize()); process.chdir('..'); assert.strictEqual(process.cwd().normalize(), - path.resolve(common.tmpDir).normalize()); + path.resolve(tmpdir.path).normalize()); const errMessage = /^TypeError: Bad argument\.$/; assert.throws(function() { process.chdir({}); }, diff --git a/test/parallel/test-process-execpath.js b/test/parallel/test-process-execpath.js index d70d1dfd389..68aef90b303 100644 --- a/test/parallel/test-process-execpath.js +++ b/test/parallel/test-process-execpath.js @@ -14,9 +14,10 @@ if (process.argv[2] === 'child') { // The console.log() output is part of the test here. console.log(process.execPath); } else { - common.refreshTmpDir(); + const tmpdir = require('../common/tmpdir'); + tmpdir.refresh(); - const symlinkedNode = path.join(common.tmpDir, 'symlinked-node'); + const symlinkedNode = path.join(tmpdir.path, 'symlinked-node'); fs.symlinkSync(process.execPath, symlinkedNode); const proc = child_process.spawnSync(symlinkedNode, [__filename, 'child']); diff --git a/test/parallel/test-process-redirect-warnings-env.js b/test/parallel/test-process-redirect-warnings-env.js index 59e236ab89f..5031152a48b 100644 --- a/test/parallel/test-process-redirect-warnings-env.js +++ b/test/parallel/test-process-redirect-warnings-env.js @@ -12,10 +12,11 @@ const fork = require('child_process').fork; const path = require('path'); const assert = require('assert'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const warnmod = require.resolve(fixtures.path('warnings.js')); -const warnpath = path.join(common.tmpDir, 'warnings.txt'); +const warnpath = path.join(tmpdir.path, 'warnings.txt'); fork(warnmod, { env: Object.assign({}, process.env, { NODE_REDIRECT_WARNINGS: warnpath }) }) diff --git a/test/parallel/test-process-redirect-warnings.js b/test/parallel/test-process-redirect-warnings.js index 76f376240ba..b4f55fa8345 100644 --- a/test/parallel/test-process-redirect-warnings.js +++ b/test/parallel/test-process-redirect-warnings.js @@ -12,10 +12,11 @@ const fork = require('child_process').fork; const path = require('path'); const assert = require('assert'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const warnmod = fixtures.path('warnings.js'); -const warnpath = path.join(common.tmpDir, 'warnings.txt'); +const warnpath = path.join(tmpdir.path, 'warnings.txt'); fork(warnmod, { execArgv: [`--redirect-warnings=${warnpath}`] }) .on('exit', common.mustCall(() => { diff --git a/test/parallel/test-regress-GH-3739.js b/test/parallel/test-regress-GH-3739.js index d41accc2e6e..dbf77ad785c 100644 --- a/test/parallel/test-regress-GH-3739.js +++ b/test/parallel/test-regress-GH-3739.js @@ -5,10 +5,12 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); -let dir = path.resolve(common.tmpDir); +const tmpdir = require('../common/tmpdir'); + +let dir = path.resolve(tmpdir.path); // Make sure that the tmp directory is clean -common.refreshTmpDir(); +tmpdir.refresh(); // Make a long path. for (let i = 0; i < 50; i++) { diff --git a/test/parallel/test-repl-history-perm.js b/test/parallel/test-repl-history-perm.js index 9f14ece568d..b125fa551dc 100644 --- a/test/parallel/test-repl-history-perm.js +++ b/test/parallel/test-repl-history-perm.js @@ -31,8 +31,9 @@ stream._write = function(c, e, cb) { }; stream.readable = stream.writable = true; -common.refreshTmpDir(); -const replHistoryPath = path.join(common.tmpDir, '.node_repl_history'); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +const replHistoryPath = path.join(tmpdir.path, '.node_repl_history'); const checkResults = common.mustCall(function(err, r) { assert.ifError(err); diff --git a/test/parallel/test-repl-persistent-history.js b/test/parallel/test-repl-persistent-history.js index 3ba71f1f175..396203d949f 100644 --- a/test/parallel/test-repl-persistent-history.js +++ b/test/parallel/test-repl-persistent-history.js @@ -11,11 +11,12 @@ const fs = require('fs'); const path = require('path'); const os = require('os'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // Mock os.homedir() os.homedir = function() { - return common.tmpDir; + return tmpdir.path; }; // Create an input stream specialized for testing an array of actions @@ -55,16 +56,16 @@ const CLEAR = { ctrl: true, name: 'u' }; // File paths const historyFixturePath = fixtures.path('.node_repl_history'); -const historyPath = path.join(common.tmpDir, '.fixture_copy_repl_history'); -const historyPathFail = path.join(common.tmpDir, '.node_repl\u0000_history'); +const historyPath = path.join(tmpdir.path, '.fixture_copy_repl_history'); +const historyPathFail = path.join(tmpdir.path, '.node_repl\u0000_history'); const oldHistoryPathObj = fixtures.path('old-repl-history-file-obj.json'); const oldHistoryPathFaulty = fixtures.path('old-repl-history-file-faulty.json'); const oldHistoryPath = fixtures.path('old-repl-history-file.json'); const enoentHistoryPath = fixtures.path('enoent-repl-history-file.json'); const emptyHistoryPath = fixtures.path('.empty-repl-history-file'); -const defaultHistoryPath = path.join(common.tmpDir, '.node_repl_history'); +const defaultHistoryPath = path.join(tmpdir.path, '.node_repl_history'); const emptyHiddenHistoryPath = fixtures.path('.empty-hidden-repl-history-file'); -const devNullHistoryPath = path.join(common.tmpDir, +const devNullHistoryPath = path.join(tmpdir.path, '.dev-null-repl-history-file'); // Common message bits const prompt = '> '; diff --git a/test/parallel/test-repl-save-load.js b/test/parallel/test-repl-save-load.js index 2be272473b6..3778ffac3ec 100644 --- a/test/parallel/test-repl-save-load.js +++ b/test/parallel/test-repl-save-load.js @@ -25,7 +25,8 @@ const assert = require('assert'); const join = require('path').join; const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const repl = require('repl'); @@ -39,7 +40,7 @@ const testFile = [ 'var top = function() {', 'var inner = {one:1};' ]; -const saveFileName = join(common.tmpDir, 'test.save.js'); +const saveFileName = join(tmpdir.path, 'test.save.js'); // input some data putIn.run(testFile); @@ -91,7 +92,7 @@ testMe.complete('inner.o', function(error, data) { // clear the REPL putIn.run(['.clear']); -let loadFile = join(common.tmpDir, 'file.does.not.exist'); +let loadFile = join(tmpdir.path, 'file.does.not.exist'); // should not break putIn.write = function(data) { @@ -103,7 +104,7 @@ putIn.write = function(data) { putIn.run([`.load ${loadFile}`]); // throw error on loading directory -loadFile = common.tmpDir; +loadFile = tmpdir.path; putIn.write = function(data) { assert.strictEqual(data, `Failed to load:${loadFile} is not a valid file\n`); putIn.write = () => {}; @@ -115,7 +116,7 @@ putIn.run(['.clear']); // NUL (\0) is disallowed in filenames in UNIX-like operating systems and // Windows so we can use that to test failed saves -const invalidFileName = join(common.tmpDir, '\0\0\0\0\0'); +const invalidFileName = join(tmpdir.path, '\0\0\0\0\0'); // should not break putIn.write = function(data) { diff --git a/test/parallel/test-require-long-path.js b/test/parallel/test-require-long-path.js index aaaf07d48ae..548a0b5425d 100644 --- a/test/parallel/test-require-long-path.js +++ b/test/parallel/test-require-long-path.js @@ -6,15 +6,17 @@ if (!common.isWindows) const fs = require('fs'); const path = require('path'); +const tmpdir = require('../common/tmpdir'); + // make a path that is more than 260 chars long. -const dirNameLen = Math.max(260 - common.tmpDir.length, 1); -const dirName = path.join(common.tmpDir, 'x'.repeat(dirNameLen)); +const dirNameLen = Math.max(260 - tmpdir.path.length, 1); +const dirName = path.join(tmpdir.path, 'x'.repeat(dirNameLen)); const fullDirPath = path.resolve(dirName); const indexFile = path.join(fullDirPath, 'index.js'); const otherFile = path.join(fullDirPath, 'other.js'); -common.refreshTmpDir(); +tmpdir.refresh(); fs.mkdirSync(fullDirPath); fs.writeFileSync(indexFile, 'require("./other");'); @@ -23,4 +25,4 @@ fs.writeFileSync(otherFile, ''); require(indexFile); require(otherFile); -common.refreshTmpDir(); +tmpdir.refresh(); diff --git a/test/parallel/test-require-symlink.js b/test/parallel/test-require-symlink.js index 60962156f2f..d245c21dd1f 100644 --- a/test/parallel/test-require-symlink.js +++ b/test/parallel/test-require-symlink.js @@ -14,12 +14,13 @@ const process = require('process'); // Setup: Copy fixtures to tmp directory. const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); const dirName = 'module-require-symlink'; const fixtureSource = fixtures.path(dirName); -const tmpDirTarget = path.join(common.tmpDir, dirName); +const tmpDirTarget = path.join(tmpdir.path, dirName); // Copy fixtureSource to linkTarget recursively. -common.refreshTmpDir(); +tmpdir.refresh(); function copyDir(source, target) { fs.mkdirSync(target); @@ -40,7 +41,7 @@ copyDir(fixtureSource, tmpDirTarget); // Move to tmp dir and do everything with relative paths there so that the test // doesn't incorrectly fail due to a symlink somewhere else in the absolute // path. -process.chdir(common.tmpDir); +process.chdir(tmpdir.path); const linkDir = path.join(dirName, 'node_modules', diff --git a/test/parallel/test-require-unicode.js b/test/parallel/test-require-unicode.js index 93a8787cdfe..530ff3bb56c 100644 --- a/test/parallel/test-require-unicode.js +++ b/test/parallel/test-require-unicode.js @@ -1,13 +1,14 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const dirname = path.join(common.tmpDir, '\u4e2d\u6587\u76ee\u5f55'); +const dirname = path.join(tmpdir.path, '\u4e2d\u6587\u76ee\u5f55'); fs.mkdirSync(dirname); fs.writeFileSync(path.join(dirname, 'file.js'), 'module.exports = 42;'); fs.writeFileSync(path.join(dirname, 'package.json'), diff --git a/test/parallel/test-stdin-from-file.js b/test/parallel/test-stdin-from-file.js index 148464e51a8..eda8e068fe3 100644 --- a/test/parallel/test-stdin-from-file.js +++ b/test/parallel/test-stdin-from-file.js @@ -1,13 +1,14 @@ 'use strict'; const common = require('../common'); const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); const assert = require('assert'); const { join } = require('path'); const childProcess = require('child_process'); const fs = require('fs'); const stdoutScript = fixtures.path('echo-close-check.js'); -const tmpFile = join(common.tmpDir, 'stdin.txt'); +const tmpFile = join(tmpdir.path, 'stdin.txt'); const cmd = `"${process.argv[0]}" "${stdoutScript}" < "${tmpFile}"`; @@ -24,7 +25,7 @@ const string = 'abc\nümlaut.\nsomething else\n' + '有效的改善了岭南地区落后的政治、##济现状。\n'; -common.refreshTmpDir(); +tmpdir.refresh(); console.log(`${cmd}\n\n`); diff --git a/test/parallel/test-stdout-to-file.js b/test/parallel/test-stdout-to-file.js index 6869fafa1cf..a02531ca41f 100644 --- a/test/parallel/test-stdout-to-file.js +++ b/test/parallel/test-stdout-to-file.js @@ -5,12 +5,13 @@ const path = require('path'); const childProcess = require('child_process'); const fs = require('fs'); const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); const scriptString = fixtures.path('print-chars.js'); const scriptBuffer = fixtures.path('print-chars-from-buffer.js'); -const tmpFile = path.join(common.tmpDir, 'stdout.txt'); +const tmpFile = path.join(tmpdir.path, 'stdout.txt'); -common.refreshTmpDir(); +tmpdir.refresh(); function test(size, useBuffer, cb) { const cmd = `"${process.argv[0]}" "${ diff --git a/test/parallel/test-tls-connect-pipe.js b/test/parallel/test-tls-connect-pipe.js index f609659d195..88e78b7a2b2 100644 --- a/test/parallel/test-tls-connect-pipe.js +++ b/test/parallel/test-tls-connect-pipe.js @@ -33,7 +33,8 @@ const options = { cert: fixtures.readKey('agent1-cert.pem') }; -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const server = tls.Server(options, common.mustCall(function(socket) { server.close(); diff --git a/test/parallel/test-tls-net-connect-prefer-path.js b/test/parallel/test-tls-net-connect-prefer-path.js index 19a3ba4b37b..263501ae033 100644 --- a/test/parallel/test-tls-net-connect-prefer-path.js +++ b/test/parallel/test-tls-net-connect-prefer-path.js @@ -8,7 +8,8 @@ const fixtures = require('../common/fixtures'); if (!common.hasCrypto) common.skip('missing crypto'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const tls = require('tls'); const net = require('net'); diff --git a/test/parallel/test-tls-session-cache.js b/test/parallel/test-tls-session-cache.js index 2bbf3b642da..7778dd03100 100644 --- a/test/parallel/test-tls-session-cache.js +++ b/test/parallel/test-tls-session-cache.js @@ -69,11 +69,11 @@ function doTest(testOptions, callback) { server.on('newSession', function(id, data, cb) { ++newSessionCount; // Emulate asynchronous store - setTimeout(function() { + setImmediate(() => { assert.ok(!session); session = { id, data }; cb(); - }, 1000); + }); }); server.on('resumeSession', function(id, callback) { ++resumeCount; @@ -89,9 +89,9 @@ function doTest(testOptions, callback) { } // Just to check that async really works there - setTimeout(function() { + setImmediate(() => { callback(null, data); - }, 100); + }); }); server.listen(0, function() { @@ -132,7 +132,7 @@ function doTest(testOptions, callback) { } assert.strictEqual(code, 0); server.close(common.mustCall(function() { - setTimeout(callback, 100); + setImmediate(callback); })); })); } diff --git a/test/parallel/test-tls-socket-destroy.js b/test/parallel/test-tls-socket-destroy.js index f62b6f90529..6f1d4b4186b 100644 --- a/test/parallel/test-tls-socket-destroy.js +++ b/test/parallel/test-tls-socket-destroy.js @@ -19,6 +19,7 @@ const server = net.createServer(common.mustCall((conn) => { const socket = new tls.TLSSocket(conn, options); socket.once('data', common.mustCall(() => { socket._destroySSL(); // Should not crash. + socket.destroy(); server.close(); })); })); diff --git a/test/parallel/test-tls-wrap-econnreset-pipe.js b/test/parallel/test-tls-wrap-econnreset-pipe.js index ef6efaedc34..b400e35d412 100644 --- a/test/parallel/test-tls-wrap-econnreset-pipe.js +++ b/test/parallel/test-tls-wrap-econnreset-pipe.js @@ -8,7 +8,8 @@ const assert = require('assert'); const tls = require('tls'); const net = require('net'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const server = net.createServer((c) => { c.end(); diff --git a/test/parallel/test-trace-events-all.js b/test/parallel/test-trace-events-all.js index 329f99f5912..07c53236597 100644 --- a/test/parallel/test-trace-events-all.js +++ b/test/parallel/test-trace-events-all.js @@ -8,8 +8,9 @@ const CODE = 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)'; const FILE_NAME = 'node_trace.1.log'; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +process.chdir(tmpdir.path); const proc = cp.spawn(process.execPath, [ '--trace-events-enabled', '-e', CODE ]); diff --git a/test/parallel/test-trace-events-async-hooks.js b/test/parallel/test-trace-events-async-hooks.js index e1f78f791a6..b15d83b07a5 100644 --- a/test/parallel/test-trace-events-async-hooks.js +++ b/test/parallel/test-trace-events-async-hooks.js @@ -8,8 +8,9 @@ const CODE = 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)'; const FILE_NAME = 'node_trace.1.log'; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +process.chdir(tmpdir.path); const proc = cp.spawn(process.execPath, [ '--trace-events-enabled', diff --git a/test/parallel/test-trace-events-binding.js b/test/parallel/test-trace-events-binding.js index 9a182821bac..fc4e7f99f87 100644 --- a/test/parallel/test-trace-events-binding.js +++ b/test/parallel/test-trace-events-binding.js @@ -20,8 +20,9 @@ const CODE = ` `; const FILE_NAME = 'node_trace.1.log'; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +process.chdir(tmpdir.path); const proc = cp.spawn(process.execPath, [ '--trace-events-enabled', diff --git a/test/parallel/test-trace-events-category-used.js b/test/parallel/test-trace-events-category-used.js index 39d09ad862d..aa0662b7493 100644 --- a/test/parallel/test-trace-events-category-used.js +++ b/test/parallel/test-trace-events-category-used.js @@ -7,8 +7,9 @@ const CODE = `console.log( process.binding("trace_events").categoryGroupEnabled("custom") );`; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +process.chdir(tmpdir.path); const procEnabled = cp.spawn( process.execPath, diff --git a/test/parallel/test-trace-events-none.js b/test/parallel/test-trace-events-none.js index 9a4d587f2db..7a87fc5cbd3 100644 --- a/test/parallel/test-trace-events-none.js +++ b/test/parallel/test-trace-events-none.js @@ -7,8 +7,9 @@ const CODE = 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)'; const FILE_NAME = 'node_trace.1.log'; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +process.chdir(tmpdir.path); const proc_no_categories = cp.spawn( process.execPath, diff --git a/test/parallel/test-trace-events-process-exit.js b/test/parallel/test-trace-events-process-exit.js index be45cb1d3e0..9f164ee6279 100644 --- a/test/parallel/test-trace-events-process-exit.js +++ b/test/parallel/test-trace-events-process-exit.js @@ -4,10 +4,12 @@ const assert = require('assert'); const cp = require('child_process'); const fs = require('fs'); +const tmpdir = require('../common/tmpdir'); + const FILE_NAME = 'node_trace.1.log'; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +tmpdir.refresh(); +process.chdir(tmpdir.path); const proc = cp.spawn(process.execPath, [ '--trace-events-enabled', diff --git a/test/parallel/test-trace-events-v8.js b/test/parallel/test-trace-events-v8.js index b17b1473eca..49c34b8f17b 100644 --- a/test/parallel/test-trace-events-v8.js +++ b/test/parallel/test-trace-events-v8.js @@ -8,8 +8,9 @@ const CODE = 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)'; const FILE_NAME = 'node_trace.1.log'; -common.refreshTmpDir(); -process.chdir(common.tmpDir); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); +process.chdir(tmpdir.path); const proc = cp.spawn(process.execPath, [ '--trace-events-enabled', diff --git a/test/parallel/test-util-inspect-bigint.js b/test/parallel/test-util-inspect-bigint.js new file mode 100644 index 00000000000..cb50c1f6982 --- /dev/null +++ b/test/parallel/test-util-inspect-bigint.js @@ -0,0 +1,10 @@ +'use strict'; + +// Flags: --harmony-bigint + +require('../common'); +const assert = require('assert'); + +const { inspect } = require('util'); + +assert.strictEqual(inspect(1n), '1n'); diff --git a/test/parallel/test-zlib-from-gzip.js b/test/parallel/test-zlib-from-gzip.js index f62dd10f323..99c3f1757e0 100644 --- a/test/parallel/test-zlib-from-gzip.js +++ b/test/parallel/test-zlib-from-gzip.js @@ -29,7 +29,8 @@ const zlib = require('zlib'); const path = require('path'); const fixtures = require('../common/fixtures'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); const gunzip = zlib.createGunzip(); @@ -37,7 +38,7 @@ const fs = require('fs'); const fixture = fixtures.path('person.jpg.gz'); const unzippedFixture = fixtures.path('person.jpg'); -const outputFile = path.resolve(common.tmpDir, 'person.jpg'); +const outputFile = path.resolve(tmpdir.path, 'person.jpg'); const expect = fs.readFileSync(unzippedFixture); const inp = fs.createReadStream(fixture); const out = fs.createWriteStream(outputFile); diff --git a/test/pummel/test-fs-largefile.js b/test/pummel/test-fs-largefile.js index b0cb24a60fe..786e325ce33 100644 --- a/test/pummel/test-fs-largefile.js +++ b/test/pummel/test-fs-largefile.js @@ -20,15 +20,16 @@ // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); const fs = require('fs'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const filepath = path.join(common.tmpDir, 'large.txt'); +const filepath = path.join(tmpdir.path, 'large.txt'); const fd = fs.openSync(filepath, 'w+'); const offset = 5 * 1024 * 1024 * 1024; // 5GB const message = 'Large File'; diff --git a/test/pummel/test-fs-watch-file-slow.js b/test/pummel/test-fs-watch-file-slow.js index 9ae9922ec8b..7b7065cffbf 100644 --- a/test/pummel/test-fs-watch-file-slow.js +++ b/test/pummel/test-fs-watch-file-slow.js @@ -25,7 +25,9 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -const FILENAME = path.join(common.tmpDir, 'watch-me'); +const tmpdir = require('../common/tmpdir'); + +const FILENAME = path.join(tmpdir.path, 'watch-me'); const TIMEOUT = 1300; let nevents = 0; diff --git a/test/pummel/test-fs-watch-file.js b/test/pummel/test-fs-watch-file.js index 3b036257b3b..c893c9dfa6e 100644 --- a/test/pummel/test-fs-watch-file.js +++ b/test/pummel/test-fs-watch-file.js @@ -25,12 +25,14 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); +const tmpdir = require('../common/tmpdir'); + let watchSeenOne = 0; let watchSeenTwo = 0; let watchSeenThree = 0; let watchSeenFour = 0; -const testDir = common.tmpDir; +const testDir = tmpdir.path; const filenameOne = 'watch.txt'; const filepathOne = path.join(testDir, filenameOne); diff --git a/test/pummel/test-fs-watch-non-recursive.js b/test/pummel/test-fs-watch-non-recursive.js index 02447cf5215..2b10f9b24da 100644 --- a/test/pummel/test-fs-watch-non-recursive.js +++ b/test/pummel/test-fs-watch-non-recursive.js @@ -24,7 +24,9 @@ const common = require('../common'); const path = require('path'); const fs = require('fs'); -const testDir = common.tmpDir; +const tmpdir = require('tmpdir'); + +const testDir = tmpdir.path; const testsubdir = path.join(testDir, 'testsubdir'); const filepath = path.join(testsubdir, 'watch.txt'); diff --git a/test/pummel/test-regress-GH-814.js b/test/pummel/test-regress-GH-814.js index a43a67fe77c..a62df944863 100644 --- a/test/pummel/test-regress-GH-814.js +++ b/test/pummel/test-regress-GH-814.js @@ -22,9 +22,11 @@ 'use strict'; // Flags: --expose_gc -const common = require('../common'); +require('../common'); const assert = require('assert'); +const tmpdir = require('../common/tmpdir'); + function newBuffer(size, value) { const buffer = Buffer.allocUnsafe(size); while (size--) { @@ -36,7 +38,7 @@ function newBuffer(size, value) { } const fs = require('fs'); -const testFileName = require('path').join(common.tmpDir, 'GH-814_testFile.txt'); +const testFileName = require('path').join(tmpdir.path, 'GH-814_testFile.txt'); const testFileFD = fs.openSync(testFileName, 'w'); console.log(testFileName); diff --git a/test/pummel/test-regress-GH-814_2.js b/test/pummel/test-regress-GH-814_2.js index 516a8727c65..a183e082f86 100644 --- a/test/pummel/test-regress-GH-814_2.js +++ b/test/pummel/test-regress-GH-814_2.js @@ -22,11 +22,12 @@ 'use strict'; // Flags: --expose_gc -const common = require('../common'); +require('../common'); const assert = require('assert'); const fs = require('fs'); -const testFileName = require('path').join(common.tmpDir, 'GH-814_test.txt'); +const tmpdir = require('../common/tmpdir'); +const testFileName = require('path').join(tmpdir.path, 'GH-814_test.txt'); const testFD = fs.openSync(testFileName, 'w'); console.error(`${testFileName}\n`); diff --git a/test/pummel/test-tls-session-timeout.js b/test/pummel/test-tls-session-timeout.js index 56fdfa16ea7..49c38102fc5 100644 --- a/test/pummel/test-tls-session-timeout.js +++ b/test/pummel/test-tls-session-timeout.js @@ -28,6 +28,8 @@ if (!common.opensslCli) if (!common.hasCrypto) common.skip('missing crypto'); +const tmpdir = require('../common/tmpdir'); + doTest(); // This test consists of three TLS requests -- @@ -65,7 +67,7 @@ function doTest() { const sessionFileName = (function() { const ticketFileName = 'tls-session-ticket.txt'; - const tmpPath = join(common.tmpDir, ticketFileName); + const tmpPath = join(tmpdir.path, ticketFileName); fs.writeFileSync(tmpPath, fixtures.readSync(ticketFileName)); return tmpPath; }()); diff --git a/test/sequential/sequential.status b/test/sequential/sequential.status index 5a6704c3924..54d68cda4ae 100644 --- a/test/sequential/sequential.status +++ b/test/sequential/sequential.status @@ -13,6 +13,7 @@ test-inspector-debug-end : PASS, FLAKY test-inspector-async-hook-setup-at-signal: PASS, FLAKY test-http2-ping-flood : PASS, FLAKY test-http2-settings-flood : PASS, FLAKY +test-inspector-stop-profile-after-done: PASS, FLAKY [$system==linux] diff --git a/test/sequential/test-async-wrap-getasyncid.js b/test/sequential/test-async-wrap-getasyncid.js index 918e76ac498..86a4065fb2b 100644 --- a/test/sequential/test-async-wrap-getasyncid.js +++ b/test/sequential/test-async-wrap-getasyncid.js @@ -6,6 +6,7 @@ const fs = require('fs'); const net = require('net'); const providers = Object.assign({}, process.binding('async_wrap').Providers); const fixtures = require('../common/fixtures'); +const tmpdir = require('../common/tmpdir'); const { getSystemErrorName } = require('util'); // Make sure that all Providers are tested. @@ -139,7 +140,7 @@ if (common.hasCrypto) { // eslint-disable-line crypto-check } { - common.refreshTmpDir(); + tmpdir.refresh(); const server = net.createServer(common.mustCall((socket) => { server.close(); diff --git a/test/sequential/test-fs-readfile-tostring-fail.js b/test/sequential/test-fs-readfile-tostring-fail.js index 8431ebab098..28d52c1c38c 100644 --- a/test/sequential/test-fs-readfile-tostring-fail.js +++ b/test/sequential/test-fs-readfile-tostring-fail.js @@ -13,9 +13,10 @@ const kStringMaxLength = process.binding('buffer').kStringMaxLength; if (common.isAIX && (Number(cp.execSync('ulimit -f')) * 512) < kStringMaxLength) common.skip('intensive toString tests due to file size confinements'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const file = path.join(common.tmpDir, 'toobig.txt'); +const file = path.join(tmpdir.path, 'toobig.txt'); const stream = fs.createWriteStream(file, { flags: 'a' }); diff --git a/test/sequential/test-fs-watch.js b/test/sequential/test-fs-watch.js index 9f1e95e8c0f..31708ee6144 100644 --- a/test/sequential/test-fs-watch.js +++ b/test/sequential/test-fs-watch.js @@ -26,14 +26,16 @@ const assert = require('assert'); const fs = require('fs'); const path = require('path'); +const tmpdir = require('../common/tmpdir'); + const expectFilePath = common.isWindows || common.isLinux || common.isOSX || common.isAIX; -const testDir = common.tmpDir; +const testDir = tmpdir.path; -common.refreshTmpDir(); +tmpdir.refresh(); { const filepath = path.join(testDir, 'watch.txt'); diff --git a/test/sequential/test-http2-timeout-large-write-file.js b/test/sequential/test-http2-timeout-large-write-file.js index e32f6037eef..910e7a0fc49 100644 --- a/test/sequential/test-http2-timeout-large-write-file.js +++ b/test/sequential/test-http2-timeout-large-write-file.js @@ -8,7 +8,8 @@ const fs = require('fs'); const http2 = require('http2'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); // This test assesses whether long-running writes can complete // or timeout because the session or stream are not aware that the @@ -29,7 +30,7 @@ let offsetTimeout = common.platformTimeout(100); let didReceiveData = false; const content = Buffer.alloc(writeSize, 0x44); -const filepath = path.join(common.tmpDir, 'http2-large-write.tmp'); +const filepath = path.join(tmpdir.path, 'http2-large-write.tmp'); fs.writeFileSync(filepath, content, 'binary'); const fd = fs.openSync(filepath, 'r'); diff --git a/test/sequential/test-module-loading.js b/test/sequential/test-module-loading.js index f0fa933a8ba..6fa789f1986 100644 --- a/test/sequential/test-module-loading.js +++ b/test/sequential/test-module-loading.js @@ -242,7 +242,8 @@ try { assert.deepStrictEqual(children, { 'common/index.js': { - 'common/fixtures.js': {} + 'common/fixtures.js': {}, + 'common/tmpdir.js': {} }, 'fixtures/not-main-module.js': {}, 'fixtures/a.js': { diff --git a/test/sequential/test-regress-GH-4027.js b/test/sequential/test-regress-GH-4027.js index 6ab6afcfd6b..89365373235 100644 --- a/test/sequential/test-regress-GH-4027.js +++ b/test/sequential/test-regress-GH-4027.js @@ -25,9 +25,10 @@ const assert = require('assert'); const path = require('path'); const fs = require('fs'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const filename = path.join(common.tmpDir, 'watched'); +const filename = path.join(tmpdir.path, 'watched'); fs.writeFileSync(filename, 'quis custodiet ipsos custodes'); fs.watchFile(filename, { interval: 50 }, common.mustCall(function(curr, prev) { diff --git a/test/tick-processor/tick-processor-base.js b/test/tick-processor/tick-processor-base.js index 3017dc6bb47..33944655258 100644 --- a/test/tick-processor/tick-processor-base.js +++ b/test/tick-processor/tick-processor-base.js @@ -1,12 +1,13 @@ 'use strict'; -const common = require('../common'); +require('../common'); const fs = require('fs'); const cp = require('child_process'); const path = require('path'); -common.refreshTmpDir(); +const tmpdir = require('../common/tmpdir'); +tmpdir.refresh(); -const LOG_FILE = path.join(common.tmpDir, 'tick-processor.log'); +const LOG_FILE = path.join(tmpdir.path, 'tick-processor.log'); const RETRY_TIMEOUT = 150; function runTest(test) { diff --git a/tools/doc/type-parser.js b/tools/doc/type-parser.js index 4ef0a6d04cb..0ab73162dd5 100644 --- a/tools/doc/type-parser.js +++ b/tools/doc/type-parser.js @@ -13,13 +13,13 @@ const jsPrimitives = { 'undefined': 'Undefined' }; const jsGlobalTypes = [ - 'Error', 'Object', 'Function', 'Array', 'TypedArray', 'Uint8Array', - 'Uint16Array', 'Uint32Array', 'Int8Array', 'Int16Array', 'Int32Array', - 'Uint8ClampedArray', 'Float32Array', 'Float64Array', 'Date', 'RegExp', - 'ArrayBuffer', 'DataView', 'Promise', 'EvalError', 'RangeError', - 'ReferenceError', 'SyntaxError', 'TypeError', 'URIError', 'Proxy', 'Map', - 'Set', 'WeakMap', 'WeakSet', 'Generator', 'GeneratorFunction', - 'AsyncFunction', 'SharedArrayBuffer' + 'Array', 'ArrayBuffer', 'AsyncFunction', 'DataView', 'Date', 'Error', + 'EvalError', 'Float32Array', 'Float64Array', 'Function', 'Generator', + 'GeneratorFunction', 'Int16Array', 'Int32Array', 'Int8Array', 'Map', 'Object', + 'Promise', 'Proxy', 'RangeError', 'ReferenceError', 'RegExp', 'Set', + 'SharedArrayBuffer', 'SyntaxError', 'TypeError', 'TypedArray', 'URIError', + 'Uint16Array', 'Uint32Array', 'Uint8Array', 'Uint8ClampedArray', 'WeakMap', + 'WeakSet' ]; const typeMap = { 'Iterable': @@ -27,39 +27,68 @@ const typeMap = { 'Iterator': `${jsDocPrefix}Reference/Iteration_protocols#The_iterator_protocol`, + 'this': `${jsDocPrefix}Reference/Operators/this`, + + 'AsyncHook': 'async_hooks.html#async_hooks_async_hooks_createhook_callbacks', + 'Buffer': 'buffer.html#buffer_class_buffer', 'ChildProcess': 'child_process.html#child_process_class_childprocess', 'cluster.Worker': 'cluster.html#cluster_class_worker', + 'crypto.constants': 'crypto.html#crypto_crypto_constants_1', + 'dgram.Socket': 'dgram.html#dgram_class_dgram_socket', + 'Domain': 'domain.html#domain_class_domain', + 'EventEmitter': 'events.html#events_class_eventemitter', + 'fs.Stats': 'fs.html#fs_class_fs_stats', + 'http.Agent': 'http.html#http_class_http_agent', 'http.ClientRequest': 'http.html#http_class_http_clientrequest', 'http.IncomingMessage': 'http.html#http_class_http_incomingmessage', 'http.Server': 'http.html#http_class_http_server', 'http.ServerResponse': 'http.html#http_class_http_serverresponse', + 'ClientHttp2Stream': 'http2.html#http2_class_clienthttp2stream', 'HTTP2 Headers Object': 'http2.html#http2_headers_object', 'HTTP2 Settings Object': 'http2.html#http2_settings_object', + 'http2.Http2ServerRequest': 'http2.html#http2_class_http2_http2serverrequest', + 'http2.Http2ServerResponse': + 'http2.html#http2_class_http2_http2serverresponse', + 'Http2Server': 'http2.html#http2_class_http2server', + 'Http2Session': 'http2.html#http2_class_http2session', + 'Http2Stream': 'http2.html#http2_class_http2stream', + 'ServerHttp2Stream': 'http2.html#http2_class_serverhttp2stream', 'Handle': 'net.html#net_server_listen_handle_backlog_callback', + 'net.Server': 'net.html#net_class_net_server', 'net.Socket': 'net.html#net_class_net_socket', - 'ServerHttp2Stream': 'http2.html#http2_class_serverhttp2stream', + 'os.constants.dlopen': 'os.html#os_dlopen_constants', + + 'PerformanceObserver': + 'perf_hooks.html#perf_hooks_class_performanceobserver_callback', + 'PerformanceObserverEntryList': + 'perf_hooks.html#perf_hooks_class_performanceobserverentrylist', + + 'readline.Interface': 'readline.html#readline_class_interface', 'Stream': 'stream.html#stream_stream', + 'stream.Duplex': 'stream.html#stream_class_stream_duplex', 'stream.Readable': 'stream.html#stream_class_stream_readable', 'stream.Writable': 'stream.html#stream_class_stream_writable', - 'stream.Duplex': 'stream.html#stream_class_stream_duplex', - - 'tls.TLSSocket': 'tls.html#tls_class_tls_tlssocket', + 'Immediate': 'timers.html#timers_class_immediate', + 'Timeout': 'timers.html#timers_class_timeout', 'Timer': 'timers.html#timers_timers', + 'tls.Server': 'tls.html#tls_class_tls_server', + 'tls.TLSSocket': 'tls.html#tls_class_tls_tlssocket', + 'URL': 'url.html#url_the_whatwg_url_api', 'URLSearchParams': 'url.html#url_class_urlsearchparams' }; diff --git a/tools/eslint-rules/prefer-assert-methods.js b/tools/eslint-rules/prefer-assert-methods.js index 0604fd3ed99..2917d40de40 100644 --- a/tools/eslint-rules/prefer-assert-methods.js +++ b/tools/eslint-rules/prefer-assert-methods.js @@ -1,3 +1,7 @@ +/** + * @fileoverview Prohibit the use of assert operators ( ===, !==, ==, != ) + */ + 'use strict'; const astSelector = 'ExpressionStatement[expression.type="CallExpression"]' + @@ -21,7 +25,19 @@ module.exports = function(context) { const arg = node.expression.arguments[0]; const assertMethod = preferedAssertMethod[arg.operator]; if (assertMethod) { - context.report(node, parseError(assertMethod, arg.operator)); + context.report({ + node, + message: parseError(assertMethod, arg.operator), + fix: (fixer) => { + const sourceCode = context.getSourceCode(); + const left = sourceCode.getText(arg.left); + const right = sourceCode.getText(arg.right); + return fixer.replaceText( + node, + `assert.${assertMethod}(${left}, ${right});` + ); + } + }); } } };