From 25e967a90b2d918b962f2723f97f2f4f812a026a Mon Sep 17 00:00:00 2001 From: Raz Luvaton <16746759+rluvaton@users.noreply.github.com> Date: Wed, 26 Jul 2023 20:16:31 +0300 Subject: [PATCH 001/125] test: reorder test files fixtures for better understanding PR-URL: https://github.com/nodejs/node/pull/48787 Backport-PR-URL: https://github.com/nodejs/node/pull/49225 Reviewed-By: Moshe Atlow Reviewed-By: Chemi Atlow --- .../{ => default-behavior}/index.test.js | 0 .../node_modules/test-nm.js | 0 .../{ => default-behavior}/random.test.mjs | 0 .../subdir/subdir_test.js | 0 .../{ => default-behavior}/test/random.cjs | 0 .../test/skip_by_name.cjs | 0 test/parallel/test-runner-cli.js | 24 ++++++------- test/parallel/test-runner-exit-code.js | 5 ++- test/parallel/test-runner-inspect.mjs | 6 +++- test/parallel/test-runner-run.mjs | 35 ++++++++++++++----- 10 files changed, 45 insertions(+), 25 deletions(-) rename test/fixtures/test-runner/{ => default-behavior}/index.test.js (100%) rename test/fixtures/test-runner/{ => default-behavior}/node_modules/test-nm.js (100%) rename test/fixtures/test-runner/{ => default-behavior}/random.test.mjs (100%) rename test/fixtures/test-runner/{ => default-behavior}/subdir/subdir_test.js (100%) rename test/fixtures/test-runner/{ => default-behavior}/test/random.cjs (100%) rename test/fixtures/test-runner/{ => default-behavior}/test/skip_by_name.cjs (100%) diff --git a/test/fixtures/test-runner/index.test.js b/test/fixtures/test-runner/default-behavior/index.test.js similarity index 100% rename from test/fixtures/test-runner/index.test.js rename to test/fixtures/test-runner/default-behavior/index.test.js diff --git a/test/fixtures/test-runner/node_modules/test-nm.js b/test/fixtures/test-runner/default-behavior/node_modules/test-nm.js similarity index 100% rename from test/fixtures/test-runner/node_modules/test-nm.js rename to test/fixtures/test-runner/default-behavior/node_modules/test-nm.js diff --git a/test/fixtures/test-runner/random.test.mjs b/test/fixtures/test-runner/default-behavior/random.test.mjs similarity index 100% rename from test/fixtures/test-runner/random.test.mjs rename to test/fixtures/test-runner/default-behavior/random.test.mjs diff --git a/test/fixtures/test-runner/subdir/subdir_test.js b/test/fixtures/test-runner/default-behavior/subdir/subdir_test.js similarity index 100% rename from test/fixtures/test-runner/subdir/subdir_test.js rename to test/fixtures/test-runner/default-behavior/subdir/subdir_test.js diff --git a/test/fixtures/test-runner/test/random.cjs b/test/fixtures/test-runner/default-behavior/test/random.cjs similarity index 100% rename from test/fixtures/test-runner/test/random.cjs rename to test/fixtures/test-runner/default-behavior/test/random.cjs diff --git a/test/fixtures/test-runner/test/skip_by_name.cjs b/test/fixtures/test-runner/default-behavior/test/skip_by_name.cjs similarity index 100% rename from test/fixtures/test-runner/test/skip_by_name.cjs rename to test/fixtures/test-runner/default-behavior/test/skip_by_name.cjs diff --git a/test/parallel/test-runner-cli.js b/test/parallel/test-runner-cli.js index 496f53e23ea3a6..81e20045e33bdd 100644 --- a/test/parallel/test-runner-cli.js +++ b/test/parallel/test-runner-cli.js @@ -22,8 +22,8 @@ const testFixtures = fixtures.path('test-runner'); { // Default behavior. node_modules is ignored. Files that don't match the // pattern are ignored except in test/ directories. - const args = ['--test', testFixtures]; - const child = spawnSync(process.execPath, args); + const args = ['--test']; + const child = spawnSync(process.execPath, args, { cwd: join(testFixtures, 'default-behavior') }); assert.strictEqual(child.status, 1); assert.strictEqual(child.signal, null); @@ -39,8 +39,8 @@ const testFixtures = fixtures.path('test-runner'); { // Same but with a prototype mutation in require scripts. - const args = ['--require', join(testFixtures, 'protoMutation.js'), '--test', testFixtures]; - const child = spawnSync(process.execPath, args); + const args = ['--require', join(testFixtures, 'protoMutation.js'), '--test']; + const child = spawnSync(process.execPath, args, { cwd: join(testFixtures, 'default-behavior') }); const stdout = child.stdout.toString(); assert.match(stdout, /ok 1 - this should pass/); @@ -56,23 +56,19 @@ const testFixtures = fixtures.path('test-runner'); { // User specified files that don't match the pattern are still run. - const args = ['--test', testFixtures, join(testFixtures, 'index.js')]; - const child = spawnSync(process.execPath, args); + const args = ['--test', join(testFixtures, 'index.js')]; + const child = spawnSync(process.execPath, args, { cwd: testFixtures }); assert.strictEqual(child.status, 1); assert.strictEqual(child.signal, null); assert.strictEqual(child.stderr.toString(), ''); const stdout = child.stdout.toString(); assert.match(stdout, /not ok 1 - .+index\.js/); - assert.match(stdout, /ok 2 - this should pass/); - assert.match(stdout, /not ok 3 - this should fail/); - assert.match(stdout, /ok 4 - .+subdir.+subdir_test\.js/); - assert.match(stdout, /ok 5 - this should pass/); } { // Searches node_modules if specified. - const args = ['--test', join(testFixtures, 'node_modules')]; + const args = ['--test', join(testFixtures, 'default-behavior/node_modules')]; const child = spawnSync(process.execPath, args); assert.strictEqual(child.status, 1); @@ -85,7 +81,7 @@ const testFixtures = fixtures.path('test-runner'); { // The current directory is used by default. const args = ['--test']; - const options = { cwd: testFixtures }; + const options = { cwd: join(testFixtures, 'default-behavior') }; const child = spawnSync(process.execPath, args, options); assert.strictEqual(child.status, 1); @@ -124,7 +120,7 @@ const testFixtures = fixtures.path('test-runner'); // Test combined stream outputs const args = [ '--test', - 'test/fixtures/test-runner/index.test.js', + 'test/fixtures/test-runner/default-behavior/index.test.js', 'test/fixtures/test-runner/nested.js', 'test/fixtures/test-runner/invalid-tap.js', ]; @@ -202,7 +198,7 @@ const testFixtures = fixtures.path('test-runner'); const args = ['--no-warnings', '--experimental-loader', 'data:text/javascript,', '--require', fixtures.path('empty.js'), - '--test', join(testFixtures, 'index.test.js')]; + '--test', join(testFixtures, 'default-behavior', 'index.test.js')]; const child = spawnSync(process.execPath, args); assert.strictEqual(child.stderr.toString(), ''); diff --git a/test/parallel/test-runner-exit-code.js b/test/parallel/test-runner-exit-code.js index c0892055aea7fb..700480386d5b4a 100644 --- a/test/parallel/test-runner-exit-code.js +++ b/test/parallel/test-runner-exit-code.js @@ -43,7 +43,10 @@ if (process.argv[2] === 'child') { assert.strictEqual(child.status, 0); assert.strictEqual(child.signal, null); - child = spawnSync(process.execPath, ['--test', fixtures.path('test-runner', 'subdir', 'subdir_test.js')]); + child = spawnSync(process.execPath, [ + '--test', + fixtures.path('test-runner', 'default-behavior', 'subdir', 'subdir_test.js'), + ]); assert.strictEqual(child.status, 0); assert.strictEqual(child.signal, null); diff --git a/test/parallel/test-runner-inspect.mjs b/test/parallel/test-runner-inspect.mjs index a8fd9770948c7a..4cfc6bea54b964 100644 --- a/test/parallel/test-runner-inspect.mjs +++ b/test/parallel/test-runner-inspect.mjs @@ -11,7 +11,11 @@ common.skipIfInspectorDisabled(); tmpdir.refresh(); { - const child = new NodeInstance(['--test', '--inspect-brk=0'], undefined, fixtures.path('test-runner/index.test.js')); + const child = new NodeInstance( + ['--test', '--inspect-brk=0'], + undefined, + fixtures.path('test-runner/default-behavior/index.test.js') + ); let stdout = ''; let stderr = ''; diff --git a/test/parallel/test-runner-run.mjs b/test/parallel/test-runner-run.mjs index 7e4a8fbe76753a..62fab0af146f4d 100644 --- a/test/parallel/test-runner-run.mjs +++ b/test/parallel/test-runner-run.mjs @@ -26,7 +26,7 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { }); it('should succeed with a file', async () => { - const stream = run({ files: [join(testFixtures, 'test/random.cjs')] }); + const stream = run({ files: [join(testFixtures, 'default-behavior/test/random.cjs')] }); stream.on('test:fail', common.mustNotCall()); stream.on('test:pass', common.mustCall(1)); // eslint-disable-next-line no-unused-vars @@ -34,7 +34,12 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { }); it('should run same file twice', async () => { - const stream = run({ files: [join(testFixtures, 'test/random.cjs'), join(testFixtures, 'test/random.cjs')] }); + const stream = run({ + files: [ + join(testFixtures, 'default-behavior/test/random.cjs'), + join(testFixtures, 'default-behavior/test/random.cjs'), + ] + }); stream.on('test:fail', common.mustNotCall()); stream.on('test:pass', common.mustCall(2)); // eslint-disable-next-line no-unused-vars @@ -68,7 +73,9 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { }); it('should be piped with dot', async () => { - const result = await run({ files: [join(testFixtures, 'test/random.cjs')] }).compose(dot).toArray(); + const result = await run({ + files: [join(testFixtures, 'default-behavior/test/random.cjs')] + }).compose(dot).toArray(); assert.deepStrictEqual(result, [ '.', '\n', @@ -77,7 +84,9 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { it('should be piped with spec', async () => { const specReporter = new spec(); - const result = await run({ files: [join(testFixtures, 'test/random.cjs')] }).compose(specReporter).toArray(); + const result = await run({ + files: [join(testFixtures, 'default-behavior/test/random.cjs')] + }).compose(specReporter).toArray(); const stringResults = result.map((bfr) => bfr.toString()); assert.match(stringResults[0], /this should pass/); assert.match(stringResults[1], /tests 1/); @@ -85,7 +94,9 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { }); it('should be piped with tap', async () => { - const result = await run({ files: [join(testFixtures, 'test/random.cjs')] }).compose(tap).toArray(); + const result = await run({ + files: [join(testFixtures, 'default-behavior/test/random.cjs')] + }).compose(tap).toArray(); assert.strictEqual(result.length, 13); assert.strictEqual(result[0], 'TAP version 13\n'); assert.strictEqual(result[1], '# Subtest: this should pass\n'); @@ -103,7 +114,10 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { }); it('should skip tests not matching testNamePatterns - RegExp', async () => { - const result = await run({ files: [join(testFixtures, 'test/skip_by_name.cjs')], testNamePatterns: [/executed/] }) + const result = await run({ + files: [join(testFixtures, 'default-behavior/test/skip_by_name.cjs')], + testNamePatterns: [/executed/] + }) .compose(tap) .toArray(); assert.strictEqual(result[2], 'ok 1 - this should be skipped # SKIP test name does not match pattern\n'); @@ -111,7 +125,10 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { }); it('should skip tests not matching testNamePatterns - string', async () => { - const result = await run({ files: [join(testFixtures, 'test/skip_by_name.cjs')], testNamePatterns: ['executed'] }) + const result = await run({ + files: [join(testFixtures, 'default-behavior/test/skip_by_name.cjs')], + testNamePatterns: ['executed'] + }) .compose(tap) .toArray(); assert.strictEqual(result[2], 'ok 1 - this should be skipped # SKIP test name does not match pattern\n'); @@ -121,7 +138,7 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { it('should emit "test:watch:drained" event on watch mode', async () => { const controller = new AbortController(); await run({ - files: [join(testFixtures, 'test/random.cjs')], + files: [join(testFixtures, 'default-behavior/test/random.cjs')], watch: true, signal: controller.signal, }).on('data', function({ type }) { @@ -135,7 +152,7 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { it('should stop watch mode when abortSignal aborts', async () => { const controller = new AbortController(); const result = await run({ - files: [join(testFixtures, 'test/random.cjs')], + files: [join(testFixtures, 'default-behavior/test/random.cjs')], watch: true, signal: controller.signal, }) From 75333f38b2d4ea846694e8f89c4e9246613eadea Mon Sep 17 00:00:00 2001 From: Raz Luvaton <16746759+rluvaton@users.noreply.github.com> Date: Mon, 24 Jul 2023 14:38:23 +0300 Subject: [PATCH 002/125] test_runner: fix global before not called when no global test exists PR-URL: https://github.com/nodejs/node/pull/48877 Backport-PR-URL: https://github.com/nodejs/node/pull/49225 Reviewed-By: Chemi Atlow Reviewed-By: Moshe Atlow --- lib/internal/test_runner/test.js | 4 + .../output/hooks-with-no-global-test.js | 84 +++++++++++++++++++ .../output/hooks-with-no-global-test.snapshot | 44 ++++++++++ test/parallel/test-runner-output.mjs | 1 + 4 files changed, 133 insertions(+) create mode 100644 test/fixtures/test-runner/output/hooks-with-no-global-test.js create mode 100644 test/fixtures/test-runner/output/hooks-with-no-global-test.snapshot diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index eb2ccf9c9a22c3..0d8e3dbd6e8d3c 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -828,6 +828,10 @@ class Suite extends Test { return; } + if (this.parent.hooks.before.length > 0) { + await this.parent.runHook('before', this.parent.getRunArgs()); + } + await this.runHook('before', hookArgs); const stopPromise = stopTest(this.timeout, this.signal); diff --git a/test/fixtures/test-runner/output/hooks-with-no-global-test.js b/test/fixtures/test-runner/output/hooks-with-no-global-test.js new file mode 100644 index 00000000000000..844aa6ff3c2d59 --- /dev/null +++ b/test/fixtures/test-runner/output/hooks-with-no-global-test.js @@ -0,0 +1,84 @@ +'use strict'; +const { test, describe, it, before, after, beforeEach, afterEach } = require('node:test'); +const assert = require("assert"); + +// This file should not have any global tests to reproduce bug #48844 +const testArr = []; + +before(() => testArr.push('global before')); +after(() => { + testArr.push('global after'); + + try { + assert.deepStrictEqual(testArr, [ + 'global before', + 'describe before', + + 'describe beforeEach', + 'describe it 1', + 'describe afterEach', + + 'describe beforeEach', + 'describe test 2', + 'describe afterEach', + + 'describe nested before', + + 'describe beforeEach', + 'describe nested beforeEach', + 'describe nested it 1', + 'describe afterEach', + 'describe nested afterEach', + + 'describe beforeEach', + 'describe nested beforeEach', + 'describe nested test 2', + 'describe afterEach', + 'describe nested afterEach', + + 'describe nested after', + 'describe after', + 'global after', + ]); + } catch (e) { + // TODO(rluvaton): remove the try catch after #48867 is fixed + console.error(e); + process.exit(1); + } +}); + +describe('describe hooks with no global tests', () => { + before(() => { + testArr.push('describe before'); + }); + after(()=> { + testArr.push('describe after'); + }); + beforeEach(() => { + testArr.push('describe beforeEach'); + }); + afterEach(() => { + testArr.push('describe afterEach'); + }); + + it('1', () => testArr.push('describe it 1')); + test('2', () => testArr.push('describe test 2')); + + describe('nested', () => { + before(() => { + testArr.push('describe nested before') + }); + after(() => { + testArr.push('describe nested after') + }); + beforeEach(() => { + testArr.push('describe nested beforeEach') + }); + afterEach(() => { + testArr.push('describe nested afterEach') + }); + + it('nested 1', () => testArr.push('describe nested it 1')); + test('nested 2', () => testArr.push('describe nested test 2')); + }); +}); diff --git a/test/fixtures/test-runner/output/hooks-with-no-global-test.snapshot b/test/fixtures/test-runner/output/hooks-with-no-global-test.snapshot new file mode 100644 index 00000000000000..722a3a4ca2ceac --- /dev/null +++ b/test/fixtures/test-runner/output/hooks-with-no-global-test.snapshot @@ -0,0 +1,44 @@ +TAP version 13 +# Subtest: describe hooks with no global tests + # Subtest: 1 + ok 1 - 1 + --- + duration_ms: * + ... + # Subtest: 2 + ok 2 - 2 + --- + duration_ms: * + ... + # Subtest: nested + # Subtest: nested 1 + ok 1 - nested 1 + --- + duration_ms: * + ... + # Subtest: nested 2 + ok 2 - nested 2 + --- + duration_ms: * + ... + 1..2 + ok 3 - nested + --- + duration_ms: * + type: 'suite' + ... + 1..3 +ok 1 - describe hooks with no global tests + --- + duration_ms: * + type: 'suite' + ... +1..1 +# tests 4 +# suites 2 +# pass 4 +# fail 0 +# cancelled 0 +# skipped 0 +# todo 0 +# duration_ms * diff --git a/test/parallel/test-runner-output.mjs b/test/parallel/test-runner-output.mjs index 0d670c37bc9319..76c511117ea091 100644 --- a/test/parallel/test-runner-output.mjs +++ b/test/parallel/test-runner-output.mjs @@ -36,6 +36,7 @@ const tests = [ { name: 'test-runner/output/describe_it.js' }, { name: 'test-runner/output/describe_nested.js' }, { name: 'test-runner/output/hooks.js' }, + { name: 'test-runner/output/hooks-with-no-global-test.js' }, { name: 'test-runner/output/no_refs.js' }, { name: 'test-runner/output/no_tests.js' }, { name: 'test-runner/output/only_tests.js' }, From c2f1830f662258300450abf79d0d6cdbbaabd56e Mon Sep 17 00:00:00 2001 From: Raz Luvaton <16746759+rluvaton@users.noreply.github.com> Date: Mon, 31 Jul 2023 11:10:36 +0300 Subject: [PATCH 003/125] test_runner: cleanup test timeout abort listener fix #48475 PR-URL: https://github.com/nodejs/node/pull/48915 Backport-PR-URL: https://github.com/nodejs/node/pull/49225 Reviewed-By: Moshe Atlow Reviewed-By: Benjamin Gruenbaum Reviewed-By: Chemi Atlow --- lib/internal/test_runner/test.js | 56 +++++++++++++--- ...efore-and-after-each-too-many-listeners.js | 8 +++ ...and-after-each-too-many-listeners.snapshot | 65 +++++++++++++++++++ ...er-each-with-timeout-too-many-listeners.js | 8 +++ ...h-with-timeout-too-many-listeners.snapshot | 65 +++++++++++++++++++ test/parallel/test-runner-output.mjs | 2 + 6 files changed, 194 insertions(+), 10 deletions(-) create mode 100644 test/fixtures/test-runner/output/before-and-after-each-too-many-listeners.js create mode 100644 test/fixtures/test-runner/output/before-and-after-each-too-many-listeners.snapshot create mode 100644 test/fixtures/test-runner/output/before-and-after-each-with-timeout-too-many-listeners.js create mode 100644 test/fixtures/test-runner/output/before-and-after-each-with-timeout-too-many-listeners.snapshot diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index 0d8e3dbd6e8d3c..3d4df848cca293 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -20,10 +20,12 @@ const { SafeSet, SafePromiseAll, SafePromiseRace, + SymbolDispose, + ObjectDefineProperty, Symbol, } = primordials; +const { addAbortListener } = require('events'); const { AsyncResource } = require('async_hooks'); -const { once } = require('events'); const { AbortController } = require('internal/abort_controller'); const { codes: { @@ -52,7 +54,7 @@ const { validateOneOf, validateUint32, } = require('internal/validators'); -const { setTimeout } = require('timers/promises'); +const { setTimeout } = require('timers'); const { TIMEOUT_MAX } = require('internal/timers'); const { availableParallelism } = require('os'); const { bigint: hrtime } = process.hrtime; @@ -76,15 +78,42 @@ const { testNamePatterns, testOnlyFlag } = parseCommandLine(); let kResistStopPropagation; function stopTest(timeout, signal) { + const deferred = createDeferredPromise(); + const abortListener = addAbortListener(signal, deferred.resolve); + let timer; + let disposeFunction; + if (timeout === kDefaultTimeout) { - return once(signal, 'abort'); + disposeFunction = abortListener[SymbolDispose]; + } if (timeout !== kDefaultTimeout) { + timer = setTimeout(() => deferred.resolve(), timeout); + timer.unref(); + + ObjectDefineProperty(deferred, 'promise', { + __proto__: null, + configurable: true, + writable: true, + value: PromisePrototypeThen(deferred.promise, () => { + throw new ERR_TEST_FAILURE( + `test timed out after ${timeout}ms`, + kTestTimeoutFailure, + ); + }), + }); + + disposeFunction = () => { + abortListener[SymbolDispose](); + timer[SymbolDispose](); + }; } - return PromisePrototypeThen(setTimeout(timeout, null, { __proto__: null, ref: false, signal }), () => { - throw new ERR_TEST_FAILURE( - `test timed out after ${timeout}ms`, - kTestTimeoutFailure, - ); + + ObjectDefineProperty(deferred.promise, SymbolDispose, { + __proto__: null, + configurable: true, + writable: true, + value: disposeFunction, }); + return deferred.promise; } class TestContext { @@ -549,6 +578,8 @@ class Test extends AsyncResource { } }); + let stopPromise; + try { if (this.parent?.hooks.before.length > 0) { await this.parent.runHook('before', this.parent.getRunArgs()); @@ -556,7 +587,7 @@ class Test extends AsyncResource { if (this.parent?.hooks.beforeEach.length > 0) { await this.parent.runHook('beforeEach', { __proto__: null, args, ctx }); } - const stopPromise = stopTest(this.timeout, this.signal); + stopPromise = stopTest(this.timeout, this.signal); const runArgs = ArrayPrototypeSlice(args); ArrayPrototypeUnshift(runArgs, this.fn, ctx); @@ -603,6 +634,8 @@ class Test extends AsyncResource { this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure)); } } finally { + stopPromise?.[SymbolDispose](); + // Do not abort hooks and the root test as hooks instance are shared between tests suite so aborting them will // cause them to not run for further tests. if (this.parent !== null) { @@ -817,6 +850,7 @@ class Suite extends Test { async run() { const hookArgs = this.getRunArgs(); + let stopPromise; try { this.parent.activeSubtests++; await this.buildSuite; @@ -834,7 +868,7 @@ class Suite extends Test { await this.runHook('before', hookArgs); - const stopPromise = stopTest(this.timeout, this.signal); + stopPromise = stopTest(this.timeout, this.signal); const subtests = this.skipped || this.error ? [] : this.subtests; const promise = SafePromiseAll(subtests, (subtests) => subtests.start()); @@ -848,6 +882,8 @@ class Suite extends Test { } else { this.fail(new ERR_TEST_FAILURE(err, kTestCodeFailure)); } + } finally { + stopPromise?.[SymbolDispose](); } this.postRun(); diff --git a/test/fixtures/test-runner/output/before-and-after-each-too-many-listeners.js b/test/fixtures/test-runner/output/before-and-after-each-too-many-listeners.js new file mode 100644 index 00000000000000..73857096068f9a --- /dev/null +++ b/test/fixtures/test-runner/output/before-and-after-each-too-many-listeners.js @@ -0,0 +1,8 @@ +'use strict'; +const { beforeEach, afterEach, test} = require("node:test"); +beforeEach(() => {}); +afterEach(() => {}); + +for (let i = 1; i <= 11; ++i) { + test(`${i}`, () => {}); +} diff --git a/test/fixtures/test-runner/output/before-and-after-each-too-many-listeners.snapshot b/test/fixtures/test-runner/output/before-and-after-each-too-many-listeners.snapshot new file mode 100644 index 00000000000000..4300e21a26403f --- /dev/null +++ b/test/fixtures/test-runner/output/before-and-after-each-too-many-listeners.snapshot @@ -0,0 +1,65 @@ +TAP version 13 +# Subtest: 1 +ok 1 - 1 + --- + duration_ms: * + ... +# Subtest: 2 +ok 2 - 2 + --- + duration_ms: * + ... +# Subtest: 3 +ok 3 - 3 + --- + duration_ms: * + ... +# Subtest: 4 +ok 4 - 4 + --- + duration_ms: * + ... +# Subtest: 5 +ok 5 - 5 + --- + duration_ms: * + ... +# Subtest: 6 +ok 6 - 6 + --- + duration_ms: * + ... +# Subtest: 7 +ok 7 - 7 + --- + duration_ms: * + ... +# Subtest: 8 +ok 8 - 8 + --- + duration_ms: * + ... +# Subtest: 9 +ok 9 - 9 + --- + duration_ms: * + ... +# Subtest: 10 +ok 10 - 10 + --- + duration_ms: * + ... +# Subtest: 11 +ok 11 - 11 + --- + duration_ms: * + ... +1..11 +# tests 11 +# suites 0 +# pass 11 +# fail 0 +# cancelled 0 +# skipped 0 +# todo 0 +# duration_ms * diff --git a/test/fixtures/test-runner/output/before-and-after-each-with-timeout-too-many-listeners.js b/test/fixtures/test-runner/output/before-and-after-each-with-timeout-too-many-listeners.js new file mode 100644 index 00000000000000..87d645d6b0fa82 --- /dev/null +++ b/test/fixtures/test-runner/output/before-and-after-each-with-timeout-too-many-listeners.js @@ -0,0 +1,8 @@ +'use strict'; +const { beforeEach, afterEach, test} = require("node:test"); +beforeEach(() => {}, {timeout: 10000}); +afterEach(() => {}, {timeout: 10000}); + +for (let i = 1; i <= 11; ++i) { + test(`${i}`, () => {}); +} diff --git a/test/fixtures/test-runner/output/before-and-after-each-with-timeout-too-many-listeners.snapshot b/test/fixtures/test-runner/output/before-and-after-each-with-timeout-too-many-listeners.snapshot new file mode 100644 index 00000000000000..4300e21a26403f --- /dev/null +++ b/test/fixtures/test-runner/output/before-and-after-each-with-timeout-too-many-listeners.snapshot @@ -0,0 +1,65 @@ +TAP version 13 +# Subtest: 1 +ok 1 - 1 + --- + duration_ms: * + ... +# Subtest: 2 +ok 2 - 2 + --- + duration_ms: * + ... +# Subtest: 3 +ok 3 - 3 + --- + duration_ms: * + ... +# Subtest: 4 +ok 4 - 4 + --- + duration_ms: * + ... +# Subtest: 5 +ok 5 - 5 + --- + duration_ms: * + ... +# Subtest: 6 +ok 6 - 6 + --- + duration_ms: * + ... +# Subtest: 7 +ok 7 - 7 + --- + duration_ms: * + ... +# Subtest: 8 +ok 8 - 8 + --- + duration_ms: * + ... +# Subtest: 9 +ok 9 - 9 + --- + duration_ms: * + ... +# Subtest: 10 +ok 10 - 10 + --- + duration_ms: * + ... +# Subtest: 11 +ok 11 - 11 + --- + duration_ms: * + ... +1..11 +# tests 11 +# suites 0 +# pass 11 +# fail 0 +# cancelled 0 +# skipped 0 +# todo 0 +# duration_ms * diff --git a/test/parallel/test-runner-output.mjs b/test/parallel/test-runner-output.mjs index 76c511117ea091..c4f7ce1d536f73 100644 --- a/test/parallel/test-runner-output.mjs +++ b/test/parallel/test-runner-output.mjs @@ -37,6 +37,8 @@ const tests = [ { name: 'test-runner/output/describe_nested.js' }, { name: 'test-runner/output/hooks.js' }, { name: 'test-runner/output/hooks-with-no-global-test.js' }, + { name: 'test-runner/output/before-and-after-each-too-many-listeners.js' }, + { name: 'test-runner/output/before-and-after-each-with-timeout-too-many-listeners.js' }, { name: 'test-runner/output/no_refs.js' }, { name: 'test-runner/output/no_tests.js' }, { name: 'test-runner/output/only_tests.js' }, From 08738b2664f4a64de9fe65e1f37476b0695d2657 Mon Sep 17 00:00:00 2001 From: Raz Luvaton <16746759+rluvaton@users.noreply.github.com> Date: Tue, 1 Aug 2023 01:13:02 +0300 Subject: [PATCH 004/125] test_runner: fix timeout in *Each hook failing further tests PR-URL: https://github.com/nodejs/node/pull/48925 Backport-PR-URL: https://github.com/nodejs/node/pull/49225 Reviewed-By: Moshe Atlow Reviewed-By: Chemi Atlow --- lib/internal/test_runner/test.js | 30 ++- .../test-runner/output/abort_hooks.js | 63 ++++++ .../test-runner/output/abort_hooks.snapshot | 188 ++++++++++++++++++ .../test-runner/output/hooks.snapshot | 4 - ...re_each_should_not_affect_further_tests.js | 46 +++++ ...h_should_not_affect_further_tests.snapshot | 67 +++++++ test/parallel/test-runner-output.mjs | 2 + 7 files changed, 385 insertions(+), 15 deletions(-) create mode 100644 test/fixtures/test-runner/output/abort_hooks.js create mode 100644 test/fixtures/test-runner/output/abort_hooks.snapshot create mode 100644 test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js create mode 100644 test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.snapshot diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index 3d4df848cca293..c3fa0c2ff39721 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -193,8 +193,8 @@ class SuiteContext { } class Test extends AsyncResource { - #abortController; - #outerSignal; + abortController; + outerSignal; #reportedSubtest; constructor(options) { @@ -292,16 +292,16 @@ class Test extends AsyncResource { fn = noop; } - this.#abortController = new AbortController(); - this.#outerSignal = signal; - this.signal = this.#abortController.signal; + this.abortController = new AbortController(); + this.outerSignal = signal; + this.signal = this.abortController.signal; validateAbortSignal(signal, 'options.signal'); if (signal) { kResistStopPropagation ??= require('internal/event_target').kResistStopPropagation; } - this.#outerSignal?.addEventListener( + this.outerSignal?.addEventListener( 'abort', this.#abortHandler, { __proto__: null, [kResistStopPropagation]: true }, @@ -441,7 +441,7 @@ class Test extends AsyncResource { } #abortHandler = () => { - const error = this.#outerSignal?.reason || new AbortError('The test was aborted'); + const error = this.outerSignal?.reason || new AbortError('The test was aborted'); error.failureType = kAborted; this.#cancel(error); }; @@ -459,7 +459,7 @@ class Test extends AsyncResource { ); this.startTime = this.startTime || this.endTime; // If a test was canceled before it was started, e.g inside a hook this.cancelled = true; - this.#abortController.abort(); + this.abortController.abort(); } createHook(name, fn, options) { @@ -527,7 +527,7 @@ class Test extends AsyncResource { if (this.signal.aborted) { return true; } - if (this.#outerSignal?.aborted) { + if (this.outerSignal?.aborted) { this.#abortHandler(); return true; } @@ -639,7 +639,7 @@ class Test extends AsyncResource { // Do not abort hooks and the root test as hooks instance are shared between tests suite so aborting them will // cause them to not run for further tests. if (this.parent !== null) { - this.#abortController.abort(); + this.abortController.abort(); } } @@ -679,7 +679,7 @@ class Test extends AsyncResource { this.fail(new ERR_TEST_FAILURE(msg, kSubtestsFailed)); } - this.#outerSignal?.removeEventListener('abort', this.#abortHandler); + this.outerSignal?.removeEventListener('abort', this.#abortHandler); this.mock?.reset(); if (this.parent !== null) { @@ -795,6 +795,14 @@ class TestHook extends Test { super({ __proto__: null, fn, timeout, signal }); } run(args) { + if (this.error && !this.outerSignal?.aborted) { + this.passed = false; + this.error = null; + this.abortController.abort(); + this.abortController = new AbortController(); + this.signal = this.abortController.signal; + } + this.#args = args; return super.run(); } diff --git a/test/fixtures/test-runner/output/abort_hooks.js b/test/fixtures/test-runner/output/abort_hooks.js new file mode 100644 index 00000000000000..b0f1da80d62719 --- /dev/null +++ b/test/fixtures/test-runner/output/abort_hooks.js @@ -0,0 +1,63 @@ +// Flags: --no-warnings +'use strict'; +const { before, beforeEach, describe, it, after, afterEach } = require('node:test'); + +describe('1 before describe', () => { + const ac = new AbortController(); + before(() => { + console.log('before'); + ac.abort() + }, {signal: ac.signal}); + + it('test 1', () => { + console.log('1.1'); + }); + it('test 2', () => { + console.log('1.2'); + }); +}); + +describe('2 after describe', () => { + const ac = new AbortController(); + after(() => { + console.log('after'); + ac.abort() + }, {signal: ac.signal}); + + it('test 1', () => { + console.log('2.1'); + }); + it('test 2', () => { + console.log('2.2'); + }); +}); + +describe('3 beforeEach describe', () => { + const ac = new AbortController(); + beforeEach(() => { + console.log('beforeEach'); + ac.abort() + }, {signal: ac.signal}); + + it('test 1', () => { + console.log('3.1'); + }); + it('test 2', () => { + console.log('3.2'); + }); +}); + +describe('4 afterEach describe', () => { + const ac = new AbortController(); + afterEach(() => { + console.log('afterEach'); + ac.abort() + }, {signal: ac.signal}); + + it('test 1', () => { + console.log('4.1'); + }); + it('test 2', () => { + console.log('4.2'); + }); +}); diff --git a/test/fixtures/test-runner/output/abort_hooks.snapshot b/test/fixtures/test-runner/output/abort_hooks.snapshot new file mode 100644 index 00000000000000..a1b5ddcd5f1908 --- /dev/null +++ b/test/fixtures/test-runner/output/abort_hooks.snapshot @@ -0,0 +1,188 @@ +before +2.1 +2.2 +after +beforeEach +4.1 +afterEach +4.2 +TAP version 13 +# Subtest: 1 before describe + # Subtest: test 1 + not ok 1 - test 1 + --- + duration_ms: ZERO + failureType: 'cancelledByParent' + error: 'test did not finish before its parent and was cancelled' + code: 'ERR_TEST_FAILURE' + ... + # Subtest: test 2 + not ok 2 - test 2 + --- + duration_ms: ZERO + failureType: 'cancelledByParent' + error: 'test did not finish before its parent and was cancelled' + code: 'ERR_TEST_FAILURE' + ... + 1..2 +not ok 1 - 1 before describe + --- + duration_ms: * + type: 'suite' + failureType: 'hookFailed' + error: 'This operation was aborted' + code: 20 + name: 'AbortError' + stack: |- + * + * + * + * + * + * + * + * + * + * + ... +# Subtest: 2 after describe + # Subtest: test 1 + ok 1 - test 1 + --- + duration_ms: * + ... + # Subtest: test 2 + ok 2 - test 2 + --- + duration_ms: * + ... + 1..2 +not ok 2 - 2 after describe + --- + duration_ms: * + type: 'suite' + failureType: 'hookFailed' + error: 'This operation was aborted' + code: 20 + name: 'AbortError' + stack: |- + * + * + * + * + * + * + * + * + * + * + ... +# Subtest: 3 beforeEach describe + # Subtest: test 1 + not ok 1 - test 1 + --- + duration_ms: * + failureType: 'hookFailed' + error: 'This operation was aborted' + code: 20 + name: 'AbortError' + stack: |- + * + * + * + * + * + * + * + * + * + async Promise.all (index 0) + ... + # Subtest: test 2 + not ok 2 - test 2 + --- + duration_ms: * + failureType: 'hookFailed' + error: 'This operation was aborted' + code: 20 + name: 'AbortError' + stack: |- + * + * + * + * + * + * + * + * + * + async Promise.all (index 0) + ... + 1..2 +not ok 3 - 3 beforeEach describe + --- + duration_ms: * + type: 'suite' + failureType: 'subtestsFailed' + error: '2 subtests failed' + code: 'ERR_TEST_FAILURE' + ... +# Subtest: 4 afterEach describe + # Subtest: test 1 + not ok 1 - test 1 + --- + duration_ms: * + failureType: 'hookFailed' + error: 'This operation was aborted' + code: 20 + name: 'AbortError' + stack: |- + * + * + * + * + * + * + * + * + * + * + ... + # Subtest: test 2 + not ok 2 - test 2 + --- + duration_ms: * + failureType: 'hookFailed' + error: 'This operation was aborted' + code: 20 + name: 'AbortError' + stack: |- + * + * + * + * + * + * + * + * + * + * + ... + 1..2 +not ok 4 - 4 afterEach describe + --- + duration_ms: * + type: 'suite' + failureType: 'subtestsFailed' + error: '2 subtests failed' + code: 'ERR_TEST_FAILURE' + ... +1..4 +# tests 8 +# suites 4 +# pass 2 +# fail 4 +# cancelled 2 +# skipped 0 +# todo 0 +# duration_ms * diff --git a/test/fixtures/test-runner/output/hooks.snapshot b/test/fixtures/test-runner/output/hooks.snapshot index b9fd23640373de..676e1c7a3287e3 100644 --- a/test/fixtures/test-runner/output/hooks.snapshot +++ b/test/fixtures/test-runner/output/hooks.snapshot @@ -134,8 +134,6 @@ not ok 3 - after throws * * * - async Promise.all (index 0) - * * ... 1..2 @@ -183,7 +181,6 @@ not ok 4 - beforeEach throws * * * - async Promise.all (index 0) * ... 1..2 @@ -265,7 +262,6 @@ not ok 6 - afterEach when test fails * * * - async Promise.all (index 0) * ... 1..2 diff --git a/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js b/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js new file mode 100644 index 00000000000000..6205e2c403fc86 --- /dev/null +++ b/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js @@ -0,0 +1,46 @@ +const {describe, test, beforeEach, afterEach} = require("node:test"); +const {setTimeout} = require("timers/promises"); + + +describe('before each timeout', () => { + let i = 0; + + beforeEach(async () => { + if (i++ === 0) { + console.log('gonna timeout'); + await setTimeout(700); + return; + } + console.log('not gonna timeout'); + }, {timeout: 500}); + + test('first describe first test', () => { + console.log('before each test first ' + i); + }); + + test('first describe second test', () => { + console.log('before each test second ' + i); + }); +}); + + +describe('after each timeout', () => { + let i = 0; + + afterEach(async function afterEach1() { + if (i++ === 0) { + console.log('gonna timeout'); + await setTimeout(700); + return; + } + console.log('not gonna timeout'); + }, {timeout: 500}); + + test('second describe first test', () => { + console.log('after each test first ' + i); + }); + + test('second describe second test', () => { + console.log('after each test second ' + i); + }); +}); diff --git a/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.snapshot b/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.snapshot new file mode 100644 index 00000000000000..cac7facf893309 --- /dev/null +++ b/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.snapshot @@ -0,0 +1,67 @@ +gonna timeout +TAP version 13 +not gonna timeout +before each test second 2 +after each test first 0 +gonna timeout +# Subtest: before each timeout + # Subtest: first describe first test + not ok 1 - first describe first test + --- + duration_ms: * + failureType: 'hookFailed' + error: 'failed running beforeEach hook' + code: 'ERR_TEST_FAILURE' + stack: |- + async Promise.all (index 0) + ... + # Subtest: first describe second test + ok 2 - first describe second test + --- + duration_ms: * + ... + 1..2 +not ok 1 - before each timeout + --- + duration_ms: * + type: 'suite' + failureType: 'subtestsFailed' + error: '1 subtest failed' + code: 'ERR_TEST_FAILURE' + ... +after each test second 1 +not gonna timeout +# Subtest: after each timeout + # Subtest: second describe first test + not ok 1 - second describe first test + --- + duration_ms: * + failureType: 'hookFailed' + error: 'failed running afterEach hook' + code: 'ERR_TEST_FAILURE' + stack: |- + async Promise.all (index 0) + ... + # Subtest: second describe second test + ok 2 - second describe second test + --- + duration_ms: * + ... + 1..2 +not ok 2 - after each timeout + --- + duration_ms: * + type: 'suite' + failureType: 'subtestsFailed' + error: '1 subtest failed' + code: 'ERR_TEST_FAILURE' + ... +1..2 +# tests 4 +# suites 2 +# pass 2 +# fail 2 +# cancelled 0 +# skipped 0 +# todo 0 +# duration_ms * diff --git a/test/parallel/test-runner-output.mjs b/test/parallel/test-runner-output.mjs index c4f7ce1d536f73..84fb4b1824dc34 100644 --- a/test/parallel/test-runner-output.mjs +++ b/test/parallel/test-runner-output.mjs @@ -33,9 +33,11 @@ const specTransform = snapshot const tests = [ { name: 'test-runner/output/abort.js' }, { name: 'test-runner/output/abort_suite.js' }, + { name: 'test-runner/output/abort_hooks.js' }, { name: 'test-runner/output/describe_it.js' }, { name: 'test-runner/output/describe_nested.js' }, { name: 'test-runner/output/hooks.js' }, + { name: 'test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js' }, { name: 'test-runner/output/hooks-with-no-global-test.js' }, { name: 'test-runner/output/before-and-after-each-too-many-listeners.js' }, { name: 'test-runner/output/before-and-after-each-with-timeout-too-many-listeners.js' }, From 4bc0a8fe9920f5cd4e2f2bceee13304bb045607a Mon Sep 17 00:00:00 2001 From: Raz Luvaton <16746759+rluvaton@users.noreply.github.com> Date: Wed, 2 Aug 2023 23:06:25 +0300 Subject: [PATCH 005/125] test_runner: fix global after not failing the tests PR-URL: https://github.com/nodejs/node/pull/48913 Backport-PR-URL: https://github.com/nodejs/node/pull/49225 Fixes: https://github.com/nodejs/node/issues/48867 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Moshe Atlow Reviewed-By: Chemi Atlow --- lib/internal/test_runner/test.js | 8 +++ test/common/assertSnapshot.js | 5 ++ .../global_after_should_fail_the_test.js | 10 ++++ ...global_after_should_fail_the_test.snapshot | 34 ++++++++++++ .../output/hooks-with-no-global-test.js | 54 +++++++++---------- test/parallel/test-runner-output.mjs | 26 +++++++-- 6 files changed, 103 insertions(+), 34 deletions(-) create mode 100644 test/fixtures/test-runner/output/global_after_should_fail_the_test.js create mode 100644 test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index c3fa0c2ff39721..cc7c81cad88c0d 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -688,6 +688,14 @@ class Test extends AsyncResource { this.parent.processReadySubtestRange(false); this.parent.processPendingSubtests(); } else if (!this.reported) { + if (!this.passed && failed === 0 && this.error) { + this.reporter.fail(0, kFilename, this.subtests.length + 1, kFilename, { + __proto__: null, + duration_ms: this.#duration(), + error: this.error, + }, undefined); + } + this.reported = true; this.reporter.plan(this.nesting, kFilename, this.root.harness.counters.topLevel); diff --git a/test/common/assertSnapshot.js b/test/common/assertSnapshot.js index da6952ba1204ee..0bd0fc18534f8b 100644 --- a/test/common/assertSnapshot.js +++ b/test/common/assertSnapshot.js @@ -20,6 +20,10 @@ function replaceWindowsPaths(str) { return common.isWindows ? str.replaceAll(path.win32.sep, path.posix.sep) : str; } +function replaceFullPaths(str) { + return str.replaceAll(process.cwd(), ''); +} + function transform(...args) { return (str) => args.reduce((acc, fn) => fn(acc), str); } @@ -79,6 +83,7 @@ async function spawnAndAssert(filename, transform = (x) => x, { tty = false, ... module.exports = { assertSnapshot, getSnapshotPath, + replaceFullPaths, replaceStackTrace, replaceWindowsLineEndings, replaceWindowsPaths, diff --git a/test/fixtures/test-runner/output/global_after_should_fail_the_test.js b/test/fixtures/test-runner/output/global_after_should_fail_the_test.js new file mode 100644 index 00000000000000..e2ad4c815b7fcd --- /dev/null +++ b/test/fixtures/test-runner/output/global_after_should_fail_the_test.js @@ -0,0 +1,10 @@ +'use strict'; +const { it, after } = require('node:test'); + +after(() => { + throw new Error('this should fail the test') +}); + +it('this is a test', () => { + console.log('this is a test') +}); diff --git a/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot b/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot new file mode 100644 index 00000000000000..16693c1a8a964b --- /dev/null +++ b/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot @@ -0,0 +1,34 @@ +this is a test +TAP version 13 +# Subtest: this is a test +ok 1 - this is a test + --- + duration_ms: * + ... +not ok 2 - /test/fixtures/test-runner/output/global_after_should_fail_the_test.js + --- + duration_ms: * + failureType: 'hookFailed' + error: 'this should fail the test' + code: 'ERR_TEST_FAILURE' + stack: |- + * + * + * + * + * + * + * + * + * + * + ... +1..1 +# tests 1 +# suites 0 +# pass 1 +# fail 0 +# cancelled 0 +# skipped 0 +# todo 0 +# duration_ms * diff --git a/test/fixtures/test-runner/output/hooks-with-no-global-test.js b/test/fixtures/test-runner/output/hooks-with-no-global-test.js index 844aa6ff3c2d59..ea01463fd6cc1f 100644 --- a/test/fixtures/test-runner/output/hooks-with-no-global-test.js +++ b/test/fixtures/test-runner/output/hooks-with-no-global-test.js @@ -9,42 +9,36 @@ before(() => testArr.push('global before')); after(() => { testArr.push('global after'); - try { - assert.deepStrictEqual(testArr, [ - 'global before', - 'describe before', + assert.deepStrictEqual(testArr, [ + 'global before', + 'describe before', - 'describe beforeEach', - 'describe it 1', - 'describe afterEach', + 'describe beforeEach', + 'describe it 1', + 'describe afterEach', - 'describe beforeEach', - 'describe test 2', - 'describe afterEach', + 'describe beforeEach', + 'describe test 2', + 'describe afterEach', - 'describe nested before', + 'describe nested before', - 'describe beforeEach', - 'describe nested beforeEach', - 'describe nested it 1', - 'describe afterEach', - 'describe nested afterEach', + 'describe beforeEach', + 'describe nested beforeEach', + 'describe nested it 1', + 'describe afterEach', + 'describe nested afterEach', - 'describe beforeEach', - 'describe nested beforeEach', - 'describe nested test 2', - 'describe afterEach', - 'describe nested afterEach', + 'describe beforeEach', + 'describe nested beforeEach', + 'describe nested test 2', + 'describe afterEach', + 'describe nested afterEach', - 'describe nested after', - 'describe after', - 'global after', - ]); - } catch (e) { - // TODO(rluvaton): remove the try catch after #48867 is fixed - console.error(e); - process.exit(1); - } + 'describe nested after', + 'describe after', + 'global after', + ]); }); describe('describe hooks with no global tests', () => { diff --git a/test/parallel/test-runner-output.mjs b/test/parallel/test-runner-output.mjs index 84fb4b1824dc34..8d5233d2de2441 100644 --- a/test/parallel/test-runner-output.mjs +++ b/test/parallel/test-runner-output.mjs @@ -24,10 +24,27 @@ function replaceSpecDuration(str) { .replaceAll(/duration_ms [0-9.]+/g, 'duration_ms *') .replace(stackTraceBasePath, '$3'); } -const defaultTransform = snapshot - .transform(snapshot.replaceWindowsLineEndings, snapshot.replaceStackTrace, replaceTestDuration); -const specTransform = snapshot - .transform(replaceSpecDuration, snapshot.replaceWindowsLineEndings, snapshot.replaceStackTrace); + +function removeWindowsPathEscaping(str) { + return common.isWindows ? str.replaceAll(/\\\\/g, '\\') : str; +} + +const defaultTransform = snapshot.transform( + snapshot.replaceWindowsLineEndings, + snapshot.replaceStackTrace, + replaceTestDuration, +); +const specTransform = snapshot.transform( + replaceSpecDuration, + snapshot.replaceWindowsLineEndings, + snapshot.replaceStackTrace, +); +const withFileNameTransform = snapshot.transform( + defaultTransform, + removeWindowsPathEscaping, + snapshot.replaceFullPaths, + snapshot.replaceWindowsPaths, +); const tests = [ @@ -41,6 +58,7 @@ const tests = [ { name: 'test-runner/output/hooks-with-no-global-test.js' }, { name: 'test-runner/output/before-and-after-each-too-many-listeners.js' }, { name: 'test-runner/output/before-and-after-each-with-timeout-too-many-listeners.js' }, + { name: 'test-runner/output/global_after_should_fail_the_test.js', transform: withFileNameTransform }, { name: 'test-runner/output/no_refs.js' }, { name: 'test-runner/output/no_tests.js' }, { name: 'test-runner/output/only_tests.js' }, From 4d184b52512539909bf6cbfaad0a41018dd81d81 Mon Sep 17 00:00:00 2001 From: Raz Luvaton <16746759+rluvaton@users.noreply.github.com> Date: Thu, 3 Aug 2023 22:41:20 +0300 Subject: [PATCH 006/125] test: remove --no-warnings flag in test_runner fixtures no longer needed after #48915 fix PR-URL: https://github.com/nodejs/node/pull/48989 Backport-PR-URL: https://github.com/nodejs/node/pull/49225 Reviewed-By: Moshe Atlow Reviewed-By: Chemi Atlow --- test/fixtures/test-runner/output/abort.js | 1 - test/fixtures/test-runner/output/abort_hooks.js | 1 - test/fixtures/test-runner/output/abort_suite.js | 1 - test/fixtures/test-runner/output/describe_it.js | 1 - test/fixtures/test-runner/output/describe_nested.js | 1 - test/fixtures/test-runner/output/dot_reporter.js | 1 - test/fixtures/test-runner/output/hooks.js | 1 - test/fixtures/test-runner/output/name_pattern.js | 2 +- test/fixtures/test-runner/output/name_pattern_with_only.js | 2 +- test/fixtures/test-runner/output/no_refs.js | 1 - test/fixtures/test-runner/output/no_tests.js | 1 - test/fixtures/test-runner/output/only_tests.js | 2 +- test/fixtures/test-runner/output/output_cli.js | 1 - test/fixtures/test-runner/output/single.js | 1 - test/fixtures/test-runner/output/spec_reporter.js | 1 - test/fixtures/test-runner/output/spec_reporter_cli.js | 1 - test/fixtures/test-runner/output/spec_reporter_successful.js | 2 +- test/fixtures/test-runner/output/unresolved_promise.js | 1 - 18 files changed, 4 insertions(+), 18 deletions(-) diff --git a/test/fixtures/test-runner/output/abort.js b/test/fixtures/test-runner/output/abort.js index 0cd9c9b9273503..eba48d9ec58718 100644 --- a/test/fixtures/test-runner/output/abort.js +++ b/test/fixtures/test-runner/output/abort.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const test = require('node:test'); diff --git a/test/fixtures/test-runner/output/abort_hooks.js b/test/fixtures/test-runner/output/abort_hooks.js index b0f1da80d62719..8395f70e86185e 100644 --- a/test/fixtures/test-runner/output/abort_hooks.js +++ b/test/fixtures/test-runner/output/abort_hooks.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; const { before, beforeEach, describe, it, after, afterEach } = require('node:test'); diff --git a/test/fixtures/test-runner/output/abort_suite.js b/test/fixtures/test-runner/output/abort_suite.js index 419698320a5f7d..8a2a2c05458fd4 100644 --- a/test/fixtures/test-runner/output/abort_suite.js +++ b/test/fixtures/test-runner/output/abort_suite.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const { describe, it } = require('node:test'); diff --git a/test/fixtures/test-runner/output/describe_it.js b/test/fixtures/test-runner/output/describe_it.js index 6625747d026969..942b1c5317abab 100644 --- a/test/fixtures/test-runner/output/describe_it.js +++ b/test/fixtures/test-runner/output/describe_it.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const assert = require('node:assert'); diff --git a/test/fixtures/test-runner/output/describe_nested.js b/test/fixtures/test-runner/output/describe_nested.js index 40ea150a018f3d..3cd4dcbb06c849 100644 --- a/test/fixtures/test-runner/output/describe_nested.js +++ b/test/fixtures/test-runner/output/describe_nested.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const { describe, it } = require('node:test'); diff --git a/test/fixtures/test-runner/output/dot_reporter.js b/test/fixtures/test-runner/output/dot_reporter.js index 72a8aaa10e491b..e9b8f5cead88f0 100644 --- a/test/fixtures/test-runner/output/dot_reporter.js +++ b/test/fixtures/test-runner/output/dot_reporter.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const fixtures = require('../../../common/fixtures'); diff --git a/test/fixtures/test-runner/output/hooks.js b/test/fixtures/test-runner/output/hooks.js index 827da5d5646262..00a6c23499f52e 100644 --- a/test/fixtures/test-runner/output/hooks.js +++ b/test/fixtures/test-runner/output/hooks.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; const common = require('../../../common'); const assert = require('assert'); diff --git a/test/fixtures/test-runner/output/name_pattern.js b/test/fixtures/test-runner/output/name_pattern.js index f183c09057fa33..10e7619b9cfcb9 100644 --- a/test/fixtures/test-runner/output/name_pattern.js +++ b/test/fixtures/test-runner/output/name_pattern.js @@ -1,4 +1,4 @@ -// Flags: --no-warnings --test-name-pattern=enabled --test-name-pattern=yes --test-name-pattern=/pattern/i +// Flags: --test-name-pattern=enabled --test-name-pattern=yes --test-name-pattern=/pattern/i 'use strict'; const common = require('../../../common'); const { diff --git a/test/fixtures/test-runner/output/name_pattern_with_only.js b/test/fixtures/test-runner/output/name_pattern_with_only.js index a3e2f1be2ad42d..bc68b7a5fdf7e7 100644 --- a/test/fixtures/test-runner/output/name_pattern_with_only.js +++ b/test/fixtures/test-runner/output/name_pattern_with_only.js @@ -1,4 +1,4 @@ -// Flags: --no-warnings --test-only --test-name-pattern=enabled +// Flags: --test-only --test-name-pattern=enabled 'use strict'; const common = require('../../../common'); const { test } = require('node:test'); diff --git a/test/fixtures/test-runner/output/no_refs.js b/test/fixtures/test-runner/output/no_refs.js index 7b36e01133da6e..0d7cc6e01791d0 100644 --- a/test/fixtures/test-runner/output/no_refs.js +++ b/test/fixtures/test-runner/output/no_refs.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const test = require('node:test'); diff --git a/test/fixtures/test-runner/output/no_tests.js b/test/fixtures/test-runner/output/no_tests.js index f9a9506706180e..2644e29fe26f85 100644 --- a/test/fixtures/test-runner/output/no_tests.js +++ b/test/fixtures/test-runner/output/no_tests.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const test = require('node:test'); diff --git a/test/fixtures/test-runner/output/only_tests.js b/test/fixtures/test-runner/output/only_tests.js index 5ac4a90c2cf264..26266b524454b7 100644 --- a/test/fixtures/test-runner/output/only_tests.js +++ b/test/fixtures/test-runner/output/only_tests.js @@ -1,4 +1,4 @@ -// Flags: --no-warnings --test-only +// Flags: --test-only 'use strict'; require('../../../common'); const { test, describe, it } = require('node:test'); diff --git a/test/fixtures/test-runner/output/output_cli.js b/test/fixtures/test-runner/output/output_cli.js index 50ef07233314b1..4c6b029c6580c0 100644 --- a/test/fixtures/test-runner/output/output_cli.js +++ b/test/fixtures/test-runner/output/output_cli.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const fixtures = require('../../../common/fixtures'); diff --git a/test/fixtures/test-runner/output/single.js b/test/fixtures/test-runner/output/single.js index e099ec3c375bb7..568e5ba986ad49 100644 --- a/test/fixtures/test-runner/output/single.js +++ b/test/fixtures/test-runner/output/single.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; const test = require('node:test'); test('last test', () => {}); diff --git a/test/fixtures/test-runner/output/spec_reporter.js b/test/fixtures/test-runner/output/spec_reporter.js index 6a7c2d655f93b3..46e18b1ca8630d 100644 --- a/test/fixtures/test-runner/output/spec_reporter.js +++ b/test/fixtures/test-runner/output/spec_reporter.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const fixtures = require('../../../common/fixtures'); diff --git a/test/fixtures/test-runner/output/spec_reporter_cli.js b/test/fixtures/test-runner/output/spec_reporter_cli.js index e88a7221fb4e71..b0c72e51ab66b8 100644 --- a/test/fixtures/test-runner/output/spec_reporter_cli.js +++ b/test/fixtures/test-runner/output/spec_reporter_cli.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const fixtures = require('../../../common/fixtures'); diff --git a/test/fixtures/test-runner/output/spec_reporter_successful.js b/test/fixtures/test-runner/output/spec_reporter_successful.js index fb9800c0491722..a7ffeb59cc9d1e 100644 --- a/test/fixtures/test-runner/output/spec_reporter_successful.js +++ b/test/fixtures/test-runner/output/spec_reporter_successful.js @@ -1,4 +1,4 @@ -// Flags: --no-warnings --test-reporter=spec +// Flags: --test-reporter=spec 'use strict'; require('../../../common'); const { it } = require('node:test'); diff --git a/test/fixtures/test-runner/output/unresolved_promise.js b/test/fixtures/test-runner/output/unresolved_promise.js index a43f064d44de8e..daf2cee24b6cf9 100644 --- a/test/fixtures/test-runner/output/unresolved_promise.js +++ b/test/fixtures/test-runner/output/unresolved_promise.js @@ -1,4 +1,3 @@ -// Flags: --no-warnings 'use strict'; require('../../../common'); const test = require('node:test'); From efdc95fbc0b7bb149bc95062faf9524bd3c0f17c Mon Sep 17 00:00:00 2001 From: cjihrig Date: Sun, 30 Jul 2023 18:17:39 -0400 Subject: [PATCH 007/125] test_runner: expose location of tests This commit adds each test's line and column number to the reporter output. This will aid in debugging test suite failures when error stacks are not helpful, test suites are large, or tests have the same name. This data is also exposed on the spec reporter. This commit also replaces the filename that was previously being reported, with the filename where the test actually exists. These are normally correct, but could be wrong if tests were run from a file other than the user's entrypoint. PR-URL: https://github.com/nodejs/node/pull/48975 Backport-PR-URL: https://github.com/nodejs/node/pull/49225 Fixes: https://github.com/nodejs/node/issues/48457 Reviewed-By: Yagiz Nizipli Reviewed-By: Matteo Collina Reviewed-By: Chemi Atlow Reviewed-By: Moshe Atlow --- doc/api/test.md | 36 ++++++ lib/internal/test_runner/harness.js | 28 ++++- lib/internal/test_runner/reporter/spec.js | 12 +- lib/internal/test_runner/reporter/tap.js | 16 ++- lib/internal/test_runner/test.js | 110 +++++++++++------ lib/internal/test_runner/tests_stream.js | 94 ++++++++++---- src/node_util.cc | 23 ++++ .../test-runner/output/abort.snapshot | 14 +++ .../test-runner/output/abort_hooks.snapshot | 10 ++ .../test-runner/output/abort_suite.snapshot | 7 ++ .../output/default_output.snapshot | 3 + .../test-runner/output/describe_it.js | 9 -- .../test-runner/output/describe_it.snapshot | 102 ++++++++------- .../test-runner/output/dot_reporter.snapshot | 5 +- ...global_after_should_fail_the_test.snapshot | 1 + .../test-runner/output/hooks.snapshot | 30 +++++ .../test-runner/output/no_refs.snapshot | 2 + test/fixtures/test-runner/output/output.js | 14 --- .../test-runner/output/output.snapshot | 114 +++++++++-------- .../test-runner/output/output_cli.snapshot | 116 ++++++++++-------- .../test-runner/output/spec_reporter.snapshot | 42 +++++-- .../output/spec_reporter_cli.snapshot | 42 +++++-- .../fixtures/test-runner/output/tap_escape.js | 19 +++ .../test-runner/output/tap_escape.snapshot | 31 +++++ ...h_should_not_affect_further_tests.snapshot | 4 + .../output/unresolved_promise.snapshot | 2 + test/parallel/test-runner-output.mjs | 24 ++-- 27 files changed, 637 insertions(+), 273 deletions(-) create mode 100644 test/fixtures/test-runner/output/tap_escape.js create mode 100644 test/fixtures/test-runner/output/tap_escape.snapshot diff --git a/doc/api/test.md b/doc/api/test.md index 375cd41c949e8b..7da36a2080558b 100644 --- a/doc/api/test.md +++ b/doc/api/test.md @@ -2049,8 +2049,12 @@ Emitted when code coverage is enabled and all tests have completed. ### Event: `'test:dequeue'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `file` {string|undefined} The path of the test file, `undefined` if test was run through the REPL. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `name` {string} The test name. * `nesting` {number} The nesting level of the test. @@ -2059,8 +2063,12 @@ Emitted when a test is dequeued, right before it is executed. ### Event: `'test:diagnostic'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `file` {string|undefined} The path of the test file, `undefined` if test was run through the REPL. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `message` {string} The diagnostic message. * `nesting` {number} The nesting level of the test. @@ -2069,8 +2077,12 @@ Emitted when [`context.diagnostic`][] is called. ### Event: `'test:enqueue'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `file` {string|undefined} The path of the test file, `undefined` if test was run through the REPL. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `name` {string} The test name. * `nesting` {number} The nesting level of the test. @@ -2079,6 +2091,8 @@ Emitted when a test is enqueued for execution. ### Event: `'test:fail'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `details` {Object} Additional execution metadata. * `duration_ms` {number} The duration of the test in milliseconds. * `error` {Error} An error wrapping the error thrown by the test. @@ -2087,6 +2101,8 @@ Emitted when a test is enqueued for execution. this is a suite. * `file` {string|undefined} The path of the test file, `undefined` if test was run through the REPL. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `name` {string} The test name. * `nesting` {number} The nesting level of the test. * `testNumber` {number} The ordinal number of the test. @@ -2098,12 +2114,16 @@ Emitted when a test fails. ### Event: `'test:pass'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `details` {Object} Additional execution metadata. * `duration_ms` {number} The duration of the test in milliseconds. * `type` {string|undefined} The type of the test, used to denote whether this is a suite. * `file` {string|undefined} The path of the test file, `undefined` if test was run through the REPL. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `name` {string} The test name. * `nesting` {number} The nesting level of the test. * `testNumber` {number} The ordinal number of the test. @@ -2115,8 +2135,12 @@ Emitted when a test passes. ### Event: `'test:plan'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `file` {string|undefined} The path of the test file, `undefined` if test was run through the REPL. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `nesting` {number} The nesting level of the test. * `count` {number} The number of subtests that have ran. @@ -2125,8 +2149,12 @@ Emitted when all subtests have completed for a given test. ### Event: `'test:start'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `file` {string|undefined} The path of the test file, `undefined` if test was run through the REPL. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `name` {string} The test name. * `nesting` {number} The nesting level of the test. @@ -2137,7 +2165,11 @@ defined. ### Event: `'test:stderr'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `file` {string} The path of the test file. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `message` {string} The message written to `stderr`. Emitted when a running test writes to `stderr`. @@ -2146,7 +2178,11 @@ This event is only emitted if `--test` flag is passed. ### Event: `'test:stdout'` * `data` {Object} + * `column` {number|undefined} The column number where the test is defined, or + `undefined` if the test was run through the REPL. * `file` {string} The path of the test file. + * `line` {number|undefined} The line number where the test is defined, or + `undefined` if the test was run through the REPL. * `message` {string} The message written to `stdout`. Emitted when a running test writes to `stdout`. diff --git a/lib/internal/test_runner/harness.js b/lib/internal/test_runner/harness.js index 36c36f2de14b04..4eb6458b23e47d 100644 --- a/lib/internal/test_runner/harness.js +++ b/lib/internal/test_runner/harness.js @@ -5,6 +5,7 @@ const { PromiseResolve, SafeMap, } = primordials; +const { getCallerLocation } = internalBinding('util'); const { createHook, executionAsyncId, @@ -217,9 +218,24 @@ function runInParentContext(Factory) { return PromiseResolve(); } - const test = (name, options, fn) => run(name, options, fn); + const test = (name, options, fn) => { + const overrides = { + __proto__: null, + loc: getCallerLocation(), + }; + + return run(name, options, fn, overrides); + }; ArrayPrototypeForEach(['skip', 'todo', 'only'], (keyword) => { - test[keyword] = (name, options, fn) => run(name, options, fn, { __proto__: null, [keyword]: true }); + test[keyword] = (name, options, fn) => { + const overrides = { + __proto__: null, + [keyword]: true, + loc: getCallerLocation(), + }; + + return run(name, options, fn, overrides); + }; }); return test; } @@ -227,7 +243,13 @@ function runInParentContext(Factory) { function hook(hook) { return (fn, options) => { const parent = testResources.get(executionAsyncId()) || getGlobalRoot(); - parent.createHook(hook, fn, options); + parent.createHook(hook, fn, { + __proto__: null, + ...options, + parent, + hookType: hook, + loc: getCallerLocation(), + }); }; } diff --git a/lib/internal/test_runner/reporter/spec.js b/lib/internal/test_runner/reporter/spec.js index f15b3eaa40aa27..0c5a3e95c7c75a 100644 --- a/lib/internal/test_runner/reporter/spec.js +++ b/lib/internal/test_runner/reporter/spec.js @@ -17,6 +17,7 @@ const { inspectWithNoCustomRetry } = require('internal/errors'); const { green, blue, red, white, gray, shouldColorize } = require('internal/util/colors'); const { kSubtestsFailed } = require('internal/test_runner/test'); const { getCoverageReport } = require('internal/test_runner/utils'); +const { relative } = require('path'); const inspectOptions = { __proto__: null, colors: shouldColorize(process.stdout), breakLength: Infinity }; @@ -40,6 +41,7 @@ class SpecReporter extends Transform { #reported = []; #indentMemo = new SafeMap(); #failedTests = []; + #cwd = process.cwd(); constructor() { super({ __proto__: null, writableObjectMode: true }); @@ -142,10 +144,12 @@ class SpecReporter extends Transform { } const results = [`\n${colors['test:fail']}${symbols['test:fail']}failing tests:${white}\n`]; for (let i = 0; i < this.#failedTests.length; i++) { - ArrayPrototypePush(results, this.#formatTestReport( - 'test:fail', - this.#failedTests[i], - )); + const test = this.#failedTests[i]; + const relPath = relative(this.#cwd, test.file); + const formattedErr = this.#formatTestReport('test:fail', test); + const location = `test at ${relPath}:${test.line}:${test.column}`; + + ArrayPrototypePush(results, location, formattedErr); } callback(null, ArrayPrototypeJoin(results, '\n')); } diff --git a/lib/internal/test_runner/reporter/tap.js b/lib/internal/test_runner/reporter/tap.js index de8188c58dd31e..1f60cfa619886e 100644 --- a/lib/internal/test_runner/reporter/tap.js +++ b/lib/internal/test_runner/reporter/tap.js @@ -31,13 +31,14 @@ async function * tapReporter(source) { yield `TAP version ${kDefaultTAPVersion}\n`; for await (const { type, data } of source) { switch (type) { - case 'test:fail': + case 'test:fail': { yield reportTest(data.nesting, data.testNumber, 'not ok', data.name, data.skip, data.todo); - yield reportDetails(data.nesting, data.details); + const location = `${data.file}:${data.line}:${data.column}`; + yield reportDetails(data.nesting, data.details, location); break; - case 'test:pass': + } case 'test:pass': yield reportTest(data.nesting, data.testNumber, 'ok', data.name, data.skip, data.todo); - yield reportDetails(data.nesting, data.details); + yield reportDetails(data.nesting, data.details, null); break; case 'test:plan': yield `${indent(data.nesting)}1..${data.count}\n`; @@ -81,13 +82,18 @@ function reportTest(nesting, testNumber, status, name, skip, todo) { return line; } -function reportDetails(nesting, data = kEmptyObject) { +function reportDetails(nesting, data = kEmptyObject, location) { const { error, duration_ms } = data; const _indent = indent(nesting); let details = `${_indent} ---\n`; details += jsToYaml(_indent, 'duration_ms', duration_ms); details += jsToYaml(_indent, 'type', data.type); + + if (location) { + details += jsToYaml(_indent, 'location', location); + } + details += jsToYaml(_indent, null, error, new SafeSet()); details += `${_indent} ...\n`; return details; diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index cc7c81cad88c0d..58f1de711f38f4 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -24,6 +24,7 @@ const { ObjectDefineProperty, Symbol, } = primordials; +const { getCallerLocation } = internalBinding('util'); const { addAbortListener } = require('events'); const { AsyncResource } = require('async_hooks'); const { AbortController } = require('internal/abort_controller'); @@ -153,8 +154,15 @@ class TestContext { } test(name, options, fn) { - // eslint-disable-next-line no-use-before-define - const subtest = this.#test.createSubtest(Test, name, options, fn); + const overrides = { + __proto__: null, + loc: getCallerLocation(), + }; + + const subtest = this.#test.createSubtest( + // eslint-disable-next-line no-use-before-define + Test, name, options, fn, overrides, + ); return subtest.start(); } @@ -201,7 +209,7 @@ class Test extends AsyncResource { super('Test'); let { fn, name, parent, skip } = options; - const { concurrency, only, timeout, todo, signal } = options; + const { concurrency, loc, only, timeout, todo, signal } = options; if (typeof fn !== 'function') { fn = noop; @@ -332,6 +340,17 @@ class Test extends AsyncResource { "'only' and 'runOnly' require the --test-only command-line option."; this.diagnostic(warning); } + + if (loc === undefined || kFilename === undefined) { + this.loc = undefined; + } else { + this.loc = { + __proto__: null, + line: loc[0], + column: loc[1], + file: loc[2], + }; + } } matchesTestNamePatterns() { @@ -351,7 +370,7 @@ class Test extends AsyncResource { while (this.pendingSubtests.length > 0 && this.hasConcurrency()) { const deferred = ArrayPrototypeShift(this.pendingSubtests); const test = deferred.test; - this.reporter.dequeue(test.nesting, kFilename, test.name); + this.reporter.dequeue(test.nesting, test.loc, test.name); await test.run(); deferred.resolve(); } @@ -510,7 +529,7 @@ class Test extends AsyncResource { // If there is enough available concurrency to run the test now, then do // it. Otherwise, return a Promise to the caller and mark the test as // pending for later execution. - this.reporter.enqueue(this.nesting, kFilename, this.name); + this.reporter.enqueue(this.nesting, this.loc, this.name); if (!this.parent.hasConcurrency()) { const deferred = createDeferredPromise(); @@ -519,7 +538,7 @@ class Test extends AsyncResource { return deferred.promise; } - this.reporter.dequeue(this.nesting, kFilename, this.name); + this.reporter.dequeue(this.nesting, this.loc, this.name); return this.run(); } @@ -688,37 +707,37 @@ class Test extends AsyncResource { this.parent.processReadySubtestRange(false); this.parent.processPendingSubtests(); } else if (!this.reported) { - if (!this.passed && failed === 0 && this.error) { - this.reporter.fail(0, kFilename, this.subtests.length + 1, kFilename, { - __proto__: null, - duration_ms: this.#duration(), - error: this.error, - }, undefined); - } + const { + diagnostics, + harness, + loc, + nesting, + reporter, + } = this; this.reported = true; - this.reporter.plan(this.nesting, kFilename, this.root.harness.counters.topLevel); + reporter.plan(nesting, loc, harness.counters.topLevel); - for (let i = 0; i < this.diagnostics.length; i++) { - this.reporter.diagnostic(this.nesting, kFilename, this.diagnostics[i]); + for (let i = 0; i < diagnostics.length; i++) { + reporter.diagnostic(nesting, loc, diagnostics[i]); } - this.reporter.diagnostic(this.nesting, kFilename, `tests ${this.root.harness.counters.all}`); - this.reporter.diagnostic(this.nesting, kFilename, `suites ${this.root.harness.counters.suites}`); - this.reporter.diagnostic(this.nesting, kFilename, `pass ${this.root.harness.counters.passed}`); - this.reporter.diagnostic(this.nesting, kFilename, `fail ${this.root.harness.counters.failed}`); - this.reporter.diagnostic(this.nesting, kFilename, `cancelled ${this.root.harness.counters.cancelled}`); - this.reporter.diagnostic(this.nesting, kFilename, `skipped ${this.root.harness.counters.skipped}`); - this.reporter.diagnostic(this.nesting, kFilename, `todo ${this.root.harness.counters.todo}`); - this.reporter.diagnostic(this.nesting, kFilename, `duration_ms ${this.#duration()}`); + reporter.diagnostic(nesting, loc, `tests ${harness.counters.all}`); + reporter.diagnostic(nesting, loc, `suites ${harness.counters.suites}`); + reporter.diagnostic(nesting, loc, `pass ${harness.counters.passed}`); + reporter.diagnostic(nesting, loc, `fail ${harness.counters.failed}`); + reporter.diagnostic(nesting, loc, `cancelled ${harness.counters.cancelled}`); + reporter.diagnostic(nesting, loc, `skipped ${harness.counters.skipped}`); + reporter.diagnostic(nesting, loc, `todo ${harness.counters.todo}`); + reporter.diagnostic(nesting, loc, `duration_ms ${this.duration()}`); - const coverage = this.harness.coverage(); + const coverage = harness.coverage(); if (coverage) { - this.reporter.coverage(this.nesting, kFilename, coverage); + reporter.coverage(nesting, loc, coverage); } - this.reporter.end(); + reporter.end(); } } @@ -746,7 +765,7 @@ class Test extends AsyncResource { this.finished = true; } - #duration() { + duration() { // Duration is recorded in BigInt nanoseconds. Convert to milliseconds. return Number(this.endTime - this.startTime) / 1_000_000; } @@ -754,12 +773,12 @@ class Test extends AsyncResource { report() { countCompletedTest(this); if (this.subtests.length > 0) { - this.reporter.plan(this.subtests[0].nesting, kFilename, this.subtests.length); + this.reporter.plan(this.subtests[0].nesting, this.loc, this.subtests.length); } else { this.reportStarted(); } let directive; - const details = { __proto__: null, duration_ms: this.#duration() }; + const details = { __proto__: null, duration_ms: this.duration() }; if (this.skipped) { directive = this.reporter.getSkip(this.message); @@ -772,14 +791,14 @@ class Test extends AsyncResource { } if (this.passed) { - this.reporter.ok(this.nesting, kFilename, this.testNumber, this.name, details, directive); + this.reporter.ok(this.nesting, this.loc, this.testNumber, this.name, details, directive); } else { details.error = this.error; - this.reporter.fail(this.nesting, kFilename, this.testNumber, this.name, details, directive); + this.reporter.fail(this.nesting, this.loc, this.testNumber, this.name, details, directive); } for (let i = 0; i < this.diagnostics.length; i++) { - this.reporter.diagnostic(this.nesting, kFilename, this.diagnostics[i]); + this.reporter.diagnostic(this.nesting, this.loc, this.diagnostics[i]); } } @@ -789,7 +808,7 @@ class Test extends AsyncResource { } this.#reportedSubtest = true; this.parent.reportStarted(); - this.reporter.start(this.nesting, kFilename, this.name); + this.reporter.start(this.nesting, this.loc, this.name); } } @@ -799,8 +818,11 @@ class TestHook extends Test { if (options === null || typeof options !== 'object') { options = kEmptyObject; } - const { timeout, signal } = options; - super({ __proto__: null, fn, timeout, signal }); + const { loc, timeout, signal } = options; + super({ __proto__: null, fn, loc, timeout, signal }); + + this.parentTest = options.parent ?? null; + this.hookType = options.hookType; } run(args) { if (this.error && !this.outerSignal?.aborted) { @@ -821,6 +843,22 @@ class TestHook extends Test { return true; } postRun() { + const { error, loc, parentTest: parent } = this; + + // Report failures in the root test's after() hook. + if (error && parent !== null && + parent === parent.root && this.hookType === 'after') { + + if (isTestFailureError(error)) { + error.failureType = kHookFailure; + } + + parent.reporter.fail(0, loc, parent.subtests.length + 1, loc.file, { + __proto__: null, + duration_ms: this.duration(), + error, + }, undefined); + } } } diff --git a/lib/internal/test_runner/tests_stream.js b/lib/internal/test_runner/tests_stream.js index 901987681f319b..f7730caac00fa7 100644 --- a/lib/internal/test_runner/tests_stream.js +++ b/lib/internal/test_runner/tests_stream.js @@ -29,16 +29,37 @@ class TestsStream extends Readable { } } - fail(nesting, file, testNumber, name, details, directive) { - this[kEmitMessage]('test:fail', { __proto__: null, name, nesting, file, testNumber, details, ...directive }); - } - - ok(nesting, file, testNumber, name, details, directive) { - this[kEmitMessage]('test:pass', { __proto__: null, name, nesting, file, testNumber, details, ...directive }); - } - - plan(nesting, file, count) { - this[kEmitMessage]('test:plan', { __proto__: null, nesting, file, count }); + fail(nesting, loc, testNumber, name, details, directive) { + this[kEmitMessage]('test:fail', { + __proto__: null, + name, + nesting, + testNumber, + details, + ...loc, + ...directive, + }); + } + + ok(nesting, loc, testNumber, name, details, directive) { + this[kEmitMessage]('test:pass', { + __proto__: null, + name, + nesting, + testNumber, + details, + ...loc, + ...directive, + }); + } + + plan(nesting, loc, count) { + this[kEmitMessage]('test:plan', { + __proto__: null, + nesting, + count, + ...loc, + }); } getSkip(reason = undefined) { @@ -49,32 +70,57 @@ class TestsStream extends Readable { return { __proto__: null, todo: reason ?? true }; } - enqueue(nesting, file, name) { - this[kEmitMessage]('test:enqueue', { __proto__: null, nesting, file, name }); + enqueue(nesting, loc, name) { + this[kEmitMessage]('test:enqueue', { + __proto__: null, + nesting, + name, + ...loc, + }); } - dequeue(nesting, file, name) { - this[kEmitMessage]('test:dequeue', { __proto__: null, nesting, file, name }); + dequeue(nesting, loc, name) { + this[kEmitMessage]('test:dequeue', { + __proto__: null, + nesting, + name, + ...loc, + }); } - start(nesting, file, name) { - this[kEmitMessage]('test:start', { __proto__: null, nesting, file, name }); + start(nesting, loc, name) { + this[kEmitMessage]('test:start', { + __proto__: null, + nesting, + name, + ...loc, + }); } - diagnostic(nesting, file, message) { - this[kEmitMessage]('test:diagnostic', { __proto__: null, nesting, file, message }); + diagnostic(nesting, loc, message) { + this[kEmitMessage]('test:diagnostic', { + __proto__: null, + nesting, + message, + ...loc, + }); } - stderr(file, message) { - this[kEmitMessage]('test:stderr', { __proto__: null, file, message }); + stderr(loc, message) { + this[kEmitMessage]('test:stderr', { __proto__: null, message, ...loc }); } - stdout(file, message) { - this[kEmitMessage]('test:stdout', { __proto__: null, file, message }); + stdout(loc, message) { + this[kEmitMessage]('test:stdout', { __proto__: null, message, ...loc }); } - coverage(nesting, file, summary) { - this[kEmitMessage]('test:coverage', { __proto__: null, nesting, file, summary }); + coverage(nesting, loc, summary) { + this[kEmitMessage]('test:coverage', { + __proto__: null, + nesting, + summary, + ...loc, + }); } end() { diff --git a/src/node_util.cc b/src/node_util.cc index dc2c730fdf042c..e8cb28969621fb 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -34,6 +34,8 @@ using v8::PropertyFilter; using v8::Proxy; using v8::SKIP_STRINGS; using v8::SKIP_SYMBOLS; +using v8::StackFrame; +using v8::StackTrace; using v8::String; using v8::Uint32; using v8::Value; @@ -140,6 +142,24 @@ static void GetProxyDetails(const FunctionCallbackInfo& args) { } } +static void GetCallerLocation(const FunctionCallbackInfo& args) { + Isolate* isolate = args.GetIsolate(); + Local trace = StackTrace::CurrentStackTrace(isolate, 2); + + // This function is frame zero. The caller is frame one. If there aren't two + // stack frames, return undefined. + if (trace->GetFrameCount() != 2) { + return; + } + + Local frame = trace->GetFrame(isolate, 1); + Local ret[] = {Integer::New(isolate, frame->GetLineNumber()), + Integer::New(isolate, frame->GetColumn()), + frame->GetScriptNameOrSourceURL()}; + + args.GetReturnValue().Set(Array::New(args.GetIsolate(), ret, arraysize(ret))); +} + static void IsArrayBufferDetached(const FunctionCallbackInfo& args) { if (args[0]->IsArrayBuffer()) { auto buffer = args[0].As(); @@ -363,6 +383,7 @@ static void ToUSVString(const FunctionCallbackInfo& args) { void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(GetPromiseDetails); registry->Register(GetProxyDetails); + registry->Register(GetCallerLocation); registry->Register(IsArrayBufferDetached); registry->Register(PreviewEntries); registry->Register(GetOwnNonIndexProperties); @@ -450,6 +471,8 @@ void Initialize(Local target, SetMethodNoSideEffect( context, target, "getPromiseDetails", GetPromiseDetails); SetMethodNoSideEffect(context, target, "getProxyDetails", GetProxyDetails); + SetMethodNoSideEffect( + context, target, "getCallerLocation", GetCallerLocation); SetMethodNoSideEffect( context, target, "isArrayBufferDetached", IsArrayBufferDetached); SetMethodNoSideEffect(context, target, "previewEntries", PreviewEntries); diff --git a/test/fixtures/test-runner/output/abort.snapshot b/test/fixtures/test-runner/output/abort.snapshot index f756377172da65..ceca09da14bfb1 100644 --- a/test/fixtures/test-runner/output/abort.snapshot +++ b/test/fixtures/test-runner/output/abort.snapshot @@ -24,6 +24,7 @@ TAP version 13 not ok 5 - not ok 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort.js:(LINE):7' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -32,6 +33,7 @@ TAP version 13 not ok 6 - not ok 2 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort.js:(LINE):7' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -40,6 +42,7 @@ TAP version 13 not ok 7 - not ok 3 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort.js:(LINE):7' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -60,6 +63,7 @@ TAP version 13 not ok 8 - not ok 4 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort.js:(LINE):7' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -80,6 +84,7 @@ TAP version 13 not ok 9 - not ok 5 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort.js:(LINE):7' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -100,6 +105,7 @@ TAP version 13 not ok 1 - promise timeout signal --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort.js:(LINE):1' failureType: 'testAborted' error: 'The operation was aborted due to timeout' code: 23 @@ -114,6 +120,7 @@ not ok 1 - promise timeout signal not ok 2 - promise abort signal --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort.js:(LINE):1' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -154,6 +161,7 @@ not ok 2 - promise abort signal not ok 5 - not ok 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort.js:(LINE):5' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -162,6 +170,7 @@ not ok 2 - promise abort signal not ok 6 - not ok 2 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort.js:(LINE):5' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -170,6 +179,7 @@ not ok 2 - promise abort signal not ok 7 - not ok 3 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort.js:(LINE):5' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -190,6 +200,7 @@ not ok 2 - promise abort signal not ok 8 - not ok 4 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort.js:(LINE):5' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -210,6 +221,7 @@ not ok 2 - promise abort signal not ok 9 - not ok 5 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort.js:(LINE):5' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -230,6 +242,7 @@ not ok 2 - promise abort signal not ok 3 - callback timeout signal --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort.js:(LINE):1' failureType: 'testAborted' error: 'The operation was aborted due to timeout' code: 23 @@ -244,6 +257,7 @@ not ok 3 - callback timeout signal not ok 4 - callback abort signal --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort.js:(LINE):1' failureType: 'testAborted' error: 'This operation was aborted' code: 20 diff --git a/test/fixtures/test-runner/output/abort_hooks.snapshot b/test/fixtures/test-runner/output/abort_hooks.snapshot index a1b5ddcd5f1908..d0b567bb6a22cd 100644 --- a/test/fixtures/test-runner/output/abort_hooks.snapshot +++ b/test/fixtures/test-runner/output/abort_hooks.snapshot @@ -12,6 +12,7 @@ TAP version 13 not ok 1 - test 1 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):3' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -20,6 +21,7 @@ TAP version 13 not ok 2 - test 2 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):3' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -29,6 +31,7 @@ not ok 1 - 1 before describe --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):1' failureType: 'hookFailed' error: 'This operation was aborted' code: 20 @@ -61,6 +64,7 @@ not ok 2 - 2 after describe --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):1' failureType: 'hookFailed' error: 'This operation was aborted' code: 20 @@ -82,6 +86,7 @@ not ok 2 - 2 after describe not ok 1 - test 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):3' failureType: 'hookFailed' error: 'This operation was aborted' code: 20 @@ -102,6 +107,7 @@ not ok 2 - 2 after describe not ok 2 - test 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):3' failureType: 'hookFailed' error: 'This operation was aborted' code: 20 @@ -123,6 +129,7 @@ not ok 3 - 3 beforeEach describe --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -132,6 +139,7 @@ not ok 3 - 3 beforeEach describe not ok 1 - test 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):3' failureType: 'hookFailed' error: 'This operation was aborted' code: 20 @@ -152,6 +160,7 @@ not ok 3 - 3 beforeEach describe not ok 2 - test 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):3' failureType: 'hookFailed' error: 'This operation was aborted' code: 20 @@ -173,6 +182,7 @@ not ok 4 - 4 afterEach describe --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' diff --git a/test/fixtures/test-runner/output/abort_suite.snapshot b/test/fixtures/test-runner/output/abort_suite.snapshot index e2abdadaf5a4b7..e7e8c4f4e2360f 100644 --- a/test/fixtures/test-runner/output/abort_suite.snapshot +++ b/test/fixtures/test-runner/output/abort_suite.snapshot @@ -24,6 +24,7 @@ TAP version 13 not ok 5 - not ok 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -32,6 +33,7 @@ TAP version 13 not ok 6 - not ok 2 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -40,6 +42,7 @@ TAP version 13 not ok 7 - not ok 3 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -60,6 +63,7 @@ TAP version 13 not ok 8 - not ok 4 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -80,6 +84,7 @@ TAP version 13 not ok 9 - not ok 5 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3' failureType: 'testAborted' error: 'This operation was aborted' code: 20 @@ -101,6 +106,7 @@ not ok 1 - describe timeout signal --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):1' failureType: 'testAborted' error: 'The operation was aborted due to timeout' code: 23 @@ -116,6 +122,7 @@ not ok 2 - describe abort signal --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):1' failureType: 'testAborted' error: 'This operation was aborted' code: 20 diff --git a/test/fixtures/test-runner/output/default_output.snapshot b/test/fixtures/test-runner/output/default_output.snapshot index dca844bb8402aa..b003f9299c4418 100644 --- a/test/fixtures/test-runner/output/default_output.snapshot +++ b/test/fixtures/test-runner/output/default_output.snapshot @@ -35,6 +35,7 @@ [31m✖ failing tests:[39m +* [31m✖ should fail [90m(*ms)[39m[39m Error: fail *[39m @@ -45,6 +46,7 @@ *[39m *[39m +* [31m✖ should fail [90m(*ms)[39m[39m Error: fail *[39m @@ -53,5 +55,6 @@ *[39m *[39m +* [31m✖ should pass but parent fail [90m(*ms)[39m[39m [32m'test did not finish before its parent and was cancelled'[39m diff --git a/test/fixtures/test-runner/output/describe_it.js b/test/fixtures/test-runner/output/describe_it.js index 942b1c5317abab..ba6a1aed064614 100644 --- a/test/fixtures/test-runner/output/describe_it.js +++ b/test/fixtures/test-runner/output/describe_it.js @@ -196,15 +196,6 @@ it('test with a name and options provided', { skip: true }); // A test with only options and a function provided. it({ skip: true }, function functionAndOptions() {}); -// A test whose description needs to be escaped. -it('escaped description \\ # \\#\\'); - -// A test whose skip message needs to be escaped. -it('escaped skip message', { skip: '#skip' }); - -// A test whose todo message needs to be escaped. -it('escaped todo message', { todo: '#todo' }); - it('callback pass', (t, done) => { setImmediate(done); }); diff --git a/test/fixtures/test-runner/output/describe_it.snapshot b/test/fixtures/test-runner/output/describe_it.snapshot index 0d07851e2a1fa9..be345f11575c8d 100644 --- a/test/fixtures/test-runner/output/describe_it.snapshot +++ b/test/fixtures/test-runner/output/describe_it.snapshot @@ -13,6 +13,7 @@ ok 2 - sync pass todo with message # TODO this is a passing todo not ok 3 - sync todo # TODO --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):4' failureType: 'testCodeFailure' error: 'should not count as a failure' code: 'ERR_TEST_FAILURE' @@ -29,6 +30,7 @@ not ok 3 - sync todo # TODO not ok 4 - sync todo with message # TODO this is a failing todo --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'should not count as a failure' code: 'ERR_TEST_FAILURE' @@ -60,6 +62,7 @@ ok 7 - sync pass not ok 8 - sync throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from sync throw fail' code: 'ERR_TEST_FAILURE' @@ -91,6 +94,7 @@ ok 11 - mixing describe/it and test should work not ok 12 - async throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from async throw fail' code: 'ERR_TEST_FAILURE' @@ -107,6 +111,7 @@ not ok 12 - async throw fail not ok 13 - async skip fail # SKIP --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'callbackAndPromisePresent' error: 'passed a callback but also returned a Promise' code: 'ERR_TEST_FAILURE' @@ -115,6 +120,7 @@ not ok 13 - async skip fail # SKIP not ok 14 - async assertion fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: |- Expected values to be strictly equal: @@ -144,6 +150,7 @@ ok 15 - resolve pass not ok 16 - reject fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'rejected from reject fail' code: 'ERR_TEST_FAILURE' @@ -186,6 +193,7 @@ ok 21 - immediate resolve pass not ok 1 - +sync throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fail' code: 'ERR_TEST_FAILURE' @@ -211,6 +219,7 @@ not ok 22 - subtest sync throw fail --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' @@ -219,6 +228,7 @@ not ok 22 - subtest sync throw fail not ok 23 - sync throw non-error fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'Symbol(thrown symbol from sync throw non-error fail)' code: 'ERR_TEST_FAILURE' @@ -270,6 +280,7 @@ ok 27 - sync skip option with message # SKIP this is skipped not ok 28 - sync skip option is false fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'this should be executed' code: 'ERR_TEST_FAILURE' @@ -322,30 +333,16 @@ ok 36 - functionAndOptions # SKIP --- duration_ms: * ... -# Subtest: escaped description \\ \# \\\#\\ -ok 37 - escaped description \\ \# \\\#\\ - --- - duration_ms: * - ... -# Subtest: escaped skip message -ok 38 - escaped skip message # SKIP \#skip - --- - duration_ms: * - ... -# Subtest: escaped todo message -ok 39 - escaped todo message # TODO \#todo - --- - duration_ms: * - ... # Subtest: callback pass -ok 40 - callback pass +ok 37 - callback pass --- duration_ms: * ... # Subtest: callback fail -not ok 41 - callback fail +not ok 38 - callback fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'callback failure' code: 'ERR_TEST_FAILURE' @@ -354,32 +351,34 @@ not ok 41 - callback fail * ... # Subtest: sync t is this in test -ok 42 - sync t is this in test +ok 39 - sync t is this in test --- duration_ms: * ... # Subtest: async t is this in test -ok 43 - async t is this in test +ok 40 - async t is this in test --- duration_ms: * ... # Subtest: callback t is this in test -ok 44 - callback t is this in test +ok 41 - callback t is this in test --- duration_ms: * ... # Subtest: callback also returns a Promise -not ok 45 - callback also returns a Promise +not ok 42 - callback also returns a Promise --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'callbackAndPromisePresent' error: 'passed a callback but also returned a Promise' code: 'ERR_TEST_FAILURE' ... # Subtest: callback throw -not ok 46 - callback throw +not ok 43 - callback throw --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from callback throw' code: 'ERR_TEST_FAILURE' @@ -393,9 +392,10 @@ not ok 46 - callback throw * ... # Subtest: callback called twice -not ok 47 - callback called twice +not ok 44 - callback called twice --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'multipleCallbackInvocations' error: 'callback invoked multiple times' code: 'ERR_TEST_FAILURE' @@ -404,14 +404,15 @@ not ok 47 - callback called twice * ... # Subtest: callback called twice in different ticks -ok 48 - callback called twice in different ticks +ok 45 - callback called twice in different ticks --- duration_ms: * ... # Subtest: callback called twice in future tick -not ok 49 - callback called twice in future tick +not ok 46 - callback called twice in future tick --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'uncaughtException' error: 'callback invoked multiple times' code: 'ERR_TEST_FAILURE' @@ -419,9 +420,10 @@ not ok 49 - callback called twice in future tick * ... # Subtest: callback async throw -not ok 50 - callback async throw +not ok 47 - callback async throw --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'uncaughtException' error: 'thrown from callback async throw' code: 'ERR_TEST_FAILURE' @@ -430,22 +432,24 @@ not ok 50 - callback async throw * ... # Subtest: callback async throw after done -ok 51 - callback async throw after done +ok 48 - callback async throw after done --- duration_ms: * ... # Subtest: custom inspect symbol fail -not ok 52 - custom inspect symbol fail +not ok 49 - custom inspect symbol fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'customized' code: 'ERR_TEST_FAILURE' ... # Subtest: custom inspect symbol that throws fail -not ok 53 - custom inspect symbol that throws fail +not ok 50 - custom inspect symbol that throws fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: |- { @@ -459,6 +463,7 @@ not ok 53 - custom inspect symbol that throws fail not ok 1 - sync throw fails at first --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fails at first' code: 'ERR_TEST_FAILURE' @@ -478,6 +483,7 @@ not ok 53 - custom inspect symbol that throws fail not ok 2 - sync throw fails at second --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fails at second' code: 'ERR_TEST_FAILURE' @@ -494,10 +500,11 @@ not ok 53 - custom inspect symbol that throws fail async Promise.all (index 0) ... 1..2 -not ok 54 - subtest sync throw fails +not ok 51 - subtest sync throw fails --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -507,15 +514,17 @@ not ok 54 - subtest sync throw fails not ok 1 - should not run --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' ... 1..1 -not ok 55 - describe sync throw fails +not ok 52 - describe sync throw fails --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from describe' code: 'ERR_TEST_FAILURE' @@ -536,15 +545,17 @@ not ok 55 - describe sync throw fails not ok 1 - should not run --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' ... 1..1 -not ok 56 - describe async throw fails +not ok 53 - describe async throw fails --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from describe' code: 'ERR_TEST_FAILURE' @@ -565,6 +576,7 @@ not ok 56 - describe async throw fails not ok 1 - timed out async test --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3' failureType: 'testTimeoutFailure' error: 'test timed out after 5ms' code: 'ERR_TEST_FAILURE' @@ -575,6 +587,7 @@ not ok 56 - describe async throw fails not ok 2 - timed out callback test --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3' failureType: 'testTimeoutFailure' error: 'test timed out after 5ms' code: 'ERR_TEST_FAILURE' @@ -590,10 +603,11 @@ not ok 56 - describe async throw fails duration_ms: * ... 1..4 -not ok 57 - timeouts +not ok 54 - timeouts --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -608,6 +622,7 @@ not ok 57 - timeouts not ok 2 - rejected thenable --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3' failureType: 'testCodeFailure' error: 'custom error' code: 'ERR_TEST_FAILURE' @@ -616,19 +631,21 @@ not ok 57 - timeouts * ... 1..2 -not ok 58 - successful thenable +not ok 55 - successful thenable --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' ... # Subtest: rejected thenable -not ok 59 - rejected thenable +not ok 56 - rejected thenable --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):1' failureType: 'testCodeFailure' error: 'custom error' code: 'ERR_TEST_FAILURE' @@ -659,33 +676,34 @@ not ok 59 - rejected thenable type: 'suite' ... 1..3 -ok 60 - async describe function +ok 57 - async describe function --- duration_ms: * type: 'suite' ... # Subtest: invalid subtest fail -not ok 61 - invalid subtest fail +not ok 58 - invalid subtest fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):5' failureType: 'parentAlreadyFinished' error: 'test could not be started because its parent finished' code: 'ERR_TEST_FAILURE' stack: |- * ... -1..61 +1..58 # Warning: Test "unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "async unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "immediate throw - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from immediate throw fail" and would have caused the test to fail, but instead triggered an uncaughtException event. # Warning: Test "immediate reject - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "callback called twice in different ticks" generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. # Warning: Test "callback async throw after done" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. -# tests 70 +# tests 67 # suites 11 -# pass 32 +# pass 31 # fail 19 # cancelled 4 -# skipped 10 -# todo 5 +# skipped 9 +# todo 4 # duration_ms * diff --git a/test/fixtures/test-runner/output/dot_reporter.snapshot b/test/fixtures/test-runner/output/dot_reporter.snapshot index 5a74119b3887e5..7c6b0ff2356b77 100644 --- a/test/fixtures/test-runner/output/dot_reporter.snapshot +++ b/test/fixtures/test-runner/output/dot_reporter.snapshot @@ -1,5 +1,4 @@ ..XX...X..XXX.X..... XXX.....X..X...X.... -.........X...XXX.XX. -.....XXXXXXX...XXXXX - +.....X...XXX.XX..... +.XXXXXXX...XXXXX diff --git a/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot b/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot index 16693c1a8a964b..845aba58eddd32 100644 --- a/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot +++ b/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot @@ -8,6 +8,7 @@ ok 1 - this is a test not ok 2 - /test/fixtures/test-runner/output/global_after_should_fail_the_test.js --- duration_ms: * + location: '/test/fixtures/test-runner/output/global_after_should_fail_the_test.js:(LINE):1' failureType: 'hookFailed' error: 'this should fail the test' code: 'ERR_TEST_FAILURE' diff --git a/test/fixtures/test-runner/output/hooks.snapshot b/test/fixtures/test-runner/output/hooks.snapshot index 676e1c7a3287e3..5afe398ed3d0ea 100644 --- a/test/fixtures/test-runner/output/hooks.snapshot +++ b/test/fixtures/test-runner/output/hooks.snapshot @@ -38,6 +38,7 @@ ok 1 - describe hooks not ok 1 - 1 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -46,6 +47,7 @@ ok 1 - describe hooks not ok 2 - 2 --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -55,6 +57,7 @@ not ok 2 - before throws --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'hookFailed' error: 'before' code: 'ERR_TEST_FAILURE' @@ -85,6 +88,7 @@ not ok 3 - after throws --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'hookFailed' error: 'after' code: 'ERR_TEST_FAILURE' @@ -104,6 +108,7 @@ not ok 3 - after throws not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'hookFailed' error: 'beforeEach' code: 'ERR_TEST_FAILURE' @@ -123,6 +128,7 @@ not ok 3 - after throws not ok 2 - 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'hookFailed' error: 'beforeEach' code: 'ERR_TEST_FAILURE' @@ -141,6 +147,7 @@ not ok 4 - beforeEach throws --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -150,6 +157,7 @@ not ok 4 - beforeEach throws not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'hookFailed' error: 'afterEach' code: 'ERR_TEST_FAILURE' @@ -169,6 +177,7 @@ not ok 4 - beforeEach throws not ok 2 - 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'hookFailed' error: 'afterEach' code: 'ERR_TEST_FAILURE' @@ -188,6 +197,7 @@ not ok 5 - afterEach throws --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -197,6 +207,7 @@ not ok 5 - afterEach throws not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'testCodeFailure' error: 'test' code: 'ERR_TEST_FAILURE' @@ -222,6 +233,7 @@ not ok 6 - afterEach when test fails --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' @@ -231,6 +243,7 @@ not ok 6 - afterEach when test fails not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'testCodeFailure' error: 'test' code: 'ERR_TEST_FAILURE' @@ -250,6 +263,7 @@ not ok 6 - afterEach when test fails not ok 2 - 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3' failureType: 'hookFailed' error: 'afterEach' code: 'ERR_TEST_FAILURE' @@ -269,6 +283,7 @@ not ok 7 - afterEach throws and test fails --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -310,6 +325,7 @@ ok 8 - test hooks not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'hookFailed' error: 'before' code: 'ERR_TEST_FAILURE' @@ -329,6 +345,7 @@ ok 8 - test hooks not ok 2 - 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'hookFailed' error: 'before' code: 'ERR_TEST_FAILURE' @@ -348,6 +365,7 @@ ok 8 - test hooks not ok 9 - t.before throws --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -357,6 +375,7 @@ not ok 9 - t.before throws not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'hookFailed' error: 'beforeEach' code: 'ERR_TEST_FAILURE' @@ -376,6 +395,7 @@ not ok 9 - t.before throws not ok 2 - 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'hookFailed' error: 'beforeEach' code: 'ERR_TEST_FAILURE' @@ -395,6 +415,7 @@ not ok 9 - t.before throws not ok 10 - t.beforeEach throws --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -404,6 +425,7 @@ not ok 10 - t.beforeEach throws not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'hookFailed' error: 'afterEach' code: 'ERR_TEST_FAILURE' @@ -423,6 +445,7 @@ not ok 10 - t.beforeEach throws not ok 2 - 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'hookFailed' error: 'afterEach' code: 'ERR_TEST_FAILURE' @@ -442,6 +465,7 @@ not ok 10 - t.beforeEach throws not ok 11 - t.afterEach throws --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -451,6 +475,7 @@ not ok 11 - t.afterEach throws not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'testCodeFailure' error: 'test' code: 'ERR_TEST_FAILURE' @@ -474,6 +499,7 @@ not ok 11 - t.afterEach throws not ok 12 - afterEach when test fails --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' @@ -483,6 +509,7 @@ not ok 12 - afterEach when test fails not ok 1 - 1 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'testCodeFailure' error: 'test' code: 'ERR_TEST_FAILURE' @@ -501,6 +528,7 @@ not ok 12 - afterEach when test fails not ok 2 - 2 --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):11' failureType: 'hookFailed' error: 'afterEach' code: 'ERR_TEST_FAILURE' @@ -520,6 +548,7 @@ not ok 12 - afterEach when test fails not ok 13 - afterEach throws and test fails --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' @@ -528,6 +557,7 @@ not ok 13 - afterEach throws and test fails not ok 14 - t.after() is called if test body throws --- duration_ms: * + location: '/test/fixtures/test-runner/output/hooks.js:(LINE):1' failureType: 'testCodeFailure' error: 'bye' code: 'ERR_TEST_FAILURE' diff --git a/test/fixtures/test-runner/output/no_refs.snapshot b/test/fixtures/test-runner/output/no_refs.snapshot index 49c51af41caec3..5756f5ebf87a0a 100644 --- a/test/fixtures/test-runner/output/no_refs.snapshot +++ b/test/fixtures/test-runner/output/no_refs.snapshot @@ -4,6 +4,7 @@ TAP version 13 not ok 1 - +does not keep event loop alive --- duration_ms: * + location: '/test/fixtures/test-runner/output/no_refs.js:(LINE):11' failureType: 'cancelledByParent' error: 'Promise resolution is still pending but the event loop has already resolved' code: 'ERR_TEST_FAILURE' @@ -14,6 +15,7 @@ TAP version 13 not ok 1 - does not keep event loop alive --- duration_ms: * + location: '/test/fixtures/test-runner/output/no_refs.js:(LINE):1' failureType: 'cancelledByParent' error: 'Promise resolution is still pending but the event loop has already resolved' code: 'ERR_TEST_FAILURE' diff --git a/test/fixtures/test-runner/output/output.js b/test/fixtures/test-runner/output/output.js index 47d99d1c8d4984..f37d3495030950 100644 --- a/test/fixtures/test-runner/output/output.js +++ b/test/fixtures/test-runner/output/output.js @@ -212,20 +212,6 @@ test('test with a name and options provided', { skip: true }); // A test with only options and a function provided. test({ skip: true }, function functionAndOptions() {}); -// A test whose description needs to be escaped. -test('escaped description \\ # \\#\\ \n \t \f \v \b \r'); - -// A test whose skip message needs to be escaped. -test('escaped skip message', { skip: '#skip' }); - -// A test whose todo message needs to be escaped. -test('escaped todo message', { todo: '#todo' }); - -// A test with a diagnostic message that needs to be escaped. -test('escaped diagnostic', (t) => { - t.diagnostic('#diagnostic'); -}); - test('callback pass', (t, done) => { setImmediate(done); }); diff --git a/test/fixtures/test-runner/output/output.snapshot b/test/fixtures/test-runner/output/output.snapshot index db19d8ca549a38..18f030dab361ab 100644 --- a/test/fixtures/test-runner/output/output.snapshot +++ b/test/fixtures/test-runner/output/output.snapshot @@ -13,6 +13,7 @@ ok 2 - sync pass todo with message # TODO this is a passing todo not ok 3 - sync fail todo # TODO --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from sync fail todo' code: 'ERR_TEST_FAILURE' @@ -29,6 +30,7 @@ not ok 3 - sync fail todo # TODO not ok 4 - sync fail todo with message # TODO this is a failing todo --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from sync fail todo with message' code: 'ERR_TEST_FAILURE' @@ -61,6 +63,7 @@ ok 7 - sync pass not ok 8 - sync throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from sync throw fail' code: 'ERR_TEST_FAILURE' @@ -87,6 +90,7 @@ ok 10 - async pass not ok 11 - async throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from async throw fail' code: 'ERR_TEST_FAILURE' @@ -103,6 +107,7 @@ not ok 11 - async throw fail not ok 12 - async skip fail # SKIP --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from async throw fail' code: 'ERR_TEST_FAILURE' @@ -119,6 +124,7 @@ not ok 12 - async skip fail # SKIP not ok 13 - async assertion fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: |- Expected values to be strictly equal: @@ -148,6 +154,7 @@ ok 14 - resolve pass not ok 15 - reject fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'rejected from reject fail' code: 'ERR_TEST_FAILURE' @@ -190,6 +197,7 @@ ok 20 - immediate resolve pass not ok 1 - +sync throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):11' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fail' code: 'ERR_TEST_FAILURE' @@ -210,6 +218,7 @@ ok 20 - immediate resolve pass not ok 21 - subtest sync throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' @@ -218,6 +227,7 @@ not ok 21 - subtest sync throw fail not ok 22 - sync throw non-error fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'Symbol(thrown symbol from sync throw non-error fail)' code: 'ERR_TEST_FAILURE' @@ -253,6 +263,7 @@ ok 23 - level 0a not ok 1 - +long running --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):5' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -272,6 +283,7 @@ ok 23 - level 0a not ok 24 - top level --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' @@ -295,6 +307,7 @@ ok 27 - sync skip option with message # SKIP this is skipped not ok 28 - sync skip option is false fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'this should be executed' code: 'ERR_TEST_FAILURE' @@ -347,36 +360,16 @@ ok 36 - functionAndOptions # SKIP --- duration_ms: * ... -# Subtest: escaped description \\ \# \\\#\\ \\n \\t \\f \\v \\b \\r -ok 37 - escaped description \\ \# \\\#\\ \\n \\t \\f \\v \\b \\r - --- - duration_ms: * - ... -# Subtest: escaped skip message -ok 38 - escaped skip message # SKIP \#skip - --- - duration_ms: * - ... -# Subtest: escaped todo message -ok 39 - escaped todo message # TODO \#todo - --- - duration_ms: * - ... -# Subtest: escaped diagnostic -ok 40 - escaped diagnostic - --- - duration_ms: * - ... -# \#diagnostic # Subtest: callback pass -ok 41 - callback pass +ok 37 - callback pass --- duration_ms: * ... # Subtest: callback fail -not ok 42 - callback fail +not ok 38 - callback fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'callback failure' code: 'ERR_TEST_FAILURE' @@ -385,32 +378,34 @@ not ok 42 - callback fail * ... # Subtest: sync t is this in test -ok 43 - sync t is this in test +ok 39 - sync t is this in test --- duration_ms: * ... # Subtest: async t is this in test -ok 44 - async t is this in test +ok 40 - async t is this in test --- duration_ms: * ... # Subtest: callback t is this in test -ok 45 - callback t is this in test +ok 41 - callback t is this in test --- duration_ms: * ... # Subtest: callback also returns a Promise -not ok 46 - callback also returns a Promise +not ok 42 - callback also returns a Promise --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'callbackAndPromisePresent' error: 'passed a callback but also returned a Promise' code: 'ERR_TEST_FAILURE' ... # Subtest: callback throw -not ok 47 - callback throw +not ok 43 - callback throw --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from callback throw' code: 'ERR_TEST_FAILURE' @@ -424,9 +419,10 @@ not ok 47 - callback throw * ... # Subtest: callback called twice -not ok 48 - callback called twice +not ok 44 - callback called twice --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'multipleCallbackInvocations' error: 'callback invoked multiple times' code: 'ERR_TEST_FAILURE' @@ -435,14 +431,15 @@ not ok 48 - callback called twice * ... # Subtest: callback called twice in different ticks -ok 49 - callback called twice in different ticks +ok 45 - callback called twice in different ticks --- duration_ms: * ... # Subtest: callback called twice in future tick -not ok 50 - callback called twice in future tick +not ok 46 - callback called twice in future tick --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'uncaughtException' error: 'callback invoked multiple times' code: 'ERR_TEST_FAILURE' @@ -450,9 +447,10 @@ not ok 50 - callback called twice in future tick * ... # Subtest: callback async throw -not ok 51 - callback async throw +not ok 47 - callback async throw --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'uncaughtException' error: 'thrown from callback async throw' code: 'ERR_TEST_FAILURE' @@ -461,7 +459,7 @@ not ok 51 - callback async throw * ... # Subtest: callback async throw after done -ok 52 - callback async throw after done +ok 48 - callback async throw after done --- duration_ms: * ... @@ -489,23 +487,25 @@ ok 52 - callback async throw after done duration_ms: * ... 1..4 -ok 53 - only is set but not in only mode +ok 49 - only is set but not in only mode --- duration_ms: * ... # 'only' and 'runOnly' require the --test-only command-line option. # Subtest: custom inspect symbol fail -not ok 54 - custom inspect symbol fail +not ok 50 - custom inspect symbol fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'customized' code: 'ERR_TEST_FAILURE' ... # Subtest: custom inspect symbol that throws fail -not ok 55 - custom inspect symbol that throws fail +not ok 51 - custom inspect symbol that throws fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: |- { @@ -519,6 +519,7 @@ not ok 55 - custom inspect symbol that throws fail not ok 1 - sync throw fails at first --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):11' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fails at first' code: 'ERR_TEST_FAILURE' @@ -538,6 +539,7 @@ not ok 55 - custom inspect symbol that throws fail not ok 2 - sync throw fails at second --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):11' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fails at second' code: 'ERR_TEST_FAILURE' @@ -554,56 +556,61 @@ not ok 55 - custom inspect symbol that throws fail * ... 1..2 -not ok 56 - subtest sync throw fails +not ok 52 - subtest sync throw fails --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' ... # Subtest: timed out async test -not ok 57 - timed out async test +not ok 53 - timed out async test --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testTimeoutFailure' error: 'test timed out after 5ms' code: 'ERR_TEST_FAILURE' ... # Subtest: timed out callback test -not ok 58 - timed out callback test +not ok 54 - timed out callback test --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testTimeoutFailure' error: 'test timed out after 5ms' code: 'ERR_TEST_FAILURE' ... # Subtest: large timeout async test is ok -ok 59 - large timeout async test is ok +ok 55 - large timeout async test is ok --- duration_ms: * ... # Subtest: large timeout callback test is ok -ok 60 - large timeout callback test is ok +ok 56 - large timeout callback test is ok --- duration_ms: * ... # Subtest: successful thenable -ok 61 - successful thenable +ok 57 - successful thenable --- duration_ms: * ... # Subtest: rejected thenable -not ok 62 - rejected thenable +not ok 58 - rejected thenable --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'custom error' code: 'ERR_TEST_FAILURE' ... # Subtest: unfinished test with uncaughtException -not ok 63 - unfinished test with uncaughtException +not ok 59 - unfinished test with uncaughtException --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'uncaughtException' error: 'foo' code: 'ERR_TEST_FAILURE' @@ -613,9 +620,10 @@ not ok 63 - unfinished test with uncaughtException * ... # Subtest: unfinished test with unhandledRejection -not ok 64 - unfinished test with unhandledRejection +not ok 60 - unfinished test with unhandledRejection --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'unhandledRejection' error: 'bar' code: 'ERR_TEST_FAILURE' @@ -625,9 +633,10 @@ not ok 64 - unfinished test with unhandledRejection * ... # Subtest: assertion errors display actual and expected properly -not ok 65 - assertion errors display actual and expected properly +not ok 61 - assertion errors display actual and expected properly --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: |- Expected values to be loosely deep-equal: @@ -656,16 +665,17 @@ not ok 65 - assertion errors display actual and expected properly * ... # Subtest: invalid subtest fail -not ok 66 - invalid subtest fail +not ok 62 - invalid subtest fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):7' failureType: 'parentAlreadyFinished' error: 'test could not be started because its parent finished' code: 'ERR_TEST_FAILURE' stack: |- * ... -1..66 +1..62 # Warning: Test "unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "async unhandled rejection - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from async unhandled rejection fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: A resource generated asynchronous activity after the test ended. This activity created the error "Error: uncaught from outside of a test" which triggered an uncaughtException event, caught by the test runner. @@ -673,11 +683,11 @@ not ok 66 - invalid subtest fail # Warning: Test "immediate reject - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. # Warning: Test "callback called twice in different ticks" generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. # Warning: Test "callback async throw after done" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. -# tests 80 +# tests 76 # suites 0 -# pass 37 +# pass 35 # fail 25 # cancelled 3 -# skipped 10 -# todo 5 +# skipped 9 +# todo 4 # duration_ms * diff --git a/test/fixtures/test-runner/output/output_cli.snapshot b/test/fixtures/test-runner/output/output_cli.snapshot index fe192625e1f8b6..3cef8f29b253b9 100644 --- a/test/fixtures/test-runner/output/output_cli.snapshot +++ b/test/fixtures/test-runner/output/output_cli.snapshot @@ -13,6 +13,7 @@ ok 2 - sync pass todo with message # TODO this is a passing todo not ok 3 - sync fail todo # TODO --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from sync fail todo' code: 'ERR_TEST_FAILURE' @@ -29,6 +30,7 @@ not ok 3 - sync fail todo # TODO not ok 4 - sync fail todo with message # TODO this is a failing todo --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from sync fail todo with message' code: 'ERR_TEST_FAILURE' @@ -61,6 +63,7 @@ ok 7 - sync pass not ok 8 - sync throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from sync throw fail' code: 'ERR_TEST_FAILURE' @@ -87,6 +90,7 @@ ok 10 - async pass not ok 11 - async throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from async throw fail' code: 'ERR_TEST_FAILURE' @@ -103,6 +107,7 @@ not ok 11 - async throw fail not ok 12 - async skip fail # SKIP --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from async throw fail' code: 'ERR_TEST_FAILURE' @@ -119,6 +124,7 @@ not ok 12 - async skip fail # SKIP not ok 13 - async assertion fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: |- Expected values to be strictly equal: @@ -148,6 +154,7 @@ ok 14 - resolve pass not ok 15 - reject fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'rejected from reject fail' code: 'ERR_TEST_FAILURE' @@ -190,6 +197,7 @@ ok 20 - immediate resolve pass not ok 1 - +sync throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):11' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fail' code: 'ERR_TEST_FAILURE' @@ -210,6 +218,7 @@ ok 20 - immediate resolve pass not ok 21 - subtest sync throw fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' @@ -218,6 +227,7 @@ not ok 21 - subtest sync throw fail not ok 22 - sync throw non-error fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'Symbol(thrown symbol from sync throw non-error fail)' code: 'ERR_TEST_FAILURE' @@ -253,6 +263,7 @@ ok 23 - level 0a not ok 1 - +long running --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):5' failureType: 'cancelledByParent' error: 'test did not finish before its parent and was cancelled' code: 'ERR_TEST_FAILURE' @@ -272,6 +283,7 @@ ok 23 - level 0a not ok 24 - top level --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' @@ -295,6 +307,7 @@ ok 27 - sync skip option with message # SKIP this is skipped not ok 28 - sync skip option is false fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'this should be executed' code: 'ERR_TEST_FAILURE' @@ -347,36 +360,16 @@ ok 36 - functionAndOptions # SKIP --- duration_ms: * ... -# Subtest: escaped description \\ \# \\\#\\ \\n \\t \\f \\v \\b \\r -ok 37 - escaped description \\ \# \\\#\\ \\n \\t \\f \\v \\b \\r - --- - duration_ms: * - ... -# Subtest: escaped skip message -ok 38 - escaped skip message # SKIP \#skip - --- - duration_ms: * - ... -# Subtest: escaped todo message -ok 39 - escaped todo message # TODO \#todo - --- - duration_ms: * - ... -# Subtest: escaped diagnostic -ok 40 - escaped diagnostic - --- - duration_ms: * - ... -# \#diagnostic # Subtest: callback pass -ok 41 - callback pass +ok 37 - callback pass --- duration_ms: * ... # Subtest: callback fail -not ok 42 - callback fail +not ok 38 - callback fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'callback failure' code: 'ERR_TEST_FAILURE' @@ -385,32 +378,34 @@ not ok 42 - callback fail * ... # Subtest: sync t is this in test -ok 43 - sync t is this in test +ok 39 - sync t is this in test --- duration_ms: * ... # Subtest: async t is this in test -ok 44 - async t is this in test +ok 40 - async t is this in test --- duration_ms: * ... # Subtest: callback t is this in test -ok 45 - callback t is this in test +ok 41 - callback t is this in test --- duration_ms: * ... # Subtest: callback also returns a Promise -not ok 46 - callback also returns a Promise +not ok 42 - callback also returns a Promise --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'callbackAndPromisePresent' error: 'passed a callback but also returned a Promise' code: 'ERR_TEST_FAILURE' ... # Subtest: callback throw -not ok 47 - callback throw +not ok 43 - callback throw --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'thrown from callback throw' code: 'ERR_TEST_FAILURE' @@ -424,9 +419,10 @@ not ok 47 - callback throw * ... # Subtest: callback called twice -not ok 48 - callback called twice +not ok 44 - callback called twice --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'multipleCallbackInvocations' error: 'callback invoked multiple times' code: 'ERR_TEST_FAILURE' @@ -435,14 +431,15 @@ not ok 48 - callback called twice * ... # Subtest: callback called twice in different ticks -ok 49 - callback called twice in different ticks +ok 45 - callback called twice in different ticks --- duration_ms: * ... # Subtest: callback called twice in future tick -not ok 50 - callback called twice in future tick +not ok 46 - callback called twice in future tick --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'uncaughtException' error: 'callback invoked multiple times' code: 'ERR_TEST_FAILURE' @@ -450,9 +447,10 @@ not ok 50 - callback called twice in future tick * ... # Subtest: callback async throw -not ok 51 - callback async throw +not ok 47 - callback async throw --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'uncaughtException' error: 'thrown from callback async throw' code: 'ERR_TEST_FAILURE' @@ -461,7 +459,7 @@ not ok 51 - callback async throw * ... # Subtest: callback async throw after done -ok 52 - callback async throw after done +ok 48 - callback async throw after done --- duration_ms: * ... @@ -489,23 +487,25 @@ ok 52 - callback async throw after done duration_ms: * ... 1..4 -ok 53 - only is set but not in only mode +ok 49 - only is set but not in only mode --- duration_ms: * ... # 'only' and 'runOnly' require the --test-only command-line option. # Subtest: custom inspect symbol fail -not ok 54 - custom inspect symbol fail +not ok 50 - custom inspect symbol fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'customized' code: 'ERR_TEST_FAILURE' ... # Subtest: custom inspect symbol that throws fail -not ok 55 - custom inspect symbol that throws fail +not ok 51 - custom inspect symbol that throws fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: |- { @@ -519,6 +519,7 @@ not ok 55 - custom inspect symbol that throws fail not ok 1 - sync throw fails at first --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):11' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fails at first' code: 'ERR_TEST_FAILURE' @@ -538,6 +539,7 @@ not ok 55 - custom inspect symbol that throws fail not ok 2 - sync throw fails at second --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):11' failureType: 'testCodeFailure' error: 'thrown from subtest sync throw fails at second' code: 'ERR_TEST_FAILURE' @@ -554,56 +556,61 @@ not ok 55 - custom inspect symbol that throws fail * ... 1..2 -not ok 56 - subtest sync throw fails +not ok 52 - subtest sync throw fails --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'subtestsFailed' error: '2 subtests failed' code: 'ERR_TEST_FAILURE' ... # Subtest: timed out async test -not ok 57 - timed out async test +not ok 53 - timed out async test --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testTimeoutFailure' error: 'test timed out after 5ms' code: 'ERR_TEST_FAILURE' ... # Subtest: timed out callback test -not ok 58 - timed out callback test +not ok 54 - timed out callback test --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testTimeoutFailure' error: 'test timed out after 5ms' code: 'ERR_TEST_FAILURE' ... # Subtest: large timeout async test is ok -ok 59 - large timeout async test is ok +ok 55 - large timeout async test is ok --- duration_ms: * ... # Subtest: large timeout callback test is ok -ok 60 - large timeout callback test is ok +ok 56 - large timeout callback test is ok --- duration_ms: * ... # Subtest: successful thenable -ok 61 - successful thenable +ok 57 - successful thenable --- duration_ms: * ... # Subtest: rejected thenable -not ok 62 - rejected thenable +not ok 58 - rejected thenable --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: 'custom error' code: 'ERR_TEST_FAILURE' ... # Subtest: unfinished test with uncaughtException -not ok 63 - unfinished test with uncaughtException +not ok 59 - unfinished test with uncaughtException --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'uncaughtException' error: 'foo' code: 'ERR_TEST_FAILURE' @@ -613,9 +620,10 @@ not ok 63 - unfinished test with uncaughtException * ... # Subtest: unfinished test with unhandledRejection -not ok 64 - unfinished test with unhandledRejection +not ok 60 - unfinished test with unhandledRejection --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'unhandledRejection' error: 'bar' code: 'ERR_TEST_FAILURE' @@ -625,9 +633,10 @@ not ok 64 - unfinished test with unhandledRejection * ... # Subtest: assertion errors display actual and expected properly -not ok 65 - assertion errors display actual and expected properly +not ok 61 - assertion errors display actual and expected properly --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):1' failureType: 'testCodeFailure' error: |- Expected values to be loosely deep-equal: @@ -656,9 +665,10 @@ not ok 65 - assertion errors display actual and expected properly * ... # Subtest: invalid subtest fail -not ok 66 - invalid subtest fail +not ok 62 - invalid subtest fail --- duration_ms: * + location: '/test/fixtures/test-runner/output/output.js:(LINE):7' failureType: 'parentAlreadyFinished' error: 'test could not be started because its parent finished' code: 'ERR_TEST_FAILURE' @@ -673,16 +683,16 @@ not ok 66 - invalid subtest fail # Warning: Test "callback called twice in different ticks" generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. # Warning: Test "callback async throw after done" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. # Subtest: last test -ok 67 - last test +ok 63 - last test --- duration_ms: * ... -1..67 -# tests 81 +1..63 +# tests 77 # suites 0 -# pass 38 +# pass 36 # fail 25 # cancelled 3 -# skipped 10 -# todo 5 +# skipped 9 +# todo 4 # duration_ms * diff --git a/test/fixtures/test-runner/output/spec_reporter.snapshot b/test/fixtures/test-runner/output/spec_reporter.snapshot index 13f3618d38c28d..5dc05d5b43c12d 100644 --- a/test/fixtures/test-runner/output/spec_reporter.snapshot +++ b/test/fixtures/test-runner/output/spec_reporter.snapshot @@ -149,12 +149,6 @@ (*ms) # SKIP test with a name and options provided (*ms) # SKIP functionAndOptions (*ms) # SKIP - escaped description \ # \#\ -  (*ms) - escaped skip message (*ms) # #skip - escaped todo message (*ms) # #todo - escaped diagnostic (*ms) - #diagnostic callback pass (*ms) callback fail (*ms) Error: callback failure @@ -296,17 +290,18 @@ Warning: Test "immediate reject - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. Warning: Test "callback called twice in different ticks" generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. Warning: Test "callback async throw after done" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. - tests 80 + tests 76 suites 0 - pass 37 + pass 35 fail 25 cancelled 3 - skipped 10 - todo 5 + skipped 9 + todo 4 duration_ms * failing tests: +* sync fail todo (*ms) # TODO Error: thrown from sync fail todo * @@ -317,6 +312,7 @@ * * +* sync fail todo with message (*ms) # this is a failing todo Error: thrown from sync fail todo with message * @@ -327,6 +323,7 @@ * * +* sync throw fail (*ms) Error: thrown from sync throw fail * @@ -337,6 +334,7 @@ * * +* async throw fail (*ms) Error: thrown from async throw fail * @@ -347,6 +345,7 @@ * * +* async skip fail (*ms) # SKIP Error: thrown from async throw fail * @@ -357,6 +356,7 @@ * * +* async assertion fail (*ms) AssertionError [ERR_ASSERTION]: Expected values to be strictly equal: @@ -376,6 +376,7 @@ operator: 'strictEqual' } +* reject fail (*ms) Error: rejected from reject fail * @@ -386,6 +387,7 @@ * * +* +sync throw fail (*ms) Error: thrown from subtest sync throw fail * @@ -399,12 +401,15 @@ * * +* sync throw non-error fail (*ms) Symbol(thrown symbol from sync throw non-error fail) +* +long running (*ms) 'test did not finish before its parent and was cancelled' +* sync skip option is false fail (*ms) Error: this should be executed * @@ -415,14 +420,17 @@ * * +* callback fail (*ms) Error: callback failure * * +* callback also returns a Promise (*ms) 'passed a callback but also returned a Promise' +* callback throw (*ms) Error: thrown from callback throw * @@ -433,9 +441,11 @@ * * +* callback called twice (*ms) 'callback invoked multiple times' +* callback called twice in future tick (*ms) Error [ERR_TEST_FAILURE]: callback invoked multiple times * { @@ -444,17 +454,21 @@ code: 'ERR_TEST_FAILURE' } +* callback async throw (*ms) Error: thrown from callback async throw * * +* custom inspect symbol fail (*ms) customized +* custom inspect symbol that throws fail (*ms) { foo: 1, [Symbol(nodejs.util.inspect.custom)]: [Function: [nodejs.util.inspect.custom]] } +* sync throw fails at first (*ms) Error: thrown from subtest sync throw fails at first * @@ -468,6 +482,7 @@ * * +* sync throw fails at second (*ms) Error: thrown from subtest sync throw fails at second * @@ -481,27 +496,33 @@ * * +* timed out async test (*ms) 'test timed out after *ms' +* timed out callback test (*ms) 'test timed out after *ms' +* rejected thenable (*ms) 'custom error' +* unfinished test with uncaughtException (*ms) Error: foo * * * +* unfinished test with unhandledRejection (*ms) Error: bar * * * +* assertion errors display actual and expected properly (*ms) AssertionError [ERR_ASSERTION]: Expected values to be loosely deep-equal: @@ -524,5 +545,6 @@ operator: 'deepEqual' } +* invalid subtest fail (*ms) 'test could not be started because its parent finished' diff --git a/test/fixtures/test-runner/output/spec_reporter_cli.snapshot b/test/fixtures/test-runner/output/spec_reporter_cli.snapshot index 22c9a9174574a1..25c22069c3b8e7 100644 --- a/test/fixtures/test-runner/output/spec_reporter_cli.snapshot +++ b/test/fixtures/test-runner/output/spec_reporter_cli.snapshot @@ -149,12 +149,6 @@ (*ms) # SKIP test with a name and options provided (*ms) # SKIP functionAndOptions (*ms) # SKIP - escaped description \ # \#\ -  (*ms) - escaped skip message (*ms) # #skip - escaped todo message (*ms) # #todo - escaped diagnostic (*ms) - #diagnostic callback pass (*ms) callback fail (*ms) Error: callback failure @@ -296,17 +290,18 @@ Warning: Test "immediate reject - passes but warns" generated asynchronous activity after the test ended. This activity created the error "Error: rejected from immediate reject fail" and would have caused the test to fail, but instead triggered an unhandledRejection event. Warning: Test "callback called twice in different ticks" generated asynchronous activity after the test ended. This activity created the error "Error [ERR_TEST_FAILURE]: callback invoked multiple times" and would have caused the test to fail, but instead triggered an uncaughtException event. Warning: Test "callback async throw after done" generated asynchronous activity after the test ended. This activity created the error "Error: thrown from callback async throw after done" and would have caused the test to fail, but instead triggered an uncaughtException event. - tests 80 + tests 76 suites 0 - pass 37 + pass 35 fail 25 cancelled 3 - skipped 10 - todo 5 + skipped 9 + todo 4 duration_ms * failing tests: +* sync fail todo (*ms) # TODO Error: thrown from sync fail todo * @@ -317,6 +312,7 @@ * * +* sync fail todo with message (*ms) # this is a failing todo Error: thrown from sync fail todo with message * @@ -327,6 +323,7 @@ * * +* sync throw fail (*ms) Error: thrown from sync throw fail * @@ -337,6 +334,7 @@ * * +* async throw fail (*ms) Error: thrown from async throw fail * @@ -347,6 +345,7 @@ * * +* async skip fail (*ms) # SKIP Error: thrown from async throw fail * @@ -357,6 +356,7 @@ * * +* async assertion fail (*ms) AssertionError [ERR_ASSERTION]: Expected values to be strictly equal: @@ -376,6 +376,7 @@ operator: 'strictEqual' } +* reject fail (*ms) Error: rejected from reject fail * @@ -386,6 +387,7 @@ * * +* +sync throw fail (*ms) Error: thrown from subtest sync throw fail * @@ -399,12 +401,15 @@ * * +* sync throw non-error fail (*ms) Symbol(thrown symbol from sync throw non-error fail) +* +long running (*ms) 'test did not finish before its parent and was cancelled' +* sync skip option is false fail (*ms) Error: this should be executed * @@ -415,14 +420,17 @@ * * +* callback fail (*ms) Error: callback failure * * +* callback also returns a Promise (*ms) 'passed a callback but also returned a Promise' +* callback throw (*ms) Error: thrown from callback throw * @@ -433,9 +441,11 @@ * * +* callback called twice (*ms) 'callback invoked multiple times' +* callback called twice in future tick (*ms) Error [ERR_TEST_FAILURE]: callback invoked multiple times * { @@ -444,17 +454,21 @@ code: 'ERR_TEST_FAILURE' } +* callback async throw (*ms) Error: thrown from callback async throw * * +* custom inspect symbol fail (*ms) customized +* custom inspect symbol that throws fail (*ms) { foo: 1 } +* sync throw fails at first (*ms) Error: thrown from subtest sync throw fails at first * @@ -468,6 +482,7 @@ * * +* sync throw fails at second (*ms) Error: thrown from subtest sync throw fails at second * @@ -481,27 +496,33 @@ * * +* timed out async test (*ms) 'test timed out after *ms' +* timed out callback test (*ms) 'test timed out after *ms' +* rejected thenable (*ms) 'custom error' +* unfinished test with uncaughtException (*ms) Error: foo * * * +* unfinished test with unhandledRejection (*ms) Error: bar * * * +* assertion errors display actual and expected properly (*ms) AssertionError [ERR_ASSERTION]: Expected values to be loosely deep-equal: @@ -524,5 +545,6 @@ operator: 'deepEqual' } +* invalid subtest fail (*ms) 'test could not be started because its parent finished' diff --git a/test/fixtures/test-runner/output/tap_escape.js b/test/fixtures/test-runner/output/tap_escape.js new file mode 100644 index 00000000000000..029ebea164e1ee --- /dev/null +++ b/test/fixtures/test-runner/output/tap_escape.js @@ -0,0 +1,19 @@ +'use strict'; +require('../../../common'); +const { test } = require('node:test'); + +// Do not include any failing tests in this file. + +// A test whose description needs to be escaped. +test('escaped description \\ # \\#\\ \n \t \f \v \b \r'); + +// A test whose skip message needs to be escaped. +test('escaped skip message', { skip: '#skip' }); + +// A test whose todo message needs to be escaped. +test('escaped todo message', { todo: '#todo' }); + +// A test with a diagnostic message that needs to be escaped. +test('escaped diagnostic', (t) => { + t.diagnostic('#diagnostic'); +}); diff --git a/test/fixtures/test-runner/output/tap_escape.snapshot b/test/fixtures/test-runner/output/tap_escape.snapshot new file mode 100644 index 00000000000000..722cd0ca427ec7 --- /dev/null +++ b/test/fixtures/test-runner/output/tap_escape.snapshot @@ -0,0 +1,31 @@ +TAP version 13 +# Subtest: escaped description \\ \# \\\#\\ \\n \\t \\f \\v \\b \\r +ok 1 - escaped description \\ \# \\\#\\ \\n \\t \\f \\v \\b \\r + --- + duration_ms: * + ... +# Subtest: escaped skip message +ok 2 - escaped skip message # SKIP \#skip + --- + duration_ms: * + ... +# Subtest: escaped todo message +ok 3 - escaped todo message # TODO \#todo + --- + duration_ms: * + ... +# Subtest: escaped diagnostic +ok 4 - escaped diagnostic + --- + duration_ms: * + ... +# \#diagnostic +1..4 +# tests 4 +# suites 0 +# pass 2 +# fail 0 +# cancelled 0 +# skipped 1 +# todo 1 +# duration_ms * diff --git a/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.snapshot b/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.snapshot index cac7facf893309..b3579da789470b 100644 --- a/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.snapshot +++ b/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.snapshot @@ -9,6 +9,7 @@ gonna timeout not ok 1 - first describe first test --- duration_ms: * + location: '/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js:(LINE):3' failureType: 'hookFailed' error: 'failed running beforeEach hook' code: 'ERR_TEST_FAILURE' @@ -25,6 +26,7 @@ not ok 1 - before each timeout --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' @@ -36,6 +38,7 @@ not gonna timeout not ok 1 - second describe first test --- duration_ms: * + location: '/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js:(LINE):3' failureType: 'hookFailed' error: 'failed running afterEach hook' code: 'ERR_TEST_FAILURE' @@ -52,6 +55,7 @@ not ok 2 - after each timeout --- duration_ms: * type: 'suite' + location: '/test/fixtures/test-runner/output/timeout_in_before_each_should_not_affect_further_tests.js:(LINE):1' failureType: 'subtestsFailed' error: '1 subtest failed' code: 'ERR_TEST_FAILURE' diff --git a/test/fixtures/test-runner/output/unresolved_promise.snapshot b/test/fixtures/test-runner/output/unresolved_promise.snapshot index 4b1593c3365798..839ec311a65e04 100644 --- a/test/fixtures/test-runner/output/unresolved_promise.snapshot +++ b/test/fixtures/test-runner/output/unresolved_promise.snapshot @@ -8,6 +8,7 @@ ok 1 - pass not ok 2 - never resolving promise --- duration_ms: * + location: '/test/fixtures/test-runner/output/unresolved_promise.js:(LINE):1' failureType: 'cancelledByParent' error: 'Promise resolution is still pending but the event loop has already resolved' code: 'ERR_TEST_FAILURE' @@ -18,6 +19,7 @@ not ok 2 - never resolving promise not ok 3 - fail --- duration_ms: ZERO + location: '/test/fixtures/test-runner/output/unresolved_promise.js:(LINE):1' failureType: 'cancelledByParent' error: 'Promise resolution is still pending but the event loop has already resolved' code: 'ERR_TEST_FAILURE' diff --git a/test/parallel/test-runner-output.mjs b/test/parallel/test-runner-output.mjs index 8d5233d2de2441..85d3131490a3db 100644 --- a/test/parallel/test-runner-output.mjs +++ b/test/parallel/test-runner-output.mjs @@ -29,23 +29,24 @@ function removeWindowsPathEscaping(str) { return common.isWindows ? str.replaceAll(/\\\\/g, '\\') : str; } +function replaceTestLocationLine(str) { + return str.replaceAll(/(js:)(\d+)(:\d+)/g, '$1(LINE)$3'); +} + const defaultTransform = snapshot.transform( snapshot.replaceWindowsLineEndings, snapshot.replaceStackTrace, + removeWindowsPathEscaping, + snapshot.replaceFullPaths, + snapshot.replaceWindowsPaths, replaceTestDuration, + replaceTestLocationLine, ); const specTransform = snapshot.transform( replaceSpecDuration, snapshot.replaceWindowsLineEndings, snapshot.replaceStackTrace, ); -const withFileNameTransform = snapshot.transform( - defaultTransform, - removeWindowsPathEscaping, - snapshot.replaceFullPaths, - snapshot.replaceWindowsPaths, -); - const tests = [ { name: 'test-runner/output/abort.js' }, @@ -58,7 +59,7 @@ const tests = [ { name: 'test-runner/output/hooks-with-no-global-test.js' }, { name: 'test-runner/output/before-and-after-each-too-many-listeners.js' }, { name: 'test-runner/output/before-and-after-each-with-timeout-too-many-listeners.js' }, - { name: 'test-runner/output/global_after_should_fail_the_test.js', transform: withFileNameTransform }, + { name: 'test-runner/output/global_after_should_fail_the_test.js' }, { name: 'test-runner/output/no_refs.js' }, { name: 'test-runner/output/no_tests.js' }, { name: 'test-runner/output/only_tests.js' }, @@ -78,6 +79,13 @@ const tests = [ transform: snapshot.transform(specTransform, replaceTestDuration), tty: true } : false, { name: 'test-runner/output/dot_output_custom_columns.js', transform: specTransform, tty: true }, + { + name: 'test-runner/output/tap_escape.js', + transform: snapshot.transform( + snapshot.replaceWindowsLineEndings, + replaceTestDuration, + ), + }, ] .filter(Boolean) .map(({ name, tty, transform }) => ({ From 546ad5f77082e2eb55eae0d440d973aec6f52b3f Mon Sep 17 00:00:00 2001 From: Colin Ihrig Date: Mon, 14 Aug 2023 10:29:29 -0400 Subject: [PATCH 008/125] test_runner: reland run global after() hook earlier This commit reverts the revert in bb52656fc627e4f48a0f706756873b593d81372a. It also includes the fix for the issue that required the revert (https://github.com/nodejs/node/pull/49059#issuecomment-1675171959) and an additional common.mustCall() in the added test. Refs: https://github.com/nodejs/node/pull/49059 Refs: https://github.com/nodejs/node/pull/49110 PR-URL: https://github.com/nodejs/node/pull/49116 Backport-PR-URL: https://github.com/nodejs/node/pull/49225 Reviewed-By: Matteo Collina Reviewed-By: Chemi Atlow Reviewed-By: Moshe Atlow Reviewed-By: Benjamin Gruenbaum --- lib/internal/test_runner/harness.js | 8 ++-- lib/internal/test_runner/test.js | 27 ++++++++++++-- .../output/async-test-scheduling.mjs | 13 +++++++ .../output/async-test-scheduling.snapshot | 37 +++++++++++++++++++ ...global_after_should_fail_the_test.snapshot | 1 - test/parallel/test-runner-output.mjs | 1 + ...st-runner-root-after-with-refed-handles.js | 26 +++++++++++++ 7 files changed, 104 insertions(+), 9 deletions(-) create mode 100644 test/fixtures/test-runner/output/async-test-scheduling.mjs create mode 100644 test/fixtures/test-runner/output/async-test-scheduling.snapshot create mode 100644 test/parallel/test-runner-root-after-with-refed-handles.js diff --git a/lib/internal/test_runner/harness.js b/lib/internal/test_runner/harness.js index 4eb6458b23e47d..357347627fcc2b 100644 --- a/lib/internal/test_runner/harness.js +++ b/lib/internal/test_runner/harness.js @@ -142,8 +142,8 @@ function setup(root) { const rejectionHandler = createProcessEventHandler('unhandledRejection', root); const coverage = configureCoverage(root, globalOptions); - const exitHandler = async () => { - await root.run(new ERR_TEST_FAILURE( + const exitHandler = () => { + root.postRun(new ERR_TEST_FAILURE( 'Promise resolution is still pending but the event loop has already resolved', kCancelledByParent)); @@ -152,8 +152,8 @@ function setup(root) { process.removeListener('uncaughtException', exceptionHandler); }; - const terminationHandler = async () => { - await exitHandler(); + const terminationHandler = () => { + exitHandler(); process.exit(); }; diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index 58f1de711f38f4..975ad4ac08b41f 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -574,7 +574,7 @@ class Test extends AsyncResource { } } - async run(pendingSubtestsError) { + async run() { if (this.parent !== null) { this.parent.activeSubtests++; } @@ -662,9 +662,16 @@ class Test extends AsyncResource { } } - // Clean up the test. Then, try to report the results and execute any - // tests that were pending due to available concurrency. - this.postRun(pendingSubtestsError); + if (this.parent !== null || typeof this.hookType === 'string') { + // Clean up the test. Then, try to report the results and execute any + // tests that were pending due to available concurrency. + // + // The root test is skipped here because it is a special case. Its + // postRun() method is called when the process is getting ready to exit. + // This helps catch any asynchronous activity that occurs after the tests + // have finished executing. + this.postRun(); + } } postRun(pendingSubtestsError) { @@ -706,6 +713,18 @@ class Test extends AsyncResource { this.parent.addReadySubtest(this); this.parent.processReadySubtestRange(false); this.parent.processPendingSubtests(); + + if (this.parent === this.root && + this.root.activeSubtests === 0 && + this.root.pendingSubtests.length === 0 && + this.root.readySubtests.size === 0 && + this.root.hooks.after.length > 0) { + // This is done so that any global after() hooks are run. At this point + // all of the tests have finished running. However, there might be + // ref'ed handles keeping the event loop alive. This gives the global + // after() hook a chance to clean them up. + this.root.run(); + } } else if (!this.reported) { const { diagnostics, diff --git a/test/fixtures/test-runner/output/async-test-scheduling.mjs b/test/fixtures/test-runner/output/async-test-scheduling.mjs new file mode 100644 index 00000000000000..7c7a9f91208911 --- /dev/null +++ b/test/fixtures/test-runner/output/async-test-scheduling.mjs @@ -0,0 +1,13 @@ +import * as common from '../../../common/index.mjs'; +import { describe, test } from 'node:test'; +import { setTimeout } from 'node:timers/promises'; + +test('test', common.mustCall()); +describe('suite', common.mustCall(async () => { + test('test', common.mustCall()); + await setTimeout(10); + test('scheduled async', common.mustCall()); +})); + +await setTimeout(10); +test('scheduled async', common.mustCall()); diff --git a/test/fixtures/test-runner/output/async-test-scheduling.snapshot b/test/fixtures/test-runner/output/async-test-scheduling.snapshot new file mode 100644 index 00000000000000..64c3004d26881d --- /dev/null +++ b/test/fixtures/test-runner/output/async-test-scheduling.snapshot @@ -0,0 +1,37 @@ +TAP version 13 +# Subtest: test +ok 1 - test + --- + duration_ms: * + ... +# Subtest: suite + # Subtest: test + ok 1 - test + --- + duration_ms: * + ... + # Subtest: scheduled async + ok 2 - scheduled async + --- + duration_ms: * + ... + 1..2 +ok 2 - suite + --- + duration_ms: * + type: 'suite' + ... +# Subtest: scheduled async +ok 3 - scheduled async + --- + duration_ms: * + ... +1..3 +# tests 4 +# suites 1 +# pass 4 +# fail 0 +# cancelled 0 +# skipped 0 +# todo 0 +# duration_ms * diff --git a/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot b/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot index 845aba58eddd32..3196f377b3d4bf 100644 --- a/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot +++ b/test/fixtures/test-runner/output/global_after_should_fail_the_test.snapshot @@ -22,7 +22,6 @@ not ok 2 - /test/fixtures/test-runner/output/global_after_should_fail_the_test.j * * * - * ... 1..1 # tests 1 diff --git a/test/parallel/test-runner-output.mjs b/test/parallel/test-runner-output.mjs index 85d3131490a3db..8db41bff38a114 100644 --- a/test/parallel/test-runner-output.mjs +++ b/test/parallel/test-runner-output.mjs @@ -74,6 +74,7 @@ const tests = [ { name: 'test-runner/output/unresolved_promise.js' }, { name: 'test-runner/output/default_output.js', transform: specTransform, tty: true }, { name: 'test-runner/output/arbitrary-output.js' }, + { name: 'test-runner/output/async-test-scheduling.mjs' }, !skipForceColors ? { name: 'test-runner/output/arbitrary-output-colored.js', transform: snapshot.transform(specTransform, replaceTestDuration), tty: true diff --git a/test/parallel/test-runner-root-after-with-refed-handles.js b/test/parallel/test-runner-root-after-with-refed-handles.js new file mode 100644 index 00000000000000..2149c2dba236cf --- /dev/null +++ b/test/parallel/test-runner-root-after-with-refed-handles.js @@ -0,0 +1,26 @@ +'use strict'; +const common = require('../common'); +const { before, after, test } = require('node:test'); +const { createServer } = require('node:http'); + +let server; + +before(common.mustCall(() => { + server = createServer(); + + return new Promise(common.mustCall((resolve, reject) => { + server.listen(0, common.mustCall((err) => { + if (err) { + reject(err); + } else { + resolve(); + } + })); + })); +})); + +after(common.mustCall(() => { + server.close(common.mustCall()); +})); + +test(); From 30f26a99f4074c888d0b57472a1f5070384673e6 Mon Sep 17 00:00:00 2001 From: RafaelGSS Date: Wed, 28 Jun 2023 16:18:00 -0300 Subject: [PATCH 009/125] permission: ensure to resolve path when calling mkdtemp MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs-private/node-private/pull/440 Refs: https://hackerone.com/bugs?subject=nodejs&report_id=2037887 Reviewed-By: Tobias Nießen From e9946885f9be4767168294333c3d128b8835166d Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 15 Aug 2023 20:32:14 +0200 Subject: [PATCH 010/125] src: serialize both BaseObject slots We previously only return startup data for the first slot for BaseObjects because we can already serialize all the necessary information in one go, but slots that do not get special startup data would be serialized verbatim which means that the pointer addresses are going to be part of the snapshot blob, resulting in indeterminism. This patch updates the serialization routines and capture information for both of the two slots - the first slot with type information and memory management type (which we can use in the future for cppgc-managed objects) and the second slot with data about the object itself. This way the embeedder slots can be serialized in a reproducible manner in the snapshot. PR-URL: https://github.com/nodejs/node/pull/48996 Refs: https://github.com/nodejs/build/issues/3043 Reviewed-By: Rafael Gonzaga --- src/encoding_binding.cc | 4 +- src/env.cc | 3 +- src/node_blob.cc | 4 +- src/node_file.cc | 4 +- src/node_process_methods.cc | 4 +- src/node_snapshotable.cc | 80 ++++++++++++++++++++++++++----------- src/node_snapshotable.h | 10 +++++ src/node_url.cc | 4 +- src/node_util.cc | 4 +- src/node_v8.cc | 4 +- src/timers.cc | 4 +- 11 files changed, 84 insertions(+), 41 deletions(-) diff --git a/src/encoding_binding.cc b/src/encoding_binding.cc index b65a4f868e2b26..c67af5319c8ff5 100644 --- a/src/encoding_binding.cc +++ b/src/encoding_binding.cc @@ -62,7 +62,7 @@ bool BindingData::PrepareForSerialization(Local context, } InternalFieldInfoBase* BindingData::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = internal_field_info_; internal_field_info_ = nullptr; return info; @@ -72,7 +72,7 @@ void BindingData::Deserialize(Local context, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); v8::HandleScope scope(context->GetIsolate()); Realm* realm = Realm::GetCurrent(context); // Recreate the buffer in the constructor. diff --git a/src/env.cc b/src/env.cc index 7e3d3aca2d5f96..20b404996ccd0f 100644 --- a/src/env.cc +++ b/src/env.cc @@ -12,6 +12,7 @@ #include "node_options-inl.h" #include "node_process-inl.h" #include "node_shadow_realm.h" +#include "node_snapshotable.h" #include "node_v8_platform-inl.h" #include "node_worker.h" #include "req_wrap-inl.h" @@ -1760,7 +1761,7 @@ void Environment::EnqueueDeserializeRequest(DeserializeRequestCallback cb, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); DeserializeRequest request{cb, {isolate(), holder}, index, info}; deserialize_requests_.push_back(std::move(request)); } diff --git a/src/node_blob.cc b/src/node_blob.cc index e4a3b2fe8b0f98..8dc81a6f15e867 100644 --- a/src/node_blob.cc +++ b/src/node_blob.cc @@ -532,7 +532,7 @@ void BlobBindingData::Deserialize(Local context, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); HandleScope scope(context->GetIsolate()); Realm* realm = Realm::GetCurrent(context); BlobBindingData* binding = realm->AddBindingData(holder); @@ -548,7 +548,7 @@ bool BlobBindingData::PrepareForSerialization(Local context, } InternalFieldInfoBase* BlobBindingData::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = InternalFieldInfoBase::New(type()); return info; diff --git a/src/node_file.cc b/src/node_file.cc index 4c21cc7467ccf2..bde29316888042 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -3151,7 +3151,7 @@ void BindingData::Deserialize(Local context, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); HandleScope scope(context->GetIsolate()); Realm* realm = Realm::GetCurrent(context); InternalFieldInfo* casted_info = static_cast(info); @@ -3179,7 +3179,7 @@ bool BindingData::PrepareForSerialization(Local context, } InternalFieldInfoBase* BindingData::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = internal_field_info_; internal_field_info_ = nullptr; return info; diff --git a/src/node_process_methods.cc b/src/node_process_methods.cc index 1b68207f3e3ba6..34d3c3af4c3e10 100644 --- a/src/node_process_methods.cc +++ b/src/node_process_methods.cc @@ -552,7 +552,7 @@ bool BindingData::PrepareForSerialization(Local context, } InternalFieldInfoBase* BindingData::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = InternalFieldInfoBase::New(type()); return info; @@ -562,7 +562,7 @@ void BindingData::Deserialize(Local context, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); v8::HandleScope scope(context->GetIsolate()); Realm* realm = Realm::GetCurrent(context); // Recreate the buffer in the constructor. diff --git a/src/node_snapshotable.cc b/src/node_snapshotable.cc index 1f066c7d5bb9ff..af85ea10a94163 100644 --- a/src/node_snapshotable.cc +++ b/src/node_snapshotable.cc @@ -1146,25 +1146,33 @@ std::string SnapshotableObject::GetTypeName() const { void DeserializeNodeInternalFields(Local holder, int index, StartupData payload, - void* env) { + void* callback_data) { if (payload.raw_size == 0) { - holder->SetAlignedPointerInInternalField(index, nullptr); return; } + per_process::Debug(DebugCategory::MKSNAPSHOT, "Deserialize internal field %d of %p, size=%d\n", static_cast(index), (*holder), static_cast(payload.raw_size)); - if (payload.raw_size == 0) { - holder->SetAlignedPointerInInternalField(index, nullptr); + Environment* env = static_cast(callback_data); + + // To deserialize the first field, check the type and re-tag the object. + if (index == BaseObject::kEmbedderType) { + int size = sizeof(EmbedderTypeInfo); + DCHECK_EQ(payload.raw_size, size); + EmbedderTypeInfo read_data; + memcpy(&read_data, payload.data, size); + // For now we only support non-cppgc objects. + CHECK_EQ(read_data.mode, EmbedderTypeInfo::MemoryMode::kBaseObject); + BaseObject::TagBaseObject(env->isolate_data(), holder); return; } - DCHECK_EQ(index, BaseObject::kEmbedderType); - - Environment* env_ptr = static_cast(env); + // To deserialize the second field, enqueue a deserialize request. + DCHECK_IS_SNAPSHOT_SLOT(index); const InternalFieldInfoBase* info = reinterpret_cast(payload.data); // TODO(joyeecheung): we can add a constant kNodeEmbedderId to the @@ -1177,7 +1185,7 @@ void DeserializeNodeInternalFields(Local holder, "Object %p is %s\n", \ (*holder), \ #NativeTypeName); \ - env_ptr->EnqueueDeserializeRequest( \ + env->EnqueueDeserializeRequest( \ NativeTypeName::Deserialize, \ holder, \ index, \ @@ -1203,28 +1211,52 @@ void DeserializeNodeInternalFields(Local holder, StartupData SerializeNodeContextInternalFields(Local holder, int index, void* callback_data) { - // We only do one serialization for the kEmbedderType slot, the result - // contains everything necessary for deserializing the entire object, - // including the fields whose index is bigger than kEmbedderType - // (most importantly, BaseObject::kSlot). - // For Node.js this design is enough for all the native binding that are - // serializable. + // For the moment we do not set any internal fields in ArrayBuffer + // or ArrayBufferViews, so just return nullptr. + if (holder->IsArrayBuffer() || holder->IsArrayBufferView()) { + CHECK_NULL(holder->GetAlignedPointerFromInternalField(index)); + return StartupData{nullptr, 0}; + } + + // Use the V8 convention and serialize unknown objects verbatim. Environment* env = static_cast(callback_data); - if (index != BaseObject::kEmbedderType || - !BaseObject::IsBaseObject(env->isolate_data(), holder)) { + if (!BaseObject::IsBaseObject(env->isolate_data(), holder)) { + per_process::Debug(DebugCategory::MKSNAPSHOT, + "Serialize unknown object, index=%d, holder=%p\n", + static_cast(index), + *holder); return StartupData{nullptr, 0}; } per_process::Debug(DebugCategory::MKSNAPSHOT, - "Serialize internal field, index=%d, holder=%p\n", + "Serialize BaseObject, index=%d, holder=%p\n", static_cast(index), *holder); - void* native_ptr = - holder->GetAlignedPointerFromInternalField(BaseObject::kSlot); - per_process::Debug(DebugCategory::MKSNAPSHOT, "native = %p\n", native_ptr); - DCHECK(static_cast(native_ptr)->is_snapshotable()); - SnapshotableObject* obj = static_cast(native_ptr); + BaseObject* object_ptr = static_cast( + holder->GetAlignedPointerFromInternalField(BaseObject::kSlot)); + // If the native object is already set to null, ignore it. + if (object_ptr == nullptr) { + return StartupData{nullptr, 0}; + } + + DCHECK(object_ptr->is_snapshotable()); + SnapshotableObject* obj = static_cast(object_ptr); + + // To serialize the type field, save data in a EmbedderTypeInfo. + if (index == BaseObject::kEmbedderType) { + int size = sizeof(EmbedderTypeInfo); + char* data = new char[size]; + // We need to use placement new because V8 calls delete[] on the returned + // data. + // TODO(joyeecheung): support cppgc objects. + new (data) EmbedderTypeInfo(obj->type(), + EmbedderTypeInfo::MemoryMode::kBaseObject); + return StartupData{data, size}; + } + + // To serialize the slot field, invoke Serialize() method on the object. + DCHECK_IS_SNAPSHOT_SLOT(index); per_process::Debug(DebugCategory::MKSNAPSHOT, "Object %p is %s, ", @@ -1380,7 +1412,7 @@ bool BindingData::PrepareForSerialization(Local context, } InternalFieldInfoBase* BindingData::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = internal_field_info_; internal_field_info_ = nullptr; return info; @@ -1390,7 +1422,7 @@ void BindingData::Deserialize(Local context, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); v8::HandleScope scope(context->GetIsolate()); Realm* realm = Realm::GetCurrent(context); // Recreate the buffer in the constructor. diff --git a/src/node_snapshotable.h b/src/node_snapshotable.h index eed572beef3a0c..d1f28ecf154d9b 100644 --- a/src/node_snapshotable.h +++ b/src/node_snapshotable.h @@ -68,6 +68,14 @@ struct InternalFieldInfoBase { InternalFieldInfoBase() = default; }; +struct EmbedderTypeInfo { + enum class MemoryMode : uint8_t { kBaseObject, kCppGC }; + EmbedderTypeInfo(EmbedderObjectType t, MemoryMode m) : type(t), mode(m) {} + EmbedderTypeInfo() = default; + EmbedderObjectType type; + MemoryMode mode; +}; + // An interface for snapshotable native objects to inherit from. // Use the SERIALIZABLE_OBJECT_METHODS() macro in the class to define // the following methods to implement: @@ -123,6 +131,8 @@ void SerializeSnapshotableObjects(Realm* realm, v8::SnapshotCreator* creator, RealmSerializeInfo* info); +#define DCHECK_IS_SNAPSHOT_SLOT(index) DCHECK_EQ(index, BaseObject::kSlot) + namespace mksnapshot { class BindingData : public SnapshotableObject { public: diff --git a/src/node_url.cc b/src/node_url.cc index f055acd51c323c..da8790c1d1843e 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -54,7 +54,7 @@ bool BindingData::PrepareForSerialization(v8::Local context, } InternalFieldInfoBase* BindingData::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = InternalFieldInfoBase::New(type()); return info; @@ -64,7 +64,7 @@ void BindingData::Deserialize(v8::Local context, v8::Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); v8::HandleScope scope(context->GetIsolate()); Realm* realm = Realm::GetCurrent(context); BindingData* binding = realm->AddBindingData(holder); diff --git a/src/node_util.cc b/src/node_util.cc index e8cb28969621fb..ec637dcbd78861 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -246,7 +246,7 @@ bool WeakReference::PrepareForSerialization(Local context, } InternalFieldInfoBase* WeakReference::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = InternalFieldInfoBase::New(type()); info->target = target_index_; @@ -258,7 +258,7 @@ void WeakReference::Deserialize(Local context, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); HandleScope scope(context->GetIsolate()); InternalFieldInfo* weak_info = reinterpret_cast(info); diff --git a/src/node_v8.cc b/src/node_v8.cc index a5e91f5b8ca624..814efe3d69651c 100644 --- a/src/node_v8.cc +++ b/src/node_v8.cc @@ -152,7 +152,7 @@ void BindingData::Deserialize(Local context, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); HandleScope scope(context->GetIsolate()); Realm* realm = Realm::GetCurrent(context); // Recreate the buffer in the constructor. @@ -163,7 +163,7 @@ void BindingData::Deserialize(Local context, } InternalFieldInfoBase* BindingData::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = internal_field_info_; internal_field_info_ = nullptr; return info; diff --git a/src/timers.cc b/src/timers.cc index 27fa18ec4d3f86..127806fbcdfd3e 100644 --- a/src/timers.cc +++ b/src/timers.cc @@ -94,7 +94,7 @@ bool BindingData::PrepareForSerialization(Local context, } InternalFieldInfoBase* BindingData::Serialize(int index) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); InternalFieldInfo* info = InternalFieldInfoBase::New(type()); return info; @@ -104,7 +104,7 @@ void BindingData::Deserialize(Local context, Local holder, int index, InternalFieldInfoBase* info) { - DCHECK_EQ(index, BaseObject::kEmbedderType); + DCHECK_IS_SNAPSHOT_SLOT(index); v8::HandleScope scope(context->GetIsolate()); Realm* realm = Realm::GetCurrent(context); // Recreate the buffer in the constructor. From f637fd46ab0272beb7fb7f060e974174b23d8242 Mon Sep 17 00:00:00 2001 From: michalbiesek Date: Tue, 15 Aug 2023 20:40:33 +0200 Subject: [PATCH 011/125] build: fix typo `libray` -> `library` (configure.py) Signed-off-by: Michal Biesek PR-URL: https://github.com/nodejs/node/pull/49106 Reviewed-By: Richard Lau Reviewed-By: Deokjin Kim Reviewed-By: Benjamin Gruenbaum Reviewed-By: Luigi Pinca --- configure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.py b/configure.py index ee08264e91d8a4..9a478c5f983a51 100755 --- a/configure.py +++ b/configure.py @@ -457,7 +457,7 @@ static_optgroup.add_argument('--static-zoslib-gyp', action='store', dest='static_zoslib_gyp', - help='path to zoslib.gyp file for includes and to link to static zoslib libray') + help='path to zoslib.gyp file for includes and to link to static zoslib library') parser.add_argument_group(static_optgroup) From 173aed47575a4e94823f7da7a043eecb819a2241 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 28 Jul 2023 16:14:33 -0400 Subject: [PATCH 012/125] report: fix recent coverity warning Fix warning about dereferencing null env Signed-off-by: Michael Dawson PR-URL: https://github.com/nodejs/node/pull/48954 Reviewed-By: Yagiz Nizipli Reviewed-By: Luigi Pinca --- src/node_report.cc | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/node_report.cc b/src/node_report.cc index 76b5d4448267ff..88c9a97789e30b 100644 --- a/src/node_report.cc +++ b/src/node_report.cc @@ -857,9 +857,13 @@ std::string TriggerNodeReport(Isolate* isolate, // Determine the required report filename. In order of priority: // 1) supplied on API 2) configured on startup 3) default generated if (!name.empty()) { - THROW_IF_INSUFFICIENT_PERMISSIONS( - env, permission::PermissionScope::kFileSystemWrite, name, name); - // Filename was specified as API parameter. + // we may not always be in a great state when generating a node report + // allow for the case where we don't have an env + if (env != nullptr) { + THROW_IF_INSUFFICIENT_PERMISSIONS( + env, permission::PermissionScope::kFileSystemWrite, name, name); + // Filename was specified as API parameter. + } filename = name; } else { std::string report_filename; From 08197aa010bf2afcc56e838c53e992c7cff9b739 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Tue, 15 Aug 2023 23:01:39 +0200 Subject: [PATCH 013/125] crypto: remove default encoding from sign/verify getDefaultEncoding() always returns 'buffer' in Node.js 20. It requires some careful justification but the default encoding can be eliminated from sig.js entirely. In Sign.prototype.update, we can safely remove the conditional assignment of getDefaultEncoding() to encoding. This is because SignUpdate() in crypto_sig.cc internally calls node::crypto::Decode, which returns UTF8 for falsy encoding values. In other words, with the conditional assignment, StringBytes::Write() ultimately receives the encoding BUFFER, and without the conditional assignment, it receives the encoding UTF8. However, StringBytes::Write() treats both encodings identically, so there is no need to deviate from the internal default encoding UTF8. In Sign.prototype.sign, we can also safely remove the conditional assignment of getDefaultEncoding() to encoding. Whether encoding is falsy or 'buffer' makes no difference. In Verify.prototype.verify, we can also safely remove the conditional assignment of getDefaultEncoding() to sigEncoding. This is because the function passes the sigEncoding to getArrayBufferOrView(), which passes it to Buffer.from(). If sigEncoding is 'buffer', getArrayBufferOrView() instead passes 'utf8' to Buffer.from(). Because the default encoding of Buffer.from() is 'utf8', passing a falsy encoding to getArrayBufferOrView() instead of 'buffer' results in the same behavior. Refs: https://github.com/nodejs/node/pull/47182 PR-URL: https://github.com/nodejs/node/pull/49145 Reviewed-By: Filip Skokan Reviewed-By: Luigi Pinca --- lib/internal/crypto/sig.js | 6 ------ 1 file changed, 6 deletions(-) diff --git a/lib/internal/crypto/sig.js b/lib/internal/crypto/sig.js index 71e8fbadaa84d7..9b3895646c7929 100644 --- a/lib/internal/crypto/sig.js +++ b/lib/internal/crypto/sig.js @@ -34,7 +34,6 @@ const { const { getArrayBufferOrView, - getDefaultEncoding, kHandle, } = require('internal/crypto/util'); @@ -70,8 +69,6 @@ Sign.prototype._write = function _write(chunk, encoding, callback) { }; Sign.prototype.update = function update(data, encoding) { - encoding = encoding || getDefaultEncoding(); - if (typeof data === 'string') { validateEncoding(data, encoding); } else if (!isArrayBufferView(data)) { @@ -131,7 +128,6 @@ Sign.prototype.sign = function sign(options, encoding) { const ret = this[kHandle].sign(data, format, type, passphrase, rsaPadding, pssSaltLength, dsaSigEnc); - encoding = encoding || getDefaultEncoding(); if (encoding && encoding !== 'buffer') return ret.toString(encoding); @@ -216,8 +212,6 @@ Verify.prototype.verify = function verify(options, signature, sigEncoding) { passphrase, } = preparePublicOrPrivateKey(options, true); - sigEncoding = sigEncoding || getDefaultEncoding(); - // Options specific to RSA const rsaPadding = getPadding(options); const pssSaltLength = getSaltLength(options); From 184bbddcf530a7de0ac5389b17eed9180dd102f3 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Thu, 3 Aug 2023 16:39:33 +0200 Subject: [PATCH 014/125] src: add per-realm GetBindingData() method This version avoids the additional access to the embedder slot when we already have a reference to the realm. PR-URL: https://github.com/nodejs/node/pull/49007 Refs: https://github.com/nodejs/node/pull/48836 Reviewed-By: Chengzhong Wu Reviewed-By: Matteo Collina Reviewed-By: Rafael Gonzaga Reviewed-By: Stephen Belanger Reviewed-By: Yagiz Nizipli --- src/node_file.cc | 34 +++++++++++++++++++--------------- src/node_realm-inl.h | 11 ++++++----- 2 files changed, 25 insertions(+), 20 deletions(-) diff --git a/src/node_file.cc b/src/node_file.cc index bde29316888042..a94792e7e96b1f 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -265,17 +265,17 @@ FileHandle* FileHandle::New(BindingData* binding_data, } void FileHandle::New(const FunctionCallbackInfo& args) { - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = binding_data->env(); CHECK(args.IsConstructCall()); CHECK(args[0]->IsInt32()); + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); std::optional maybeOffset = std::nullopt; std::optional maybeLength = std::nullopt; if (args[1]->IsNumber()) - maybeOffset = args[1]->IntegerValue(env->context()).FromJust(); + maybeOffset = args[1]->IntegerValue(realm->context()).FromJust(); if (args[2]->IsNumber()) - maybeLength = args[2]->IntegerValue(env->context()).FromJust(); + maybeLength = args[2]->IntegerValue(realm->context()).FromJust(); FileHandle::New(binding_data, args[0].As()->Value(), @@ -1143,13 +1143,14 @@ static void InternalModuleStat(const FunctionCallbackInfo& args) { } static void Stat(const FunctionCallbackInfo& args) { - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = binding_data->env(); + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); + Environment* env = realm->env(); const int argc = args.Length(); CHECK_GE(argc, 2); - BufferValue path(env->isolate(), args[0]); + BufferValue path(realm->isolate(), args[0]); CHECK_NOT_NULL(*path); THROW_IF_INSUFFICIENT_PERMISSIONS( env, permission::PermissionScope::kFileSystemRead, path.ToStringView()); @@ -1178,13 +1179,14 @@ static void Stat(const FunctionCallbackInfo& args) { } static void LStat(const FunctionCallbackInfo& args) { - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = binding_data->env(); + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); + Environment* env = realm->env(); const int argc = args.Length(); CHECK_GE(argc, 3); - BufferValue path(env->isolate(), args[0]); + BufferValue path(realm->isolate(), args[0]); CHECK_NOT_NULL(*path); bool use_bigint = args[1]->IsTrue(); @@ -1212,8 +1214,9 @@ static void LStat(const FunctionCallbackInfo& args) { } static void FStat(const FunctionCallbackInfo& args) { - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = binding_data->env(); + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); + Environment* env = realm->env(); const int argc = args.Length(); CHECK_GE(argc, 2); @@ -1244,13 +1247,14 @@ static void FStat(const FunctionCallbackInfo& args) { } static void StatFs(const FunctionCallbackInfo& args) { - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = binding_data->env(); + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); + Environment* env = realm->env(); const int argc = args.Length(); CHECK_GE(argc, 2); - BufferValue path(env->isolate(), args[0]); + BufferValue path(realm->isolate(), args[0]); CHECK_NOT_NULL(*path); THROW_IF_INSUFFICIENT_PERMISSIONS( env, permission::PermissionScope::kFileSystemRead, path.ToStringView()); diff --git a/src/node_realm-inl.h b/src/node_realm-inl.h index 5ccd76fc56673c..fe20ac2b2fea3a 100644 --- a/src/node_realm-inl.h +++ b/src/node_realm-inl.h @@ -67,16 +67,17 @@ inline T* Realm::GetBindingData( template inline T* Realm::GetBindingData(v8::Local context) { Realm* realm = GetCurrent(context); - DCHECK_NOT_NULL(realm); - BindingDataStore* map = realm->binding_data_store(); - DCHECK_NOT_NULL(map); + return realm->GetBindingData(); +} + +template +inline T* Realm::GetBindingData() { constexpr size_t binding_index = static_cast(T::binding_type_int); static_assert(binding_index < std::tuple_size_v); - auto ptr = (*map)[binding_index]; + auto ptr = binding_data_store_[binding_index]; if (UNLIKELY(!ptr)) return nullptr; T* result = static_cast(ptr.get()); DCHECK_NOT_NULL(result); - DCHECK_EQ(result->realm(), GetCurrent(context)); return result; } From 2a35383b3e4106818b870905763f2403c524150b Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Thu, 3 Aug 2023 16:50:23 +0200 Subject: [PATCH 015/125] src: use per-realm GetBindingData() wherever applicable This reduce the number of embedder slot accesses and also removes the assumption in a few binding methods that the current realm is the principal realm of the current environment (which is not true for shadow realms). PR-URL: https://github.com/nodejs/node/pull/49007 Refs: https://github.com/nodejs/node/pull/48836 Reviewed-By: Chengzhong Wu Reviewed-By: Matteo Collina Reviewed-By: Rafael Gonzaga Reviewed-By: Stephen Belanger Reviewed-By: Yagiz Nizipli --- src/dataqueue/queue.cc | 12 +++++----- src/encoding_binding.cc | 7 +++--- src/node_blob.cc | 52 ++++++++++++++++++++--------------------- src/node_file-inl.h | 7 +++--- src/node_file.cc | 8 +++---- src/node_http2.cc | 10 ++++---- src/node_realm.h | 2 ++ src/node_url.cc | 27 ++++++++++----------- src/quic/bindingdata.cc | 2 +- 9 files changed, 63 insertions(+), 64 deletions(-) diff --git a/src/dataqueue/queue.cc b/src/dataqueue/queue.cc index 8ae28f9d0a791b..994b82a8751f6e 100644 --- a/src/dataqueue/queue.cc +++ b/src/dataqueue/queue.cc @@ -876,12 +876,12 @@ class FdEntry final : public EntryImpl { } Realm* realm = entry->env()->principal_realm(); return std::make_shared( - BaseObjectPtr(fs::FileHandle::New( - realm->GetBindingData(realm->context()), - file, - Local(), - entry->start_, - entry->end_ - entry->start_)), + BaseObjectPtr( + fs::FileHandle::New(realm->GetBindingData(), + file, + Local(), + entry->start_, + entry->end_ - entry->start_)), entry); } diff --git a/src/encoding_binding.cc b/src/encoding_binding.cc index c67af5319c8ff5..97ddd59fb661c8 100644 --- a/src/encoding_binding.cc +++ b/src/encoding_binding.cc @@ -83,12 +83,13 @@ void BindingData::Deserialize(Local context, } void BindingData::EncodeInto(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - Isolate* isolate = env->isolate(); CHECK_GE(args.Length(), 2); CHECK(args[0]->IsString()); CHECK(args[1]->IsUint8Array()); - BindingData* binding_data = Realm::GetBindingData(args); + + Realm* realm = Realm::GetCurrent(args); + Isolate* isolate = realm->isolate(); + BindingData* binding_data = realm->GetBindingData(); Local source = args[0].As(); diff --git a/src/node_blob.cc b/src/node_blob.cc index 8dc81a6f15e867..9ea37853ce1d46 100644 --- a/src/node_blob.cc +++ b/src/node_blob.cc @@ -400,20 +400,22 @@ std::unique_ptr Blob::CloneForMessaging() const { } void Blob::StoreDataObject(const v8::FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - BlobBindingData* binding_data = Realm::GetBindingData(args); + Realm* realm = Realm::GetCurrent(args); CHECK(args[0]->IsString()); // ID key - CHECK(Blob::HasInstance(env, args[1])); // Blob + CHECK(Blob::HasInstance(realm->env(), args[1])); // Blob CHECK(args[2]->IsUint32()); // Length CHECK(args[3]->IsString()); // Type - Utf8Value key(env->isolate(), args[0]); + BlobBindingData* binding_data = realm->GetBindingData(); + Isolate* isolate = realm->isolate(); + + Utf8Value key(isolate, args[0]); Blob* blob; ASSIGN_OR_RETURN_UNWRAP(&blob, args[1]); size_t length = args[2].As()->Value(); - Utf8Value type(env->isolate(), args[3]); + Utf8Value type(isolate, args[3]); binding_data->store_data_object( std::string(*key, key.length()), @@ -427,9 +429,11 @@ void Blob::StoreDataObject(const v8::FunctionCallbackInfo& args) { void Blob::RevokeObjectURL(const FunctionCallbackInfo& args) { CHECK_GE(args.Length(), 1); CHECK(args[0]->IsString()); - BlobBindingData* binding_data = Realm::GetBindingData(args); - Environment* env = Environment::GetCurrent(args); - Utf8Value input(env->isolate(), args[0].As()); + Realm* realm = Realm::GetCurrent(args); + BlobBindingData* binding_data = realm->GetBindingData(); + Isolate* isolate = realm->isolate(); + + Utf8Value input(isolate, args[0].As()); auto out = ada::parse(input.ToStringView()); if (!out) { @@ -449,36 +453,30 @@ void Blob::RevokeObjectURL(const FunctionCallbackInfo& args) { } void Blob::GetDataObject(const v8::FunctionCallbackInfo& args) { - BlobBindingData* binding_data = Realm::GetBindingData(args); - - Environment* env = Environment::GetCurrent(args); CHECK(args[0]->IsString()); + Realm* realm = Realm::GetCurrent(args); + BlobBindingData* binding_data = realm->GetBindingData(); + Isolate* isolate = realm->isolate(); - Utf8Value key(env->isolate(), args[0]); + Utf8Value key(isolate, args[0]); BlobBindingData::StoredDataObject stored = binding_data->get_data_object(std::string(*key, key.length())); if (stored.blob) { Local type; - if (!String::NewFromUtf8( - env->isolate(), - stored.type.c_str(), - v8::NewStringType::kNormal, - static_cast(stored.type.length())).ToLocal(&type)) { + if (!String::NewFromUtf8(isolate, + stored.type.c_str(), + v8::NewStringType::kNormal, + static_cast(stored.type.length())) + .ToLocal(&type)) { return; } - Local values[] = { - stored.blob->object(), - Uint32::NewFromUnsigned(env->isolate(), stored.length), - type - }; + Local values[] = {stored.blob->object(), + Uint32::NewFromUnsigned(isolate, stored.length), + type}; - args.GetReturnValue().Set( - Array::New( - env->isolate(), - values, - arraysize(values))); + args.GetReturnValue().Set(Array::New(isolate, values, arraysize(values))); } } diff --git a/src/node_file-inl.h b/src/node_file-inl.h index 2ba5906d614f1c..cdf21a4b3a6c22 100644 --- a/src/node_file-inl.h +++ b/src/node_file-inl.h @@ -277,9 +277,10 @@ FSReqBase* GetReqWrap(const v8::FunctionCallbackInfo& args, return Unwrap(value.As()); } - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = binding_data->env(); - if (value->StrictEquals(env->fs_use_promises_symbol())) { + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); + + if (value->StrictEquals(realm->isolate_data()->fs_use_promises_symbol())) { if (use_bigint) { return FSReqPromise::New(binding_data, use_bigint); } else { diff --git a/src/node_file.cc b/src/node_file.cc index a94792e7e96b1f..285e532f0078e1 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -2105,14 +2105,14 @@ static void Open(const FunctionCallbackInfo& args) { } static void OpenFileHandle(const FunctionCallbackInfo& args) { - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = binding_data->env(); - Isolate* isolate = env->isolate(); + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); + Environment* env = realm->env(); const int argc = args.Length(); CHECK_GE(argc, 3); - BufferValue path(isolate, args[0]); + BufferValue path(realm->isolate(), args[0]); CHECK_NOT_NULL(*path); CHECK(args[1]->IsInt32()); diff --git a/src/node_http2.cc b/src/node_http2.cc index 0a8f2271f25689..070b40ae0a6ad6 100644 --- a/src/node_http2.cc +++ b/src/node_http2.cc @@ -2650,12 +2650,12 @@ void Http2Session::RefreshState(const FunctionCallbackInfo& args) { // Constructor for new Http2Session instances. void Http2Session::New(const FunctionCallbackInfo& args) { - Http2State* state = Realm::GetBindingData(args); - Environment* env = state->env(); + Realm* realm = Realm::GetCurrent(args); + Http2State* state = realm->GetBindingData(); + CHECK(args.IsConstructCall()); - SessionType type = - static_cast( - args[0]->Int32Value(env->context()).ToChecked()); + SessionType type = static_cast( + args[0]->Int32Value(realm->context()).ToChecked()); Http2Session* session = new Http2Session(state, args.This(), type); Debug(session, "session created"); } diff --git a/src/node_realm.h b/src/node_realm.h index a75cd610692183..51fbd502a10eb6 100644 --- a/src/node_realm.h +++ b/src/node_realm.h @@ -101,6 +101,8 @@ class Realm : public MemoryRetainer { const v8::FunctionCallbackInfo& info); template static inline T* GetBindingData(v8::Local context); + template + inline T* GetBindingData(); inline BindingDataStore* binding_data_store(); // The BaseObject count is a debugging helper that makes sure that there are diff --git a/src/node_url.cc b/src/node_url.cc index da8790c1d1843e..85147ccd1c0d59 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -229,17 +229,16 @@ void BindingData::Parse(const FunctionCallbackInfo& args) { CHECK(args[0]->IsString()); // input // args[1] // base url - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = Environment::GetCurrent(args); - HandleScope handle_scope(env->isolate()); - Context::Scope context_scope(env->context()); + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); + Isolate* isolate = realm->isolate(); - Utf8Value input(env->isolate(), args[0]); + Utf8Value input(isolate, args[0]); ada::result base; ada::url_aggregator* base_pointer = nullptr; if (args[1]->IsString()) { - base = ada::parse( - Utf8Value(env->isolate(), args[1]).ToString()); + base = + ada::parse(Utf8Value(isolate, args[1]).ToString()); if (!base) { return args.GetReturnValue().Set(false); } @@ -255,8 +254,7 @@ void BindingData::Parse(const FunctionCallbackInfo& args) { binding_data->UpdateComponents(out->get_components(), out->type); args.GetReturnValue().Set( - ToV8Value(env->context(), out->get_href(), env->isolate()) - .ToLocalChecked()); + ToV8Value(realm->context(), out->get_href(), isolate).ToLocalChecked()); } void BindingData::Update(const FunctionCallbackInfo& args) { @@ -264,12 +262,12 @@ void BindingData::Update(const FunctionCallbackInfo& args) { CHECK(args[1]->IsNumber()); // action type CHECK(args[2]->IsString()); // new value - BindingData* binding_data = Realm::GetBindingData(args); - Environment* env = Environment::GetCurrent(args); - Isolate* isolate = env->isolate(); + Realm* realm = Realm::GetCurrent(args); + BindingData* binding_data = realm->GetBindingData(); + Isolate* isolate = realm->isolate(); enum url_update_action action = static_cast( - args[1]->Uint32Value(env->context()).FromJust()); + args[1]->Uint32Value(realm->context()).FromJust()); Utf8Value input(isolate, args[0].As()); Utf8Value new_value(isolate, args[2].As()); @@ -330,8 +328,7 @@ void BindingData::Update(const FunctionCallbackInfo& args) { binding_data->UpdateComponents(out->get_components(), out->type); args.GetReturnValue().Set( - ToV8Value(env->context(), out->get_href(), env->isolate()) - .ToLocalChecked()); + ToV8Value(realm->context(), out->get_href(), isolate).ToLocalChecked()); } void BindingData::UpdateComponents(const ada::url_components& components, diff --git a/src/quic/bindingdata.cc b/src/quic/bindingdata.cc index af3642c1c16f7e..c97d781ca54ad9 100644 --- a/src/quic/bindingdata.cc +++ b/src/quic/bindingdata.cc @@ -25,7 +25,7 @@ using v8::Value; namespace quic { BindingData& BindingData::Get(Environment* env) { - return *Realm::GetBindingData(env->context()); + return *(env->principal_realm()->GetBindingData()); } BindingData::operator ngtcp2_mem() { From c441f5a0970ea55bf11114d1efb65e83164e8da6 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 4 Aug 2023 19:54:59 +0200 Subject: [PATCH 016/125] test: add expectSyncExitWithoutError() and expectSyncExit() utils These can be used to check the state and the output of a child process launched with `spawnSync()`. They log additional information about the child process when the check fails to facilitate debugging test failures. PR-URL: https://github.com/nodejs/node/pull/49020 Reviewed-By: Luigi Pinca --- test/common/README.md | 38 +++++++++++++++++ test/common/child_process.js | 80 ++++++++++++++++++++++++++++++++++++ 2 files changed, 118 insertions(+) diff --git a/test/common/README.md b/test/common/README.md index db56e4744dd1b1..3d35edf5510186 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -6,6 +6,7 @@ This directory contains modules used to test the Node.js implementation. * [ArrayStream module](#arraystream-module) * [Benchmark module](#benchmark-module) +* [Child process module](#child-process-module) * [Common module API](#common-module-api) * [Countdown module](#countdown-module) * [CPU Profiler module](#cpu-profiler-module) @@ -35,6 +36,42 @@ The `benchmark` module is used by tests to run benchmarks. * `env` [\][] Environment variables to be applied during the run. +## Child Process Module + +The `child_process` module is used by tests that launch child processes. + +### `expectSyncExit(child, options)` + +Checks if a _synchronous_ child process runs in the way expected. If it does +not, print the stdout and stderr output from the child process and additional +information about it to the stderr of the current process before throwing +and error. This helps gathering more information about test failures +coming from child processes. + +* `child` [\][]: a `ChildProcess` instance + returned by `child_process.spawnSync()`. +* `options` [\][] + * `status` [\][] Expected `child.status` + * `signal` [\][] | `null` Expected `child.signal` + * `stderr` [\][] | [\][] | + [\][] Optional. If it's a string, check that the output + to the stderr of the child process is exactly the same as the string. If + it's a regular expression, check that the stderr matches it. If it's a + function, invoke it with the stderr output as a string and check + that it returns true. The function can just throw errors (e.g. assertion + errors) to provide more information if the check fails. + * `stdout` [\][] | [\][] | + [\][] Optional. Similar to `stderr` but for the stdout. + * `trim` [\][] Optional. Whether this method should trim + out the whitespace characters when checking `stderr` and `stdout` outputs. + Defaults to `false`. + +### `expectSyncExitWithoutError(child[, options])` + +Similar to `expectSyncExit()` with the `status` expected to be 0 and +`signal` expected to be `null`. Any other optional options are passed +into `expectSyncExit()`. + ## Common Module API The `common` module is used by tests for consistency across repeated @@ -1111,6 +1148,7 @@ See [the WPT tests README][] for details. []: https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView []: https://nodejs.org/api/buffer.html#buffer_class_buffer []: https://developer.mozilla.org/en-US/docs/Web/API/BufferSource +[]: ../../doc/api/child_process.md#class-childprocess []: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error []: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function []: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object diff --git a/test/common/child_process.js b/test/common/child_process.js index 799c963a7ed7dc..a53dddc19f3216 100644 --- a/test/common/child_process.js +++ b/test/common/child_process.js @@ -2,6 +2,7 @@ const assert = require('assert'); const common = require('./'); +const util = require('util'); // Workaround for Windows Server 2008R2 // When CMD is used to launch a process and CMD is killed too quickly, the @@ -41,9 +42,88 @@ function logAfterTime(time) { }, time); } +function checkOutput(str, check) { + if ((check instanceof RegExp && !check.test(str)) || + (typeof check === 'string' && check !== str)) { + return { passed: false, reason: `did not match ${util.inspect(check)}` }; + } + if (typeof check === 'function') { + try { + check(str); + } catch (error) { + return { + passed: false, + reason: `did not match expectation, checker throws:\n${util.inspect(error)}`, + }; + } + } + return { passed: true }; +} + +function expectSyncExit(child, { + status, + signal, + stderr: stderrCheck, + stdout: stdoutCheck, + trim = false, +}) { + const failures = []; + let stderrStr, stdoutStr; + if (status !== undefined && child.status !== status) { + failures.push(`- process terminated with status ${child.status}, expected ${status}`); + } + if (signal !== undefined && child.signal !== signal) { + failures.push(`- process terminated with signal ${child.signal}, expected ${signal}`); + } + + function logAndThrow() { + const tag = `[process ${child.pid}]:`; + console.error(`${tag} --- stderr ---`); + console.error(stderrStr === undefined ? child.stderr.toString() : stderrStr); + console.error(`${tag} --- stdout ---`); + console.error(stdoutStr === undefined ? child.stdout.toString() : stdoutStr); + console.error(`${tag} status = ${child.status}, signal = ${child.signal}`); + throw new Error(`${failures.join('\n')}`); + } + + // If status and signal are not matching expectations, fail early. + if (failures.length !== 0) { + logAndThrow(); + } + + if (stderrCheck !== undefined) { + stderrStr = child.stderr.toString(); + const { passed, reason } = checkOutput(trim ? stderrStr.trim() : stderrStr, stderrCheck); + if (!passed) { + failures.push(`- stderr ${reason}`); + } + } + if (stdoutCheck !== undefined) { + stdoutStr = child.stdout.toString(); + const { passed, reason } = checkOutput(trim ? stdoutStr.trim() : stdoutStr, stdoutCheck); + if (!passed) { + failures.push(`- stdout ${reason}`); + } + } + if (failures.length !== 0) { + logAndThrow(); + } + return { child, stderr: stderrStr, stdout: stdoutStr }; +} + +function expectSyncExitWithoutError(child, options) { + return expectSyncExit(child, { + status: 0, + signal: null, + ...options, + }); +} + module.exports = { cleanupStaleProcess, logAfterTime, kExpiringChildRunTime, kExpiringParentTimer, + expectSyncExit, + expectSyncExitWithoutError, }; From 47d24f144b372b20b7f9a4c4fdb2bd3df151f79c Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 4 Aug 2023 19:58:32 +0200 Subject: [PATCH 017/125] test: use expectSyncExit{WithErrors} in snapshot tests ..and replace the similar code added for logging. PR-URL: https://github.com/nodejs/node/pull/49020 Reviewed-By: Luigi Pinca --- test/parallel/test-snapshot-api.js | 18 +++-- test/parallel/test-snapshot-basic.js | 42 ++++------- test/parallel/test-snapshot-warning.js | 96 ++++++++++++-------------- 3 files changed, 69 insertions(+), 87 deletions(-) diff --git a/test/parallel/test-snapshot-api.js b/test/parallel/test-snapshot-api.js index 38b17add3db9c1..1068ae3b4c7b46 100644 --- a/test/parallel/test-snapshot-api.js +++ b/test/parallel/test-snapshot-api.js @@ -7,6 +7,7 @@ const assert = require('assert'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); const fixtures = require('../common/fixtures'); +const { expectSyncExitWithoutError } = require('../common/child_process'); const fs = require('fs'); const v8 = require('v8'); @@ -36,11 +37,8 @@ const entry = fixtures.path('snapshot', 'v8-startup-snapshot-api.js'); ], { cwd: tmpdir.path }); - if (child.status !== 0) { - console.log(child.stderr.toString()); - console.log(child.stdout.toString()); - assert.strictEqual(child.status, 0); - } + + expectSyncExitWithoutError(child); const stats = fs.statSync(tmpdir.resolve('snapshot.blob')); assert(stats.isFile()); } @@ -58,9 +56,9 @@ const entry = fixtures.path('snapshot', 'v8-startup-snapshot-api.js'); } }); - const stdout = child.stdout.toString().trim(); - const stderr = child.stderr.toString().trim(); - assert.strictEqual(stderr, 'Reading book1.en_US.txt'); - assert.strictEqual(stdout, 'This is book1.en_US.txt'); - assert.strictEqual(child.status, 0); + expectSyncExitWithoutError(child, { + stderr: 'Reading book1.en_US.txt', + stdout: 'This is book1.en_US.txt', + trim: true + }); } diff --git a/test/parallel/test-snapshot-basic.js b/test/parallel/test-snapshot-basic.js index 6f1d3c21ae1772..cd87caa3fcbce3 100644 --- a/test/parallel/test-snapshot-basic.js +++ b/test/parallel/test-snapshot-basic.js @@ -8,6 +8,7 @@ const assert = require('assert'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); const fixtures = require('../common/fixtures'); +const { expectSyncExitWithoutError, expectSyncExit } = require('../common/child_process'); const fs = require('fs'); tmpdir.refresh(); @@ -15,7 +16,7 @@ tmpdir.refresh(); let snapshotScript = 'node:embedded_snapshot_main'; if (!process.config.variables.node_use_node_snapshot) { // Check that Node.js built without an embedded snapshot - // exits with 1 when node:embedded_snapshot_main is specified + // exits with 9 when node:embedded_snapshot_main is specified // as snapshot entry point. const child = spawnSync(process.execPath, [ '--build-snapshot', @@ -24,10 +25,11 @@ if (!process.config.variables.node_use_node_snapshot) { cwd: tmpdir.path }); - assert.match( - child.stderr.toString(), - /Node\.js was built without embedded snapshot/); - assert.strictEqual(child.status, 9); + expectSyncExit(child, { + status: 9, + signal: null, + stderr: /Node\.js was built without embedded snapshot/ + }); snapshotScript = fixtures.path('empty.js'); } @@ -41,12 +43,7 @@ if (!process.config.variables.node_use_node_snapshot) { ], { cwd: tmpdir.path }); - if (child.status !== 0) { - console.log(child.stderr.toString()); - console.log(child.stdout.toString()); - console.log(child.signal); - assert.strictEqual(child.status, 0); - } + expectSyncExitWithoutError(child); const stats = fs.statSync(tmpdir.resolve('snapshot.blob')); assert(stats.isFile()); } @@ -63,12 +60,7 @@ const blobPath = tmpdir.resolve('my-snapshot.blob'); ], { cwd: tmpdir.path }); - if (child.status !== 0) { - console.log(child.stderr.toString()); - console.log(child.stdout.toString()); - console.log(child.signal); - assert.strictEqual(child.status, 0); - } + expectSyncExitWithoutError(child); const stats = fs.statSync(blobPath); assert(stats.isFile()); } @@ -82,13 +74,7 @@ const blobPath = tmpdir.resolve('my-snapshot.blob'); ], { cwd: tmpdir.path }); - - if (child.status !== 0) { - console.log(child.stderr.toString()); - console.log(child.stdout.toString()); - console.log(child.signal); - assert.strictEqual(child.status, 0); - } + expectSyncExitWithoutError(child); assert(child.stdout.toString().includes('--help')); } @@ -105,7 +91,9 @@ const blobPath = tmpdir.resolve('my-snapshot.blob'); }); // Check that it is a noop. - assert.strictEqual(child.stdout.toString().trim(), ''); - assert.strictEqual(child.stderr.toString().trim(), ''); - assert.strictEqual(child.status, 0); + expectSyncExitWithoutError(child, { + stderr: '', + stdout: '', + trim: true + }); } diff --git a/test/parallel/test-snapshot-warning.js b/test/parallel/test-snapshot-warning.js index 2ca87f1ef5f055..444f65af0b8b35 100644 --- a/test/parallel/test-snapshot-warning.js +++ b/test/parallel/test-snapshot-warning.js @@ -10,6 +10,7 @@ const assert = require('assert'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); const fixtures = require('../common/fixtures'); +const { expectSyncExitWithoutError } = require('../common/child_process'); const fs = require('fs'); const warningScript = fixtures.path('snapshot', 'warning.js'); @@ -27,12 +28,7 @@ tmpdir.refresh(); ], { cwd: tmpdir.path }); - console.log('[stderr]:', child.stderr.toString()); - console.log('[stdout]:', child.stdout.toString()); - if (child.status !== 0) { - console.log(child.signal); - assert.strictEqual(child.status, 0); - } + expectSyncExitWithoutError(child); const stats = fs.statSync(blobPath); assert(stats.isFile()); @@ -43,14 +39,14 @@ tmpdir.refresh(); ], { cwd: tmpdir.path }); - console.log('[stderr]:', child.stderr.toString()); - console.log('[stdout]:', child.stdout.toString()); - if (child.status !== 0) { - console.log(child.signal); - assert.strictEqual(child.status, 0); - } - const match = child.stderr.toString().match(/Warning: test warning/g); - assert.strictEqual(match.length, 1); + expectSyncExitWithoutError(child, { + stderr(output) { + const match = output.match(/Warning: test warning/g); + assert.strictEqual(match.length, 1); + return true; + } + }); + } tmpdir.refresh(); @@ -65,18 +61,17 @@ tmpdir.refresh(); ], { cwd: tmpdir.path }); - console.log('[stderr]:', child.stderr.toString()); - console.log('[stdout]:', child.stdout.toString()); - if (child.status !== 0) { - console.log(child.signal); - assert.strictEqual(child.status, 0); - } + expectSyncExitWithoutError(child, { + stderr(output) { + let match = output.match(/Warning: test warning/g); + assert.strictEqual(match.length, 1); + match = output.match(/Use `node --trace-warnings/g); + assert.strictEqual(match.length, 1); + return true; + } + }); const stats = fs.statSync(blobPath); assert(stats.isFile()); - let match = child.stderr.toString().match(/Warning: test warning/g); - assert.strictEqual(match.length, 1); - match = child.stderr.toString().match(/Use `node --trace-warnings/g); - assert.strictEqual(match.length, 1); child = spawnSync(process.execPath, [ '--snapshot-blob', @@ -85,17 +80,17 @@ tmpdir.refresh(); ], { cwd: tmpdir.path }); - console.log('[stderr]:', child.stderr.toString()); - console.log('[stdout]:', child.stdout.toString()); - if (child.status !== 0) { - console.log(child.signal); - assert.strictEqual(child.status, 0); - } - // Warnings should not be handled more than once. - match = child.stderr.toString().match(/Warning: test warning/g); - assert.strictEqual(match.length, 1); - match = child.stderr.toString().match(/Use `node --trace-warnings/g); - assert.strictEqual(match.length, 1); + + expectSyncExitWithoutError(child, { + stderr(output) { + // Warnings should not be handled more than once. + let match = output.match(/Warning: test warning/g); + assert.strictEqual(match.length, 1); + match = output.match(/Use `node --trace-warnings/g); + assert.strictEqual(match.length, 1); + return true; + } + }); } tmpdir.refresh(); @@ -114,25 +109,26 @@ tmpdir.refresh(); ], { cwd: tmpdir.path }); - console.log('[stderr]:', child.stderr.toString()); - console.log('[stdout]:', child.stdout.toString()); - if (child.status !== 0) { - console.log(child.signal); - assert.strictEqual(child.status, 0); - } + + expectSyncExitWithoutError(child, { + stderr(output) { + assert.doesNotMatch(output, /Warning: test warning/); + } + }); + const stats = fs.statSync(blobPath); assert(stats.isFile()); + const warnings1 = fs.readFileSync(warningFile1, 'utf8'); console.log(warningFile1, ':', warnings1); let match = warnings1.match(/Warning: test warning/g); assert.strictEqual(match.length, 1); match = warnings1.match(/Use `node --trace-warnings/g); assert.strictEqual(match.length, 1); - assert.doesNotMatch(child.stderr.toString(), /Warning: test warning/); - fs.rmSync(warningFile1, { maxRetries: 3, recursive: false, force: true }); + child = spawnSync(process.execPath, [ '--snapshot-blob', blobPath, @@ -142,12 +138,13 @@ tmpdir.refresh(); ], { cwd: tmpdir.path }); - console.log('[stderr]:', child.stderr.toString()); - console.log('[stdout]:', child.stdout.toString()); - if (child.status !== 0) { - console.log(child.signal); - assert.strictEqual(child.status, 0); - } + + expectSyncExitWithoutError(child, { + stderr(output) { + assert.doesNotMatch(output, /Warning: test warning/); + return true; + } + }); assert(!fs.existsSync(warningFile1)); const warnings2 = fs.readFileSync(warningFile2, 'utf8'); @@ -156,5 +153,4 @@ tmpdir.refresh(); assert.strictEqual(match.length, 1); match = warnings2.match(/Use `node --trace-warnings/g); assert.strictEqual(match.length, 1); - assert.doesNotMatch(child.stderr.toString(), /Warning: test warning/); } From de103a4686c827999e96c20824f23be1d110941d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Wed, 16 Aug 2023 15:49:44 +0200 Subject: [PATCH 018/125] test: add test for effect of UV_THREADPOOL_SIZE This (not particularly elegant) native addon tests the effect of UV_THREADPOOL_SIZE on node-api. The test fails if Node.js allows more than UV_THREADPOOL_SIZE async tasks to run concurrently, or if it limits the number of concurrent async tasks to anything less than UV_THREADPOOL_SIZE. PR-URL: https://github.com/nodejs/node/pull/49165 Reviewed-By: Yagiz Nizipli Reviewed-By: Michael Dawson --- .../test_uv_threadpool_size/binding.gyp | 8 + test/node-api/test_uv_threadpool_size/test.js | 7 + .../test_uv_threadpool_size.c | 189 ++++++++++++++++++ 3 files changed, 204 insertions(+) create mode 100644 test/node-api/test_uv_threadpool_size/binding.gyp create mode 100644 test/node-api/test_uv_threadpool_size/test.js create mode 100644 test/node-api/test_uv_threadpool_size/test_uv_threadpool_size.c diff --git a/test/node-api/test_uv_threadpool_size/binding.gyp b/test/node-api/test_uv_threadpool_size/binding.gyp new file mode 100644 index 00000000000000..55eff885e1ca1f --- /dev/null +++ b/test/node-api/test_uv_threadpool_size/binding.gyp @@ -0,0 +1,8 @@ +{ + "targets": [ + { + "target_name": "test_uv_threadpool_size", + "sources": [ "test_uv_threadpool_size.c" ] + } + ] +} diff --git a/test/node-api/test_uv_threadpool_size/test.js b/test/node-api/test_uv_threadpool_size/test.js new file mode 100644 index 00000000000000..9f219e2e87aead --- /dev/null +++ b/test/node-api/test_uv_threadpool_size/test.js @@ -0,0 +1,7 @@ +'use strict'; +const common = require('../../common'); +const { test } = require(`./build/${common.buildType}/test_uv_threadpool_size`); + +const uvThreadpoolSize = parseInt(process.env.EXPECTED_UV_THREADPOOL_SIZE || + process.env.UV_THREADPOOL_SIZE, 10) || 4; +test(uvThreadpoolSize); diff --git a/test/node-api/test_uv_threadpool_size/test_uv_threadpool_size.c b/test/node-api/test_uv_threadpool_size/test_uv_threadpool_size.c new file mode 100644 index 00000000000000..5e9e633d25b769 --- /dev/null +++ b/test/node-api/test_uv_threadpool_size/test_uv_threadpool_size.c @@ -0,0 +1,189 @@ +#undef NDEBUG +#include +#include +#include +#include +#include "../../js-native-api/common.h" + +typedef struct { + uv_mutex_t mutex; + uint32_t threadpool_size; + uint32_t n_tasks_started; + uint32_t n_tasks_exited; + uint32_t n_tasks_finalized; + bool observed_saturation; +} async_shared_data; + +typedef struct { + uint32_t task_id; + async_shared_data* shared_data; + napi_async_work request; +} async_carrier; + +static inline bool all_tasks_started(async_shared_data* d) { + assert(d->n_tasks_started <= d->threadpool_size + 1); + return d->n_tasks_started == d->threadpool_size + 1; +} + +static inline bool all_tasks_exited(async_shared_data* d) { + assert(d->n_tasks_exited <= d->n_tasks_started); + return all_tasks_started(d) && d->n_tasks_exited == d->n_tasks_started; +} + +static inline bool all_tasks_finalized(async_shared_data* d) { + assert(d->n_tasks_finalized <= d->n_tasks_exited); + return all_tasks_exited(d) && d->n_tasks_finalized == d->n_tasks_exited; +} + +static inline bool still_saturating(async_shared_data* d) { + return d->n_tasks_started < d->threadpool_size; +} + +static inline bool threadpool_saturated(async_shared_data* d) { + return d->n_tasks_started == d->threadpool_size && d->n_tasks_exited == 0; +} + +static inline bool threadpool_desaturating(async_shared_data* d) { + return d->n_tasks_started >= d->threadpool_size && d->n_tasks_exited != 0; +} + +static inline void print_info(const char* label, async_carrier* c) { + async_shared_data* d = c->shared_data; + printf("%s task_id=%u n_tasks_started=%u n_tasks_exited=%u " + "n_tasks_finalized=%u observed_saturation=%d\n", + label, + c->task_id, + d->n_tasks_started, + d->n_tasks_exited, + d->n_tasks_finalized, + d->observed_saturation); +} + +static void Execute(napi_env env, void* data) { + async_carrier* c = (async_carrier*)data; + async_shared_data* d = c->shared_data; + + // As long as fewer than threadpool_size async tasks have been started, more + // should be started (eventually). Only once that happens should scheduled + // async tasks remain queued. + uv_mutex_lock(&d->mutex); + bool should_be_concurrent = still_saturating(d); + d->n_tasks_started++; + assert(d->n_tasks_started <= d->threadpool_size + 1); + + print_info("start", c); + + if (should_be_concurrent) { + // Wait for the thread pool to be saturated. This is not an elegant way of + // doing so, but it really does not matter much here. + while (still_saturating(d)) { + print_info("waiting", c); + uv_mutex_unlock(&d->mutex); + uv_sleep(100); + uv_mutex_lock(&d->mutex); + } + + // One async task will observe that the threadpool is saturated, that is, + // that threadpool_size tasks have been started and none have exited yet. + // That task will be the first to exit. + if (!d->observed_saturation) { + assert(threadpool_saturated(d)); + d->observed_saturation = true; + } else { + assert(threadpool_saturated(d) || threadpool_desaturating(d)); + } + } else { + // If this task is not among the first threadpool_size tasks, it should not + // have been started unless other tasks have already finished. + assert(threadpool_desaturating(d)); + } + + print_info("exit", c); + + // Allow other tasks to access the shared data. If the thread pool is actually + // larger than threadpool_size, this allows an extraneous task to start, which + // will lead to an assertion error. + uv_mutex_unlock(&d->mutex); + uv_sleep(1000); + uv_mutex_lock(&d->mutex); + + d->n_tasks_exited++; + uv_mutex_unlock(&d->mutex); +} + +static void Complete(napi_env env, napi_status status, void* data) { + async_carrier* c = (async_carrier*)data; + async_shared_data* d = c->shared_data; + + if (status != napi_ok) { + napi_throw_type_error(env, NULL, "Execute callback failed."); + return; + } + + uv_mutex_lock(&d->mutex); + assert(threadpool_desaturating(d)); + d->n_tasks_finalized++; + print_info("finalize", c); + if (all_tasks_finalized(d)) { + uv_mutex_unlock(&d->mutex); + uv_mutex_destroy(&d->mutex); + free(d); + } else { + uv_mutex_unlock(&d->mutex); + } + + NODE_API_CALL_RETURN_VOID(env, napi_delete_async_work(env, c->request)); + free(c); +} + +static napi_value Test(napi_env env, napi_callback_info info) { + size_t argc = 1; + napi_value argv[1]; + napi_value this; + void* data; + NODE_API_CALL(env, napi_get_cb_info(env, info, &argc, argv, &this, &data)); + NODE_API_ASSERT(env, argc >= 1, "Not enough arguments, expected 1."); + + async_shared_data* shared_data = calloc(1, sizeof(async_shared_data)); + assert(shared_data != NULL); + int ret = uv_mutex_init(&shared_data->mutex); + assert(ret == 0); + + napi_valuetype t; + NODE_API_CALL(env, napi_typeof(env, argv[0], &t)); + NODE_API_ASSERT( + env, t == napi_number, "Wrong first argument, integer expected."); + NODE_API_CALL( + env, napi_get_value_uint32(env, argv[0], &shared_data->threadpool_size)); + + napi_value resource_name; + NODE_API_CALL(env, + napi_create_string_utf8( + env, "TestResource", NAPI_AUTO_LENGTH, &resource_name)); + + for (uint32_t i = 0; i <= shared_data->threadpool_size; i++) { + async_carrier* carrier = malloc(sizeof(async_carrier)); + assert(carrier != NULL); + carrier->task_id = i; + carrier->shared_data = shared_data; + NODE_API_CALL(env, + napi_create_async_work(env, + NULL, + resource_name, + Execute, + Complete, + carrier, + &carrier->request)); + NODE_API_CALL(env, napi_queue_async_work(env, carrier->request)); + } + + return NULL; +} + +static napi_value Init(napi_env env, napi_value exports) { + napi_property_descriptor desc = DECLARE_NODE_API_PROPERTY("test", Test); + NODE_API_CALL(env, napi_define_properties(env, exports, 1, &desc)); + return exports; +} + +NAPI_MODULE(NODE_GYP_MODULE_NAME, Init) From c795083232948b7c5fd045eb13039ba293426ae8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Wed, 16 Aug 2023 16:54:05 +0200 Subject: [PATCH 019/125] crypto: remove default encoding from Hash/Hmac getDefaultEncoding() always returns 'buffer' in Node.js 20. It requires some careful justification but the default encoding can be eliminated from hash.js entirely. The reasoning is almost identical with that in https://github.com/nodejs/node/pull/49145 so I won't repeat it here. Refs: https://github.com/nodejs/node/pull/47182 PR-URL: https://github.com/nodejs/node/pull/49167 Reviewed-By: Yagiz Nizipli Reviewed-By: Filip Skokan --- lib/internal/crypto/hash.js | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/lib/internal/crypto/hash.js b/lib/internal/crypto/hash.js index c8e9af003086c8..57fcb63518d52d 100644 --- a/lib/internal/crypto/hash.js +++ b/lib/internal/crypto/hash.js @@ -14,7 +14,6 @@ const { } = internalBinding('crypto'); const { - getDefaultEncoding, getStringOption, jobPromise, normalizeHashName, @@ -95,8 +94,6 @@ Hash.prototype._flush = function _flush(callback) { }; Hash.prototype.update = function update(data, encoding) { - encoding = encoding || getDefaultEncoding(); - const state = this[kState]; if (state[kFinalized]) throw new ERR_CRYPTO_HASH_FINALIZED(); @@ -118,10 +115,9 @@ Hash.prototype.digest = function digest(outputEncoding) { const state = this[kState]; if (state[kFinalized]) throw new ERR_CRYPTO_HASH_FINALIZED(); - outputEncoding = outputEncoding || getDefaultEncoding(); - // Explicit conversion for backward compatibility. - const ret = this[kHandle].digest(`${outputEncoding}`); + // Explicit conversion of truthy values for backward compatibility. + const ret = this[kHandle].digest(outputEncoding && `${outputEncoding}`); state[kFinalized] = true; return ret; }; @@ -147,15 +143,16 @@ Hmac.prototype.update = Hash.prototype.update; Hmac.prototype.digest = function digest(outputEncoding) { const state = this[kState]; - outputEncoding = outputEncoding || getDefaultEncoding(); if (state[kFinalized]) { const buf = Buffer.from(''); - return outputEncoding === 'buffer' ? buf : buf.toString(outputEncoding); + if (outputEncoding && outputEncoding !== 'buffer') + return buf.toString(outputEncoding); + return buf; } - // Explicit conversion for backward compatibility. - const ret = this[kHandle].digest(`${outputEncoding}`); + // Explicit conversion of truthy values for backward compatibility. + const ret = this[kHandle].digest(outputEncoding && `${outputEncoding}`); state[kFinalized] = true; return ret; }; From 772496c030fa2f7026d47dcbd59587c01d735e61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Wed, 16 Aug 2023 17:23:06 +0200 Subject: [PATCH 020/125] crypto: remove default encoding from DiffieHellman getDefaultEncoding() always returns 'buffer' in Node.js 20. In diffiehellman.js, this value is always used as input to either toBuf(), encode(), or getArrayBufferOrView(). All of these functions treat any falsy encoding just like 'buffer', so we can safely remove the calls to getDefaultEncoding(). Refs: https://github.com/nodejs/node/pull/47182 PR-URL: https://github.com/nodejs/node/pull/49169 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Filip Skokan --- lib/internal/crypto/diffiehellman.js | 20 +------------------- 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/lib/internal/crypto/diffiehellman.js b/lib/internal/crypto/diffiehellman.js index 3f52e78d7a3036..59bbf8ff71233c 100644 --- a/lib/internal/crypto/diffiehellman.js +++ b/lib/internal/crypto/diffiehellman.js @@ -51,7 +51,6 @@ const { const { getArrayBufferOrView, - getDefaultEncoding, jobPromise, toBuf, kHandle, @@ -97,10 +96,6 @@ function DiffieHellman(sizeOrKey, keyEncoding, generator, genEncoding) { keyEncoding = false; } - const encoding = getDefaultEncoding(); - keyEncoding = keyEncoding || encoding; - genEncoding = genEncoding || encoding; - if (typeof sizeOrKey !== 'number') sizeOrKey = toBuf(sizeOrKey, keyEncoding); @@ -148,7 +143,6 @@ DiffieHellmanGroup.prototype.generateKeys = function dhGenerateKeys(encoding) { const keys = this[kHandle].generateKeys(); - encoding = encoding || getDefaultEncoding(); return encode(keys, encoding); } @@ -158,9 +152,6 @@ DiffieHellmanGroup.prototype.computeSecret = dhComputeSecret; function dhComputeSecret(key, inEnc, outEnc) { - const encoding = getDefaultEncoding(); - inEnc = inEnc || encoding; - outEnc = outEnc || encoding; key = getArrayBufferOrView(key, 'key', inEnc); const ret = this[kHandle].computeSecret(key); if (typeof ret === 'string') @@ -175,7 +166,6 @@ DiffieHellmanGroup.prototype.getPrime = function dhGetPrime(encoding) { const prime = this[kHandle].getPrime(); - encoding = encoding || getDefaultEncoding(); return encode(prime, encoding); } @@ -186,7 +176,6 @@ DiffieHellmanGroup.prototype.getGenerator = function dhGetGenerator(encoding) { const generator = this[kHandle].getGenerator(); - encoding = encoding || getDefaultEncoding(); return encode(generator, encoding); } @@ -197,7 +186,6 @@ DiffieHellmanGroup.prototype.getPublicKey = function dhGetPublicKey(encoding) { const key = this[kHandle].getPublicKey(); - encoding = encoding || getDefaultEncoding(); return encode(key, encoding); } @@ -208,13 +196,11 @@ DiffieHellmanGroup.prototype.getPrivateKey = function dhGetPrivateKey(encoding) { const key = this[kHandle].getPrivateKey(); - encoding = encoding || getDefaultEncoding(); return encode(key, encoding); } DiffieHellman.prototype.setPublicKey = function setPublicKey(key, encoding) { - encoding = encoding || getDefaultEncoding(); key = getArrayBufferOrView(key, 'key', encoding); this[kHandle].setPublicKey(key); return this; @@ -222,7 +208,6 @@ DiffieHellman.prototype.setPublicKey = function setPublicKey(key, encoding) { DiffieHellman.prototype.setPrivateKey = function setPrivateKey(key, encoding) { - encoding = encoding || getDefaultEncoding(); key = getArrayBufferOrView(key, 'key', encoding); this[kHandle].setPrivateKey(key); return this; @@ -251,15 +236,12 @@ ECDH.prototype.generateKeys = function generateKeys(encoding, format) { ECDH.prototype.getPublicKey = function getPublicKey(encoding, format) { const f = getFormat(format); const key = this[kHandle].getPublicKey(f); - encoding = encoding || getDefaultEncoding(); return encode(key, encoding); }; ECDH.convertKey = function convertKey(key, curve, inEnc, outEnc, format) { validateString(curve, 'curve'); - const encoding = inEnc || getDefaultEncoding(); - key = getArrayBufferOrView(key, 'key', encoding); - outEnc = outEnc || encoding; + key = getArrayBufferOrView(key, 'key', inEnc); const f = getFormat(format); const convertedKey = _ECDHConvertKey(key, curve, f); return encode(convertedKey, outEnc); From ca9f801332cd177bb06fecf55c2ec1abd5232520 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Mon, 7 Aug 2023 16:24:06 +0200 Subject: [PATCH 021/125] test: make WeakReference tests robust Previously we assume that the objects are GC'ed after one global.gc() returns, which is not necessarily always the case. Use gcUntil() to run GC multiple times if they are not GC'ed in the first time around. PR-URL: https://github.com/nodejs/node/pull/49053 Reviewed-By: Anna Henningsen Reviewed-By: Yagiz Nizipli Reviewed-By: Rafael Gonzaga Reviewed-By: Benjamin Gruenbaum --- test/fixtures/snapshot/weak-reference-gc.js | 21 ++++++++++++++----- .../parallel/test-domain-async-id-map-leak.js | 17 +++++++++++---- .../test-internal-util-weakreference.js | 12 ++++++----- 3 files changed, 36 insertions(+), 14 deletions(-) diff --git a/test/fixtures/snapshot/weak-reference-gc.js b/test/fixtures/snapshot/weak-reference-gc.js index d8bfdf95d1772a..8dada530e77c2a 100644 --- a/test/fixtures/snapshot/weak-reference-gc.js +++ b/test/fixtures/snapshot/weak-reference-gc.js @@ -5,16 +5,27 @@ const { WeakReference } = internalBinding('util'); const { setDeserializeMainFunction } = require('v8').startupSnapshot -const assert = require('assert'); let obj = { hello: 'world' }; const ref = new WeakReference(obj); +let gcCount = 0; +let maxGC = 10; -setDeserializeMainFunction(() => { - obj = null; +function run() { globalThis.gc(); - setImmediate(() => { - assert.strictEqual(ref.get(), undefined); + gcCount++; + if (ref.get() === undefined) { + return; + } else if (gcCount < maxGC) { + run(); + } else { + throw new Error(`Reference is still around after ${maxGC} GC`); + } }); +} + +setDeserializeMainFunction(() => { + obj = null; + run(); }); diff --git a/test/parallel/test-domain-async-id-map-leak.js b/test/parallel/test-domain-async-id-map-leak.js index 8c03aa9401259a..12e93ef3594eaa 100644 --- a/test/parallel/test-domain-async-id-map-leak.js +++ b/test/parallel/test-domain-async-id-map-leak.js @@ -13,6 +13,8 @@ const isEnumerable = Function.call.bind(Object.prototype.propertyIsEnumerable); // See: https://github.com/nodejs/node/issues/23862 let d = domain.create(); +let resourceGCed = false; let domainGCed = false; let + emitterGCed = false; d.run(() => { const resource = new async_hooks.AsyncResource('TestResource'); const emitter = new EventEmitter(); @@ -30,10 +32,17 @@ d.run(() => { // emitter → resource → async id ⇒ domain → emitter. // Make sure that all of these objects are released: - onGC(resource, { ongc: common.mustCall() }); - onGC(d, { ongc: common.mustCall() }); - onGC(emitter, { ongc: common.mustCall() }); + onGC(resource, { ongc: common.mustCall(() => { resourceGCed = true; }) }); + onGC(d, { ongc: common.mustCall(() => { domainGCed = true; }) }); + onGC(emitter, { ongc: common.mustCall(() => { emitterGCed = true; }) }); }); d = null; -global.gc(); + +async function main() { + await common.gcUntil( + 'All objects garbage collected', + () => resourceGCed && domainGCed && emitterGCed); +} + +main(); diff --git a/test/parallel/test-internal-util-weakreference.js b/test/parallel/test-internal-util-weakreference.js index b48b34fe2309ea..75a00176bb095b 100644 --- a/test/parallel/test-internal-util-weakreference.js +++ b/test/parallel/test-internal-util-weakreference.js @@ -1,6 +1,6 @@ // Flags: --expose-internals --expose-gc 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const { internalBinding } = require('internal/test/binding'); const { WeakReference } = internalBinding('util'); @@ -9,9 +9,11 @@ let obj = { hello: 'world' }; const ref = new WeakReference(obj); assert.strictEqual(ref.get(), obj); -setImmediate(() => { +async function main() { obj = null; - global.gc(); + await common.gcUntil( + 'Reference is garbage collected', + () => ref.get() === undefined); +} - assert.strictEqual(ref.get(), undefined); -}); +main(); From c12711ebfeaf482be1cbccc748f6ae428e097420 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Mon, 7 Aug 2023 16:28:56 +0200 Subject: [PATCH 022/125] lib: implement WeakReference on top of JS WeakRef The C++ implementation can now be done entirely in JS using WeakRef. Re-implement it in JS instead to simplify the code. PR-URL: https://github.com/nodejs/node/pull/49053 Reviewed-By: Anna Henningsen Reviewed-By: Yagiz Nizipli Reviewed-By: Rafael Gonzaga Reviewed-By: Benjamin Gruenbaum --- lib/diagnostics_channel.js | 2 +- lib/domain.js | 3 +- lib/internal/util.js | 34 +++++++++++++++++++ test/fixtures/snapshot/weak-reference-gc.js | 3 +- test/fixtures/snapshot/weak-reference.js | 3 +- .../test-internal-util-weakreference.js | 3 +- 6 files changed, 39 insertions(+), 9 deletions(-) diff --git a/lib/diagnostics_channel.js b/lib/diagnostics_channel.js index dae0e930a395e9..10d35054f56535 100644 --- a/lib/diagnostics_channel.js +++ b/lib/diagnostics_channel.js @@ -28,7 +28,7 @@ const { const { triggerUncaughtException } = internalBinding('errors'); -const { WeakReference } = internalBinding('util'); +const { WeakReference } = require('internal/util'); // Can't delete when weakref count reaches 0 as it could increment again. // Only GC can be used as a valid time to clean up the channels map. diff --git a/lib/domain.js b/lib/domain.js index 51565795d72010..7da672a3691560 100644 --- a/lib/domain.js +++ b/lib/domain.js @@ -52,9 +52,8 @@ const { const { createHook } = require('async_hooks'); const { useDomainTrampoline } = require('internal/async_hooks'); -// TODO(addaleax): Use a non-internal solution for this. const kWeak = Symbol('kWeak'); -const { WeakReference } = internalBinding('util'); +const { WeakReference } = require('internal/util'); // Overwrite process.domain with a getter/setter that will allow for more // effective optimizations diff --git a/lib/internal/util.js b/lib/internal/util.js index 1e1a647e693876..3586084ba7b8bd 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -33,6 +33,7 @@ const { SafeMap, SafeSet, SafeWeakMap, + SafeWeakRef, StringPrototypeReplace, StringPrototypeToLowerCase, StringPrototypeToUpperCase, @@ -797,6 +798,38 @@ function guessHandleType(fd) { return handleTypes[type]; } +class WeakReference { + #weak = null; + #strong = null; + #refCount = 0; + constructor(object) { + this.#weak = new SafeWeakRef(object); + } + + incRef() { + this.#refCount++; + if (this.#refCount === 1) { + const derefed = this.#weak.deref(); + if (derefed !== undefined) { + this.#strong = derefed; + } + } + return this.#refCount; + } + + decRef() { + this.#refCount--; + if (this.#refCount === 0) { + this.#strong = null; + } + return this.#refCount; + } + + get() { + return this.#weak.deref(); + } +} + module.exports = { getLazy, assertCrypto, @@ -855,4 +888,5 @@ module.exports = { kEnumerableProperty, setOwnProperty, pendingDeprecate, + WeakReference, }; diff --git a/test/fixtures/snapshot/weak-reference-gc.js b/test/fixtures/snapshot/weak-reference-gc.js index 8dada530e77c2a..b6af6c46e3829a 100644 --- a/test/fixtures/snapshot/weak-reference-gc.js +++ b/test/fixtures/snapshot/weak-reference-gc.js @@ -1,7 +1,6 @@ 'use strict'; -const { internalBinding } = require('internal/test/binding'); -const { WeakReference } = internalBinding('util'); +const { WeakReference } = require('internal/util'); const { setDeserializeMainFunction } = require('v8').startupSnapshot diff --git a/test/fixtures/snapshot/weak-reference.js b/test/fixtures/snapshot/weak-reference.js index 214d52fee185fe..1aefc6a1c07195 100644 --- a/test/fixtures/snapshot/weak-reference.js +++ b/test/fixtures/snapshot/weak-reference.js @@ -1,7 +1,6 @@ 'use strict'; -const { internalBinding } = require('internal/test/binding'); -const { WeakReference } = internalBinding('util'); +const { WeakReference } = require('internal/util'); const { setDeserializeMainFunction } = require('v8').startupSnapshot diff --git a/test/parallel/test-internal-util-weakreference.js b/test/parallel/test-internal-util-weakreference.js index 75a00176bb095b..ef3c0943b1f83e 100644 --- a/test/parallel/test-internal-util-weakreference.js +++ b/test/parallel/test-internal-util-weakreference.js @@ -2,8 +2,7 @@ 'use strict'; const common = require('../common'); const assert = require('assert'); -const { internalBinding } = require('internal/test/binding'); -const { WeakReference } = internalBinding('util'); +const { WeakReference } = require('internal/util'); let obj = { hello: 'world' }; const ref = new WeakReference(obj); From f460362cdf5dabae1f6727e51181305b8c9c3241 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Mon, 7 Aug 2023 17:04:56 +0200 Subject: [PATCH 023/125] src: remove C++ WeakReference implementation PR-URL: https://github.com/nodejs/node/pull/49053 Reviewed-By: Anna Henningsen Reviewed-By: Yagiz Nizipli Reviewed-By: Rafael Gonzaga Reviewed-By: Benjamin Gruenbaum --- node.gyp | 1 - src/base_object_types.h | 3 +- src/inspector/node_string.cc | 1 - src/node_snapshotable.cc | 1 - src/node_util.cc | 119 ----------------------------------- src/node_util.h | 52 --------------- src/util.cc | 1 - 7 files changed, 1 insertion(+), 177 deletions(-) delete mode 100644 src/node_util.h diff --git a/node.gyp b/node.gyp index 49e39c3ce8b2e2..22a45eb8fd3ac8 100644 --- a/node.gyp +++ b/node.gyp @@ -255,7 +255,6 @@ 'src/node_stat_watcher.h', 'src/node_union_bytes.h', 'src/node_url.h', - 'src/node_util.h', 'src/node_version.h', 'src/node_v8.h', 'src/node_v8_platform-inl.h', diff --git a/src/base_object_types.h b/src/base_object_types.h index bb7a0e064b0b72..cb034f1d62b681 100644 --- a/src/base_object_types.h +++ b/src/base_object_types.h @@ -28,8 +28,7 @@ namespace node { // The first argument should match what the type passes to // SET_OBJECT_ID(), the second argument should match the C++ class // name. -#define SERIALIZABLE_NON_BINDING_TYPES(V) \ - V(util_weak_reference, util::WeakReference) +#define SERIALIZABLE_NON_BINDING_TYPES(V) // Helper list of all binding data wrapper types. #define BINDING_TYPES(V) \ diff --git a/src/inspector/node_string.cc b/src/inspector/node_string.cc index 6b59cd73f9742d..0f780f46c8ebdd 100644 --- a/src/inspector/node_string.cc +++ b/src/inspector/node_string.cc @@ -1,6 +1,5 @@ #include "node_string.h" #include "node/inspector/protocol/Protocol.h" -#include "node_util.h" #include "simdutf.h" #include "util-inl.h" diff --git a/src/node_snapshotable.cc b/src/node_snapshotable.cc index af85ea10a94163..da66bab7ea3147 100644 --- a/src/node_snapshotable.cc +++ b/src/node_snapshotable.cc @@ -22,7 +22,6 @@ #include "node_process.h" #include "node_snapshot_builder.h" #include "node_url.h" -#include "node_util.h" #include "node_v8.h" #include "node_v8_platform-inl.h" #include "timers.h" diff --git a/src/node_util.cc b/src/node_util.cc index ec637dcbd78861..1f86e47a69aeac 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -1,4 +1,3 @@ -#include "node_util.h" #include "base_object-inl.h" #include "node_errors.h" #include "node_external_reference.h" @@ -17,8 +16,6 @@ using v8::CFunction; using v8::Context; using v8::External; using v8::FunctionCallbackInfo; -using v8::FunctionTemplate; -using v8::HandleScope; using v8::IndexFilter; using v8::Integer; using v8::Isolate; @@ -201,109 +198,6 @@ void ArrayBufferViewHasBuffer(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(args[0].As()->HasBuffer()); } -WeakReference::WeakReference(Realm* realm, - Local object, - Local target) - : WeakReference(realm, object, target, 0) {} - -WeakReference::WeakReference(Realm* realm, - Local object, - Local target, - uint64_t reference_count) - : SnapshotableObject(realm, object, type_int), - reference_count_(reference_count) { - MakeWeak(); - if (!target.IsEmpty()) { - target_.Reset(realm->isolate(), target); - if (reference_count_ == 0) { - target_.SetWeak(); - } - } -} - -bool WeakReference::PrepareForSerialization(Local context, - v8::SnapshotCreator* creator) { - if (target_.IsEmpty()) { - target_index_ = 0; - return true; - } - - // Users can still hold strong references to target in addition to the - // reference that we manage here, and they could expect that the referenced - // object remains the same as long as that external strong reference - // is alive. Since we have no way to know if there is any other reference - // keeping the target alive, the best we can do to maintain consistency is to - // simply save a reference to the target in the snapshot (effectively making - // it strong) during serialization, and restore it during deserialization. - // If there's no known counted reference from our side, we'll make the - // reference here weak upon deserialization so that it can be GC'ed if users - // do not hold additional references to it. - Local target = target_.Get(context->GetIsolate()); - target_index_ = creator->AddData(context, target); - DCHECK_NE(target_index_, 0); - target_.Reset(); - return true; -} - -InternalFieldInfoBase* WeakReference::Serialize(int index) { - DCHECK_IS_SNAPSHOT_SLOT(index); - InternalFieldInfo* info = - InternalFieldInfoBase::New(type()); - info->target = target_index_; - info->reference_count = reference_count_; - return info; -} - -void WeakReference::Deserialize(Local context, - Local holder, - int index, - InternalFieldInfoBase* info) { - DCHECK_IS_SNAPSHOT_SLOT(index); - HandleScope scope(context->GetIsolate()); - - InternalFieldInfo* weak_info = reinterpret_cast(info); - Local target; - if (weak_info->target != 0) { - target = context->GetDataFromSnapshotOnce(weak_info->target) - .ToLocalChecked(); - } - new WeakReference( - Realm::GetCurrent(context), holder, target, weak_info->reference_count); -} - -void WeakReference::New(const FunctionCallbackInfo& args) { - Realm* realm = Realm::GetCurrent(args); - CHECK(args.IsConstructCall()); - CHECK(args[0]->IsObject()); - new WeakReference(realm, args.This(), args[0].As()); -} - -void WeakReference::Get(const FunctionCallbackInfo& args) { - WeakReference* weak_ref = Unwrap(args.Holder()); - Isolate* isolate = args.GetIsolate(); - if (!weak_ref->target_.IsEmpty()) - args.GetReturnValue().Set(weak_ref->target_.Get(isolate)); -} - -void WeakReference::IncRef(const FunctionCallbackInfo& args) { - WeakReference* weak_ref = Unwrap(args.Holder()); - weak_ref->reference_count_++; - if (weak_ref->target_.IsEmpty()) return; - if (weak_ref->reference_count_ == 1) weak_ref->target_.ClearWeak(); - args.GetReturnValue().Set( - v8::Number::New(args.GetIsolate(), weak_ref->reference_count_)); -} - -void WeakReference::DecRef(const FunctionCallbackInfo& args) { - WeakReference* weak_ref = Unwrap(args.Holder()); - CHECK_GE(weak_ref->reference_count_, 1); - weak_ref->reference_count_--; - if (weak_ref->target_.IsEmpty()) return; - if (weak_ref->reference_count_ == 0) weak_ref->target_.SetWeak(); - args.GetReturnValue().Set( - v8::Number::New(args.GetIsolate(), weak_ref->reference_count_)); -} - static uint32_t GetUVHandleTypeCode(const uv_handle_type type) { // TODO(anonrig): We can use an enum here and then create the array in the // binding, which will remove the hard-coding in C++ and JS land. @@ -391,10 +285,6 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(GetExternalValue); registry->Register(Sleep); registry->Register(ArrayBufferViewHasBuffer); - registry->Register(WeakReference::New); - registry->Register(WeakReference::Get); - registry->Register(WeakReference::IncRef); - registry->Register(WeakReference::DecRef); registry->Register(GuessHandleType); registry->Register(FastGuessHandleType); registry->Register(fast_guess_handle_type_.GetTypeInfo()); @@ -494,15 +384,6 @@ void Initialize(Local target, env->should_abort_on_uncaught_toggle().GetJSArray()) .FromJust()); - Local weak_ref = - NewFunctionTemplate(isolate, WeakReference::New); - weak_ref->InstanceTemplate()->SetInternalFieldCount( - WeakReference::kInternalFieldCount); - SetProtoMethod(isolate, weak_ref, "get", WeakReference::Get); - SetProtoMethod(isolate, weak_ref, "incRef", WeakReference::IncRef); - SetProtoMethod(isolate, weak_ref, "decRef", WeakReference::DecRef); - SetConstructorFunction(context, target, "WeakReference", weak_ref); - SetFastMethodNoSideEffect(context, target, "guessHandleType", diff --git a/src/node_util.h b/src/node_util.h deleted file mode 100644 index 715686856db879..00000000000000 --- a/src/node_util.h +++ /dev/null @@ -1,52 +0,0 @@ - -#ifndef SRC_NODE_UTIL_H_ -#define SRC_NODE_UTIL_H_ - -#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS -#include "base_object.h" -#include "node_snapshotable.h" -#include "v8.h" - -namespace node { -namespace util { - -class WeakReference : public SnapshotableObject { - public: - SERIALIZABLE_OBJECT_METHODS() - - SET_OBJECT_ID(util_weak_reference) - - WeakReference(Realm* realm, - v8::Local object, - v8::Local target); - static void New(const v8::FunctionCallbackInfo& args); - static void Get(const v8::FunctionCallbackInfo& args); - static void IncRef(const v8::FunctionCallbackInfo& args); - static void DecRef(const v8::FunctionCallbackInfo& args); - - SET_MEMORY_INFO_NAME(WeakReference) - SET_SELF_SIZE(WeakReference) - SET_NO_MEMORY_INFO() - - struct InternalFieldInfo : public node::InternalFieldInfoBase { - SnapshotIndex target; - uint64_t reference_count; - }; - - private: - WeakReference(Realm* realm, - v8::Local object, - v8::Local target, - uint64_t reference_count); - v8::Global target_; - uint64_t reference_count_ = 0; - - SnapshotIndex target_index_ = 0; // 0 means target_ is not snapshotted -}; - -} // namespace util -} // namespace node - -#endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS - -#endif // SRC_NODE_UTIL_H_ diff --git a/src/util.cc b/src/util.cc index 8140c177490c33..76a61aef592641 100644 --- a/src/util.cc +++ b/src/util.cc @@ -27,7 +27,6 @@ #include "node_buffer.h" #include "node_errors.h" #include "node_internals.h" -#include "node_util.h" #include "node_v8_platform-inl.h" #include "string_bytes.h" #include "uv.h" From b51946ebdd81924e31dea36e5115046d24b4b5aa Mon Sep 17 00:00:00 2001 From: Moshe Atlow Date: Wed, 16 Aug 2023 21:22:45 +0300 Subject: [PATCH 024/125] tools: fix github reporter appended multiple times PR-URL: https://github.com/nodejs/node/pull/49199 Reviewed-By: Chemi Atlow Reviewed-By: Benjamin Gruenbaum --- tools/test.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tools/test.py b/tools/test.py index 1d0260ed2823a1..d35b45a669bccc 100755 --- a/tools/test.py +++ b/tools/test.py @@ -317,8 +317,7 @@ def HasRun(self, output): class ActionsAnnotationProgressIndicator(DotsProgressIndicator): def AboutToRun(self, case): - if not hasattr(case, 'additional_flags'): - case.additional_flags = [] + case.additional_flags = case.additional_flags.copy() if hasattr(case, 'additional_flags') else [] case.additional_flags.append('--test-reporter=./tools/github_reporter/index.js') case.additional_flags.append('--test-reporter-destination=stdout') From 6ee74be87f03fdb8ab8cdebd2eb1d0d8006fdfdb Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 16 Aug 2023 22:18:46 +0200 Subject: [PATCH 025/125] vm: store MicrotaskQueue in ContextifyContext directly Previously the ContextifyContext holds a MicrotaskQueueWrap which in turns holds a MicrotaskQueue in a shared pointer. The indirection is actually unnecessary, we can directly hold the MicrotaskQueue via a unique pointer in ContextifyContext, the lifetime would still remain the same but the graph would be simpler, and this removes the additional JS -> C++ to create the wrapper object. PR-URL: https://github.com/nodejs/node/pull/48982 Reviewed-By: Stephen Belanger --- lib/vm.js | 5 +-- src/module_wrap.cc | 2 +- src/node_contextify.cc | 85 +++++++++++------------------------------- src/node_contextify.h | 39 ++++--------------- 4 files changed, 31 insertions(+), 100 deletions(-) diff --git a/lib/vm.js b/lib/vm.js index b48e79c282541b..515c7afb4aedb9 100644 --- a/lib/vm.js +++ b/lib/vm.js @@ -30,7 +30,6 @@ const { const { ContextifyScript, - MicrotaskQueue, makeContext, constants, measureMemory: _measureMemory, @@ -238,9 +237,7 @@ function createContext(contextObject = {}, options = kEmptyObject) { validateOneOf(microtaskMode, 'options.microtaskMode', ['afterEvaluate', undefined]); - const microtaskQueue = microtaskMode === 'afterEvaluate' ? - new MicrotaskQueue() : - null; + const microtaskQueue = (microtaskMode === 'afterEvaluate'); makeContext(contextObject, name, origin, strings, wasm, microtaskQueue); return contextObject; diff --git a/src/module_wrap.cc b/src/module_wrap.cc index 2dca349bd97089..0127a09167f851 100644 --- a/src/module_wrap.cc +++ b/src/module_wrap.cc @@ -361,7 +361,7 @@ void ModuleWrap::Evaluate(const FunctionCallbackInfo& args) { Local module = obj->module_.Get(isolate); ContextifyContext* contextify_context = obj->contextify_context_; - std::shared_ptr microtask_queue; + MicrotaskQueue* microtask_queue = nullptr; if (contextify_context != nullptr) microtask_queue = contextify_context->microtask_queue(); diff --git a/src/node_contextify.cc b/src/node_contextify.cc index ee68ed12795740..a557f5bd9f3b35 100644 --- a/src/node_contextify.cc +++ b/src/node_contextify.cc @@ -110,17 +110,15 @@ Local Uint32ToName(Local context, uint32_t index) { } // anonymous namespace BaseObjectPtr ContextifyContext::New( - Environment* env, - Local sandbox_obj, - const ContextOptions& options) { + Environment* env, Local sandbox_obj, ContextOptions* options) { HandleScope scope(env->isolate()); Local object_template = env->contextify_global_template(); DCHECK(!object_template.IsEmpty()); const SnapshotData* snapshot_data = env->isolate_data()->snapshot_data(); MicrotaskQueue* queue = - options.microtask_queue_wrap - ? options.microtask_queue_wrap->microtask_queue().get() + options->own_microtask_queue + ? options->own_microtask_queue.get() : env->isolate()->GetCurrentContext()->GetMicrotaskQueue(); Local v8_context; @@ -132,19 +130,16 @@ BaseObjectPtr ContextifyContext::New( return New(v8_context, env, sandbox_obj, options); } -void ContextifyContext::MemoryInfo(MemoryTracker* tracker) const { - if (microtask_queue_wrap_) { - tracker->TrackField("microtask_queue_wrap", - microtask_queue_wrap_->object()); - } -} +void ContextifyContext::MemoryInfo(MemoryTracker* tracker) const {} ContextifyContext::ContextifyContext(Environment* env, Local wrapper, Local v8_context, - const ContextOptions& options) + ContextOptions* options) : BaseObject(env, wrapper), - microtask_queue_wrap_(options.microtask_queue_wrap) { + microtask_queue_(options->own_microtask_queue + ? options->own_microtask_queue.release() + : nullptr) { context_.Reset(env->isolate(), v8_context); // This should only be done after the initial initializations of the context // global object is finished. @@ -240,7 +235,7 @@ BaseObjectPtr ContextifyContext::New( Local v8_context, Environment* env, Local sandbox_obj, - const ContextOptions& options) { + ContextOptions* options) { HandleScope scope(env->isolate()); // This only initializes part of the context. The primordials are // only initialized when needed because even deserializing them slows @@ -268,14 +263,14 @@ BaseObjectPtr ContextifyContext::New( v8_context->AllowCodeGenerationFromStrings(false); v8_context->SetEmbedderData( ContextEmbedderIndex::kAllowCodeGenerationFromStrings, - options.allow_code_gen_strings); + options->allow_code_gen_strings); v8_context->SetEmbedderData(ContextEmbedderIndex::kAllowWasmCodeGeneration, - options.allow_code_gen_wasm); + options->allow_code_gen_wasm); - Utf8Value name_val(env->isolate(), options.name); + Utf8Value name_val(env->isolate(), options->name); ContextInfo info(*name_val); - if (!options.origin.IsEmpty()) { - Utf8Value origin_val(env->isolate(), options.origin); + if (!options->origin.IsEmpty()) { + Utf8Value origin_val(env->isolate(), options->origin); info.origin = *origin_val; } @@ -374,16 +369,14 @@ void ContextifyContext::MakeContext(const FunctionCallbackInfo& args) { CHECK(args[4]->IsBoolean()); options.allow_code_gen_wasm = args[4].As(); - if (args[5]->IsObject() && - !env->microtask_queue_ctor_template().IsEmpty() && - env->microtask_queue_ctor_template()->HasInstance(args[5])) { - options.microtask_queue_wrap.reset( - Unwrap(args[5].As())); + if (args[5]->IsBoolean() && args[5]->BooleanValue(env->isolate())) { + options.own_microtask_queue = + MicrotaskQueue::New(env->isolate(), MicrotasksPolicy::kExplicit); } TryCatchScope try_catch(env); BaseObjectPtr context_ptr = - ContextifyContext::New(env, sandbox, options); + ContextifyContext::New(env, sandbox, &options); if (try_catch.HasCaught()) { if (!try_catch.HasTerminated()) @@ -987,7 +980,7 @@ void ContextifyScript::RunInContext(const FunctionCallbackInfo& args) { CHECK(args[0]->IsObject() || args[0]->IsNull()); Local context; - std::shared_ptr microtask_queue; + v8::MicrotaskQueue* microtask_queue = nullptr; if (args[0]->IsObject()) { Local sandbox = args[0].As(); @@ -1036,7 +1029,7 @@ bool ContextifyScript::EvalMachine(Local context, const bool display_errors, const bool break_on_sigint, const bool break_on_first_line, - std::shared_ptr mtask_queue, + MicrotaskQueue* mtask_queue, const FunctionCallbackInfo& args) { Context::Scope context_scope(context); @@ -1068,7 +1061,7 @@ bool ContextifyScript::EvalMachine(Local context, bool received_signal = false; auto run = [&]() { MaybeLocal result = script->Run(context); - if (!result.IsEmpty() && mtask_queue) + if (!result.IsEmpty() && mtask_queue != nullptr) mtask_queue->PerformCheckpoint(env->isolate()); return result; }; @@ -1122,7 +1115,6 @@ bool ContextifyScript::EvalMachine(Local context, return true; } - ContextifyScript::ContextifyScript(Environment* env, Local object) : BaseObject(env, object), id_(env->get_next_script_id()) { @@ -1376,46 +1368,12 @@ static void MeasureMemory(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(promise); } -MicrotaskQueueWrap::MicrotaskQueueWrap(Environment* env, Local obj) - : BaseObject(env, obj), - microtask_queue_( - MicrotaskQueue::New(env->isolate(), MicrotasksPolicy::kExplicit)) { - MakeWeak(); -} - -const std::shared_ptr& -MicrotaskQueueWrap::microtask_queue() const { - return microtask_queue_; -} - -void MicrotaskQueueWrap::New(const FunctionCallbackInfo& args) { - CHECK(args.IsConstructCall()); - new MicrotaskQueueWrap(Environment::GetCurrent(args), args.This()); -} - -void MicrotaskQueueWrap::CreatePerIsolateProperties( - IsolateData* isolate_data, Local target) { - Isolate* isolate = isolate_data->isolate(); - HandleScope scope(isolate); - Local tmpl = NewFunctionTemplate(isolate, New); - tmpl->InstanceTemplate()->SetInternalFieldCount( - ContextifyScript::kInternalFieldCount); - isolate_data->set_microtask_queue_ctor_template(tmpl); - SetConstructorFunction(isolate, target, "MicrotaskQueue", tmpl); -} - -void MicrotaskQueueWrap::RegisterExternalReferences( - ExternalReferenceRegistry* registry) { - registry->Register(New); -} - void CreatePerIsolateProperties(IsolateData* isolate_data, Local target) { Isolate* isolate = isolate_data->isolate(); ContextifyContext::CreatePerIsolateProperties(isolate_data, target); ContextifyScript::CreatePerIsolateProperties(isolate_data, target); - MicrotaskQueueWrap::CreatePerIsolateProperties(isolate_data, target); SetMethod(isolate, target, "startSigintWatchdog", StartSigintWatchdog); SetMethod(isolate, target, "stopSigintWatchdog", StopSigintWatchdog); @@ -1470,7 +1428,6 @@ static void CreatePerContextProperties(Local target, void RegisterExternalReferences(ExternalReferenceRegistry* registry) { ContextifyContext::RegisterExternalReferences(registry); ContextifyScript::RegisterExternalReferences(registry); - MicrotaskQueueWrap::RegisterExternalReferences(registry); registry->Register(StartSigintWatchdog); registry->Register(StopSigintWatchdog); diff --git a/src/node_contextify.h b/src/node_contextify.h index 9a0cbe07d6e660..2bcc15b5f55ad3 100644 --- a/src/node_contextify.h +++ b/src/node_contextify.h @@ -12,34 +12,12 @@ class ExternalReferenceRegistry; namespace contextify { -class MicrotaskQueueWrap : public BaseObject { - public: - MicrotaskQueueWrap(Environment* env, v8::Local obj); - - const std::shared_ptr& microtask_queue() const; - - static void CreatePerIsolateProperties(IsolateData* isolate_data, - v8::Local target); - static void RegisterExternalReferences(ExternalReferenceRegistry* registry); - static void New(const v8::FunctionCallbackInfo& args); - - // This could have methods for running the microtask queue, if we ever decide - // to make that fully customizable from userland. - - SET_NO_MEMORY_INFO() - SET_MEMORY_INFO_NAME(MicrotaskQueueWrap) - SET_SELF_SIZE(MicrotaskQueueWrap) - - private: - std::shared_ptr microtask_queue_; -}; - struct ContextOptions { v8::Local name; v8::Local origin; v8::Local allow_code_gen_strings; v8::Local allow_code_gen_wasm; - BaseObjectPtr microtask_queue_wrap; + std::unique_ptr own_microtask_queue; }; class ContextifyContext : public BaseObject { @@ -47,7 +25,7 @@ class ContextifyContext : public BaseObject { ContextifyContext(Environment* env, v8::Local wrapper, v8::Local v8_context, - const ContextOptions& options); + ContextOptions* options); ~ContextifyContext(); void MemoryInfo(MemoryTracker* tracker) const override; @@ -80,9 +58,8 @@ class ContextifyContext : public BaseObject { .As(); } - inline std::shared_ptr microtask_queue() const { - if (!microtask_queue_wrap_) return {}; - return microtask_queue_wrap_->microtask_queue(); + inline v8::MicrotaskQueue* microtask_queue() const { + return microtask_queue_.get(); } template @@ -94,12 +71,12 @@ class ContextifyContext : public BaseObject { private: static BaseObjectPtr New(Environment* env, v8::Local sandbox_obj, - const ContextOptions& options); + ContextOptions* options); // Initialize a context created from CreateV8Context() static BaseObjectPtr New(v8::Local ctx, Environment* env, v8::Local sandbox_obj, - const ContextOptions& options); + ContextOptions* options); static bool IsStillInitializing(const ContextifyContext* ctx); static void MakeContext(const v8::FunctionCallbackInfo& args); @@ -146,7 +123,7 @@ class ContextifyContext : public BaseObject { const v8::PropertyCallbackInfo& args); v8::Global context_; - BaseObjectPtr microtask_queue_wrap_; + std::unique_ptr microtask_queue_; }; class ContextifyScript : public BaseObject { @@ -171,7 +148,7 @@ class ContextifyScript : public BaseObject { const bool display_errors, const bool break_on_sigint, const bool break_on_first_line, - std::shared_ptr microtask_queue, + v8::MicrotaskQueue* microtask_queue, const v8::FunctionCallbackInfo& args); inline uint32_t id() { return id_; } From 11c85ffa980af120f09ae617e45bbd6146b892f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=BF=A0=20/=20green?= Date: Thu, 17 Aug 2023 17:42:12 +0900 Subject: [PATCH 026/125] lib: add api to detect whether source-maps are enabled Add `process.sourceMapsEnabled` to detect whether source-maps are enabled. Fixes: https://github.com/nodejs/node/issues/46304 PR-URL: https://github.com/nodejs/node/pull/46391 Reviewed-By: Chemi Atlow --- doc/api/process.md | 13 +++++++++++++ lib/internal/bootstrap/node.js | 9 +++++++++ .../source-map/output/source_map_disabled_by_api.js | 4 ++++ .../source-map/output/source_map_enabled_by_api.js | 4 ++++ 4 files changed, 30 insertions(+) diff --git a/doc/api/process.md b/doc/api/process.md index 5b08ce8a4b8367..548f0fdcf44378 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -3517,6 +3517,19 @@ throw an error. Using this function is mutually exclusive with using the deprecated [`domain`][] built-in module. +## `process.sourceMapsEnabled` + + + +> Stability: 1 - Experimental + +* {boolean} + +The `process.sourceMapsEnabled` property returns whether the +[Source Map v3][Source Map] support for stack traces is enabled. + ## `process.stderr` * {Stream} diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js index 36ff5bcd8c526a..7a773d5208e250 100644 --- a/lib/internal/bootstrap/node.js +++ b/lib/internal/bootstrap/node.js @@ -326,6 +326,7 @@ process.emitWarning = emitWarning; { const { + getSourceMapsEnabled, setSourceMapsEnabled, maybeCacheGeneratedSourceMap, } = require('internal/source_map/source_map_cache'); @@ -333,6 +334,14 @@ process.emitWarning = emitWarning; setMaybeCacheGeneratedSourceMap, } = internalBinding('errors'); + ObjectDefineProperty(process, 'sourceMapsEnabled', { + __proto__: null, + enumerable: true, + configurable: true, + get() { + return getSourceMapsEnabled(); + }, + }); process.setSourceMapsEnabled = setSourceMapsEnabled; // The C++ land calls back to maybeCacheGeneratedSourceMap() // when code is generated by user with eval() or new Function() diff --git a/test/fixtures/source-map/output/source_map_disabled_by_api.js b/test/fixtures/source-map/output/source_map_disabled_by_api.js index b1a28d0eae1c2e..d94a6310cff7ae 100644 --- a/test/fixtures/source-map/output/source_map_disabled_by_api.js +++ b/test/fixtures/source-map/output/source_map_disabled_by_api.js @@ -2,9 +2,12 @@ 'use strict'; require('../../../common'); +const assert = require('assert'); Error.stackTraceLimit = 5; +assert.strictEqual(process.sourceMapsEnabled, true); process.setSourceMapsEnabled(false); +assert.strictEqual(process.sourceMapsEnabled, false); try { require('../enclosing-call-site-min.js'); @@ -17,6 +20,7 @@ delete require.cache[require // Re-enable. process.setSourceMapsEnabled(true); +assert.strictEqual(process.sourceMapsEnabled, true); try { require('../enclosing-call-site-min.js'); diff --git a/test/fixtures/source-map/output/source_map_enabled_by_api.js b/test/fixtures/source-map/output/source_map_enabled_by_api.js index 4c70fa1cb2a240..1dd4f9530c68db 100644 --- a/test/fixtures/source-map/output/source_map_enabled_by_api.js +++ b/test/fixtures/source-map/output/source_map_enabled_by_api.js @@ -1,8 +1,11 @@ 'use strict'; require('../../../common'); +const assert = require('assert'); Error.stackTraceLimit = 5; +assert.strictEqual(process.sourceMapsEnabled, false); process.setSourceMapsEnabled(true); +assert.strictEqual(process.sourceMapsEnabled, true); try { require('../enclosing-call-site-min.js'); @@ -14,6 +17,7 @@ delete require.cache[require .resolve('../enclosing-call-site-min.js')]; process.setSourceMapsEnabled(false); +assert.strictEqual(process.sourceMapsEnabled, false); try { require('../enclosing-call-site-min.js'); From af6dc1754d4abe9656f7111bee464ac5465430e0 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Thu, 17 Aug 2023 19:24:46 +0200 Subject: [PATCH 027/125] bootstrap: do not generate code cache in an unfinalized isolate V8 now no longer supports serializing code cache in an isolate with unfinalized read-only space. So guard the code cache regeneration with the `is_building_snapshot()` flag. When the isolate is created for snapshot generation, the code cache is going to be serialized separately anyway, so there is no need to do it in the builtin loader. PR-URL: https://github.com/nodejs/node/pull/49108 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Yagiz Nizipli --- src/node_builtins.cc | 53 ++++++++++++++++++++++++++------------------ src/node_builtins.h | 3 ++- 2 files changed, 34 insertions(+), 22 deletions(-) diff --git a/src/node_builtins.cc b/src/node_builtins.cc index d78ad3dd811432..84815969b6d1fa 100644 --- a/src/node_builtins.cc +++ b/src/node_builtins.cc @@ -254,7 +254,7 @@ MaybeLocal BuiltinLoader::LookupAndCompileInternal( Local context, const char* id, std::vector>* parameters, - BuiltinLoader::Result* result) { + Realm* optional_realm) { Isolate* isolate = context->GetIsolate(); EscapableHandleScope scope(isolate); @@ -320,9 +320,13 @@ MaybeLocal BuiltinLoader::LookupAndCompileInternal( // will never be in any of these two sets, but the two sets are only for // testing anyway. - *result = (has_cache && !script_source.GetCachedData()->rejected) - ? Result::kWithCache - : Result::kWithoutCache; + Result result = (has_cache && !script_source.GetCachedData()->rejected) + ? Result::kWithCache + : Result::kWithoutCache; + if (optional_realm != nullptr) { + DCHECK_EQ(this, optional_realm->env()->builtin_loader()); + RecordResult(id, result, optional_realm); + } if (has_cache) { per_process::Debug(DebugCategory::CODE_CACHE, @@ -336,28 +340,35 @@ MaybeLocal BuiltinLoader::LookupAndCompileInternal( : "is accepted"); } - if (*result == Result::kWithoutCache) { + if (result == Result::kWithoutCache && optional_realm != nullptr && + !optional_realm->env()->isolate_data()->is_building_snapshot()) { // We failed to accept this cache, maybe because it was rejected, maybe // because it wasn't present. Either way, we'll attempt to replace this // code cache info with a new one. - std::shared_ptr new_cached_data( - ScriptCompiler::CreateCodeCacheForFunction(fun)); - CHECK_NOT_NULL(new_cached_data); - - { - RwLock::ScopedLock lock(code_cache_->mutex); - code_cache_->map.insert_or_assign( - id, BuiltinCodeCacheData(std::move(new_cached_data))); - } + // This is only done when the isolate is not being serialized because + // V8 does not support serializing code cache with an unfinalized read-only + // space (which is what isolates pending to be serialized have). + SaveCodeCache(id, fun); } return scope.Escape(fun); } +void BuiltinLoader::SaveCodeCache(const char* id, Local fun) { + std::shared_ptr new_cached_data( + ScriptCompiler::CreateCodeCacheForFunction(fun)); + CHECK_NOT_NULL(new_cached_data); + + { + RwLock::ScopedLock lock(code_cache_->mutex); + code_cache_->map.insert_or_assign( + id, BuiltinCodeCacheData(std::move(new_cached_data))); + } +} + MaybeLocal BuiltinLoader::LookupAndCompile(Local context, const char* id, Realm* optional_realm) { - Result result; std::vector> parameters; Isolate* isolate = context->GetIsolate(); // Detects parameters of the scripts based on module ids. @@ -403,11 +414,7 @@ MaybeLocal BuiltinLoader::LookupAndCompile(Local context, } MaybeLocal maybe = - LookupAndCompileInternal(context, id, ¶meters, &result); - if (optional_realm != nullptr) { - DCHECK_EQ(this, optional_realm->env()->builtin_loader()); - RecordResult(id, result, optional_realm); - } + LookupAndCompileInternal(context, id, ¶meters, optional_realm); return maybe; } @@ -483,13 +490,17 @@ bool BuiltinLoader::CompileAllBuiltins(Local context) { continue; } v8::TryCatch bootstrapCatch(context->GetIsolate()); - USE(LookupAndCompile(context, id.data(), nullptr)); + auto fn = LookupAndCompile(context, id.data(), nullptr); if (bootstrapCatch.HasCaught()) { per_process::Debug(DebugCategory::CODE_CACHE, "Failed to compile code cache for %s\n", id.data()); all_succeeded = false; PrintCaughtException(context->GetIsolate(), context, bootstrapCatch); + } else { + // This is used by the snapshot builder, so save the code cache + // unconditionally. + SaveCodeCache(id.data(), fn.ToLocalChecked()); } } return all_succeeded; diff --git a/src/node_builtins.h b/src/node_builtins.h index f91c2a8105bfe5..9f2fbc1e539374 100644 --- a/src/node_builtins.h +++ b/src/node_builtins.h @@ -147,7 +147,8 @@ class NODE_EXTERN_PRIVATE BuiltinLoader { v8::Local context, const char* id, std::vector>* parameters, - Result* result); + Realm* optional_realm); + void SaveCodeCache(const char* id, v8::Local fn); static void RecordResult(const char* id, BuiltinLoader::Result result, From 283f2806b1c428b54523f0ceaa03b2c35153bc5c Mon Sep 17 00:00:00 2001 From: Chemi Atlow Date: Thu, 17 Aug 2023 21:21:14 +0300 Subject: [PATCH 028/125] test_runner: expose spec reporter as newable function Fixes: https://github.com/nodejs/node/issues/48112 Ref: https://github.com/nodejs/node/pull/48208 PR-URL: https://github.com/nodejs/node/pull/49184 Refs: https://github.com/nodejs/node/pull/48208 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Moshe Atlow --- lib/test/reporters.js | 6 +++--- test/parallel/test-runner-run.mjs | 31 ++++++++++++++++++++++--------- 2 files changed, 25 insertions(+), 12 deletions(-) diff --git a/lib/test/reporters.js b/lib/test/reporters.js index 287c07510bc13a..86aea679b52a7a 100644 --- a/lib/test/reporters.js +++ b/lib/test/reporters.js @@ -1,6 +1,6 @@ 'use strict'; -const { ObjectDefineProperties } = primordials; +const { ObjectDefineProperties, ReflectConstruct } = primordials; let dot; let spec; @@ -21,9 +21,9 @@ ObjectDefineProperties(module.exports, { __proto__: null, configurable: true, enumerable: true, - get() { + value: function value() { spec ??= require('internal/test_runner/reporter/spec'); - return spec; + return ReflectConstruct(spec, arguments); }, }, tap: { diff --git a/test/parallel/test-runner-run.mjs b/test/parallel/test-runner-run.mjs index 62fab0af146f4d..be15c42d465fca 100644 --- a/test/parallel/test-runner-run.mjs +++ b/test/parallel/test-runner-run.mjs @@ -82,15 +82,28 @@ describe('require(\'node:test\').run', { concurrency: true }, () => { ]); }); - it('should be piped with spec', async () => { - const specReporter = new spec(); - const result = await run({ - files: [join(testFixtures, 'default-behavior/test/random.cjs')] - }).compose(specReporter).toArray(); - const stringResults = result.map((bfr) => bfr.toString()); - assert.match(stringResults[0], /this should pass/); - assert.match(stringResults[1], /tests 1/); - assert.match(stringResults[1], /pass 1/); + describe('should be piped with spec reporter', () => { + it('new spec', async () => { + const specReporter = new spec(); + const result = await run({ + files: [join(testFixtures, 'default-behavior/test/random.cjs')] + }).compose(specReporter).toArray(); + const stringResults = result.map((bfr) => bfr.toString()); + assert.match(stringResults[0], /this should pass/); + assert.match(stringResults[1], /tests 1/); + assert.match(stringResults[1], /pass 1/); + }); + + it('spec()', async () => { + const specReporter = spec(); + const result = await run({ + files: [join(testFixtures, 'default-behavior/test/random.cjs')] + }).compose(specReporter).toArray(); + const stringResults = result.map((bfr) => bfr.toString()); + assert.match(stringResults[0], /this should pass/); + assert.match(stringResults[1], /tests 1/); + assert.match(stringResults[1], /pass 1/); + }); }); it('should be piped with tap', async () => { From ceaa5494de265b995cbd0471418cfe821b855b92 Mon Sep 17 00:00:00 2001 From: Chemi Atlow Date: Thu, 17 Aug 2023 21:21:22 +0300 Subject: [PATCH 029/125] meta: add test/reporters to codeowners PR-URL: https://github.com/nodejs/node/pull/49186 Reviewed-By: Yagiz Nizipli Reviewed-By: Moshe Atlow Reviewed-By: Trivikram Kamat Reviewed-By: Benjamin Gruenbaum Reviewed-By: Rafael Gonzaga --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 47e420a9457bb8..f4615852c3cd17 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -146,6 +146,7 @@ /lib/internal/main/test_runner.js @nodejs/test_runner /lib/internal/test_runner/* @nodejs/test_runner /lib/test.js @nodejs/test_runner +/lib/test/reporters.js @nodejs/test_runner /test/parallel/test-runner-* @nodejs/test_runner # Single Executable Applications From ec51e25ed714b9abdd8004559a1f1dcb240abfd3 Mon Sep 17 00:00:00 2001 From: Carlos Espa <43477095+Ceres6@users.noreply.github.com> Date: Thu, 17 Aug 2023 20:39:04 +0200 Subject: [PATCH 030/125] src,permission: add multiple allow-fs-* flags Support for a single comma separates list for allow-fs-* flags is removed. Instead now multiple flags can be passed to allow multiple paths. Fixes: https://github.com/nodejs/security-wg/issues/1039 PR-URL: https://github.com/nodejs/node/pull/49047 Reviewed-By: Rafael Gonzaga Reviewed-By: Marco Ippolito --- doc/api/cli.md | 22 ++++- doc/api/permissions.md | 2 +- lib/internal/process/pre_execution.js | 19 ++++- src/env.cc | 6 +- src/node_options.h | 4 +- src/permission/child_process_permission.cc | 2 +- src/permission/child_process_permission.h | 3 +- src/permission/fs_permission.cc | 6 +- src/permission/fs_permission.h | 3 +- src/permission/inspector_permission.cc | 2 +- src/permission/inspector_permission.h | 3 +- src/permission/permission.cc | 3 +- src/permission/permission.h | 2 +- src/permission/permission_base.h | 3 +- src/permission/worker_permission.cc | 3 +- src/permission/worker_permission.h | 3 +- .../test-cjs-legacyMainResolve-permission.js | 8 +- .../test-cli-permission-multiple-allow.js | 83 +++++++++++++++++++ test/parallel/test-permission-fs-read.js | 2 +- ...test-permission-fs-symlink-target-write.js | 4 +- test/parallel/test-permission-fs-symlink.js | 2 +- .../test-permission-fs-traversal-path.js | 2 +- test/parallel/test-permission-fs-wildcard.js | 6 +- test/parallel/test-permission-fs-write.js | 2 +- 24 files changed, 161 insertions(+), 34 deletions(-) create mode 100644 test/parallel/test-cli-permission-multiple-allow.js diff --git a/doc/api/cli.md b/doc/api/cli.md index e24ab1e143aec5..48d761ef5d8da6 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -145,6 +145,10 @@ Error: Access to this API has been restricted > Stability: 1 - Experimental @@ -155,8 +159,11 @@ the [Permission Model][]. The valid arguments for the `--allow-fs-read` flag are: * `*` - To allow all `FileSystemRead` operations. -* Paths delimited by comma (`,`) to allow only matching `FileSystemRead` - operations. +* Multiple paths can be allowed using multiple `--allow-fs-read` flags. + Example `--allow-fs-read=/folder1/ --allow-fs-read=/folder1/` + +Paths delimited by comma (`,`) are no longer allowed. +When passing a single flag with a comma a warning will be diplayed Examples can be found in the [File System Permissions][] documentation. @@ -192,6 +199,10 @@ node --experimental-permission --allow-fs-read=/path/to/index.js index.js > Stability: 1 - Experimental @@ -202,8 +213,11 @@ the [Permission Model][]. The valid arguments for the `--allow-fs-write` flag are: * `*` - To allow all `FileSystemWrite` operations. -* Paths delimited by comma (`,`) to allow only matching `FileSystemWrite` - operations. +* Multiple paths can be allowed using multiple `--allow-fs-read` flags. + Example `--allow-fs-read=/folder1/ --allow-fs-read=/folder1/` + +Paths delimited by comma (`,`) are no longer allowed. +When passing a single flag with a comma a warning will be diplayed Examples can be found in the [File System Permissions][] documentation. diff --git a/doc/api/permissions.md b/doc/api/permissions.md index 2c50c4fbabfb55..0d1ec9e5d61a72 100644 --- a/doc/api/permissions.md +++ b/doc/api/permissions.md @@ -532,7 +532,7 @@ Example: * `--allow-fs-write=*` - It will allow all `FileSystemWrite` operations. * `--allow-fs-write=/tmp/` - It will allow `FileSystemWrite` access to the `/tmp/` folder. -* `--allow-fs-read=/tmp/,/home/.gitignore` - It allows `FileSystemRead` access +* `--allow-fs-read=/tmp/ --allow-fs-read=/home/.gitignore` - It allows `FileSystemRead` access to the `/tmp/` folder **and** the `/home/.gitignore` path. Wildcards are supported too: diff --git a/lib/internal/process/pre_execution.js b/lib/internal/process/pre_execution.js index d79aa41c53e7b6..89b916a6ee66ae 100644 --- a/lib/internal/process/pre_execution.js +++ b/lib/internal/process/pre_execution.js @@ -554,6 +554,22 @@ function initializePermission() { 'It could invalidate the permission model.', 'SecurityWarning'); } } + const warnCommaFlags = [ + '--allow-fs-read', + '--allow-fs-write', + ]; + for (const flag of warnCommaFlags) { + const value = getOptionValue(flag); + if (value.length === 1 && value[0].includes(',')) { + process.emitWarning( + `The ${flag} CLI flag has changed. ` + + 'Passing a comma-separated list of paths is no longer valid. ' + + 'Documentation can be found at ' + + 'https://nodejs.org/api/permissions.html#file-system-permissions', + 'Warning', + ); + } + } ObjectDefineProperty(process, 'permission', { __proto__: null, @@ -572,7 +588,8 @@ function initializePermission() { '--allow-worker', ]; ArrayPrototypeForEach(availablePermissionFlags, (flag) => { - if (getOptionValue(flag)) { + const value = getOptionValue(flag); + if (value.length) { throw new ERR_MISSING_OPTION('--experimental-permission'); } }); diff --git a/src/env.cc b/src/env.cc index 20b404996ccd0f..1d8df40c3446ac 100644 --- a/src/env.cc +++ b/src/env.cc @@ -875,12 +875,12 @@ Environment::Environment(IsolateData* isolate_data, // unless explicitly allowed by the user options_->allow_native_addons = false; flags_ = flags_ | EnvironmentFlags::kNoCreateInspector; - permission()->Apply("*", permission::PermissionScope::kInspector); + permission()->Apply({"*"}, permission::PermissionScope::kInspector); if (!options_->allow_child_process) { - permission()->Apply("*", permission::PermissionScope::kChildProcess); + permission()->Apply({"*"}, permission::PermissionScope::kChildProcess); } if (!options_->allow_worker_threads) { - permission()->Apply("*", permission::PermissionScope::kWorkerThreads); + permission()->Apply({"*"}, permission::PermissionScope::kWorkerThreads); } if (!options_->allow_fs_read.empty()) { diff --git a/src/node_options.h b/src/node_options.h index 1cc575bb9c7e3c..bc18a45e681a3c 100644 --- a/src/node_options.h +++ b/src/node_options.h @@ -121,8 +121,8 @@ class EnvironmentOptions : public Options { std::string experimental_policy_integrity; bool has_policy_integrity_string = false; bool experimental_permission = false; - std::string allow_fs_read; - std::string allow_fs_write; + std::vector allow_fs_read; + std::vector allow_fs_write; bool allow_child_process = false; bool allow_worker_threads = false; bool experimental_repl_await = true; diff --git a/src/permission/child_process_permission.cc b/src/permission/child_process_permission.cc index 7151eb15f90da2..de078febf4bcd9 100644 --- a/src/permission/child_process_permission.cc +++ b/src/permission/child_process_permission.cc @@ -9,7 +9,7 @@ namespace permission { // Currently, ChildProcess manage a single state // Once denied, it's always denied -void ChildProcessPermission::Apply(const std::string& allow, +void ChildProcessPermission::Apply(const std::vector& allow, PermissionScope scope) { deny_all_ = true; } diff --git a/src/permission/child_process_permission.h b/src/permission/child_process_permission.h index b67169f1c4e180..cf0ec97d5021a3 100644 --- a/src/permission/child_process_permission.h +++ b/src/permission/child_process_permission.h @@ -12,7 +12,8 @@ namespace permission { class ChildProcessPermission final : public PermissionBase { public: - void Apply(const std::string& allow, PermissionScope scope) override; + void Apply(const std::vector& allow, + PermissionScope scope) override; bool is_granted(PermissionScope perm, const std::string_view& param = "") override; diff --git a/src/permission/fs_permission.cc b/src/permission/fs_permission.cc index 91c63dff6582a8..fadf75968c779d 100644 --- a/src/permission/fs_permission.cc +++ b/src/permission/fs_permission.cc @@ -116,9 +116,11 @@ namespace permission { // allow = '*' // allow = '/tmp/,/home/example.js' -void FSPermission::Apply(const std::string& allow, PermissionScope scope) { +void FSPermission::Apply(const std::vector& allow, + PermissionScope scope) { using std::string_view_literals::operator""sv; - for (const std::string_view res : SplitString(allow, ","sv)) { + + for (const std::string_view res : allow) { if (res == "*"sv) { if (scope == PermissionScope::kFileSystemRead) { deny_all_in_ = false; diff --git a/src/permission/fs_permission.h b/src/permission/fs_permission.h index 217d0a92d6ce71..244e95727ad487 100644 --- a/src/permission/fs_permission.h +++ b/src/permission/fs_permission.h @@ -15,7 +15,8 @@ namespace permission { class FSPermission final : public PermissionBase { public: - void Apply(const std::string& allow, PermissionScope scope) override; + void Apply(const std::vector& allow, + PermissionScope scope) override; bool is_granted(PermissionScope perm, const std::string_view& param) override; struct RadixTree { diff --git a/src/permission/inspector_permission.cc b/src/permission/inspector_permission.cc index 3cff03433b4225..401d801ac0adb5 100644 --- a/src/permission/inspector_permission.cc +++ b/src/permission/inspector_permission.cc @@ -8,7 +8,7 @@ namespace permission { // Currently, Inspector manage a single state // Once denied, it's always denied -void InspectorPermission::Apply(const std::string& allow, +void InspectorPermission::Apply(const std::vector& allow, PermissionScope scope) { deny_all_ = true; } diff --git a/src/permission/inspector_permission.h b/src/permission/inspector_permission.h index 33eb25732c0d4d..e5c6d1b81677f5 100644 --- a/src/permission/inspector_permission.h +++ b/src/permission/inspector_permission.h @@ -12,7 +12,8 @@ namespace permission { class InspectorPermission final : public PermissionBase { public: - void Apply(const std::string& allow, PermissionScope scope) override; + void Apply(const std::vector& allow, + PermissionScope scope) override; bool is_granted(PermissionScope perm, const std::string_view& param = "") override; diff --git a/src/permission/permission.cc b/src/permission/permission.cc index 38767e46093f0b..4392f49b66e9b7 100644 --- a/src/permission/permission.cc +++ b/src/permission/permission.cc @@ -130,7 +130,8 @@ void Permission::EnablePermissions() { } } -void Permission::Apply(const std::string& allow, PermissionScope scope) { +void Permission::Apply(const std::vector& allow, + PermissionScope scope) { auto permission = nodes_.find(scope); if (permission != nodes_.end()) { permission->second->Apply(allow, scope); diff --git a/src/permission/permission.h b/src/permission/permission.h index 3252e8d540d306..942937a80cae28 100644 --- a/src/permission/permission.h +++ b/src/permission/permission.h @@ -49,7 +49,7 @@ class Permission { const std::string_view& res); // CLI Call - void Apply(const std::string& allow, PermissionScope scope); + void Apply(const std::vector& allow, PermissionScope scope); void EnablePermissions(); private: diff --git a/src/permission/permission_base.h b/src/permission/permission_base.h index c4728e40ce8f2c..c2f377424f6fc5 100644 --- a/src/permission/permission_base.h +++ b/src/permission/permission_base.h @@ -39,7 +39,8 @@ enum class PermissionScope { class PermissionBase { public: - virtual void Apply(const std::string& allow, PermissionScope scope) = 0; + virtual void Apply(const std::vector& allow, + PermissionScope scope) = 0; virtual bool is_granted(PermissionScope perm, const std::string_view& param = "") = 0; }; diff --git a/src/permission/worker_permission.cc b/src/permission/worker_permission.cc index 69c89a4a4fea87..a18938e5fe1efd 100644 --- a/src/permission/worker_permission.cc +++ b/src/permission/worker_permission.cc @@ -9,7 +9,8 @@ namespace permission { // Currently, PolicyDenyWorker manage a single state // Once denied, it's always denied -void WorkerPermission::Apply(const std::string& allow, PermissionScope scope) { +void WorkerPermission::Apply(const std::vector& allow, + PermissionScope scope) { deny_all_ = true; } diff --git a/src/permission/worker_permission.h b/src/permission/worker_permission.h index 71681a4485a82f..cdc224925c2291 100644 --- a/src/permission/worker_permission.h +++ b/src/permission/worker_permission.h @@ -12,7 +12,8 @@ namespace permission { class WorkerPermission final : public PermissionBase { public: - void Apply(const std::string& allow, PermissionScope scope) override; + void Apply(const std::vector& allow, + PermissionScope scope) override; bool is_granted(PermissionScope perm, const std::string_view& param = "") override; diff --git a/test/es-module/test-cjs-legacyMainResolve-permission.js b/test/es-module/test-cjs-legacyMainResolve-permission.js index 9f78883eebb7cd..b45e3dee3bbfcd 100644 --- a/test/es-module/test-cjs-legacyMainResolve-permission.js +++ b/test/es-module/test-cjs-legacyMainResolve-permission.js @@ -31,7 +31,9 @@ describe('legacyMainResolve', () => { for (const [mainOrFolder, allowReads] of paths) { const allowReadFilePaths = allowReads.map((filepath) => path.resolve(fixtextureFolder, filepath)); - const allowReadFiles = allowReads?.length > 0 ? ['--allow-fs-read', allowReadFilePaths.join(',')] : []; + const allowReadFiles = allowReads?.length > 0 ? + allowReadFilePaths.flatMap((path) => ['--allow-fs-read', path]) : + []; const fixtextureFolderEscaped = escapeWhenSepIsBackSlash(fixtextureFolder); const { status, stderr } = spawnSync( @@ -85,7 +87,9 @@ describe('legacyMainResolve', () => { for (const [folder, expectedFile, allowReads] of paths) { const allowReadFilePaths = allowReads.map((filepath) => path.resolve(fixtextureFolder, folder, filepath)); - const allowReadFiles = allowReads?.length > 0 ? ['--allow-fs-read', allowReadFilePaths.join(',')] : []; + const allowReadFiles = allowReads?.length > 0 ? + allowReadFilePaths.flatMap((path) => ['--allow-fs-read', path]) : + []; const fixtextureFolderEscaped = escapeWhenSepIsBackSlash(fixtextureFolder); const { status, stderr } = spawnSync( diff --git a/test/parallel/test-cli-permission-multiple-allow.js b/test/parallel/test-cli-permission-multiple-allow.js new file mode 100644 index 00000000000000..68e9029b1ddb45 --- /dev/null +++ b/test/parallel/test-cli-permission-multiple-allow.js @@ -0,0 +1,83 @@ +'use strict'; + +require('../common'); + +const { spawnSync } = require('child_process'); +const assert = require('assert'); +const path = require('path'); + +{ + const tmpPath = path.resolve('/tmp/'); + const otherPath = path.resolve('/other-path/'); + const { status, stdout } = spawnSync( + process.execPath, + [ + '--experimental-permission', + '--allow-fs-write', tmpPath, '--allow-fs-write', otherPath, '-e', + `console.log(process.permission.has("fs")); + console.log(process.permission.has("fs.read")); + console.log(process.permission.has("fs.write")); + console.log(process.permission.has("fs.write", "/tmp/")); + console.log(process.permission.has("fs.write", "/other-path/"));`, + ] + ); + const [fs, fsIn, fsOut, fsOutAllowed1, fsOutAllowed2] = stdout.toString().split('\n'); + assert.strictEqual(fs, 'false'); + assert.strictEqual(fsIn, 'false'); + assert.strictEqual(fsOut, 'false'); + assert.strictEqual(fsOutAllowed1, 'true'); + assert.strictEqual(fsOutAllowed2, 'true'); + assert.strictEqual(status, 0); +} + +{ + const tmpPath = path.resolve('/tmp/'); + const pathWithComma = path.resolve('/other,path/'); + const { status, stdout } = spawnSync( + process.execPath, + [ + '--experimental-permission', + '--allow-fs-write', + tmpPath, + '--allow-fs-write', + pathWithComma, + '-e', + `console.log(process.permission.has("fs")); + console.log(process.permission.has("fs.read")); + console.log(process.permission.has("fs.write")); + console.log(process.permission.has("fs.write", "/tmp/")); + console.log(process.permission.has("fs.write", "/other,path/"));`, + ] + ); + const [fs, fsIn, fsOut, fsOutAllowed1, fsOutAllowed2] = stdout.toString().split('\n'); + assert.strictEqual(fs, 'false'); + assert.strictEqual(fsIn, 'false'); + assert.strictEqual(fsOut, 'false'); + assert.strictEqual(fsOutAllowed1, 'true'); + assert.strictEqual(fsOutAllowed2, 'true'); + assert.strictEqual(status, 0); +} + +{ + const filePath = path.resolve('/tmp/file,with,comma.txt'); + const { status, stdout, stderr } = spawnSync( + process.execPath, + [ + '--experimental-permission', + '--allow-fs-read=*', + `--allow-fs-write=${filePath}`, + '-e', + `console.log(process.permission.has("fs")); + console.log(process.permission.has("fs.read")); + console.log(process.permission.has("fs.write")); + console.log(process.permission.has("fs.write", "/tmp/file,with,comma.txt"));`, + ] + ); + const [fs, fsIn, fsOut, fsOutAllowed] = stdout.toString().split('\n'); + assert.strictEqual(fs, 'false'); + assert.strictEqual(fsIn, 'true'); + assert.strictEqual(fsOut, 'false'); + assert.strictEqual(fsOutAllowed, 'true'); + assert.strictEqual(status, 0); + assert.ok(stderr.toString().includes('Warning: The --allow-fs-write CLI flag has changed.')); +} diff --git a/test/parallel/test-permission-fs-read.js b/test/parallel/test-permission-fs-read.js index 010a5932c4eae1..5be993c9df6be5 100644 --- a/test/parallel/test-permission-fs-read.js +++ b/test/parallel/test-permission-fs-read.js @@ -28,7 +28,7 @@ const commonPath = path.join(__filename, '../../common'); const { status, stderr } = spawnSync( process.execPath, [ - '--experimental-permission', `--allow-fs-read=${file},${commonPathWildcard}`, file, + '--experimental-permission', `--allow-fs-read=${file}`, `--allow-fs-read=${commonPathWildcard}`, file, ], { env: { diff --git a/test/parallel/test-permission-fs-symlink-target-write.js b/test/parallel/test-permission-fs-symlink-target-write.js index 55251cb163b126..e2b4aa2a657442 100644 --- a/test/parallel/test-permission-fs-symlink-target-write.js +++ b/test/parallel/test-permission-fs-symlink-target-write.js @@ -36,8 +36,8 @@ fs.writeFileSync(path.join(readWriteFolder, 'file'), 'NO evil file contents'); process.execPath, [ '--experimental-permission', - `--allow-fs-read=${file},${commonPathWildcard},${readOnlyFolder},${readWriteFolder}`, - `--allow-fs-write=${readWriteFolder},${writeOnlyFolder}`, + `--allow-fs-read=${file}`, `--allow-fs-read=${commonPathWildcard}`, `--allow-fs-read=${readOnlyFolder}`, `--allow-fs-read=${readWriteFolder}`, + `--allow-fs-write=${readWriteFolder}`, `--allow-fs-write=${writeOnlyFolder}`, file, ], { diff --git a/test/parallel/test-permission-fs-symlink.js b/test/parallel/test-permission-fs-symlink.js index 808965001accd5..c7d753c267c1e7 100644 --- a/test/parallel/test-permission-fs-symlink.js +++ b/test/parallel/test-permission-fs-symlink.js @@ -37,7 +37,7 @@ const symlinkFromBlockedFile = tmpdir.resolve('example-symlink.md'); process.execPath, [ '--experimental-permission', - `--allow-fs-read=${file},${commonPathWildcard},${symlinkFromBlockedFile}`, + `--allow-fs-read=${file}`, `--allow-fs-read=${commonPathWildcard}`, `--allow-fs-read=${symlinkFromBlockedFile}`, `--allow-fs-write=${symlinkFromBlockedFile}`, file, ], diff --git a/test/parallel/test-permission-fs-traversal-path.js b/test/parallel/test-permission-fs-traversal-path.js index b84bed65db3b0a..547cd81c77cf18 100644 --- a/test/parallel/test-permission-fs-traversal-path.js +++ b/test/parallel/test-permission-fs-traversal-path.js @@ -31,7 +31,7 @@ const commonPathWildcard = path.join(__filename, '../../common*'); process.execPath, [ '--experimental-permission', - `--allow-fs-read=${file},${commonPathWildcard},${allowedFolder}`, + `--allow-fs-read=${file}`, `--allow-fs-read=${commonPathWildcard}`, `--allow-fs-read=${allowedFolder}`, `--allow-fs-write=${allowedFolder}`, file, ], diff --git a/test/parallel/test-permission-fs-wildcard.js b/test/parallel/test-permission-fs-wildcard.js index 5b0dc411666013..0c81ff5da51b87 100644 --- a/test/parallel/test-permission-fs-wildcard.js +++ b/test/parallel/test-permission-fs-wildcard.js @@ -32,7 +32,7 @@ if (common.isWindows) { process.execPath, [ '--experimental-permission', - `--allow-fs-read=${allowList.join(',')}`, + ...allowList.flatMap((path) => ['--allow-fs-read', path]), '-e', ` const path = require('path'); @@ -67,7 +67,7 @@ if (common.isWindows) { process.execPath, [ '--experimental-permission', - `--allow-fs-read=${allowList.join(',')}`, + ...allowList.flatMap((path) => ['--allow-fs-read', path]), '-e', ` const assert = require('assert') @@ -92,7 +92,7 @@ if (common.isWindows) { process.execPath, [ '--experimental-permission', - `--allow-fs-read=${file},${commonPathWildcard},${allowList.join(',')}`, + `--allow-fs-read=${file}`, `--allow-fs-read=${commonPathWildcard}`, ...allowList.flatMap((path) => ['--allow-fs-read', path]), file, ], ); diff --git a/test/parallel/test-permission-fs-write.js b/test/parallel/test-permission-fs-write.js index 9f257df86f8672..626c00e5c007a2 100644 --- a/test/parallel/test-permission-fs-write.js +++ b/test/parallel/test-permission-fs-write.js @@ -26,7 +26,7 @@ const file = fixtures.path('permission', 'fs-write.js'); [ '--experimental-permission', '--allow-fs-read=*', - `--allow-fs-write=${regularFile},${commonPath}`, + `--allow-fs-write=${regularFile}`, `--allow-fs-write=${commonPath}`, file, ], { From 777931f499233828571c1ee34964d16ea5b98590 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Thu, 17 Aug 2023 21:05:44 +0200 Subject: [PATCH 031/125] doc: fix wording in napi_async_init Refs: https://github.com/nodejs/node/pull/32930 PR-URL: https://github.com/nodejs/node/pull/49180 Reviewed-By: Chengzhong Wu Reviewed-By: Michael Dawson Reviewed-By: Luigi Pinca --- doc/api/n-api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 786df00f9a7960..26ca5dc83fb34f 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -5633,7 +5633,7 @@ problems like loss of async context when using the `AsyncLocalStorage` API. In order to retain ABI compatibility with previous versions, passing `NULL` for `async_resource` does not result in an error. However, this is not -recommended as this will result poor results with `async_hooks` +recommended as this will result in undesirable behavior with `async_hooks` [`init` hooks][] and `async_hooks.executionAsyncResource()` as the resource is now required by the underlying `async_hooks` implementation in order to provide the linkage between async callbacks. From 9fcd99a7443533911905b338703044efe522fc2e Mon Sep 17 00:00:00 2001 From: Rafael Gonzaga Date: Thu, 17 Aug 2023 16:23:25 -0300 Subject: [PATCH 032/125] doc: update to semver-minor releases by default PR-URL: https://github.com/nodejs/node/pull/49175 Reviewed-By: Moshe Atlow Reviewed-By: Ruy Adorno Reviewed-By: Luigi Pinca --- doc/contributing/releases.md | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/doc/contributing/releases.md b/doc/contributing/releases.md index 27907bdaeb7492..0020ec59c8bd02 100644 --- a/doc/contributing/releases.md +++ b/doc/contributing/releases.md @@ -182,10 +182,10 @@ metadata, as well as the GitHub labels such as `semver-minor` and omitted from a commit, the commit will show up because it's unsure if it's a duplicate or not. -For a list of commits that could be landed in a patch release on v1.x: +For a list of commits that could be landed in a minor release on v1.x: ```bash -branch-diff v1.x-staging main --exclude-label=semver-major,semver-minor,dont-land-on-v1.x,backport-requested-v1.x,backport-blocked-v1.x,backport-open-v1.x,backported-to-v1.x --filter-release --format=simple +branch-diff v1.x-staging main --exclude-label=semver-major,dont-land-on-v1.x,backport-requested-v1.x,backport-blocked-v1.x,backport-open-v1.x,backported-to-v1.x --filter-release --format=simple ``` Previously released commits and version bumps do not need to be @@ -201,13 +201,15 @@ Carefully review the list of commits: `baking-for-lts` tag. When you are ready to cherry-pick commits, you can automate with the following -command. (For semver-minor releases, make sure to remove the `semver-minor` tag -from `exclude-label`.) +command. ```bash -branch-diff v1.x-staging main --exclude-label=semver-major,semver-minor,dont-land-on-v1.x,backport-requested-v1.x,backport-blocked-v1.x,backport-open-v1.x,backported-to-v1.x --filter-release --format=sha --reverse | xargs git cherry-pick +branch-diff v1.x-staging main --exclude-label=semver-major,dont-land-on-v1.x,backport-requested-v1.x,backport-blocked-v1.x,backport-open-v1.x,backported-to-v1.x --filter-release --format=sha --reverse | xargs git cherry-pick ``` +For patch releases, make sure to add the `semver-minor` tag +to `exclude-label` + When cherry-picking commits, if there are simple conflicts you can resolve them. Otherwise, add the `backport-requested-vN.x` label to the original PR and post a comment stating that it does not land cleanly and will require a From 3df2251a6ae734c6fc84aa27ad516ff77ec565e3 Mon Sep 17 00:00:00 2001 From: Rafael Gonzaga Date: Thu, 17 Aug 2023 16:40:20 -0300 Subject: [PATCH 033/125] doc: add h1 summary to security release process PR-URL: https://github.com/nodejs/node/pull/49112 Reviewed-By: Moshe Atlow Reviewed-By: Matteo Collina Reviewed-By: Michael Dawson --- doc/contributing/security-release-process.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/contributing/security-release-process.md b/doc/contributing/security-release-process.md index 557ff8b7a9ec2b..fd33f3ccbb5afd 100644 --- a/doc/contributing/security-release-process.md +++ b/doc/contributing/security-release-process.md @@ -56,6 +56,8 @@ The current security stewards are documented in the main Node.js * [ ] pre-release: _**LINK TO PR**_ * [ ] post-release: _**LINK TO PR**_ * List vulnerabilities in order of descending severity + * Use the "summary" feature in HackerOne to sync post-release content + and CVE requests. Example [2038134](https://hackerone.com/bugs?subject=nodejs\&report_id=2038134) * Ask the HackerOne reporter if they would like to be credited on the security release blog page: ```text @@ -79,6 +81,9 @@ The current security stewards are documented in the main Node.js between Security Releases. * Pass `make test` * Have CVEs + * Use the "summary" feature in HackerOne to create a description for the + CVE and the post release announcement. + Example [2038134](https://hackerone.com/bugs?subject=nodejs\&report_id=2038134) * Make sure that dependent libraries have CVEs for their issues. We should only create CVEs for vulnerabilities in Node.js itself. This is to avoid having duplicate CVEs for the same vulnerability. From edf278d60d1dc8b9af1ee7fcb3dd4e43a9271160 Mon Sep 17 00:00:00 2001 From: Rafael Gonzaga Date: Thu, 17 Aug 2023 18:09:36 -0300 Subject: [PATCH 034/125] doc: add notable-change label mention to PR template PR-URL: https://github.com/nodejs/node/pull/49188 Reviewed-By: Michael Dawson Reviewed-By: Antoine du Hamel Reviewed-By: Darshan Sen --- .github/PULL_REQUEST_TEMPLATE.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 7e72cfbd77e972..936c2a06125795 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,6 +10,9 @@ For code changes: 2. Update documentation if relevant. 3. Ensure that `make -j4 test` (UNIX), or `vcbuild test` (Windows) passes. +If you believe this PR should be highlighted in the Node.js CHANGELOG +please add the `notable-change` label. + Developer's Certificate of Origin 1.1 By making a contribution to this project, I certify that: From ab975233cca376165b091f784b6be4a64226e47d Mon Sep 17 00:00:00 2001 From: Rafael Gonzaga Date: Thu, 17 Aug 2023 18:19:23 -0300 Subject: [PATCH 035/125] meta: mention nodejs/tsc when changing GH templates MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/49189 Reviewed-By: Yagiz Nizipli Reviewed-By: Moshe Atlow Reviewed-By: Trivikram Kamat Reviewed-By: Michaël Zasso Reviewed-By: Gireesh Punathil Reviewed-By: Darshan Sen --- .github/CODEOWNERS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f4615852c3cd17..f803463dae15db 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -11,6 +11,8 @@ # tsc /.github/CODEOWNERS @nodejs/tsc +/.github/PULL_REQUEST_TEMPLATE.md @nodejs/tsc +/.github/ISSUE_TEMPLATE/* @nodejs/tsc /CODE_OF_CONDUCT.md @nodejs/tsc /CONTRIBUTING.md @nodejs/tsc /doc/contributing/*.md @nodejs/tsc From f37444e896b384b4bcaa7718220e04f250517672 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Aug 2023 13:17:13 +0200 Subject: [PATCH 036/125] bootstrap: build code cache from deserialized isolate V8 now requires the code cache to be compiled with a finalized read-only space, so we need to serialize the snapshot to get a finalized read-only space first, then deserialize it to compile the code cache. PR-URL: https://github.com/nodejs/node/pull/49099 Refs: https://github.com/nodejs/node/issues/47636 Refs: https://bugs.chromium.org/p/v8/issues/detail?id=13789 Reviewed-By: Yagiz Nizipli --- src/node_snapshotable.cc | 91 ++++++++++++++++++++++++++++++++-------- 1 file changed, 73 insertions(+), 18 deletions(-) diff --git a/src/node_snapshotable.cc b/src/node_snapshotable.cc index da66bab7ea3147..1d93f846a1a981 100644 --- a/src/node_snapshotable.cc +++ b/src/node_snapshotable.cc @@ -911,7 +911,7 @@ void SnapshotBuilder::InitializeIsolateParams(const SnapshotData* data, const_cast(&(data->v8_snapshot_blob_data)); } -ExitCode SnapshotBuilder::Generate( +ExitCode BuildSnapshotWithoutCodeCache( SnapshotData* out, const std::vector& args, const std::vector& exec_args, @@ -933,8 +933,8 @@ ExitCode SnapshotBuilder::Generate( fprintf(stderr, "%s: %s\n", args[0].c_str(), err.c_str()); return ExitCode::kBootstrapFailure; } - Isolate* isolate = setup->isolate(); + Isolate* isolate = setup->isolate(); { HandleScope scope(isolate); TryCatch bootstrapCatch(isolate); @@ -968,7 +968,77 @@ ExitCode SnapshotBuilder::Generate( } } - return CreateSnapshot(out, setup.get(), static_cast(snapshot_type)); + return SnapshotBuilder::CreateSnapshot( + out, setup.get(), static_cast(snapshot_type)); +} + +ExitCode BuildCodeCacheFromSnapshot(SnapshotData* out, + const std::vector& args, + const std::vector& exec_args) { + std::vector errors; + auto data_wrapper = out->AsEmbedderWrapper(); + auto setup = CommonEnvironmentSetup::CreateFromSnapshot( + per_process::v8_platform.Platform(), + &errors, + data_wrapper.get(), + args, + exec_args); + if (!setup) { + for (const auto& err : errors) + fprintf(stderr, "%s: %s\n", args[0].c_str(), err.c_str()); + return ExitCode::kBootstrapFailure; + } + + Isolate* isolate = setup->isolate(); + v8::Locker locker(isolate); + Isolate::Scope isolate_scope(isolate); + HandleScope handle_scope(isolate); + TryCatch bootstrapCatch(isolate); + + auto print_Exception = OnScopeLeave([&]() { + if (bootstrapCatch.HasCaught()) { + PrintCaughtException( + isolate, isolate->GetCurrentContext(), bootstrapCatch); + } + }); + + Environment* env = setup->env(); + // Regenerate all the code cache. + if (!env->builtin_loader()->CompileAllBuiltins(setup->context())) { + return ExitCode::kGenericUserError; + } + env->builtin_loader()->CopyCodeCache(&(out->code_cache)); + if (per_process::enabled_debug_list.enabled(DebugCategory::MKSNAPSHOT)) { + for (const auto& item : out->code_cache) { + std::string size_str = FormatSize(item.data.length); + per_process::Debug(DebugCategory::MKSNAPSHOT, + "Generated code cache for %d: %s\n", + item.id.c_str(), + size_str.c_str()); + } + } + return ExitCode::kNoFailure; +} + +ExitCode SnapshotBuilder::Generate( + SnapshotData* out, + const std::vector& args, + const std::vector& exec_args, + std::optional main_script) { + ExitCode code = + BuildSnapshotWithoutCodeCache(out, args, exec_args, main_script); + if (code != ExitCode::kNoFailure) { + return code; + } + +#ifdef NODE_USE_NODE_CODE_CACHE + // Deserialize the snapshot to recompile code cache. We need to do this in the + // second pass because V8 requires the code cache to be compiled with a + // finalized read-only space. + return BuildCodeCacheFromSnapshot(out, args, exec_args); +#else + return ExitCode::kNoFailure; +#endif } ExitCode SnapshotBuilder::CreateSnapshot(SnapshotData* out, @@ -1021,21 +1091,6 @@ ExitCode SnapshotBuilder::CreateSnapshot(SnapshotData* out, out->isolate_data_info = setup->isolate_data()->Serialize(creator); out->env_info = env->Serialize(creator); -#ifdef NODE_USE_NODE_CODE_CACHE - // Regenerate all the code cache. - if (!env->builtin_loader()->CompileAllBuiltins(main_context)) { - return ExitCode::kGenericUserError; - } - env->builtin_loader()->CopyCodeCache(&(out->code_cache)); - for (const auto& item : out->code_cache) { - std::string size_str = FormatSize(item.data.length); - per_process::Debug(DebugCategory::MKSNAPSHOT, - "Generated code cache for %d: %s\n", - item.id.c_str(), - size_str.c_str()); - } -#endif - ResetContextSettingsBeforeSnapshot(main_context); } From dc8fff9a75a586d0f4f93e85975cbaad9ed762c9 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Aug 2023 13:17:24 +0200 Subject: [PATCH 037/125] test: use gcUntil() in test-v8-serialize-leak Previously this can be flaky because the there could be a delay of the deallocation after gc() is invoked. Use gcUntil() to run the GC multiple times to make the test more robust. PR-URL: https://github.com/nodejs/node/pull/49168 Reviewed-By: Yagiz Nizipli Reviewed-By: Luigi Pinca --- test/parallel/test-v8-serialize-leak.js | 26 ++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/test/parallel/test-v8-serialize-leak.js b/test/parallel/test-v8-serialize-leak.js index 696dbfea65ba95..89b36c4a248dd4 100644 --- a/test/parallel/test-v8-serialize-leak.js +++ b/test/parallel/test-v8-serialize-leak.js @@ -8,7 +8,6 @@ if (common.isIBMi) common.skip('On IBMi, the rss memory always returns zero'); const v8 = require('v8'); -const assert = require('assert'); const before = process.memoryUsage.rss(); @@ -16,14 +15,19 @@ for (let i = 0; i < 1000000; i++) { v8.serialize(''); } -global.gc(); - -const after = process.memoryUsage.rss(); - -if (process.config.variables.asan) { - assert(after < before * 10, `asan: before=${before} after=${after}`); -} else if (process.config.variables.node_builtin_modules_path) { - assert(after < before * 4, `node_builtin_modules_path: before=${before} after=${after}`); -} else { - assert(after < before * 2, `before=${before} after=${after}`); +async function main() { + await common.gcUntil('RSS should go down', () => { + const after = process.memoryUsage.rss(); + if (process.config.variables.asan) { + console.log(`asan: before=${before} after=${after}`); + return after < before * 10; + } else if (process.config.variables.node_builtin_modules_path) { + console.log(`node_builtin_modules_path: before=${before} after=${after}`); + return after < before * 10; + } + console.log(`before=${before} after=${after}`); + return after < before * 10; + }); } + +main(); From 9610008b793baab47081e329d7748534a2694613 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Aug 2023 13:49:56 +0200 Subject: [PATCH 038/125] test: make test-perf-hooks more robust and work with workers Previously the test makes several assumptions about the absolute values of the nodeTiming fields, which can make the test flaky on slow machines. This patch rewrites the test to check the relative values instead. It also updates the test to make it work with workers instead of directly skipping in workers. PR-URL: https://github.com/nodejs/node/pull/49197 Refs: https://github.com/nodejs/reliability/issues/638 Reviewed-By: Debadree Chatterjee Reviewed-By: Antoine du Hamel --- test/sequential/test-perf-hooks.js | 205 ++++++++++++++++++++--------- 1 file changed, 143 insertions(+), 62 deletions(-) diff --git a/test/sequential/test-perf-hooks.js b/test/sequential/test-perf-hooks.js index d0ee0f5aad04c8..5ed9ff22ce2d38 100644 --- a/test/sequential/test-perf-hooks.js +++ b/test/sequential/test-perf-hooks.js @@ -1,84 +1,165 @@ 'use strict'; const common = require('../common'); -const assert = require('assert'); const { performance } = require('perf_hooks'); +// Get the start time as soon as possible. +const testStartTime = performance.now(); +const assert = require('assert'); +const { writeSync } = require('fs'); -if (!common.isMainThread) - common.skip('bootstrapping workers works differently'); +// Use writeSync to stdout to avoid disturbing the loop. +function log(str) { + writeSync(1, str + '\n'); +} assert(performance); assert(performance.nodeTiming); assert.strictEqual(typeof performance.timeOrigin, 'number'); + +assert(testStartTime > 0, `${testStartTime} <= 0`); // Use a fairly large epsilon value, since we can only guarantee that the node // process started up in 15 seconds. -assert(Math.abs(performance.timeOrigin - Date.now()) < 15000); +assert(testStartTime < 15000, `${testStartTime} >= 15000`); -const inited = performance.now(); -assert(inited < 15000); +// Use different ways to calculate process uptime to check that +// performance.timeOrigin and performance.now() are in reasonable range. +const epsilon = 50; +{ + const uptime1 = Date.now() - performance.timeOrigin; + const uptime2 = performance.now(); + const uptime3 = process.uptime() * 1000; + assert(Math.abs(uptime1 - uptime2) < epsilon, + `Date.now() - performance.timeOrigin (${uptime1}) - ` + + `performance.now() (${uptime2}) = ` + + `${uptime1 - uptime2} >= +- ${epsilon}`); + assert(Math.abs(uptime1 - uptime3) < epsilon, + `Date.now() - performance.timeOrigin (${uptime1}) - ` + + `process.uptime() * 1000 (${uptime3}) = ` + + `${uptime1 - uptime3} >= +- ${epsilon}`); +} assert.strictEqual(performance.nodeTiming.name, 'node'); assert.strictEqual(performance.nodeTiming.entryType, 'node'); -const delay = 250; -function checkNodeTiming(props) { - console.log(props); - - for (const prop of Object.keys(props)) { - if (props[prop].around !== undefined) { - assert.strictEqual(typeof performance.nodeTiming[prop], 'number'); - const delta = performance.nodeTiming[prop] - props[prop].around; - assert( - Math.abs(delta) < (props[prop].delay || delay), - `${prop}: ${Math.abs(delta)} >= ${props[prop].delay || delay}` - ); - } else { - assert.strictEqual(performance.nodeTiming[prop], props[prop], - `mismatch for performance property ${prop}: ` + - `${performance.nodeTiming[prop]} vs ${props[prop]}`); - } - } +// Copy all the values from the getters. +const initialTiming = { ...performance.nodeTiming }; + +{ + const { + startTime, + nodeStart, + v8Start, + environment, + bootstrapComplete, + } = initialTiming; + + assert.strictEqual(startTime, 0); + assert.strictEqual(typeof nodeStart, 'number'); + assert(nodeStart > 0, `nodeStart ${nodeStart} <= 0`); + // The whole process starts before this test starts. + assert(nodeStart < testStartTime, + `nodeStart ${nodeStart} >= ${testStartTime}`); + + assert.strictEqual(typeof v8Start, 'number'); + assert(v8Start > 0, `v8Start ${v8Start} <= 0`); + // V8 starts after the process starts. + assert(v8Start > nodeStart, `v8Start ${v8Start} <= ${nodeStart}`); + // V8 starts before this test starts. + assert(v8Start < testStartTime, + `v8Start ${v8Start} >= ${testStartTime}`); + + assert.strictEqual(typeof environment, 'number'); + assert(environment > 0, `environment ${environment} <= 0`); + // Environment starts after V8 starts. + assert(environment > v8Start, + `environment ${environment} <= ${v8Start}`); + // Environment starts before this test starts. + assert(environment < testStartTime, + `environment ${environment} >= ${testStartTime}`); + + assert.strictEqual(typeof bootstrapComplete, 'number'); + assert(bootstrapComplete > 0, `bootstrapComplete ${bootstrapComplete} <= 0`); + // Bootstrap completes after environment starts. + assert(bootstrapComplete > environment, + `bootstrapComplete ${bootstrapComplete} <= ${environment}`); + // Bootstrap completes before this test starts. + assert(bootstrapComplete < testStartTime, + `bootstrapComplete ${bootstrapComplete} >= ${testStartTime}`); } -checkNodeTiming({ - name: 'node', - entryType: 'node', - startTime: 0, - duration: { around: performance.now() }, - nodeStart: { around: 0 }, - v8Start: { around: 0 }, - bootstrapComplete: { around: inited, delay: 2500 }, - environment: { around: 0 }, - loopStart: -1, - loopExit: -1 -}); +function checkNodeTiming(timing) { + // Calculate the difference between now() and duration as soon as possible. + const now = performance.now(); + const delta = Math.abs(now - timing.duration); + + log(JSON.stringify(timing, null, 2)); + // Check that the properties are still reasonable. + assert.strictEqual(timing.name, 'node'); + assert.strictEqual(timing.entryType, 'node'); + + // Check that duration is positive and practically the same as + // performance.now() i.e. measures Node.js instance up time. + assert.strictEqual(typeof timing.duration, 'number'); + assert(timing.duration > 0, `timing.duration ${timing.duration} <= 0`); + assert(delta < 10, + `now (${now}) - timing.duration (${timing.duration}) = ${delta} >= 10`); + + // Check that the following fields do not change. + assert.strictEqual(timing.startTime, initialTiming.startTime); + assert.strictEqual(timing.nodeStart, initialTiming.nodeStart); + assert.strictEqual(timing.v8Start, initialTiming.v8Start); + assert.strictEqual(timing.environment, initialTiming.environment); + assert.strictEqual(timing.bootstrapComplete, initialTiming.bootstrapComplete); + + assert.strictEqual(typeof timing.loopStart, 'number'); + assert.strictEqual(typeof timing.loopExit, 'number'); +} + +log('check initial nodeTiming'); +checkNodeTiming(initialTiming); +assert.strictEqual(initialTiming.loopExit, -1); -setTimeout(() => { - checkNodeTiming({ - name: 'node', - entryType: 'node', - startTime: 0, - duration: { around: performance.now() }, - nodeStart: { around: 0 }, - v8Start: { around: 0 }, - bootstrapComplete: { around: inited, delay: 2500 }, - environment: { around: 0 }, - loopStart: { around: inited, delay: 2500 }, - loopExit: -1 - }); -}, 1000); +function checkValue(timing, name, min, max) { + const value = timing[name]; + assert(value > 0, `${name} ${value} <= 0`); + // Loop starts after bootstrap completes. + assert(value > min, + `${name} ${value} <= ${min}`); + assert(value < max, `${name} ${value} >= ${max}`); +} + +let loopStart = initialTiming.loopStart; +if (common.isMainThread) { + // In the main thread, the loop does not start until we start an operation + // that requires it, e.g. setTimeout(). + assert.strictEqual(initialTiming.loopStart, -1); + log('Start timer'); + setTimeout(() => { + log('Check nodeTiming in timer'); + const timing = { ...performance.nodeTiming }; + checkNodeTiming(timing); + // Loop should start after we fire the timeout, and before we call + // performance.now() here. + loopStart = timing.loopStart; + checkValue(timing, 'loopStart', initialTiming.duration, performance.now()); + }, 1000); +} else { + // In the worker, the loop always starts before the user code is evaluated, + // and after bootstrap completes. + checkValue(initialTiming, + 'loopStart', + initialTiming.bootstrapComplete, + testStartTime); +} process.on('exit', () => { - checkNodeTiming({ - name: 'node', - entryType: 'node', - startTime: 0, - duration: { around: performance.now() }, - nodeStart: { around: 0 }, - v8Start: { around: 0 }, - bootstrapComplete: { around: inited, delay: 2500 }, - environment: { around: 0 }, - loopStart: { around: inited, delay: 2500 }, - loopExit: { around: performance.now() } - }); + log('Check nodeTiming in process exit event'); + const timing = { ...performance.nodeTiming }; + checkNodeTiming(timing); + // Check that loopStart does not change. + assert.strictEqual(timing.loopStart, loopStart); + checkValue(timing, + 'loopExit', + loopStart, + performance.now()); }); From 4a85f7046297a5e209b80f1d63ffc1d54be4a42e Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Aug 2023 18:54:19 +0200 Subject: [PATCH 039/125] test: add spawnSyncAndExit() and spawnSyncAndExitWithoutError() Replaces expectSyncExit() and expectSyncExitWithoutError(). Since we usually just check the child process right after its spawned, these shorthands also takes care of the spawning. This makes the tests more concise. PR-URL: https://github.com/nodejs/node/pull/49200 Reviewed-By: LiviaMedeiros Reviewed-By: Antoine du Hamel --- test/common/README.md | 25 ++++++++++-------- test/common/child_process.js | 34 ++++++++++++++++--------- test/parallel/test-snapshot-api.js | 13 +++------- test/parallel/test-snapshot-basic.js | 32 ++++++++++------------- test/parallel/test-snapshot-warning.js | 35 +++++++++----------------- 5 files changed, 66 insertions(+), 73 deletions(-) diff --git a/test/common/README.md b/test/common/README.md index 3d35edf5510186..fa78b2792ef6ac 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -40,17 +40,16 @@ The `benchmark` module is used by tests to run benchmarks. The `child_process` module is used by tests that launch child processes. -### `expectSyncExit(child, options)` +### `spawnSyncAndExit(command[, args][, spawnOptions], expectations)` -Checks if a _synchronous_ child process runs in the way expected. If it does -not, print the stdout and stderr output from the child process and additional -information about it to the stderr of the current process before throwing -and error. This helps gathering more information about test failures -coming from child processes. +Spawns a child process synchronously using [`child_process.spawnSync()`][] and +check if it runs in the way expected. If it does not, print the stdout and +stderr output from the child process and additional information about it to +the stderr of the current process before throwing and error. This helps +gathering more information about test failures coming from child processes. -* `child` [\][]: a `ChildProcess` instance - returned by `child_process.spawnSync()`. -* `options` [\][] +* `command`, `args`, `spawnOptions` See [`child_process.spawnSync()`][] +* `expectations` [\][] * `status` [\][] Expected `child.status` * `signal` [\][] | `null` Expected `child.signal` * `stderr` [\][] | [\][] | @@ -65,8 +64,13 @@ coming from child processes. * `trim` [\][] Optional. Whether this method should trim out the whitespace characters when checking `stderr` and `stdout` outputs. Defaults to `false`. +* return [\][] + * `child` [\][] The child process returned by + [`child_process.spawnSync()`][]. + * `stderr` [\][] The output from the child process to stderr. + * `stdout` [\][] The output from the child process to stdout. -### `expectSyncExitWithoutError(child[, options])` +### `spawnSyncAndExitWithoutError(command[, args][, spawnOptions], expectations)` Similar to `expectSyncExit()` with the `status` expected to be 0 and `signal` expected to be `null`. Any other optional options are passed @@ -1160,6 +1164,7 @@ See [the WPT tests README][] for details. []: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type []: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type [Web Platform Tests]: https://github.com/web-platform-tests/wpt +[`child_process.spawnSync()`]: ../../doc/api/child_process.md#child_processspawnsynccommand-args-options [`hijackstdio.hijackStdErr()`]: #hijackstderrlistener [`hijackstdio.hijackStdOut()`]: #hijackstdoutlistener [internationalization]: ../../doc/api/intl.md diff --git a/test/common/child_process.js b/test/common/child_process.js index a53dddc19f3216..35f06e95ae70a8 100644 --- a/test/common/child_process.js +++ b/test/common/child_process.js @@ -1,6 +1,7 @@ 'use strict'; const assert = require('assert'); +const { spawnSync, execFileSync } = require('child_process'); const common = require('./'); const util = require('util'); @@ -14,14 +15,13 @@ function cleanupStaleProcess(filename) { process.once('beforeExit', () => { const basename = filename.replace(/.*[/\\]/g, ''); try { - require('child_process') - .execFileSync(`${process.env.SystemRoot}\\System32\\wbem\\WMIC.exe`, [ - 'process', - 'where', - `commandline like '%${basename}%child'`, - 'delete', - '/nointeractive', - ]); + execFileSync(`${process.env.SystemRoot}\\System32\\wbem\\WMIC.exe`, [ + 'process', + 'where', + `commandline like '%${basename}%child'`, + 'delete', + '/nointeractive', + ]); } catch { // Ignore failures, there might not be any stale process to clean up. } @@ -111,11 +111,21 @@ function expectSyncExit(child, { return { child, stderr: stderrStr, stdout: stdoutStr }; } -function expectSyncExitWithoutError(child, options) { +function spawnSyncAndExit(...args) { + const spawnArgs = args.slice(0, args.length - 1); + const expectations = args[args.length - 1]; + const child = spawnSync(...spawnArgs); + return expectSyncExit(child, expectations); +} + +function spawnSyncAndExitWithoutError(...args) { + const spawnArgs = args.slice(0, args.length); + const expectations = args[args.length - 1]; + const child = spawnSync(...spawnArgs); return expectSyncExit(child, { status: 0, signal: null, - ...options, + ...expectations, }); } @@ -124,6 +134,6 @@ module.exports = { logAfterTime, kExpiringChildRunTime, kExpiringParentTimer, - expectSyncExit, - expectSyncExitWithoutError, + spawnSyncAndExit, + spawnSyncAndExitWithoutError, }; diff --git a/test/parallel/test-snapshot-api.js b/test/parallel/test-snapshot-api.js index 1068ae3b4c7b46..2396dd32c345c7 100644 --- a/test/parallel/test-snapshot-api.js +++ b/test/parallel/test-snapshot-api.js @@ -4,10 +4,9 @@ require('../common'); const assert = require('assert'); -const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); const fixtures = require('../common/fixtures'); -const { expectSyncExitWithoutError } = require('../common/child_process'); +const { spawnSyncAndExitWithoutError } = require('../common/child_process'); const fs = require('fs'); const v8 = require('v8'); @@ -29,7 +28,7 @@ const entry = fixtures.path('snapshot', 'v8-startup-snapshot-api.js'); fs.writeFileSync(tmpdir.resolve(book), content, 'utf8'); } fs.copyFileSync(entry, tmpdir.resolve('entry.js')); - const child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, '--build-snapshot', @@ -37,14 +36,12 @@ const entry = fixtures.path('snapshot', 'v8-startup-snapshot-api.js'); ], { cwd: tmpdir.path }); - - expectSyncExitWithoutError(child); const stats = fs.statSync(tmpdir.resolve('snapshot.blob')); assert(stats.isFile()); } { - const child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, 'book1', @@ -54,9 +51,7 @@ const entry = fixtures.path('snapshot', 'v8-startup-snapshot-api.js'); ...process.env, BOOK_LANG: 'en_US', } - }); - - expectSyncExitWithoutError(child, { + }, { stderr: 'Reading book1.en_US.txt', stdout: 'This is book1.en_US.txt', trim: true diff --git a/test/parallel/test-snapshot-basic.js b/test/parallel/test-snapshot-basic.js index cd87caa3fcbce3..760469ed5dc896 100644 --- a/test/parallel/test-snapshot-basic.js +++ b/test/parallel/test-snapshot-basic.js @@ -5,10 +5,12 @@ require('../common'); const assert = require('assert'); -const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); const fixtures = require('../common/fixtures'); -const { expectSyncExitWithoutError, expectSyncExit } = require('../common/child_process'); +const { + spawnSyncAndExitWithoutError, + spawnSyncAndExit, +} = require('../common/child_process'); const fs = require('fs'); tmpdir.refresh(); @@ -18,14 +20,12 @@ if (!process.config.variables.node_use_node_snapshot) { // Check that Node.js built without an embedded snapshot // exits with 9 when node:embedded_snapshot_main is specified // as snapshot entry point. - const child = spawnSync(process.execPath, [ + spawnSyncAndExit(process.execPath, [ '--build-snapshot', snapshotScript, ], { cwd: tmpdir.path - }); - - expectSyncExit(child, { + }, { status: 9, signal: null, stderr: /Node\.js was built without embedded snapshot/ @@ -37,13 +37,12 @@ if (!process.config.variables.node_use_node_snapshot) { // By default, the snapshot blob path is cwd/snapshot.blob. { // Create the snapshot. - const child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--build-snapshot', snapshotScript, ], { cwd: tmpdir.path }); - expectSyncExitWithoutError(child); const stats = fs.statSync(tmpdir.resolve('snapshot.blob')); assert(stats.isFile()); } @@ -52,7 +51,7 @@ tmpdir.refresh(); const blobPath = tmpdir.resolve('my-snapshot.blob'); { // Create the snapshot. - const child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, '--build-snapshot', @@ -60,38 +59,33 @@ const blobPath = tmpdir.resolve('my-snapshot.blob'); ], { cwd: tmpdir.path }); - expectSyncExitWithoutError(child); const stats = fs.statSync(blobPath); assert(stats.isFile()); } { // Check --help. - const child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, '--help', ], { cwd: tmpdir.path + }, { + stdout: /--help/ }); - expectSyncExitWithoutError(child); - - assert(child.stdout.toString().includes('--help')); } { // Check -c. - const child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, '-c', fixtures.path('snapshot', 'marked.js'), ], { cwd: tmpdir.path - }); - - // Check that it is a noop. - expectSyncExitWithoutError(child, { + }, { stderr: '', stdout: '', trim: true diff --git a/test/parallel/test-snapshot-warning.js b/test/parallel/test-snapshot-warning.js index 444f65af0b8b35..889fed59db54a9 100644 --- a/test/parallel/test-snapshot-warning.js +++ b/test/parallel/test-snapshot-warning.js @@ -7,10 +7,9 @@ require('../common'); const assert = require('assert'); -const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); const fixtures = require('../common/fixtures'); -const { expectSyncExitWithoutError } = require('../common/child_process'); +const { spawnSyncAndExitWithoutError } = require('../common/child_process'); const fs = require('fs'); const warningScript = fixtures.path('snapshot', 'warning.js'); @@ -20,7 +19,7 @@ const empty = fixtures.path('empty.js'); tmpdir.refresh(); { console.log('\n# Check snapshot scripts that do not emit warnings.'); - let child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, '--build-snapshot', @@ -28,40 +27,36 @@ tmpdir.refresh(); ], { cwd: tmpdir.path }); - expectSyncExitWithoutError(child); const stats = fs.statSync(blobPath); assert(stats.isFile()); - child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, warningScript, ], { cwd: tmpdir.path - }); - expectSyncExitWithoutError(child, { + }, { stderr(output) { const match = output.match(/Warning: test warning/g); assert.strictEqual(match.length, 1); return true; } }); - } tmpdir.refresh(); { console.log('\n# Check snapshot scripts that emit ' + 'warnings and --trace-warnings hint.'); - let child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, '--build-snapshot', warningScript, ], { cwd: tmpdir.path - }); - expectSyncExitWithoutError(child, { + }, { stderr(output) { let match = output.match(/Warning: test warning/g); assert.strictEqual(match.length, 1); @@ -73,15 +68,13 @@ tmpdir.refresh(); const stats = fs.statSync(blobPath); assert(stats.isFile()); - child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, warningScript, ], { cwd: tmpdir.path - }); - - expectSyncExitWithoutError(child, { + }, { stderr(output) { // Warnings should not be handled more than once. let match = output.match(/Warning: test warning/g); @@ -99,7 +92,7 @@ tmpdir.refresh(); const warningFile1 = tmpdir.resolve('warnings.txt'); const warningFile2 = tmpdir.resolve('warnings2.txt'); - let child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, '--redirect-warnings', @@ -108,9 +101,7 @@ tmpdir.refresh(); warningScript, ], { cwd: tmpdir.path - }); - - expectSyncExitWithoutError(child, { + }, { stderr(output) { assert.doesNotMatch(output, /Warning: test warning/); } @@ -129,7 +120,7 @@ tmpdir.refresh(); maxRetries: 3, recursive: false, force: true }); - child = spawnSync(process.execPath, [ + spawnSyncAndExitWithoutError(process.execPath, [ '--snapshot-blob', blobPath, '--redirect-warnings', @@ -137,9 +128,7 @@ tmpdir.refresh(); warningScript, ], { cwd: tmpdir.path - }); - - expectSyncExitWithoutError(child, { + }, { stderr(output) { assert.doesNotMatch(output, /Warning: test warning/); return true; From 7eb10a38ea3d8c0f132ab0817fa184a76961559f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Mon, 14 Aug 2023 15:23:37 +0000 Subject: [PATCH 040/125] crypto: remove getDefaultEncoding() Refs: https://github.com/nodejs/node/pull/47182 Refs: https://github.com/nodejs/node/pull/47869 Refs: https://github.com/nodejs/node/pull/47943 Refs: https://github.com/nodejs/node/pull/47998 Refs: https://github.com/nodejs/node/pull/49140 Refs: https://github.com/nodejs/node/pull/49145 Refs: https://github.com/nodejs/node/pull/49167 Refs: https://github.com/nodejs/node/pull/49169 PR-URL: https://github.com/nodejs/node/pull/49170 Reviewed-By: Benjamin Gruenbaum Reviewed-By: Filip Skokan Reviewed-By: Matteo Collina Reviewed-By: Luigi Pinca --- lib/internal/crypto/util.js | 6 ------ lib/internal/streams/lazy_transform.js | 6 +----- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/lib/internal/crypto/util.js b/lib/internal/crypto/util.js index cf044e804ad05a..51ca3f4c056fb9 100644 --- a/lib/internal/crypto/util.js +++ b/lib/internal/crypto/util.js @@ -75,11 +75,6 @@ const { const kHandle = Symbol('kHandle'); const kKeyObject = Symbol('kKeyObject'); -// TODO(tniessen): remove all call sites and this function -function getDefaultEncoding() { - return 'buffer'; -} - // This is here because many functions accepted binary strings without // any explicit encoding in older versions of node, and we don't want // to break them unnecessarily. @@ -555,7 +550,6 @@ module.exports = { getCiphers, getCurves, getDataViewOrTypedArrayBuffer, - getDefaultEncoding, getHashes, kHandle, kKeyObject, diff --git a/lib/internal/streams/lazy_transform.js b/lib/internal/streams/lazy_transform.js index d9d1407a819594..204ad456cd64b3 100644 --- a/lib/internal/streams/lazy_transform.js +++ b/lib/internal/streams/lazy_transform.js @@ -11,10 +11,6 @@ const { const stream = require('stream'); -const { - getDefaultEncoding, -} = require('internal/crypto/util'); - module.exports = LazyTransform; function LazyTransform(options) { @@ -29,7 +25,7 @@ function makeGetter(name) { this._writableState.decodeStrings = false; if (!this._options || !this._options.defaultEncoding) { - this._writableState.defaultEncoding = getDefaultEncoding(); + this._writableState.defaultEncoding = 'buffer'; // TODO(tniessen): remove } return this[name]; From 8101f2b2597ffb0b717aa413a93cd688ca6d07d0 Mon Sep 17 00:00:00 2001 From: Hyunjin Kim Date: Sun, 20 Aug 2023 01:15:22 +0900 Subject: [PATCH 041/125] doc: use same name in the doc as in the code Refs: https://streams.spec.whatwg.org/#bytelengthqueuingstrategy PR-URL: https://github.com/nodejs/node/pull/49216 Reviewed-By: Antoine du Hamel Reviewed-By: Deokjin Kim Reviewed-By: Luigi Pinca --- doc/api/webstreams.md | 8 ++++---- lib/internal/webstreams/queuingstrategies.js | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/api/webstreams.md b/doc/api/webstreams.md index ed8cddd2fdbfdd..a8a111caef6414 100644 --- a/doc/api/webstreams.md +++ b/doc/api/webstreams.md @@ -1219,13 +1219,13 @@ changes: description: This class is now exposed on the global object. --> -#### `new ByteLengthQueuingStrategy(options)` +#### `new ByteLengthQueuingStrategy(init)` -* `options` {Object} +* `init` {Object} * `highWaterMark` {number} #### `byteLengthQueuingStrategy.highWaterMark` @@ -1256,13 +1256,13 @@ changes: description: This class is now exposed on the global object. --> -#### `new CountQueuingStrategy(options)` +#### `new CountQueuingStrategy(init)` -* `options` {Object} +* `init` {Object} * `highWaterMark` {number} #### `countQueuingStrategy.highWaterMark` diff --git a/lib/internal/webstreams/queuingstrategies.js b/lib/internal/webstreams/queuingstrategies.js index df114a44cc8adc..ee4169106838c3 100644 --- a/lib/internal/webstreams/queuingstrategies.js +++ b/lib/internal/webstreams/queuingstrategies.js @@ -78,7 +78,7 @@ class ByteLengthQueuingStrategy { constructor(init) { validateObject(init, 'init'); if (init.highWaterMark === undefined) - throw new ERR_MISSING_OPTION('options.highWaterMark'); + throw new ERR_MISSING_OPTION('init.highWaterMark'); // The highWaterMark value is not checked until the strategy // is actually used, per the spec. @@ -133,7 +133,7 @@ class CountQueuingStrategy { constructor(init) { validateObject(init, 'init'); if (init.highWaterMark === undefined) - throw new ERR_MISSING_OPTION('options.highWaterMark'); + throw new ERR_MISSING_OPTION('init.highWaterMark'); // The highWaterMark value is not checked until the strategy // is actually used, per the spec. From 5a363bb01b086d4dff7a524bf77c0190698c6067 Mon Sep 17 00:00:00 2001 From: Jacob Smith <3012099+JakobJingleheimer@users.noreply.github.com> Date: Sat, 19 Aug 2023 18:45:02 +0200 Subject: [PATCH 042/125] doc: caveat unavailability of `import.meta.resolve` in custom loaders PR-URL: https://github.com/nodejs/node/pull/49242 Reviewed-By: Antoine du Hamel Reviewed-By: Geoffrey Booth --- doc/api/esm.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/api/esm.md b/doc/api/esm.md index 48273e09a38746..b67ee922a54b5d 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -376,6 +376,9 @@ behind the `--experimental-import-meta-resolve` flag: * `parent` {string|URL} An optional absolute parent module URL to resolve from. +> **Caveat** This feature is not available within custom loaders (it would +> create a deadlock). + ## Interoperability with CommonJS ### `import` statements From 34a2590b054ee65b90add1c333f5c2c0361b73fe Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Sat, 19 Aug 2023 17:44:21 -0400 Subject: [PATCH 043/125] build: expand when we run internet tests Refs: https://github.com/nodejs/node/issues/49203 Changes slipped into v18.x regressed test/internet/test-dns-ipv6 as I assume the action did not run because no test under test/internet was changed. Add some of the common paths that include code that might introduce failures in the internet tests. Signed-off-by: Michael Dawson PR-URL: https://github.com/nodejs/node/pull/49218 Reviewed-By: Ruy Adorno Reviewed-By: Yagiz Nizipli Reviewed-By: Antoine du Hamel Reviewed-By: Moshe Atlow Reviewed-By: Luigi Pinca --- .github/workflows/test-internet.yml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 1c3113ab6acdd0..dbed086da7056c 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -7,14 +7,22 @@ on: pull_request: types: [opened, synchronize, reopened, ready_for_review] - paths: [test/internet/**] + paths: + - test/internet/** + - internal/dns/** + - lib/dns.js + - lib/net.js push: branches: - main - canary - v[0-9]+.x-staging - v[0-9]+.x - paths: [test/internet/**] + paths: + - test/internet/** + - internal/dns/** + - lib/dns.js + - lib/net.js concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} From 910378f93fe5f6fc45d95cf1e3967baa5943ea90 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 20 Aug 2023 06:50:06 +0200 Subject: [PATCH 044/125] test: reduce flakiness of `test-esm-loader-hooks` PR-URL: https://github.com/nodejs/node/pull/49248 Reviewed-By: Moshe Atlow Reviewed-By: Jacob Smith Reviewed-By: Geoffrey Booth Reviewed-By: Yagiz Nizipli --- test/es-module/test-esm-loader-hooks.mjs | 16 ++++++++++------ .../es-module-loaders/hooks-initialize.mjs | 5 +++-- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/test/es-module/test-esm-loader-hooks.mjs b/test/es-module/test-esm-loader-hooks.mjs index 445ceedd968aa2..2ea0128596e25b 100644 --- a/test/es-module/test-esm-loader-hooks.mjs +++ b/test/es-module/test-esm-loader-hooks.mjs @@ -599,7 +599,7 @@ describe('Loader hooks', { concurrency: true }, () => { ` import {MessageChannel} from 'node:worker_threads'; import {register} from 'node:module'; - import {setTimeout} from 'node:timers/promises'; + import {once} from 'node:events'; const {port1, port2} = new MessageChannel(); port1.on('message', (msg) => { console.log('message', msg); @@ -610,8 +610,12 @@ describe('Loader hooks', { concurrency: true }, () => { ); console.log('register', result); - await import('node:os'); - await setTimeout(99); // delay to limit flakiness + const timeout = setTimeout(() => {}, 2**31 - 1); // to keep the process alive. + await Promise.all([ + once(port1, 'message').then(() => once(port1, 'message')), + import('node:os'), + ]); + clearTimeout(timeout); port1.close(); `, ]); @@ -707,10 +711,10 @@ describe('Loader hooks', { concurrency: true }, () => { ]); assert.strictEqual(stderr, ''); - assert.deepStrictEqual(stdout.split('\n'), [ 'result 1', - 'result 2', - 'hooks initialize 1', + assert.deepStrictEqual(stdout.split('\n'), [ 'hooks initialize 1', + 'result 1', 'hooks initialize 2', + 'result 2', '' ]); assert.strictEqual(code, 0); assert.strictEqual(signal, null); diff --git a/test/fixtures/es-module-loaders/hooks-initialize.mjs b/test/fixtures/es-module-loaders/hooks-initialize.mjs index 646be145503134..ab6f2c50d146e3 100644 --- a/test/fixtures/es-module-loaders/hooks-initialize.mjs +++ b/test/fixtures/es-module-loaders/hooks-initialize.mjs @@ -1,7 +1,8 @@ +import { writeFileSync } from 'node:fs'; + let counter = 0; export async function initialize() { - counter += 1; - console.log('hooks initialize', counter); + writeFileSync(1, `hooks initialize ${++counter}\n`); return counter; } From a6cfea3f749ff2db9d5dc8968eb698f1281ac316 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 20 Aug 2023 07:08:22 +0200 Subject: [PATCH 045/125] esm: align sync and async load implementations Refs: https://github.com/nodejs/node/pull/48272 PR-URL: https://github.com/nodejs/node/pull/49152 Refs: https://github.com/nodejs/node/pull/47999 Reviewed-By: Geoffrey Booth Reviewed-By: Jacob Smith --- lib/internal/modules/esm/load.js | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/lib/internal/modules/esm/load.js b/lib/internal/modules/esm/load.js index 1998ed1dab67fb..d064296d11c463 100644 --- a/lib/internal/modules/esm/load.js +++ b/lib/internal/modules/esm/load.js @@ -70,25 +70,30 @@ async function getSource(url, context) { return { __proto__: null, responseURL, source }; } +/** + * @param {URL} url URL to the module + * @param {ESModuleContext} context used to decorate error messages + * @returns {{ responseURL: string, source: string | BufferView }} + */ function getSourceSync(url, context) { - const parsed = new URL(url); - const responseURL = url; + const { protocol, href } = url; + const responseURL = href; let source; - if (parsed.protocol === 'file:') { - source = readFileSync(parsed); - } else if (parsed.protocol === 'data:') { - const match = RegExpPrototypeExec(DATA_URL_PATTERN, parsed.pathname); + if (protocol === 'file:') { + source = readFileSync(url); + } else if (protocol === 'data:') { + const match = RegExpPrototypeExec(DATA_URL_PATTERN, url.pathname); if (!match) { - throw new ERR_INVALID_URL(url); + throw new ERR_INVALID_URL(responseURL); } const { 1: base64, 2: body } = match; source = BufferFrom(decodeURIComponent(body), base64 ? 'base64' : 'utf8'); } else { const supportedSchemes = ['file', 'data']; - throw new ERR_UNSUPPORTED_ESM_URL_SCHEME(parsed, supportedSchemes); + throw new ERR_UNSUPPORTED_ESM_URL_SCHEME(url, supportedSchemes); } if (policy?.manifest) { - policy.manifest.assertIntegrity(parsed, source); + policy.manifest.assertIntegrity(url, source); } return { __proto__: null, responseURL, source }; } @@ -159,14 +164,18 @@ function defaultLoadSync(url, context = kEmptyObject) { source, } = context; - format ??= defaultGetFormat(new URL(url), context); + const urlInstance = new URL(url); + + throwIfUnsupportedURLScheme(urlInstance, false); + + format ??= defaultGetFormat(urlInstance, context); validateAssertions(url, format, importAssertions); if (format === 'builtin') { source = null; } else if (source == null) { - ({ responseURL, source } = getSourceSync(url, context)); + ({ responseURL, source } = getSourceSync(urlInstance, context)); } return { From 4a9ae315198d19b8289f764463a5f2379d09dce1 Mon Sep 17 00:00:00 2001 From: Jungku Lee Date: Sun, 20 Aug 2023 18:55:44 +0900 Subject: [PATCH 046/125] src: add a condition if the argument of `DomainToUnicode` is empty PR-URL: https://github.com/nodejs/node/pull/49097 Refs: https://github.com/nodejs/node/pull/46410 Reviewed-By: Yagiz Nizipli Reviewed-By: Deokjin Kim --- src/node_url.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/node_url.cc b/src/node_url.cc index 85147ccd1c0d59..60300d08730128 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -100,6 +100,11 @@ void BindingData::DomainToUnicode(const FunctionCallbackInfo& args) { CHECK(args[0]->IsString()); std::string input = Utf8Value(env->isolate(), args[0]).ToString(); + if (input.empty()) { + return args.GetReturnValue().Set( + String::NewFromUtf8(env->isolate(), "").ToLocalChecked()); + } + // It is important to have an initial value that contains a special scheme. // Since it will change the implementation of `set_hostname` according to URL // spec. From f2552a410ef89579488a372a4e7a2b694c6464db Mon Sep 17 00:00:00 2001 From: Jungku Lee Date: Sun, 20 Aug 2023 19:04:34 +0900 Subject: [PATCH 047/125] src: use ARES_SUCCESS instead of 0 PR-URL: https://github.com/nodejs/node/pull/49048 Refs: https://github.com/nodejs/node/pull/48834 Reviewed-By: Paolo Insogna Reviewed-By: Luigi Pinca Reviewed-By: Matteo Collina Reviewed-By: Daeyeon Jeong Reviewed-By: Deokjin Kim --- src/cares_wrap.cc | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/cares_wrap.cc b/src/cares_wrap.cc index 433c5822953071..8b037356360729 100644 --- a/src/cares_wrap.cc +++ b/src/cares_wrap.cc @@ -830,62 +830,62 @@ void ChannelWrap::EnsureServers() { int AnyTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_any); - return 0; + return ARES_SUCCESS; } int ATraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_a); - return 0; + return ARES_SUCCESS; } int AaaaTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_aaaa); - return 0; + return ARES_SUCCESS; } int CaaTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, T_CAA); - return 0; + return ARES_SUCCESS; } int CnameTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_cname); - return 0; + return ARES_SUCCESS; } int MxTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_mx); - return 0; + return ARES_SUCCESS; } int NsTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_ns); - return 0; + return ARES_SUCCESS; } int TxtTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_txt); - return 0; + return ARES_SUCCESS; } int SrvTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_srv); - return 0; + return ARES_SUCCESS; } int PtrTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_ptr); - return 0; + return ARES_SUCCESS; } int NaptrTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_naptr); - return 0; + return ARES_SUCCESS; } int SoaTraits::Send(QueryWrap* wrap, const char* name) { wrap->AresQuery(name, ns_c_in, ns_t_soa); - return 0; + return ARES_SUCCESS; } int AnyTraits::Parse( @@ -1381,7 +1381,7 @@ int ReverseTraits::Send(GetHostByAddrWrap* wrap, const char* name) { family, GetHostByAddrWrap::Callback, wrap->MakeCallbackPointer()); - return 0; + return ARES_SUCCESS; } int ReverseTraits::Parse( From 1704f24cb9117f8a2269d75b20f83d451f0c5e67 Mon Sep 17 00:00:00 2001 From: Geoffrey Booth Date: Sun, 20 Aug 2023 13:52:41 -0700 Subject: [PATCH 048/125] doc: add signature for `module.register` PR-URL: https://github.com/nodejs/node/pull/49251 Reviewed-By: Yagiz Nizipli Reviewed-By: Jacob Smith Reviewed-By: Antoine du Hamel --- doc/api/esm.md | 4 ++-- doc/api/module.md | 23 +++++++++++++++++++---- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/doc/api/esm.md b/doc/api/esm.md index b67ee922a54b5d..9437c41ea50f8c 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -1715,14 +1715,14 @@ for ESM specifiers is [commonjs-extension-resolution-loader][]. [`import`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import [`initialize`]: #initialize [`module.createRequire()`]: module.md#modulecreaterequirefilename -[`module.register()`]: module.md#moduleregister +[`module.register()`]: module.md#moduleregisterspecifier-parenturl-options [`module.syncBuiltinESMExports()`]: module.md#modulesyncbuiltinesmexports [`package.json`]: packages.md#nodejs-packagejson-field-definitions [`port.postMessage`]: worker_threads.md#portpostmessagevalue-transferlist [`port.ref()`]: https://nodejs.org/dist/latest-v17.x/docs/api/worker_threads.html#portref [`port.unref()`]: https://nodejs.org/dist/latest-v17.x/docs/api/worker_threads.html#portunref [`process.dlopen`]: process.md#processdlopenmodule-filename-flags -[`register`]: module.md#moduleregister +[`register`]: module.md#moduleregisterspecifier-parenturl-options [`string`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String [`util.TextDecoder`]: util.md#class-utiltextdecoder [cjs-module-lexer]: https://github.com/nodejs/cjs-module-lexer/tree/1.2.2 diff --git a/doc/api/module.md b/doc/api/module.md index 5531aedda0b5ee..e29ffc3cc0ba80 100644 --- a/doc/api/module.md +++ b/doc/api/module.md @@ -80,15 +80,29 @@ isBuiltin('fs'); // true isBuiltin('wss'); // false ``` -### `module.register()` +### `module.register(specifier[, parentURL][, options])` -In addition to using the `--experimental-loader` option in the CLI, -loaders can be registered programmatically using the -`module.register()` method. +> Stability: 1.1 - Active development + +* `specifier` {string} Customization hooks to be registered; this should be the + same string that would be passed to `import()`, except that if it is relative, + it is resolved relative to `parentURL`. +* `parentURL` {string} If you want to resolve `specifier` relative to a base + URL, such as `import.meta.url`, you can pass that URL here. **Default:** + `'data:'` +* `options` {Object} + * `data` {any} Any arbitrary, cloneable JavaScript value to pass into the + [`initialize`][] hook. + * `transferList` {Object\[]} [transferrable objects][] to be passed into the + `initialize` hook. +* Returns: {any} returns whatever was returned by the `initialize` hook. + +Register a module that exports hooks that customize Node.js module resolution +and loading behavior. ```mjs import { register } from 'node:module'; @@ -390,3 +404,4 @@ returned object contains the following keys: [`module`]: modules.md#the-module-object [module wrapper]: modules.md#the-module-wrapper [source map include directives]: https://sourcemaps.info/spec.html#h.lmz475t4mvbx +[transferrable objects]: worker_threads.md#portpostmessagevalue-transferlist From e391f4b1976054ddca522d381db2a866bdcb76c4 Mon Sep 17 00:00:00 2001 From: Livia Medeiros Date: Tue, 22 Aug 2023 01:41:53 +0900 Subject: [PATCH 049/125] test: use `tmpdir.resolve()` PR-URL: https://github.com/nodejs/node/pull/49136 Reviewed-By: Luigi Pinca --- test/addons/symlinked-module/test.js | 2 +- test/async-hooks/test-statwatcher.js | 5 ++--- test/common/snapshot.js | 7 +++---- test/doctool/test-apilinks.mjs | 2 +- test/doctool/test-doctool-versions.mjs | 3 +-- test/embedding/test-embedding.js | 4 ++-- .../test-esm-extension-lookup-deprecation.mjs | 12 ++++++------ test/es-module/test-esm-resolve-type.mjs | 2 +- test/es-module/test-esm-symlink-main.js | 2 +- test/es-module/test-esm-windows.js | 2 +- test/fixtures/test-runner/concurrency/a.mjs | 4 ++-- test/fixtures/test-runner/concurrency/b.mjs | 4 ++-- test/fixtures/watch-mode/ipc.js | 2 +- test/internet/test-corepack-yarn-install.js | 6 +++--- test/internet/test-trace-events-dns.js | 3 +-- test/node-api/test_general/test.js | 2 +- test/node-api/test_policy/test_policy.js | 3 +-- test/pummel/test-fs-largefile.js | 3 +-- test/pummel/test-fs-readfile-tostring-fail.js | 3 +-- test/pummel/test-fs-watch-file-slow.js | 3 +-- test/pummel/test-policy-integrity-dep.js | 2 +- .../test-policy-integrity-parent-commonjs.js | 2 +- test/pummel/test-policy-integrity-parent-module.js | 2 +- ...test-policy-integrity-parent-no-package-json.js | 2 +- .../test-policy-integrity-worker-commonjs.js | 2 +- test/pummel/test-policy-integrity-worker-module.js | 2 +- ...test-policy-integrity-worker-no-package-json.js | 2 +- test/pummel/test-watch-file.js | 3 +-- test/report/test-report-writereport.js | 6 +++--- test/sequential/test-cpu-prof-dir-absolute.js | 3 +-- test/sequential/test-cpu-prof-dir-and-name.js | 2 +- test/sequential/test-cpu-prof-dir-relative.js | 3 +-- test/sequential/test-cpu-prof-dir-worker.js | 3 +-- test/sequential/test-cpu-prof-name.js | 3 +-- test/sequential/test-diagnostic-dir-cpu-prof.js | 7 +++---- test/sequential/test-diagnostic-dir-heap-prof.js | 6 +++--- .../test-http2-timeout-large-write-file.js | 3 +-- ...application-disable-experimental-sea-warning.js | 10 +++++----- .../test-single-executable-application-empty.js | 9 ++++----- .../test-single-executable-application-snapshot.js | 11 +++++------ ...single-executable-application-use-code-cache.js | 10 +++++----- .../test-single-executable-application.js | 10 +++++----- test/sequential/test-tls-session-timeout.js | 3 +-- test/sequential/test-watch-mode.mjs | 14 +++++++------- test/sequential/test-worker-prof.js | 3 +-- .../test-tick-processor-polyfill-brokenfile.js | 3 +-- test/tick-processor/tick-processor-base.js | 3 +-- test/wasi/test-wasi-stdio.js | 6 +++--- test/wasi/test-wasi-symlinks.js | 6 +++--- 49 files changed, 97 insertions(+), 118 deletions(-) diff --git a/test/addons/symlinked-module/test.js b/test/addons/symlinked-module/test.js index d47a84b98d1ed8..5a98db77771b5a 100644 --- a/test/addons/symlinked-module/test.js +++ b/test/addons/symlinked-module/test.js @@ -16,7 +16,7 @@ const tmpdir = require('../../common/tmpdir'); tmpdir.refresh(); const addonPath = path.join(__dirname, 'build', common.buildType); -const addonLink = path.join(tmpdir.path, 'addon'); +const addonLink = tmpdir.resolve('addon'); try { fs.symlinkSync(addonPath, addonLink, 'dir'); diff --git a/test/async-hooks/test-statwatcher.js b/test/async-hooks/test-statwatcher.js index b8651ab8e0431e..f3c0e74355eeba 100644 --- a/test/async-hooks/test-statwatcher.js +++ b/test/async-hooks/test-statwatcher.js @@ -6,15 +6,14 @@ const assert = require('assert'); const initHooks = require('./init-hooks'); const { checkInvocations } = require('./hook-checks'); const fs = require('fs'); -const path = require('path'); if (!common.isMainThread) common.skip('Worker bootstrapping works differently -> different async IDs'); tmpdir.refresh(); -const file1 = path.join(tmpdir.path, 'file1'); -const file2 = path.join(tmpdir.path, 'file2'); +const file1 = tmpdir.resolve('file1'); +const file2 = tmpdir.resolve('file2'); const onchangex = (x) => (curr, prev) => { console.log(`Watcher: ${x}`); diff --git a/test/common/snapshot.js b/test/common/snapshot.js index 3037ce45639eb9..4a46533facb6fa 100644 --- a/test/common/snapshot.js +++ b/test/common/snapshot.js @@ -2,14 +2,13 @@ const tmpdir = require('../common/tmpdir'); const { spawnSync } = require('child_process'); -const path = require('path'); const fs = require('fs'); const assert = require('assert'); function buildSnapshot(entry, env) { const child = spawnSync(process.execPath, [ '--snapshot-blob', - path.join(tmpdir.path, 'snapshot.blob'), + tmpdir.resolve('snapshot.blob'), '--build-snapshot', entry, ], { @@ -29,14 +28,14 @@ function buildSnapshot(entry, env) { assert.strictEqual(child.status, 0); - const stats = fs.statSync(path.join(tmpdir.path, 'snapshot.blob')); + const stats = fs.statSync(tmpdir.resolve('snapshot.blob')); assert(stats.isFile()); return { child, stderr, stdout }; } function runWithSnapshot(entry, env) { - const args = ['--snapshot-blob', path.join(tmpdir.path, 'snapshot.blob')]; + const args = ['--snapshot-blob', tmpdir.resolve('snapshot.blob')]; if (entry !== undefined) { args.push(entry); } diff --git a/test/doctool/test-apilinks.mjs b/test/doctool/test-apilinks.mjs index fbbfafc139b711..70b7b4ef8e21c4 100644 --- a/test/doctool/test-apilinks.mjs +++ b/test/doctool/test-apilinks.mjs @@ -19,7 +19,7 @@ fs.readdirSync(apilinks).forEach((fixture) => { const input = path.join(apilinks, fixture); const expectedContent = fs.readFileSync(`${input}on`, 'utf8'); - const outputPath = path.join(tmpdir.path, `${fixture}on`); + const outputPath = tmpdir.resolve(`${fixture}on`); execFileSync( process.execPath, [script, outputPath, input], diff --git a/test/doctool/test-doctool-versions.mjs b/test/doctool/test-doctool-versions.mjs index 10eb8467bb110c..ba5d7291064685 100644 --- a/test/doctool/test-doctool-versions.mjs +++ b/test/doctool/test-doctool-versions.mjs @@ -4,7 +4,6 @@ import tmpdir from '../common/tmpdir.js'; import assert from 'assert'; import { spawnSync } from 'child_process'; import fs from 'fs'; -import path from 'path'; import { fileURLToPath } from 'url'; import util from 'util'; @@ -29,7 +28,7 @@ const expected = [ ]; tmpdir.refresh(); -const versionsFile = path.join(tmpdir.path, 'versions.json'); +const versionsFile = tmpdir.resolve('versions.json'); debuglog(`${process.execPath} ${versionsTool} ${versionsFile}`); const opts = { cwd: tmpdir.path, encoding: 'utf8' }; const cp = spawnSync(process.execPath, [ versionsTool, versionsFile ], opts); diff --git a/test/embedding/test-embedding.js b/test/embedding/test-embedding.js index a0ac4834b566eb..5d448b78a433e8 100644 --- a/test/embedding/test-embedding.js +++ b/test/embedding/test-embedding.js @@ -63,7 +63,7 @@ function getReadFileCodeForPath(path) { for (const extraSnapshotArgs of [[], ['--embedder-snapshot-as-file']]) { // readSync + eval since snapshots don't support userland require() (yet) const snapshotFixture = fixtures.path('snapshot', 'echo-args.js'); - const blobPath = path.join(tmpdir.path, 'embedder-snapshot.blob'); + const blobPath = tmpdir.resolve('embedder-snapshot.blob'); const buildSnapshotArgs = [ `eval(${getReadFileCodeForPath(snapshotFixture)})`, 'arg1', 'arg2', '--embedder-snapshot-blob', blobPath, '--embedder-snapshot-create', @@ -94,7 +94,7 @@ for (const extraSnapshotArgs of [[], ['--embedder-snapshot-as-file']]) { // Create workers and vm contexts after deserialization { const snapshotFixture = fixtures.path('snapshot', 'create-worker-and-vm.js'); - const blobPath = path.join(tmpdir.path, 'embedder-snapshot.blob'); + const blobPath = tmpdir.resolve('embedder-snapshot.blob'); const buildSnapshotArgs = [ `eval(${getReadFileCodeForPath(snapshotFixture)})`, '--embedder-snapshot-blob', blobPath, '--embedder-snapshot-create', diff --git a/test/es-module/test-esm-extension-lookup-deprecation.mjs b/test/es-module/test-esm-extension-lookup-deprecation.mjs index dc391486f7edc2..393b554b3e47b2 100644 --- a/test/es-module/test-esm-extension-lookup-deprecation.mjs +++ b/test/es-module/test-esm-extension-lookup-deprecation.mjs @@ -11,7 +11,7 @@ describe('ESM in main field', { concurrency: true }, () => { before(() => tmpdir.refresh()); it('should handle fully-specified relative path without any warning', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -29,7 +29,7 @@ describe('ESM in main field', { concurrency: true }, () => { assert.strictEqual(code, 0); }); it('should handle fully-specified absolute path without any warning', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -48,7 +48,7 @@ describe('ESM in main field', { concurrency: true }, () => { }); it('should emit warning when "main" and "exports" are missing', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -65,7 +65,7 @@ describe('ESM in main field', { concurrency: true }, () => { assert.strictEqual(code, 0); }); it('should emit warning when "main" is falsy', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -83,7 +83,7 @@ describe('ESM in main field', { concurrency: true }, () => { assert.strictEqual(code, 0); }); it('should emit warning when "main" is a relative path without extension', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); @@ -101,7 +101,7 @@ describe('ESM in main field', { concurrency: true }, () => { assert.strictEqual(code, 0); }); it('should emit warning when "main" is an absolute path without extension', async () => { - const cwd = path.join(tmpdir.path, Math.random().toString()); + const cwd = tmpdir.resolve(Math.random().toString()); const pkgPath = path.join(cwd, './node_modules/pkg/'); await mkdir(pkgPath, { recursive: true }); await writeFile(path.join(pkgPath, './index.js'), 'console.log("Hello World!")'); diff --git a/test/es-module/test-esm-resolve-type.mjs b/test/es-module/test-esm-resolve-type.mjs index 7a0527ff59e554..0f442ed569f848 100644 --- a/test/es-module/test-esm-resolve-type.mjs +++ b/test/es-module/test-esm-resolve-type.mjs @@ -26,7 +26,7 @@ const { defaultResolve: resolve } = internalResolve; -const rel = (file) => path.join(tmpdir.path, file); +const rel = (file) => tmpdir.resolve(file); const previousCwd = process.cwd(); const nmDir = rel('node_modules'); diff --git a/test/es-module/test-esm-symlink-main.js b/test/es-module/test-esm-symlink-main.js index 48b4d8bbe65daf..2be495ad7dcfb5 100644 --- a/test/es-module/test-esm-symlink-main.js +++ b/test/es-module/test-esm-symlink-main.js @@ -9,7 +9,7 @@ const fs = require('fs'); tmpdir.refresh(); const realPath = path.resolve(__dirname, '../fixtures/es-modules/symlink.mjs'); -const symlinkPath = path.resolve(tmpdir.path, 'symlink.mjs'); +const symlinkPath = tmpdir.resolve('symlink.mjs'); try { fs.symlinkSync(realPath, symlinkPath); diff --git a/test/es-module/test-esm-windows.js b/test/es-module/test-esm-windows.js index 76e016217b3ef8..e5c52226ab001d 100644 --- a/test/es-module/test-esm-windows.js +++ b/test/es-module/test-esm-windows.js @@ -15,7 +15,7 @@ const imp = (file) => { (async () => { tmpdir.refresh(); - const rel = (file) => path.join(tmpdir.path, file); + const rel = (file) => tmpdir.resolve(file); { // Load a single script const file = rel('con.mjs'); diff --git a/test/fixtures/test-runner/concurrency/a.mjs b/test/fixtures/test-runner/concurrency/a.mjs index 69954461bfbae0..a34b87e82055ad 100644 --- a/test/fixtures/test-runner/concurrency/a.mjs +++ b/test/fixtures/test-runner/concurrency/a.mjs @@ -3,9 +3,9 @@ import { setTimeout } from 'node:timers/promises'; import fs from 'node:fs/promises'; import path from 'node:path'; -await fs.writeFile(path.resolve(tmpdir.path, 'test-runner-concurrency'), 'a.mjs'); +await fs.writeFile(tmpdir.resolve('test-runner-concurrency'), 'a.mjs'); while (true) { - const file = await fs.readFile(path.resolve(tmpdir.path, 'test-runner-concurrency'), 'utf8'); + const file = await fs.readFile(tmpdir.resolve('test-runner-concurrency'), 'utf8'); if (file === 'b.mjs') { break; } diff --git a/test/fixtures/test-runner/concurrency/b.mjs b/test/fixtures/test-runner/concurrency/b.mjs index 09af543a2551eb..395cea1df47b68 100644 --- a/test/fixtures/test-runner/concurrency/b.mjs +++ b/test/fixtures/test-runner/concurrency/b.mjs @@ -4,9 +4,9 @@ import fs from 'node:fs/promises'; import path from 'node:path'; while (true) { - const file = await fs.readFile(path.resolve(tmpdir.path, 'test-runner-concurrency'), 'utf8'); + const file = await fs.readFile(tmpdir.resolve('test-runner-concurrency'), 'utf8'); if (file === 'a.mjs') { - await fs.writeFile(path.resolve(tmpdir.path, 'test-runner-concurrency'), 'b.mjs'); + await fs.writeFile(tmpdir.resolve('test-runner-concurrency'), 'b.mjs'); break; } await setTimeout(10); diff --git a/test/fixtures/watch-mode/ipc.js b/test/fixtures/watch-mode/ipc.js index 5881299387e5b4..d2a5a63854f8f9 100644 --- a/test/fixtures/watch-mode/ipc.js +++ b/test/fixtures/watch-mode/ipc.js @@ -3,7 +3,7 @@ const url = require('node:url'); const fs = require('node:fs'); const tmpdir = require('../../common/tmpdir'); -const tmpfile = path.join(tmpdir.path, 'file'); +const tmpfile = tmpdir.resolve('file'); fs.writeFileSync(tmpfile, ''); process.send({ 'watch:require': [path.resolve(__filename)] }); diff --git a/test/internet/test-corepack-yarn-install.js b/test/internet/test-corepack-yarn-install.js index 48a9bdb44cd75f..80c2285cc23c6c 100644 --- a/test/internet/test-corepack-yarn-install.js +++ b/test/internet/test-corepack-yarn-install.js @@ -11,11 +11,11 @@ const fixtures = require('../common/fixtures'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -const npmSandbox = path.join(tmpdir.path, 'npm-sandbox'); +const npmSandbox = tmpdir.resolve('npm-sandbox'); fs.mkdirSync(npmSandbox); -const homeDir = path.join(tmpdir.path, 'home'); +const homeDir = tmpdir.resolve('home'); fs.mkdirSync(homeDir); -const installDir = path.join(tmpdir.path, 'install-dir'); +const installDir = tmpdir.resolve('install-dir'); fs.mkdirSync(installDir); const corepackYarnPath = path.join( diff --git a/test/internet/test-trace-events-dns.js b/test/internet/test-trace-events-dns.js index 64efd541fd9d2c..c18a49bc9496c8 100644 --- a/test/internet/test-trace-events-dns.js +++ b/test/internet/test-trace-events-dns.js @@ -2,7 +2,6 @@ const common = require('../common'); const assert = require('assert'); const cp = require('child_process'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); const fs = require('fs'); const util = require('util'); @@ -57,7 +56,7 @@ for (const tr in tests) { throw new Error(`${tr}:\n${util.inspect(proc)}`); } - const file = path.join(tmpdir.path, traceFile); + const file = tmpdir.resolve(traceFile); const data = fs.readFileSync(file); const traces = JSON.parse(data.toString()).traceEvents diff --git a/test/node-api/test_general/test.js b/test/node-api/test_general/test.js index 397bb3c91f629b..c7dd70f2da5f17 100644 --- a/test/node-api/test_general/test.js +++ b/test/node-api/test_general/test.js @@ -19,7 +19,7 @@ tmpdir.refresh(); } { - const urlTestDir = path.join(tmpdir.path, 'foo%#bar'); + const urlTestDir = tmpdir.resolve('foo%#bar'); const urlTestFile = path.join(urlTestDir, path.basename(filename)); fs.mkdirSync(urlTestDir, { recursive: true }); fs.copyFileSync(filename, urlTestFile); diff --git a/test/node-api/test_policy/test_policy.js b/test/node-api/test_policy/test_policy.js index d6cb12b56cb683..428dd905c2e365 100644 --- a/test/node-api/test_policy/test_policy.js +++ b/test/node-api/test_policy/test_policy.js @@ -8,7 +8,6 @@ const tmpdir = require('../../common/tmpdir'); const { spawnSync } = require('child_process'); const crypto = require('crypto'); const fs = require('fs'); -const path = require('path'); const { pathToFileURL } = require('url'); tmpdir.refresh(); @@ -19,7 +18,7 @@ function hash(algo, body) { return h.digest('base64'); } -const policyFilepath = path.join(tmpdir.path, 'policy'); +const policyFilepath = tmpdir.resolve('policy'); const depFilepath = require.resolve(`./build/${common.buildType}/binding.node`); const depURL = pathToFileURL(depFilepath); diff --git a/test/pummel/test-fs-largefile.js b/test/pummel/test-fs-largefile.js index 7f2630f497b817..486f23106f21df 100644 --- a/test/pummel/test-fs-largefile.js +++ b/test/pummel/test-fs-largefile.js @@ -24,14 +24,13 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); try { - const filepath = path.join(tmpdir.path, 'large.txt'); + const filepath = tmpdir.resolve('large.txt'); const fd = fs.openSync(filepath, 'w+'); const offset = 5 * 1024 * 1024 * 1024; // 5GB const message = 'Large File'; diff --git a/test/pummel/test-fs-readfile-tostring-fail.js b/test/pummel/test-fs-readfile-tostring-fail.js index 8428f1f15a0c22..8ffe630076a52d 100644 --- a/test/pummel/test-fs-readfile-tostring-fail.js +++ b/test/pummel/test-fs-readfile-tostring-fail.js @@ -7,7 +7,6 @@ if (!common.enoughTestMem) const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const cp = require('child_process'); const kStringMaxLength = require('buffer').constants.MAX_STRING_LENGTH; if (common.isAIX && (Number(cp.execSync('ulimit -f')) * 512) < kStringMaxLength) @@ -20,7 +19,7 @@ if (!tmpdir.hasEnoughSpace(kStringMaxLength)) { common.skip(`Not enough space in ${tmpdir.path}`); } -const file = path.join(tmpdir.path, 'toobig.txt'); +const file = tmpdir.resolve('toobig.txt'); const stream = fs.createWriteStream(file, { flags: 'a', }); diff --git a/test/pummel/test-fs-watch-file-slow.js b/test/pummel/test-fs-watch-file-slow.js index c7513a18e6fa3e..c6d148df05db47 100644 --- a/test/pummel/test-fs-watch-file-slow.js +++ b/test/pummel/test-fs-watch-file-slow.js @@ -22,13 +22,12 @@ 'use strict'; require('../common'); const assert = require('assert'); -const path = require('path'); const fs = require('fs'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -const FILENAME = path.join(tmpdir.path, 'watch-me'); +const FILENAME = tmpdir.resolve('watch-me'); const TIMEOUT = 1300; let nevents = 0; diff --git a/test/pummel/test-policy-integrity-dep.js b/test/pummel/test-policy-integrity-dep.js index 4611dec65007ee..d5a23d96bc2593 100644 --- a/test/pummel/test-policy-integrity-dep.js +++ b/test/pummel/test-policy-integrity-dep.js @@ -194,7 +194,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-parent-commonjs.js b/test/pummel/test-policy-integrity-parent-commonjs.js index d19a28bea5b5ad..07eee598117ba1 100644 --- a/test/pummel/test-policy-integrity-parent-commonjs.js +++ b/test/pummel/test-policy-integrity-parent-commonjs.js @@ -194,7 +194,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-parent-module.js b/test/pummel/test-policy-integrity-parent-module.js index 42f06d83ef0326..a09243ea10f529 100644 --- a/test/pummel/test-policy-integrity-parent-module.js +++ b/test/pummel/test-policy-integrity-parent-module.js @@ -194,7 +194,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-parent-no-package-json.js b/test/pummel/test-policy-integrity-parent-no-package-json.js index dd447c9fa843e4..a6461a9a5835c3 100644 --- a/test/pummel/test-policy-integrity-parent-no-package-json.js +++ b/test/pummel/test-policy-integrity-parent-no-package-json.js @@ -194,7 +194,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-worker-commonjs.js b/test/pummel/test-policy-integrity-worker-commonjs.js index 415e33664413cc..acc4298eb7b23b 100644 --- a/test/pummel/test-policy-integrity-worker-commonjs.js +++ b/test/pummel/test-policy-integrity-worker-commonjs.js @@ -211,7 +211,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-worker-module.js b/test/pummel/test-policy-integrity-worker-module.js index 813d167844104e..65a04841415da9 100644 --- a/test/pummel/test-policy-integrity-worker-module.js +++ b/test/pummel/test-policy-integrity-worker-module.js @@ -211,7 +211,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-policy-integrity-worker-no-package-json.js b/test/pummel/test-policy-integrity-worker-no-package-json.js index 108af1b78e0697..fc90f73a03cf31 100644 --- a/test/pummel/test-policy-integrity-worker-no-package-json.js +++ b/test/pummel/test-policy-integrity-worker-no-package-json.js @@ -211,7 +211,7 @@ function drainQueue() { assert.notStrictEqual(status, 0, 'Should not allow multiple policies'); } { - const enoentFilepath = path.join(tmpdir.path, 'enoent'); + const enoentFilepath = tmpdir.resolve('enoent'); try { fs.unlinkSync(enoentFilepath); } catch { diff --git a/test/pummel/test-watch-file.js b/test/pummel/test-watch-file.js index bbbbf396d72227..6d55f08160a23b 100644 --- a/test/pummel/test-watch-file.js +++ b/test/pummel/test-watch-file.js @@ -24,11 +24,10 @@ require('../common'); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -const f = path.join(tmpdir.path, 'x.txt'); +const f = tmpdir.resolve('x.txt'); fs.closeSync(fs.openSync(f, 'w')); let changes = 0; diff --git a/test/report/test-report-writereport.js b/test/report/test-report-writereport.js index 971afd84c22281..fd5430a14008e0 100644 --- a/test/report/test-report-writereport.js +++ b/test/report/test-report-writereport.js @@ -50,7 +50,7 @@ function validate() { { // Test with a file argument. const file = process.report.writeReport('custom-name-1.json'); - const absolutePath = path.join(tmpdir.path, file); + const absolutePath = tmpdir.resolve(file); assert.strictEqual(helper.findReports(process.pid, tmpdir.path).length, 0); assert.strictEqual(file, 'custom-name-1.json'); helper.validate(absolutePath); @@ -61,7 +61,7 @@ function validate() { // Test with file and error arguments. const file = process.report.writeReport('custom-name-2.json', new Error('test error')); - const absolutePath = path.join(tmpdir.path, file); + const absolutePath = tmpdir.resolve(file); assert.strictEqual(helper.findReports(process.pid, tmpdir.path).length, 0); assert.strictEqual(file, 'custom-name-2.json'); helper.validate(absolutePath); @@ -117,7 +117,7 @@ function validate() { { // Test the case where the report file cannot be opened. - const reportDir = path.join(tmpdir.path, 'does', 'not', 'exist'); + const reportDir = tmpdir.resolve('does', 'not', 'exist'); const args = [`--report-directory=${reportDir}`, '-e', 'process.report.writeReport()']; diff --git a/test/sequential/test-cpu-prof-dir-absolute.js b/test/sequential/test-cpu-prof-dir-absolute.js index ad0842dbc4c4fc..03d7f50865b650 100644 --- a/test/sequential/test-cpu-prof-dir-absolute.js +++ b/test/sequential/test-cpu-prof-dir-absolute.js @@ -8,7 +8,6 @@ common.skipIfInspectorDisabled(); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -22,7 +21,7 @@ const { // relative --cpu-prof-dir { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--cpu-prof', '--cpu-prof-interval', diff --git a/test/sequential/test-cpu-prof-dir-and-name.js b/test/sequential/test-cpu-prof-dir-and-name.js index 7ce775ebc16973..84af5d8212065d 100644 --- a/test/sequential/test-cpu-prof-dir-and-name.js +++ b/test/sequential/test-cpu-prof-dir-and-name.js @@ -21,7 +21,7 @@ const { { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); const file = path.join(dir, 'test.cpuprofile'); const output = spawnSync(process.execPath, [ '--cpu-prof', diff --git a/test/sequential/test-cpu-prof-dir-relative.js b/test/sequential/test-cpu-prof-dir-relative.js index 2d679959efdebd..ac8c46486feae5 100644 --- a/test/sequential/test-cpu-prof-dir-relative.js +++ b/test/sequential/test-cpu-prof-dir-relative.js @@ -8,7 +8,6 @@ common.skipIfInspectorDisabled(); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -37,7 +36,7 @@ const { console.log(output.stderr.toString()); } assert.strictEqual(output.status, 0); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); assert(fs.existsSync(dir)); const profiles = getCpuProfiles(dir); assert.strictEqual(profiles.length, 1); diff --git a/test/sequential/test-cpu-prof-dir-worker.js b/test/sequential/test-cpu-prof-dir-worker.js index fe72af7416d813..22c7f79deb2fca 100644 --- a/test/sequential/test-cpu-prof-dir-worker.js +++ b/test/sequential/test-cpu-prof-dir-worker.js @@ -8,7 +8,6 @@ common.skipIfInspectorDisabled(); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -37,7 +36,7 @@ const { console.log(output.stderr.toString()); } assert.strictEqual(output.status, 0); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); assert(fs.existsSync(dir)); const profiles = getCpuProfiles(dir); assert.strictEqual(profiles.length, 2); diff --git a/test/sequential/test-cpu-prof-name.js b/test/sequential/test-cpu-prof-name.js index 58d9a0ec15862f..3f1c6945c5436f 100644 --- a/test/sequential/test-cpu-prof-name.js +++ b/test/sequential/test-cpu-prof-name.js @@ -8,7 +8,6 @@ const fixtures = require('../common/fixtures'); common.skipIfInspectorDisabled(); const assert = require('assert'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -22,7 +21,7 @@ const { // --cpu-prof-name { tmpdir.refresh(); - const file = path.join(tmpdir.path, 'test.cpuprofile'); + const file = tmpdir.resolve('test.cpuprofile'); const output = spawnSync(process.execPath, [ '--cpu-prof', '--cpu-prof-interval', diff --git a/test/sequential/test-diagnostic-dir-cpu-prof.js b/test/sequential/test-diagnostic-dir-cpu-prof.js index 396a6ca7de0595..75f1d86ef4b2b5 100644 --- a/test/sequential/test-diagnostic-dir-cpu-prof.js +++ b/test/sequential/test-diagnostic-dir-cpu-prof.js @@ -9,7 +9,6 @@ common.skipIfInspectorDisabled(); const assert = require('assert'); const fs = require('fs'); -const path = require('path'); const { spawnSync } = require('child_process'); const tmpdir = require('../common/tmpdir'); @@ -24,7 +23,7 @@ const { { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--cpu-prof', '--cpu-prof-interval', @@ -50,8 +49,8 @@ const { { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'diag'); - const dir2 = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('diag'); + const dir2 = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--cpu-prof', '--cpu-prof-interval', diff --git a/test/sequential/test-diagnostic-dir-heap-prof.js b/test/sequential/test-diagnostic-dir-heap-prof.js index 0ec68ab49efdf7..c74c075724185d 100644 --- a/test/sequential/test-diagnostic-dir-heap-prof.js +++ b/test/sequential/test-diagnostic-dir-heap-prof.js @@ -66,7 +66,7 @@ function getHeapProfiles(dir) { // Test --diagnostic-dir changes the default for --cpu-prof { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--heap-prof', '--diagnostic-dir', @@ -91,8 +91,8 @@ function getHeapProfiles(dir) { // Test --heap-prof-dir overwrites --diagnostic-dir { tmpdir.refresh(); - const dir = path.join(tmpdir.path, 'diag'); - const dir2 = path.join(tmpdir.path, 'prof'); + const dir = tmpdir.resolve('diag'); + const dir2 = tmpdir.resolve('prof'); const output = spawnSync(process.execPath, [ '--heap-prof', '--heap-prof-interval', diff --git a/test/sequential/test-http2-timeout-large-write-file.js b/test/sequential/test-http2-timeout-large-write-file.js index 520958bd57f6d4..a35268b6127bae 100644 --- a/test/sequential/test-http2-timeout-large-write-file.js +++ b/test/sequential/test-http2-timeout-large-write-file.js @@ -6,7 +6,6 @@ const assert = require('assert'); const fixtures = require('../common/fixtures'); const fs = require('fs'); const http2 = require('http2'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); @@ -30,7 +29,7 @@ let offsetTimeout = common.platformTimeout(100); let didReceiveData = false; const content = Buffer.alloc(writeSize, 0x44); -const filepath = path.join(tmpdir.path, 'http2-large-write.tmp'); +const filepath = tmpdir.resolve('http2-large-write.tmp'); fs.writeFileSync(filepath, content, 'binary'); const fd = fs.openSync(filepath, 'r'); process.on('beforeExit', () => fs.closeSync(fd)); diff --git a/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js b/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js index a20dce83988228..0b4701b07e1c54 100644 --- a/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js +++ b/test/sequential/test-single-executable-application-disable-experimental-sea-warning.js @@ -21,10 +21,10 @@ const { strictEqual } = require('assert'); const assert = require('assert'); const inputFile = fixtures.path('sea.js'); -const requirableFile = join(tmpdir.path, 'requirable.js'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const requirableFile = tmpdir.resolve('requirable.js'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); tmpdir.refresh(); @@ -43,7 +43,7 @@ writeFileSync(configFile, ` `); // Copy input to working directory -copyFileSync(inputFile, join(tmpdir.path, 'sea.js')); +copyFileSync(inputFile, tmpdir.resolve('sea.js')); execFileSync(process.execPath, ['--experimental-sea-config', 'sea-config.json'], { cwd: tmpdir.path }); diff --git a/test/sequential/test-single-executable-application-empty.js b/test/sequential/test-single-executable-application-empty.js index 961ae0018368cf..13dc2e834b7caa 100644 --- a/test/sequential/test-single-executable-application-empty.js +++ b/test/sequential/test-single-executable-application-empty.js @@ -15,16 +15,15 @@ skipIfSingleExecutableIsNotSupported(); const tmpdir = require('../common/tmpdir'); const { copyFileSync, writeFileSync, existsSync } = require('fs'); const { execFileSync } = require('child_process'); -const { join } = require('path'); const assert = require('assert'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); tmpdir.refresh(); -writeFileSync(join(tmpdir.path, 'empty.js'), '', 'utf-8'); +writeFileSync(tmpdir.resolve('empty.js'), '', 'utf-8'); writeFileSync(configFile, ` { "main": "empty.js", diff --git a/test/sequential/test-single-executable-application-snapshot.js b/test/sequential/test-single-executable-application-snapshot.js index d1c44b6dbab3b7..51b09cea662adf 100644 --- a/test/sequential/test-single-executable-application-snapshot.js +++ b/test/sequential/test-single-executable-application-snapshot.js @@ -14,17 +14,16 @@ skipIfSingleExecutableIsNotSupported(); const tmpdir = require('../common/tmpdir'); const { copyFileSync, writeFileSync, existsSync } = require('fs'); const { spawnSync } = require('child_process'); -const { join } = require('path'); const assert = require('assert'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); { tmpdir.refresh(); - writeFileSync(join(tmpdir.path, 'snapshot.js'), '', 'utf-8'); + writeFileSync(tmpdir.resolve('snapshot.js'), '', 'utf-8'); writeFileSync(configFile, ` { "main": "snapshot.js", @@ -57,7 +56,7 @@ const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : }); `; - writeFileSync(join(tmpdir.path, 'snapshot.js'), code, 'utf-8'); + writeFileSync(tmpdir.resolve('snapshot.js'), code, 'utf-8'); writeFileSync(configFile, ` { "main": "snapshot.js", diff --git a/test/sequential/test-single-executable-application-use-code-cache.js b/test/sequential/test-single-executable-application-use-code-cache.js index 6d45fcf289a772..96de5769b1fe6b 100644 --- a/test/sequential/test-single-executable-application-use-code-cache.js +++ b/test/sequential/test-single-executable-application-use-code-cache.js @@ -21,10 +21,10 @@ const { strictEqual } = require('assert'); const assert = require('assert'); const inputFile = fixtures.path('sea.js'); -const requirableFile = join(tmpdir.path, 'requirable.js'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const requirableFile = tmpdir.resolve('requirable.js'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); tmpdir.refresh(); @@ -43,7 +43,7 @@ writeFileSync(configFile, ` `); // Copy input to working directory -copyFileSync(inputFile, join(tmpdir.path, 'sea.js')); +copyFileSync(inputFile, tmpdir.resolve('sea.js')); execFileSync(process.execPath, ['--experimental-sea-config', 'sea-config.json'], { cwd: tmpdir.path }); diff --git a/test/sequential/test-single-executable-application.js b/test/sequential/test-single-executable-application.js index 99d0c0d6e352dc..e930254cb0a7ae 100644 --- a/test/sequential/test-single-executable-application.js +++ b/test/sequential/test-single-executable-application.js @@ -20,10 +20,10 @@ const { strictEqual } = require('assert'); const assert = require('assert'); const inputFile = fixtures.path('sea.js'); -const requirableFile = join(tmpdir.path, 'requirable.js'); -const configFile = join(tmpdir.path, 'sea-config.json'); -const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); -const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); +const requirableFile = tmpdir.resolve('requirable.js'); +const configFile = tmpdir.resolve('sea-config.json'); +const seaPrepBlob = tmpdir.resolve('sea-prep.blob'); +const outputFile = tmpdir.resolve(process.platform === 'win32' ? 'sea.exe' : 'sea'); tmpdir.refresh(); @@ -42,7 +42,7 @@ writeFileSync(configFile, ` `); // Copy input to working directory -copyFileSync(inputFile, join(tmpdir.path, 'sea.js')); +copyFileSync(inputFile, tmpdir.resolve('sea.js')); execFileSync(process.execPath, ['--experimental-sea-config', 'sea-config.json'], { cwd: tmpdir.path }); diff --git a/test/sequential/test-tls-session-timeout.js b/test/sequential/test-tls-session-timeout.js index 86a29eed46fe73..f0ec612b449867 100644 --- a/test/sequential/test-tls-session-timeout.js +++ b/test/sequential/test-tls-session-timeout.js @@ -45,7 +45,6 @@ function doTest() { const assert = require('assert'); const tls = require('tls'); const fs = require('fs'); - const join = require('path').join; const fixtures = require('../common/fixtures'); const spawn = require('child_process').spawn; @@ -69,7 +68,7 @@ function doTest() { const sessionFileName = (function() { const ticketFileName = 'tls-session-ticket.txt'; - const tmpPath = join(tmpdir.path, ticketFileName); + const tmpPath = tmpdir.resolve(ticketFileName); fs.writeFileSync(tmpPath, fixtures.readSync(ticketFileName)); return tmpPath; }()); diff --git a/test/sequential/test-watch-mode.mjs b/test/sequential/test-watch-mode.mjs index 38654a78a1dc7f..dbe486f5bb2991 100644 --- a/test/sequential/test-watch-mode.mjs +++ b/test/sequential/test-watch-mode.mjs @@ -117,7 +117,7 @@ describe('watch mode', { concurrency: true, timeout: 60_000 }, () => { it('should watch changes to a file with watch-path', { skip: !supportsRecursive, }, async () => { - const dir = path.join(tmpdir.path, 'subdir1'); + const dir = tmpdir.resolve('subdir1'); mkdirSync(dir); const file = createTmpFile(); const watchedFile = createTmpFile('', '.js', dir); @@ -138,7 +138,7 @@ describe('watch mode', { concurrency: true, timeout: 60_000 }, () => { it('should watch when running an non-existing file - when specified under --watch-path', { skip: !supportsRecursive }, async () => { - const dir = path.join(tmpdir.path, 'subdir2'); + const dir = tmpdir.resolve('subdir2'); mkdirSync(dir); const file = path.join(dir, 'non-existing.js'); const watchedFile = createTmpFile('', '.js', dir); @@ -156,7 +156,7 @@ describe('watch mode', { concurrency: true, timeout: 60_000 }, () => { it('should watch when running an non-existing file - when specified under --watch-path with equals', { skip: !supportsRecursive }, async () => { - const dir = path.join(tmpdir.path, 'subdir3'); + const dir = tmpdir.resolve('subdir3'); mkdirSync(dir); const file = path.join(dir, 'non-existing.js'); const watchedFile = createTmpFile('', '.js', dir); @@ -295,21 +295,21 @@ console.log(values.random); it('should not watch when running an missing file', { skip: !supportsRecursive }, async () => { - const nonExistingfile = path.join(tmpdir.path, `${tmpFiles++}.js`); + const nonExistingfile = tmpdir.resolve(`${tmpFiles++}.js`); await failWriteSucceed({ file: nonExistingfile, watchedFile: nonExistingfile }); }); it('should not watch when running an missing mjs file', { skip: !supportsRecursive }, async () => { - const nonExistingfile = path.join(tmpdir.path, `${tmpFiles++}.mjs`); + const nonExistingfile = tmpdir.resolve(`${tmpFiles++}.mjs`); await failWriteSucceed({ file: nonExistingfile, watchedFile: nonExistingfile }); }); it('should watch changes to previously missing dependency', { skip: !supportsRecursive }, async () => { - const dependency = path.join(tmpdir.path, `${tmpFiles++}.js`); + const dependency = tmpdir.resolve(`${tmpFiles++}.js`); const relativeDependencyPath = `./${path.basename(dependency)}`; const dependant = createTmpFile(`console.log(require('${relativeDependencyPath}'))`); @@ -320,7 +320,7 @@ console.log(values.random); skip: !supportsRecursive }, async () => { const relativeDependencyPath = `./${tmpFiles++}.mjs`; - const dependency = path.join(tmpdir.path, relativeDependencyPath); + const dependency = tmpdir.resolve(relativeDependencyPath); const dependant = createTmpFile(`import ${JSON.stringify(relativeDependencyPath)}`, '.mjs'); await failWriteSucceed({ file: dependant, watchedFile: dependency }); diff --git a/test/sequential/test-worker-prof.js b/test/sequential/test-worker-prof.js index c2df47a8e8a121..bcb5a477497d73 100644 --- a/test/sequential/test-worker-prof.js +++ b/test/sequential/test-worker-prof.js @@ -4,7 +4,6 @@ const tmpdir = require('../common/tmpdir'); const fs = require('fs'); const assert = require('assert'); const util = require('util'); -const { join } = require('path'); const { spawnSync } = require('child_process'); // Test that --prof also tracks Worker threads. @@ -67,7 +66,7 @@ if (process.argv[2] === 'child') { for (const logfile of logfiles) { const lines = fs.readFileSync( - join(tmpdir.path, logfile), 'utf8').split('\n'); + tmpdir.resolve(logfile), 'utf8').split('\n'); const ticks = lines.filter((line) => /^tick,/.test(line)).length; // Test that at least 15 ticks have been recorded for both parent and child diff --git a/test/tick-processor/test-tick-processor-polyfill-brokenfile.js b/test/tick-processor/test-tick-processor-polyfill-brokenfile.js index 2089325dff3e99..ac3cb8692b0215 100644 --- a/test/tick-processor/test-tick-processor-polyfill-brokenfile.js +++ b/test/tick-processor/test-tick-processor-polyfill-brokenfile.js @@ -15,10 +15,9 @@ if (isCPPSymbolsNotMapped) { const assert = require('assert'); const { spawn, spawnSync } = require('child_process'); -const path = require('path'); const { writeFileSync } = require('fs'); -const LOG_FILE = path.join(tmpdir.path, 'tick-processor.log'); +const LOG_FILE = tmpdir.resolve('tick-processor.log'); const RETRY_TIMEOUT = 150; const BROKEN_PART = 'tick,'; const WARN_REG_EXP = /\(node:\d+\) \[BROKEN_PROFILE_FILE] Warning: Profile file .* is broken/; diff --git a/test/tick-processor/tick-processor-base.js b/test/tick-processor/tick-processor-base.js index 91307d16928889..a9fd939495091b 100644 --- a/test/tick-processor/tick-processor-base.js +++ b/test/tick-processor/tick-processor-base.js @@ -2,12 +2,11 @@ require('../common'); const fs = require('fs'); const cp = require('child_process'); -const path = require('path'); const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); -const LOG_FILE = path.join(tmpdir.path, 'tick-processor.log'); +const LOG_FILE = tmpdir.resolve('tick-processor.log'); const RETRY_TIMEOUT = 150; function runTest(test) { diff --git a/test/wasi/test-wasi-stdio.js b/test/wasi/test-wasi-stdio.js index 29e91281553817..d4c65f238df890 100644 --- a/test/wasi/test-wasi-stdio.js +++ b/test/wasi/test-wasi-stdio.js @@ -7,9 +7,9 @@ const { join } = require('path'); const { WASI } = require('wasi'); const modulePath = join(__dirname, 'wasm', 'stdin.wasm'); const buffer = readFileSync(modulePath); -const stdinFile = join(tmpdir.path, 'stdin.txt'); -const stdoutFile = join(tmpdir.path, 'stdout.txt'); -const stderrFile = join(tmpdir.path, 'stderr.txt'); +const stdinFile = tmpdir.resolve('stdin.txt'); +const stdoutFile = tmpdir.resolve('stdout.txt'); +const stderrFile = tmpdir.resolve('stderr.txt'); tmpdir.refresh(); // Write 33 x's. The test's buffer only holds 31 x's + a terminator. diff --git a/test/wasi/test-wasi-symlinks.js b/test/wasi/test-wasi-symlinks.js index 79369fd4c18247..9c95a0e55757d0 100644 --- a/test/wasi/test-wasi-symlinks.js +++ b/test/wasi/test-wasi-symlinks.js @@ -38,15 +38,15 @@ if (process.argv[2] === 'wasi-child') { // Setup the sandbox environment. tmpdir.refresh(); - const sandbox = path.join(tmpdir.path, 'sandbox'); + const sandbox = tmpdir.resolve('sandbox'); const sandboxedFile = path.join(sandbox, 'input.txt'); - const externalFile = path.join(tmpdir.path, 'outside.txt'); + const externalFile = tmpdir.resolve('outside.txt'); const sandboxedDir = path.join(sandbox, 'subdir'); const sandboxedSymlink = path.join(sandboxedDir, 'input_link.txt'); const escapingSymlink = path.join(sandboxedDir, 'outside.txt'); const loopSymlink1 = path.join(sandboxedDir, 'loop1'); const loopSymlink2 = path.join(sandboxedDir, 'loop2'); - const sandboxedTmp = path.join(tmpdir.path, 'tmp'); + const sandboxedTmp = tmpdir.resolve('tmp'); fs.mkdirSync(sandbox); fs.mkdirSync(sandboxedDir); From fac56dbcc0b28fe3fbde1d40582a5ebda73720ab Mon Sep 17 00:00:00 2001 From: Livia Medeiros Date: Tue, 22 Aug 2023 01:42:03 +0900 Subject: [PATCH 050/125] test,benchmark: use `tmpdir.fileURL()` PR-URL: https://github.com/nodejs/node/pull/49138 Refs: https://github.com/nodejs/node/pull/49040 Reviewed-By: Luigi Pinca --- benchmark/esm/esm-loader-import.js | 4 +--- test/es-module/test-esm-dynamic-import-mutating-fs.js | 5 +---- test/es-module/test-esm-dynamic-import-mutating-fs.mjs | 5 +---- test/node-api/test_policy/test_policy.js | 5 ----- 4 files changed, 3 insertions(+), 16 deletions(-) diff --git a/benchmark/esm/esm-loader-import.js b/benchmark/esm/esm-loader-import.js index 9967cd95275469..025afbf616b570 100644 --- a/benchmark/esm/esm-loader-import.js +++ b/benchmark/esm/esm-loader-import.js @@ -2,13 +2,11 @@ // general startup, does not test lazy operations 'use strict'; const fs = require('node:fs'); -const path = require('node:path'); const common = require('../common.js'); const tmpdir = require('../../test/common/tmpdir.js'); -const { pathToFileURL } = require('node:url'); -const benchmarkDirectory = pathToFileURL(path.resolve(tmpdir.path, 'benchmark-import')); +const benchmarkDirectory = tmpdir.fileURL('benchmark-import'); const configs = { n: [1e3], diff --git a/test/es-module/test-esm-dynamic-import-mutating-fs.js b/test/es-module/test-esm-dynamic-import-mutating-fs.js index 09cbffe487959e..b3e3bd899a93e8 100644 --- a/test/es-module/test-esm-dynamic-import-mutating-fs.js +++ b/test/es-module/test-esm-dynamic-import-mutating-fs.js @@ -4,12 +4,9 @@ const tmpdir = require('../common/tmpdir'); const assert = require('node:assert'); const fs = require('node:fs/promises'); -const { pathToFileURL } = require('node:url'); tmpdir.refresh(); -const tmpDir = pathToFileURL(tmpdir.path); - -const target = new URL(`./${Math.random()}.mjs`, tmpDir); +const target = tmpdir.fileURL(`${Math.random()}.mjs`); (async () => { diff --git a/test/es-module/test-esm-dynamic-import-mutating-fs.mjs b/test/es-module/test-esm-dynamic-import-mutating-fs.mjs index 7eb79337065765..74a75ddd1c4824 100644 --- a/test/es-module/test-esm-dynamic-import-mutating-fs.mjs +++ b/test/es-module/test-esm-dynamic-import-mutating-fs.mjs @@ -4,12 +4,9 @@ import tmpdir from '../common/tmpdir.js'; import assert from 'node:assert'; import fs from 'node:fs/promises'; import { execPath } from 'node:process'; -import { pathToFileURL } from 'node:url'; tmpdir.refresh(); -const tmpDir = pathToFileURL(tmpdir.path); - -const target = new URL(`./${Math.random()}.mjs`, tmpDir); +const target = tmpdir.fileURL(`${Math.random()}.mjs`); await assert.rejects(import(target), { code: 'ERR_MODULE_NOT_FOUND' }); diff --git a/test/node-api/test_policy/test_policy.js b/test/node-api/test_policy/test_policy.js index 428dd905c2e365..f14ceff3c4537b 100644 --- a/test/node-api/test_policy/test_policy.js +++ b/test/node-api/test_policy/test_policy.js @@ -23,11 +23,6 @@ const policyFilepath = tmpdir.resolve('policy'); const depFilepath = require.resolve(`./build/${common.buildType}/binding.node`); const depURL = pathToFileURL(depFilepath); -const tmpdirURL = pathToFileURL(tmpdir.path); -if (!tmpdirURL.pathname.endsWith('/')) { - tmpdirURL.pathname += '/'; -} - const depBody = fs.readFileSync(depURL); function writePolicy(...resources) { const manifest = { resources: {} }; From dc092864efba8820c39448ca84ab5b70d66377d4 Mon Sep 17 00:00:00 2001 From: Jungku Lee Date: Tue, 22 Aug 2023 01:42:12 +0900 Subject: [PATCH 051/125] src: remove unused function `GetName()` in node_perf PR-URL: https://github.com/nodejs/node/pull/49244 Reviewed-By: Antoine du Hamel Reviewed-By: Deokjin Kim --- src/node_perf.cc | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/node_perf.cc b/src/node_perf.cc index 1acaa9dfe47145..360cc8bf673073 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -236,18 +236,6 @@ static void RemoveGarbageCollectionTracking( GarbageCollectionCleanupHook(env); } -// Gets the name of a function -inline Local GetName(Local fn) { - Local val = fn->GetDebugName(); - if (val.IsEmpty() || val->IsUndefined()) { - Local boundFunction = fn->GetBoundFunction(); - if (!boundFunction.IsEmpty() && !boundFunction->IsUndefined()) { - val = GetName(boundFunction.As()); - } - } - return val; -} - // Notify a custom PerformanceEntry to observers void Notify(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); From a22e0d9696ec4d797b6a099729afcbfa056f0313 Mon Sep 17 00:00:00 2001 From: Fedor Indutny <238531+indutny@users.noreply.github.com> Date: Mon, 21 Aug 2023 11:12:09 -0700 Subject: [PATCH 052/125] doc: clarify use of Uint8Array for n-api `napi_get_buffer_info` always supported receiving `Uint8Array` as a `value` argument because `node::Buffer` is a subclass of `Uint8Array` and the underlying V8 APIs don't distinguish between two. With this change we mark both types as supported by the API so that the user code doesn't have to unknowingly use oficially unsupported type of the `value` argument. PR-URL: https://github.com/nodejs/node/pull/48742 Reviewed-By: Luigi Pinca Reviewed-By: Gabriel Schulhof Reviewed-By: Chengzhong Wu --- doc/api/n-api.md | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 26ca5dc83fb34f..8b8f77f9d6a1f8 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3070,13 +3070,18 @@ napi_status napi_get_buffer_info(napi_env env, ``` * `[in] env`: The environment that the API is invoked under. -* `[in] value`: `napi_value` representing the `node::Buffer` being queried. -* `[out] data`: The underlying data buffer of the `node::Buffer`. - If length is `0`, this may be `NULL` or any other pointer value. +* `[in] value`: `napi_value` representing the `node::Buffer` or `Uint8Array` + being queried. +* `[out] data`: The underlying data buffer of the `node::Buffer` or + `Uint8Array`. If length is `0`, this may be `NULL` or any other pointer value. * `[out] length`: Length in bytes of the underlying data buffer. Returns `napi_ok` if the API succeeded. +This method returns the identical `data` and `byte_length` as +[`napi_get_typedarray_info`][]. And `napi_get_typedarray_info` accepts a +`node::Buffer` (a Uint8Array) as the value too. + This API is used to retrieve the underlying data buffer of a `node::Buffer` and its length. @@ -3827,12 +3832,14 @@ napi_status napi_is_buffer(napi_env env, napi_value value, bool* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: The JavaScript value to check. -* `[out] result`: Whether the given `napi_value` represents a `node::Buffer` - object. +* `[out] result`: Whether the given `napi_value` represents a `node::Buffer` or + `Uint8Array` object. Returns `napi_ok` if the API succeeded. -This API checks if the `Object` passed in is a buffer. +This API checks if the `Object` passed in is a buffer or Uint8Array. +[`napi_is_typedarray`][] should be preferred if the caller needs to check if the +value is a Uint8Array. ### `napi_is_date` @@ -6502,11 +6509,13 @@ the add-on's file name during loading. [`napi_get_last_error_info`]: #napi_get_last_error_info [`napi_get_property`]: #napi_get_property [`napi_get_reference_value`]: #napi_get_reference_value +[`napi_get_typedarray_info`]: #napi_get_typedarray_info [`napi_get_value_external`]: #napi_get_value_external [`napi_has_property`]: #napi_has_property [`napi_instanceof`]: #napi_instanceof [`napi_is_error`]: #napi_is_error [`napi_is_exception_pending`]: #napi_is_exception_pending +[`napi_is_typedarray`]: #napi_is_typedarray [`napi_make_callback`]: #napi_make_callback [`napi_open_callback_scope`]: #napi_open_callback_scope [`napi_open_escapable_handle_scope`]: #napi_open_escapable_handle_scope From 55d664917582388169bb2551996b712422e02109 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Aug 2023 14:33:54 +0200 Subject: [PATCH 053/125] src: support snapshot deserialization in RAIIIsolate PR-URL: https://github.com/nodejs/node/pull/49226 Refs: https://github.com/nodejs/node-v8/issues/252 Reviewed-By: Darshan Sen Reviewed-By: Yagiz Nizipli --- src/util.cc | 6 +++++- src/util.h | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/util.cc b/src/util.cc index 76a61aef592641..19fb91c959a205 100644 --- a/src/util.cc +++ b/src/util.cc @@ -27,6 +27,7 @@ #include "node_buffer.h" #include "node_errors.h" #include "node_internals.h" +#include "node_snapshot_builder.h" #include "node_v8_platform-inl.h" #include "string_bytes.h" #include "uv.h" @@ -677,13 +678,16 @@ Local UnionBytes::ToStringChecked(Isolate* isolate) const { } } -RAIIIsolate::RAIIIsolate() +RAIIIsolate::RAIIIsolate(const SnapshotData* data) : allocator_{ArrayBuffer::Allocator::NewDefaultAllocator()} { isolate_ = Isolate::Allocate(); CHECK_NOT_NULL(isolate_); per_process::v8_platform.Platform()->RegisterIsolate(isolate_, uv_default_loop()); Isolate::CreateParams params; + if (data != nullptr) { + SnapshotBuilder::InitializeIsolateParams(data, ¶ms); + } params.array_buffer_allocator = allocator_.get(); Isolate::Initialize(isolate_, params); } diff --git a/src/util.h b/src/util.h index b9369867eed316..344f7753dab2b1 100644 --- a/src/util.h +++ b/src/util.h @@ -971,7 +971,7 @@ void SetConstructorFunction(v8::Isolate* isolate, // Simple RAII class to spin up a v8::Isolate instance. class RAIIIsolate { public: - RAIIIsolate(); + explicit RAIIIsolate(const SnapshotData* data = nullptr); ~RAIIIsolate(); v8::Isolate* get() const { return isolate_; } From d7ff78b442d067e76159c8ad78747bebe138e764 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Aug 2023 14:16:55 +0200 Subject: [PATCH 054/125] sea: generate code cache with deserialized isolate V8 now requires code cache to be compiled from an isolate with the same RO space layout as the one that's going to deserialize the cache, so for a binary built with snapshot, we need to compile the code cache using a deserialized isolate. Drive-by: ignore "useCodeCache" when "useSnapshot" is true because the compilation would've been done during build time anyway in that case, and print a warning for it. PR-URL: https://github.com/nodejs/node/pull/49226 Refs: https://github.com/nodejs/node-v8/issues/252 Reviewed-By: Darshan Sen Reviewed-By: Yagiz Nizipli --- src/node_sea.cc | 21 ++++--- ...ble-application-snapshot-and-code-cache.js | 63 +++++++++++++++++++ 2 files changed, 76 insertions(+), 8 deletions(-) create mode 100644 test/sequential/test-single-executable-application-snapshot-and-code-cache.js diff --git a/src/node_sea.cc b/src/node_sea.cc index a8dbfeaa424943..521f2f670b28c8 100644 --- a/src/node_sea.cc +++ b/src/node_sea.cc @@ -411,7 +411,7 @@ ExitCode GenerateSnapshotForSEA(const SeaConfig& config, std::optional GenerateCodeCache(std::string_view main_path, std::string_view main_script) { - RAIIIsolate raii_isolate; + RAIIIsolate raii_isolate(SnapshotBuilder::GetEmbeddedSnapshotData()); Isolate* isolate = raii_isolate.get(); HandleScope handle_scope(isolate); @@ -489,14 +489,19 @@ ExitCode GenerateSingleExecutableBlob( std::optional optional_sv_code_cache; std::string code_cache; if (static_cast(config.flags & SeaFlags::kUseCodeCache)) { - std::optional optional_code_cache = - GenerateCodeCache(config.main_path, main_script); - if (!optional_code_cache.has_value()) { - FPrintF(stderr, "Cannot generate V8 code cache\n"); - return ExitCode::kGenericUserError; + if (builds_snapshot_from_main) { + FPrintF(stderr, + "\"useCodeCache\" is redundant when \"useSnapshot\" is true\n"); + } else { + std::optional optional_code_cache = + GenerateCodeCache(config.main_path, main_script); + if (!optional_code_cache.has_value()) { + FPrintF(stderr, "Cannot generate V8 code cache\n"); + return ExitCode::kGenericUserError; + } + code_cache = optional_code_cache.value(); + optional_sv_code_cache = code_cache; } - code_cache = optional_code_cache.value(); - optional_sv_code_cache = code_cache; } SeaResource sea{ diff --git a/test/sequential/test-single-executable-application-snapshot-and-code-cache.js b/test/sequential/test-single-executable-application-snapshot-and-code-cache.js new file mode 100644 index 00000000000000..66012e38a4faa6 --- /dev/null +++ b/test/sequential/test-single-executable-application-snapshot-and-code-cache.js @@ -0,0 +1,63 @@ +'use strict'; + +require('../common'); + +const { + injectAndCodeSign, + skipIfSingleExecutableIsNotSupported, +} = require('../common/sea'); + +skipIfSingleExecutableIsNotSupported(); + +// This tests "useCodeCache" is ignored when "useSnapshot" is true. + +const tmpdir = require('../common/tmpdir'); +const { copyFileSync, writeFileSync, existsSync } = require('fs'); +const { spawnSync } = require('child_process'); +const { join } = require('path'); +const assert = require('assert'); + +const configFile = join(tmpdir.path, 'sea-config.json'); +const seaPrepBlob = join(tmpdir.path, 'sea-prep.blob'); +const outputFile = join(tmpdir.path, process.platform === 'win32' ? 'sea.exe' : 'sea'); + +{ + tmpdir.refresh(); + const code = ` + const { + setDeserializeMainFunction, + } = require('v8').startupSnapshot; + + setDeserializeMainFunction(() => { + console.log('Hello from snapshot'); + }); + `; + + writeFileSync(join(tmpdir.path, 'snapshot.js'), code, 'utf-8'); + writeFileSync(configFile, ` + { + "main": "snapshot.js", + "output": "sea-prep.blob", + "useSnapshot": true, + "useCodeCache": true + } + `); + + let child = spawnSync( + process.execPath, + ['--experimental-sea-config', 'sea-config.json'], + { + cwd: tmpdir.path + }); + assert.match( + child.stderr.toString(), + /"useCodeCache" is redundant when "useSnapshot" is true/); + + assert(existsSync(seaPrepBlob)); + + copyFileSync(process.execPath, outputFile); + injectAndCodeSign(outputFile, seaPrepBlob); + + child = spawnSync(outputFile); + assert.strictEqual(child.stdout.toString().trim(), 'Hello from snapshot'); +} From bef900e56b381b86d6f61713108ba1e9e9c41ce9 Mon Sep 17 00:00:00 2001 From: Geoffrey Booth Date: Tue, 22 Aug 2023 14:53:40 -0700 Subject: [PATCH 055/125] doc: move and rename loaders section PR-URL: https://github.com/nodejs/node/pull/49261 Reviewed-By: Matteo Collina Reviewed-By: Jacob Smith Reviewed-By: Rafael Gonzaga Reviewed-By: Benjamin Gruenbaum --- doc/api/esm.md | 686 +--------------------------------------------- doc/api/module.md | 673 ++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 682 insertions(+), 677 deletions(-) diff --git a/doc/api/esm.md b/doc/api/esm.md index 9437c41ea50f8c..4004d5f03f0fcc 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -9,12 +9,12 @@ added: v8.5.0 changes: - version: v20.0.0 pr-url: https://github.com/nodejs/node/pull/44710 - description: Loader hooks are executed off the main thread. + description: Module customization hooks are executed off the main thread. - version: - v18.6.0 - v16.17.0 pr-url: https://github.com/nodejs/node/pull/42623 - description: Add support for chaining loaders. + description: Add support for chaining module customization hooks. - version: - v17.1.0 - v16.14.0 @@ -25,7 +25,7 @@ changes: - v16.12.0 pr-url: https://github.com/nodejs/node/pull/37468 description: - Consolidate loader hooks, removed `getFormat`, `getSource`, + Consolidate customization hooks, removed `getFormat`, `getSource`, `transformSource`, and `getGlobalPreloadCode` hooks added `load` and `globalPreload` hooks allowed returning `format` from either `resolve` or `load` hooks. @@ -376,8 +376,8 @@ behind the `--experimental-import-meta-resolve` flag: * `parent` {string|URL} An optional absolute parent module URL to resolve from. -> **Caveat** This feature is not available within custom loaders (it would -> create a deadlock). +> **Caveat** This feature is not available within module customization hooks (it +> would create a deadlock). ## Interoperability with CommonJS @@ -525,8 +525,8 @@ if this behavior is desired. #### No `require.extensions` -`require.extensions` is not used by `import`. The expectation is that loader -hooks can provide this workflow in the future. +`require.extensions` is not used by `import`. Module customization hooks can +provide a replacement. #### No `require.cache` @@ -694,651 +694,8 @@ of Node.js applications. ## Loaders - - -> Stability: 1 - Experimental - -> This API is currently being redesigned and will still change. - - - -To customize the default module resolution, loader hooks can optionally be -provided via a `--experimental-loader ./loader-name.mjs` argument to Node.js. - -When hooks are used they apply to each subsequent loader, the entry point, and -all `import` calls. They won't apply to `require` calls; those still follow -[CommonJS][] rules. - -Loaders follow the pattern of `--require`: - -```bash -node \ - --experimental-loader unpkg \ - --experimental-loader http-to-https \ - --experimental-loader cache-buster -``` - -These are called in the following sequence: `cache-buster` calls -`http-to-https` which calls `unpkg`. - -### Hooks - -Hooks are part of a chain, even if that chain consists of only one custom -(user-provided) hook and the default hook, which is always present. Hook -functions nest: each one must always return a plain object, and chaining happens -as a result of each function calling `next()`, which is a reference -to the subsequent loader's hook. - -A hook that returns a value lacking a required property triggers an exception. -A hook that returns without calling `next()` _and_ without returning -`shortCircuit: true` also triggers an exception. These errors are to help -prevent unintentional breaks in the chain. - -Hooks are run in a separate thread, isolated from the main. That means it is a -different [realm](https://tc39.es/ecma262/#realm). The hooks thread may be -terminated by the main thread at any time, so do not depend on asynchronous -operations (like `console.log`) to complete. - -#### `initialize()` - - - -> The loaders API is being redesigned. This hook may disappear or its -> signature may change. Do not rely on the API described below. - -* `data` {any} The data from `register(loader, import.meta.url, { data })`. -* Returns: {any} The data to be returned to the caller of `register`. - -The `initialize` hook provides a way to define a custom function that runs -in the loader's thread when the loader is initialized. Initialization happens -when the loader is registered via [`register`][] or registered via the -`--experimental-loader` command line option. - -This hook can send and receive data from a [`register`][] invocation, including -ports and other transferrable objects. The return value of `initialize` must be -either: - -* `undefined`, -* something that can be posted as a message between threads (e.g. the input to - [`port.postMessage`][]), -* a `Promise` resolving to one of the aforementioned values. - -Loader code: - -```js -// In the below example this file is referenced as -// '/path-to-my-loader.js' - -export async function initialize({ number, port }) { - port.postMessage(`increment: ${number + 1}`); - return 'ok'; -} -``` - -Caller code: - -```js -import assert from 'node:assert'; -import { register } from 'node:module'; -import { MessageChannel } from 'node:worker_threads'; - -// This example showcases how a message channel can be used to -// communicate between the main (application) thread and the loader -// running on the loaders thread, by sending `port2` to the loader. -const { port1, port2 } = new MessageChannel(); - -port1.on('message', (msg) => { - assert.strictEqual(msg, 'increment: 2'); -}); - -const result = register('/path-to-my-loader.js', { - parentURL: import.meta.url, - data: { number: 1, port: port2 }, - transferList: [port2], -}); - -assert.strictEqual(result, 'ok'); -``` - -#### `resolve(specifier, context, nextResolve)` - - - -> The loaders API is being redesigned. This hook may disappear or its -> signature may change. Do not rely on the API described below. - -* `specifier` {string} -* `context` {Object} - * `conditions` {string\[]} Export conditions of the relevant `package.json` - * `importAssertions` {Object} An object whose key-value pairs represent the - assertions for the module to import - * `parentURL` {string|undefined} The module importing this one, or undefined - if this is the Node.js entry point -* `nextResolve` {Function} The subsequent `resolve` hook in the chain, or the - Node.js default `resolve` hook after the last user-supplied `resolve` hook - * `specifier` {string} - * `context` {Object} -* Returns: {Object|Promise} - * `format` {string|null|undefined} A hint to the load hook (it might be - ignored) - `'builtin' | 'commonjs' | 'json' | 'module' | 'wasm'` - * `importAssertions` {Object|undefined} The import assertions to use when - caching the module (optional; if excluded the input will be used) - * `shortCircuit` {undefined|boolean} A signal that this hook intends to - terminate the chain of `resolve` hooks. **Default:** `false` - * `url` {string} The absolute URL to which this input resolves - -> **Caveat** Despite support for returning promises and async functions, calls -> to `resolve` may block the main thread which can impact performance. - -The `resolve` hook chain is responsible for telling Node.js where to find and -how to cache a given `import` statement or expression. It can optionally return -its format (such as `'module'`) as a hint to the `load` hook. If a format is -specified, the `load` hook is ultimately responsible for providing the final -`format` value (and it is free to ignore the hint provided by `resolve`); if -`resolve` provides a `format`, a custom `load` hook is required even if only to -pass the value to the Node.js default `load` hook. - -Import type assertions are part of the cache key for saving loaded modules into -the internal module cache. The `resolve` hook is responsible for -returning an `importAssertions` object if the module should be cached with -different assertions than were present in the source code. - -The `conditions` property in `context` is an array of conditions for -[package exports conditions][Conditional Exports] that apply to this resolution -request. They can be used for looking up conditional mappings elsewhere or to -modify the list when calling the default resolution logic. - -The current [package exports conditions][Conditional Exports] are always in -the `context.conditions` array passed into the hook. To guarantee _default -Node.js module specifier resolution behavior_ when calling `defaultResolve`, the -`context.conditions` array passed to it _must_ include _all_ elements of the -`context.conditions` array originally passed into the `resolve` hook. - -```js -export function resolve(specifier, context, nextResolve) { - const { parentURL = null } = context; - - if (Math.random() > 0.5) { // Some condition. - // For some or all specifiers, do some custom logic for resolving. - // Always return an object of the form {url: }. - return { - shortCircuit: true, - url: parentURL ? - new URL(specifier, parentURL).href : - new URL(specifier).href, - }; - } - - if (Math.random() < 0.5) { // Another condition. - // When calling `defaultResolve`, the arguments can be modified. In this - // case it's adding another value for matching conditional exports. - return nextResolve(specifier, { - ...context, - conditions: [...context.conditions, 'another-condition'], - }); - } - - // Defer to the next hook in the chain, which would be the - // Node.js default resolve if this is the last user-specified loader. - return nextResolve(specifier); -} -``` - -#### `load(url, context, nextLoad)` - - - -> The loaders API is being redesigned. This hook may disappear or its -> signature may change. Do not rely on the API described below. - -> In a previous version of this API, this was split across 3 separate, now -> deprecated, hooks (`getFormat`, `getSource`, and `transformSource`). - -* `url` {string} The URL returned by the `resolve` chain -* `context` {Object} - * `conditions` {string\[]} Export conditions of the relevant `package.json` - * `format` {string|null|undefined} The format optionally supplied by the - `resolve` hook chain - * `importAssertions` {Object} -* `nextLoad` {Function} The subsequent `load` hook in the chain, or the - Node.js default `load` hook after the last user-supplied `load` hook - * `specifier` {string} - * `context` {Object} -* Returns: {Object} - * `format` {string} - * `shortCircuit` {undefined|boolean} A signal that this hook intends to - terminate the chain of `resolve` hooks. **Default:** `false` - * `source` {string|ArrayBuffer|TypedArray} The source for Node.js to evaluate - -The `load` hook provides a way to define a custom method of determining how -a URL should be interpreted, retrieved, and parsed. It is also in charge of -validating the import assertion. - -The final value of `format` must be one of the following: - -| `format` | Description | Acceptable types for `source` returned by `load` | -| ------------ | ------------------------------ | -------------------------------------------------------------------------- | -| `'builtin'` | Load a Node.js builtin module | Not applicable | -| `'commonjs'` | Load a Node.js CommonJS module | { [`string`][], [`ArrayBuffer`][], [`TypedArray`][], `null`, `undefined` } | -| `'json'` | Load a JSON file | { [`string`][], [`ArrayBuffer`][], [`TypedArray`][] } | -| `'module'` | Load an ES module | { [`string`][], [`ArrayBuffer`][], [`TypedArray`][] } | -| `'wasm'` | Load a WebAssembly module | { [`ArrayBuffer`][], [`TypedArray`][] } | - -The value of `source` is ignored for type `'builtin'` because currently it is -not possible to replace the value of a Node.js builtin (core) module. - -The value of `source` can be omitted for type `'commonjs'`. When a `source` is -provided, all `require` calls from this module will be processed by the ESM -loader with registered `resolve` and `load` hooks; all `require.resolve` calls -from this module will be processed by the ESM loader with registered `resolve` -hooks; `require.extensions` and monkey-patching on the CommonJS module loader -will not apply. If `source` is undefined or `null`, it will be handled by the -CommonJS module loader and `require`/`require.resolve` calls will not go through -the registered hooks. This behavior for nullish `source` is temporary — in the -future, nullish `source` will not be supported. - -The Node.js own `load` implementation, which is the value of `next` for the last -loader in the `load` chain, returns `null` for `source` when `format` is -`'commonjs'` for backward compatibility. Here is an example loader that would -opt-in to using the non-default behavior: - -```js -import { readFile } from 'node:fs/promises'; - -export async function load(url, context, nextLoad) { - const result = await nextLoad(url, context); - if (result.format === 'commonjs') { - result.source ??= await readFile(new URL(result.responseURL ?? url)); - } - return result; -} -``` - -> **Caveat**: The ESM `load` hook and namespaced exports from CommonJS modules -> are incompatible. Attempting to use them together will result in an empty -> object from the import. This may be addressed in the future. - -> These types all correspond to classes defined in ECMAScript. - -* The specific [`ArrayBuffer`][] object is a [`SharedArrayBuffer`][]. -* The specific [`TypedArray`][] object is a [`Uint8Array`][]. - -If the source value of a text-based format (i.e., `'json'`, `'module'`) -is not a string, it is converted to a string using [`util.TextDecoder`][]. - -The `load` hook provides a way to define a custom method for retrieving the -source code of an ES module specifier. This would allow a loader to potentially -avoid reading files from disk. It could also be used to map an unrecognized -format to a supported one, for example `yaml` to `module`. - -```js -export async function load(url, context, nextLoad) { - const { format } = context; - - if (Math.random() > 0.5) { // Some condition - /* - For some or all URLs, do some custom logic for retrieving the source. - Always return an object of the form { - format: , - source: , - }. - */ - return { - format, - shortCircuit: true, - source: '...', - }; - } - - // Defer to the next hook in the chain. - return nextLoad(url); -} -``` - -In a more advanced scenario, this can also be used to transform an unsupported -source to a supported one (see [Examples](#examples) below). - -#### `globalPreload()` - - - -> This hook will be removed in a future version. Use [`initialize`][] instead. -> When a loader has an `initialize` export, `globalPreload` will be ignored. - -> In a previous version of this API, this hook was named -> `getGlobalPreloadCode`. - -* `context` {Object} Information to assist the preload code - * `port` {MessagePort} -* Returns: {string} Code to run before application startup - -Sometimes it might be necessary to run some code inside of the same global -scope that the application runs in. This hook allows the return of a string -that is run as a sloppy-mode script on startup. - -Similar to how CommonJS wrappers work, the code runs in an implicit function -scope. The only argument is a `require`-like function that can be used to load -builtins like "fs": `getBuiltin(request: string)`. - -If the code needs more advanced `require` features, it has to construct -its own `require` using `module.createRequire()`. - -```js -export function globalPreload(context) { - return `\ -globalThis.someInjectedProperty = 42; -console.log('I just set some globals!'); - -const { createRequire } = getBuiltin('module'); -const { cwd } = getBuiltin('process'); - -const require = createRequire(cwd() + '/'); -// [...] -`; -} -``` - -In order to allow communication between the application and the loader, another -argument is provided to the preload code: `port`. This is available as a -parameter to the loader hook and inside of the source text returned by the hook. -Some care must be taken in order to properly call [`port.ref()`][] and -[`port.unref()`][] to prevent a process from being in a state where it won't -close normally. - -```js -/** - * This example has the application context send a message to the loader - * and sends the message back to the application context - */ -export function globalPreload({ port }) { - port.onmessage = (evt) => { - port.postMessage(evt.data); - }; - return `\ - port.postMessage('console.log("I went to the Loader and back");'); - port.onmessage = (evt) => { - eval(evt.data); - }; - `; -} -``` - -### Examples - -The various loader hooks can be used together to accomplish wide-ranging -customizations of the Node.js code loading and evaluation behaviors. - -#### HTTPS loader - -In current Node.js, specifiers starting with `https://` are experimental (see -[HTTPS and HTTP imports][]). - -The loader below registers hooks to enable rudimentary support for such -specifiers. While this may seem like a significant improvement to Node.js core -functionality, there are substantial downsides to actually using this loader: -performance is much slower than loading files from disk, there is no caching, -and there is no security. - -```js -// https-loader.mjs -import { get } from 'node:https'; - -export function load(url, context, nextLoad) { - // For JavaScript to be loaded over the network, we need to fetch and - // return it. - if (url.startsWith('https://')) { - return new Promise((resolve, reject) => { - get(url, (res) => { - let data = ''; - res.setEncoding('utf8'); - res.on('data', (chunk) => data += chunk); - res.on('end', () => resolve({ - // This example assumes all network-provided JavaScript is ES module - // code. - format: 'module', - shortCircuit: true, - source: data, - })); - }).on('error', (err) => reject(err)); - }); - } - - // Let Node.js handle all other URLs. - return nextLoad(url); -} -``` - -```js -// main.mjs -import { VERSION } from 'https://coffeescript.org/browser-compiler-modern/coffeescript.js'; - -console.log(VERSION); -``` - -With the preceding loader, running -`node --experimental-loader ./https-loader.mjs ./main.mjs` -prints the current version of CoffeeScript per the module at the URL in -`main.mjs`. - -#### Transpiler loader - -Sources that are in formats Node.js doesn't understand can be converted into -JavaScript using the [`load` hook][load hook]. - -This is less performant than transpiling source files before running -Node.js; a transpiler loader should only be used for development and testing -purposes. - -```js -// coffeescript-loader.mjs -import { readFile } from 'node:fs/promises'; -import { dirname, extname, resolve as resolvePath } from 'node:path'; -import { cwd } from 'node:process'; -import { fileURLToPath, pathToFileURL } from 'node:url'; -import CoffeeScript from 'coffeescript'; - -const baseURL = pathToFileURL(`${cwd()}/`).href; - -export async function load(url, context, nextLoad) { - if (extensionsRegex.test(url)) { - // Now that we patched resolve to let CoffeeScript URLs through, we need to - // tell Node.js what format such URLs should be interpreted as. Because - // CoffeeScript transpiles into JavaScript, it should be one of the two - // JavaScript formats: 'commonjs' or 'module'. - - // CoffeeScript files can be either CommonJS or ES modules, so we want any - // CoffeeScript file to be treated by Node.js the same as a .js file at the - // same location. To determine how Node.js would interpret an arbitrary .js - // file, search up the file system for the nearest parent package.json file - // and read its "type" field. - const format = await getPackageType(url); - // When a hook returns a format of 'commonjs', `source` is ignored. - // To handle CommonJS files, a handler needs to be registered with - // `require.extensions` in order to process the files with the CommonJS - // loader. Avoiding the need for a separate CommonJS handler is a future - // enhancement planned for ES module loaders. - if (format === 'commonjs') { - return { - format, - shortCircuit: true, - }; - } - - const { source: rawSource } = await nextLoad(url, { ...context, format }); - // This hook converts CoffeeScript source code into JavaScript source code - // for all imported CoffeeScript files. - const transformedSource = coffeeCompile(rawSource.toString(), url); - - return { - format, - shortCircuit: true, - source: transformedSource, - }; - } - - // Let Node.js handle all other URLs. - return nextLoad(url); -} - -async function getPackageType(url) { - // `url` is only a file path during the first iteration when passed the - // resolved url from the load() hook - // an actual file path from load() will contain a file extension as it's - // required by the spec - // this simple truthy check for whether `url` contains a file extension will - // work for most projects but does not cover some edge-cases (such as - // extensionless files or a url ending in a trailing space) - const isFilePath = !!extname(url); - // If it is a file path, get the directory it's in - const dir = isFilePath ? - dirname(fileURLToPath(url)) : - url; - // Compose a file path to a package.json in the same directory, - // which may or may not exist - const packagePath = resolvePath(dir, 'package.json'); - // Try to read the possibly nonexistent package.json - const type = await readFile(packagePath, { encoding: 'utf8' }) - .then((filestring) => JSON.parse(filestring).type) - .catch((err) => { - if (err?.code !== 'ENOENT') console.error(err); - }); - // Ff package.json existed and contained a `type` field with a value, voila - if (type) return type; - // Otherwise, (if not at the root) continue checking the next directory up - // If at the root, stop and return false - return dir.length > 1 && getPackageType(resolvePath(dir, '..')); -} -``` - -```coffee -# main.coffee -import { scream } from './scream.coffee' -console.log scream 'hello, world' - -import { version } from 'node:process' -console.log "Brought to you by Node.js version #{version}" -``` - -```coffee -# scream.coffee -export scream = (str) -> str.toUpperCase() -``` - -With the preceding loader, running -`node --experimental-loader ./coffeescript-loader.mjs main.coffee` -causes `main.coffee` to be turned into JavaScript after its source code is -loaded from disk but before Node.js executes it; and so on for any `.coffee`, -`.litcoffee` or `.coffee.md` files referenced via `import` statements of any -loaded file. - -#### "import map" loader - -The previous two loaders defined `load` hooks. This is an example of a loader -that does its work via the `resolve` hook. This loader reads an -`import-map.json` file that specifies which specifiers to override to another -URL (this is a very simplistic implemenation of a small subset of the -"import maps" specification). - -```js -// import-map-loader.js -import fs from 'node:fs/promises'; - -const { imports } = JSON.parse(await fs.readFile('import-map.json')); - -export async function resolve(specifier, context, nextResolve) { - if (Object.hasOwn(imports, specifier)) { - return nextResolve(imports[specifier], context); - } - - return nextResolve(specifier, context); -} -``` - -Let's assume we have these files: - -```js -// main.js -import 'a-module'; -``` - -```json -// import-map.json -{ - "imports": { - "a-module": "./some-module.js" - } -} -``` - -```js -// some-module.js -console.log('some module!'); -``` - -If you run `node --experimental-loader ./import-map-loader.js main.js` -the output will be `some module!`. - -### Register loaders programmatically - - - -In addition to using the `--experimental-loader` option in the CLI, -loaders can also be registered programmatically. You can find -detailed information about this process in the documentation page -for [`module.register()`][]. +The former Loaders documentation is now at +[Modules: Customization hooks][Module customization hooks]. ## Resolution and loading algorithm @@ -1678,58 +1035,43 @@ _isImports_, _conditions_) ### Customizing ESM specifier resolution algorithm -The [Loaders API][] provides a mechanism for customizing the ESM specifier -resolution algorithm. An example loader that provides CommonJS-style resolution -for ESM specifiers is [commonjs-extension-resolution-loader][]. +[Module customization hooks][] provide a mechanism for customizing the ESM +specifier resolution algorithm. An example that provides CommonJS-style +resolution for ESM specifiers is [commonjs-extension-resolution-loader][]. [6.1.7 Array Index]: https://tc39.es/ecma262/#integer-index [Addons]: addons.md [CommonJS]: modules.md -[Conditional exports]: packages.md#conditional-exports [Core modules]: modules.md#core-modules [Determining module system]: packages.md#determining-module-system [Dynamic `import()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/import [ES Module Integration Proposal for WebAssembly]: https://github.com/webassembly/esm-integration -[HTTPS and HTTP imports]: #https-and-http-imports [Import Assertions]: #import-assertions [Import Assertions proposal]: https://github.com/tc39/proposal-import-assertions [JSON modules]: #json-modules -[Loaders API]: #loaders +[Module customization hooks]: module.md#customization-hooks [Node.js Module Resolution And Loading Algorithm]: #resolution-algorithm-specification [Terminology]: #terminology [URL]: https://url.spec.whatwg.org/ [`"exports"`]: packages.md#exports [`"type"`]: packages.md#type [`--input-type`]: cli.md#--input-typetype -[`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer -[`SharedArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer -[`TypedArray`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray -[`Uint8Array`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array [`data:` URLs]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs [`export`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/export [`import()`]: #import-expressions [`import.meta.resolve`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/import.meta/resolve [`import.meta.url`]: #importmetaurl [`import`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import -[`initialize`]: #initialize [`module.createRequire()`]: module.md#modulecreaterequirefilename -[`module.register()`]: module.md#moduleregisterspecifier-parenturl-options [`module.syncBuiltinESMExports()`]: module.md#modulesyncbuiltinesmexports [`package.json`]: packages.md#nodejs-packagejson-field-definitions -[`port.postMessage`]: worker_threads.md#portpostmessagevalue-transferlist -[`port.ref()`]: https://nodejs.org/dist/latest-v17.x/docs/api/worker_threads.html#portref -[`port.unref()`]: https://nodejs.org/dist/latest-v17.x/docs/api/worker_threads.html#portunref [`process.dlopen`]: process.md#processdlopenmodule-filename-flags -[`register`]: module.md#moduleregisterspecifier-parenturl-options -[`string`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String -[`util.TextDecoder`]: util.md#class-utiltextdecoder [cjs-module-lexer]: https://github.com/nodejs/cjs-module-lexer/tree/1.2.2 [commonjs-extension-resolution-loader]: https://github.com/nodejs/loaders-test/tree/main/commonjs-extension-resolution-loader -[custom https loader]: #https-loader +[custom https loader]: module.md#https-loader [import.meta.resolve]: #importmetaresolvespecifier -[load hook]: #loadurl-context-nextload [percent-encoded]: url.md#percent-encoding-in-urls [special scheme]: https://url.spec.whatwg.org/#special-scheme [status code]: process.md#exit-codes diff --git a/doc/api/module.md b/doc/api/module.md index e29ffc3cc0ba80..81b89ae0a3752b 100644 --- a/doc/api/module.md +++ b/doc/api/module.md @@ -101,8 +101,8 @@ added: v20.6.0 `initialize` hook. * Returns: {any} returns whatever was returned by the `initialize` hook. -Register a module that exports hooks that customize Node.js module resolution -and loading behavior. +Register a module that exports [hooks][] that customize Node.js module +resolution and loading behavior. ```mjs import { register } from 'node:module'; @@ -126,8 +126,8 @@ will not be evaluated until `my-app.mjs` is when it's dynamically imported. The `--experimental-loader` flag of the CLI can be used together -with the `register` function; the loaders registered with the -function will follow the same evaluation chain of loaders registered +with the `register` function; the hooks registered with the +function will follow the same evaluation chain of hooks registered within the CLI: ```console @@ -250,6 +250,654 @@ import('node:fs').then((esmFS) => { }); ``` +## Customization Hooks + + + +> Stability: 1 - Experimental + +> This API is currently being redesigned and will still change. + + + +To customize the default module resolution, loader hooks can optionally be +provided via a `--experimental-loader ./loader-name.mjs` argument to Node.js. + +When hooks are used they apply to each subsequent loader, the entry point, and +all `import` calls. They won't apply to `require` calls; those still follow +[CommonJS][] rules. + +Loaders follow the pattern of `--require`: + +```bash +node \ + --experimental-loader unpkg \ + --experimental-loader http-to-https \ + --experimental-loader cache-buster +``` + +These are called in the following sequence: `cache-buster` calls +`http-to-https` which calls `unpkg`. + +### Hooks + +Hooks are part of a chain, even if that chain consists of only one custom +(user-provided) hook and the default hook, which is always present. Hook +functions nest: each one must always return a plain object, and chaining happens +as a result of each function calling `next()`, which is a reference +to the subsequent loader's hook. + +A hook that returns a value lacking a required property triggers an exception. +A hook that returns without calling `next()` _and_ without returning +`shortCircuit: true` also triggers an exception. These errors are to help +prevent unintentional breaks in the chain. + +Hooks are run in a separate thread, isolated from the main. That means it is a +different [realm](https://tc39.es/ecma262/#realm). The hooks thread may be +terminated by the main thread at any time, so do not depend on asynchronous +operations (like `console.log`) to complete. + +#### `initialize()` + + + +> The loaders API is being redesigned. This hook may disappear or its +> signature may change. Do not rely on the API described below. + +* `data` {any} The data from `register(loader, import.meta.url, { data })`. +* Returns: {any} The data to be returned to the caller of `register`. + +The `initialize` hook provides a way to define a custom function that runs +in the loader's thread when the loader is initialized. Initialization happens +when the loader is registered via [`register`][] or registered via the +`--experimental-loader` command line option. + +This hook can send and receive data from a [`register`][] invocation, including +ports and other transferrable objects. The return value of `initialize` must be +either: + +* `undefined`, +* something that can be posted as a message between threads (e.g. the input to + [`port.postMessage`][]), +* a `Promise` resolving to one of the aforementioned values. + +Loader code: + +```mjs +// In the below example this file is referenced as +// '/path-to-my-loader.js' + +export async function initialize({ number, port }) { + port.postMessage(`increment: ${number + 1}`); + return 'ok'; +} +``` + +Caller code: + +```mjs +import assert from 'node:assert'; +import { register } from 'node:module'; +import { MessageChannel } from 'node:worker_threads'; + +// This example showcases how a message channel can be used to +// communicate between the main (application) thread and the loader +// running on the loaders thread, by sending `port2` to the loader. +const { port1, port2 } = new MessageChannel(); + +port1.on('message', (msg) => { + assert.strictEqual(msg, 'increment: 2'); +}); + +const result = register('/path-to-my-loader.js', { + parentURL: import.meta.url, + data: { number: 1, port: port2 }, + transferList: [port2], +}); + +assert.strictEqual(result, 'ok'); +``` + +#### `resolve(specifier, context, nextResolve)` + + + +> The loaders API is being redesigned. This hook may disappear or its +> signature may change. Do not rely on the API described below. + +* `specifier` {string} +* `context` {Object} + * `conditions` {string\[]} Export conditions of the relevant `package.json` + * `importAssertions` {Object} An object whose key-value pairs represent the + assertions for the module to import + * `parentURL` {string|undefined} The module importing this one, or undefined + if this is the Node.js entry point +* `nextResolve` {Function} The subsequent `resolve` hook in the chain, or the + Node.js default `resolve` hook after the last user-supplied `resolve` hook + * `specifier` {string} + * `context` {Object} +* Returns: {Object|Promise} + * `format` {string|null|undefined} A hint to the load hook (it might be + ignored) + `'builtin' | 'commonjs' | 'json' | 'module' | 'wasm'` + * `importAssertions` {Object|undefined} The import assertions to use when + caching the module (optional; if excluded the input will be used) + * `shortCircuit` {undefined|boolean} A signal that this hook intends to + terminate the chain of `resolve` hooks. **Default:** `false` + * `url` {string} The absolute URL to which this input resolves + +> **Caveat** Despite support for returning promises and async functions, calls +> to `resolve` may block the main thread which can impact performance. + +The `resolve` hook chain is responsible for telling Node.js where to find and +how to cache a given `import` statement or expression. It can optionally return +its format (such as `'module'`) as a hint to the `load` hook. If a format is +specified, the `load` hook is ultimately responsible for providing the final +`format` value (and it is free to ignore the hint provided by `resolve`); if +`resolve` provides a `format`, a custom `load` hook is required even if only to +pass the value to the Node.js default `load` hook. + +Import type assertions are part of the cache key for saving loaded modules into +the internal module cache. The `resolve` hook is responsible for +returning an `importAssertions` object if the module should be cached with +different assertions than were present in the source code. + +The `conditions` property in `context` is an array of conditions for +[package exports conditions][Conditional exports] that apply to this resolution +request. They can be used for looking up conditional mappings elsewhere or to +modify the list when calling the default resolution logic. + +The current [package exports conditions][Conditional exports] are always in +the `context.conditions` array passed into the hook. To guarantee _default +Node.js module specifier resolution behavior_ when calling `defaultResolve`, the +`context.conditions` array passed to it _must_ include _all_ elements of the +`context.conditions` array originally passed into the `resolve` hook. + +```mjs +export function resolve(specifier, context, nextResolve) { + const { parentURL = null } = context; + + if (Math.random() > 0.5) { // Some condition. + // For some or all specifiers, do some custom logic for resolving. + // Always return an object of the form {url: }. + return { + shortCircuit: true, + url: parentURL ? + new URL(specifier, parentURL).href : + new URL(specifier).href, + }; + } + + if (Math.random() < 0.5) { // Another condition. + // When calling `defaultResolve`, the arguments can be modified. In this + // case it's adding another value for matching conditional exports. + return nextResolve(specifier, { + ...context, + conditions: [...context.conditions, 'another-condition'], + }); + } + + // Defer to the next hook in the chain, which would be the + // Node.js default resolve if this is the last user-specified loader. + return nextResolve(specifier); +} +``` + +#### `load(url, context, nextLoad)` + + + +> The loaders API is being redesigned. This hook may disappear or its +> signature may change. Do not rely on the API described below. + +> In a previous version of this API, this was split across 3 separate, now +> deprecated, hooks (`getFormat`, `getSource`, and `transformSource`). + +* `url` {string} The URL returned by the `resolve` chain +* `context` {Object} + * `conditions` {string\[]} Export conditions of the relevant `package.json` + * `format` {string|null|undefined} The format optionally supplied by the + `resolve` hook chain + * `importAssertions` {Object} +* `nextLoad` {Function} The subsequent `load` hook in the chain, or the + Node.js default `load` hook after the last user-supplied `load` hook + * `specifier` {string} + * `context` {Object} +* Returns: {Object} + * `format` {string} + * `shortCircuit` {undefined|boolean} A signal that this hook intends to + terminate the chain of `resolve` hooks. **Default:** `false` + * `source` {string|ArrayBuffer|TypedArray} The source for Node.js to evaluate + +The `load` hook provides a way to define a custom method of determining how +a URL should be interpreted, retrieved, and parsed. It is also in charge of +validating the import assertion. + +The final value of `format` must be one of the following: + +| `format` | Description | Acceptable types for `source` returned by `load` | +| ------------ | ------------------------------ | -------------------------------------------------------------------------- | +| `'builtin'` | Load a Node.js builtin module | Not applicable | +| `'commonjs'` | Load a Node.js CommonJS module | { [`string`][], [`ArrayBuffer`][], [`TypedArray`][], `null`, `undefined` } | +| `'json'` | Load a JSON file | { [`string`][], [`ArrayBuffer`][], [`TypedArray`][] } | +| `'module'` | Load an ES module | { [`string`][], [`ArrayBuffer`][], [`TypedArray`][] } | +| `'wasm'` | Load a WebAssembly module | { [`ArrayBuffer`][], [`TypedArray`][] } | + +The value of `source` is ignored for type `'builtin'` because currently it is +not possible to replace the value of a Node.js builtin (core) module. + +The value of `source` can be omitted for type `'commonjs'`. When a `source` is +provided, all `require` calls from this module will be processed by the ESM +loader with registered `resolve` and `load` hooks; all `require.resolve` calls +from this module will be processed by the ESM loader with registered `resolve` +hooks; `require.extensions` and monkey-patching on the CommonJS module loader +will not apply. If `source` is undefined or `null`, it will be handled by the +CommonJS module loader and `require`/`require.resolve` calls will not go through +the registered hooks. This behavior for nullish `source` is temporary — in the +future, nullish `source` will not be supported. + +The Node.js own `load` implementation, which is the value of `next` for the last +loader in the `load` chain, returns `null` for `source` when `format` is +`'commonjs'` for backward compatibility. Here is an example loader that would +opt-in to using the non-default behavior: + +```mjs +import { readFile } from 'node:fs/promises'; + +export async function load(url, context, nextLoad) { + const result = await nextLoad(url, context); + if (result.format === 'commonjs') { + result.source ??= await readFile(new URL(result.responseURL ?? url)); + } + return result; +} +``` + +> **Caveat**: The ESM `load` hook and namespaced exports from CommonJS modules +> are incompatible. Attempting to use them together will result in an empty +> object from the import. This may be addressed in the future. + +> These types all correspond to classes defined in ECMAScript. + +* The specific [`ArrayBuffer`][] object is a [`SharedArrayBuffer`][]. +* The specific [`TypedArray`][] object is a [`Uint8Array`][]. + +If the source value of a text-based format (i.e., `'json'`, `'module'`) +is not a string, it is converted to a string using [`util.TextDecoder`][]. + +The `load` hook provides a way to define a custom method for retrieving the +source code of an ES module specifier. This would allow a loader to potentially +avoid reading files from disk. It could also be used to map an unrecognized +format to a supported one, for example `yaml` to `module`. + +```mjs +export async function load(url, context, nextLoad) { + const { format } = context; + + if (Math.random() > 0.5) { // Some condition + /* + For some or all URLs, do some custom logic for retrieving the source. + Always return an object of the form { + format: , + source: , + }. + */ + return { + format, + shortCircuit: true, + source: '...', + }; + } + + // Defer to the next hook in the chain. + return nextLoad(url); +} +``` + +In a more advanced scenario, this can also be used to transform an unsupported +source to a supported one (see [Examples](#examples) below). + +#### `globalPreload()` + + + +> This hook will be removed in a future version. Use [`initialize`][] instead. +> When a loader has an `initialize` export, `globalPreload` will be ignored. + +> In a previous version of this API, this hook was named +> `getGlobalPreloadCode`. + +* `context` {Object} Information to assist the preload code + * `port` {MessagePort} +* Returns: {string} Code to run before application startup + +Sometimes it might be necessary to run some code inside of the same global +scope that the application runs in. This hook allows the return of a string +that is run as a sloppy-mode script on startup. + +Similar to how CommonJS wrappers work, the code runs in an implicit function +scope. The only argument is a `require`-like function that can be used to load +builtins like "fs": `getBuiltin(request: string)`. + +If the code needs more advanced `require` features, it has to construct +its own `require` using `module.createRequire()`. + +```mjs +export function globalPreload(context) { + return `\ +globalThis.someInjectedProperty = 42; +console.log('I just set some globals!'); + +const { createRequire } = getBuiltin('module'); +const { cwd } = getBuiltin('process'); + +const require = createRequire(cwd() + '/'); +// [...] +`; +} +``` + +In order to allow communication between the application and the loader, another +argument is provided to the preload code: `port`. This is available as a +parameter to the loader hook and inside of the source text returned by the hook. +Some care must be taken in order to properly call [`port.ref()`][] and +[`port.unref()`][] to prevent a process from being in a state where it won't +close normally. + +```mjs +/** + * This example has the application context send a message to the loader + * and sends the message back to the application context + */ +export function globalPreload({ port }) { + port.onmessage = (evt) => { + port.postMessage(evt.data); + }; + return `\ + port.postMessage('console.log("I went to the Loader and back");'); + port.onmessage = (evt) => { + eval(evt.data); + }; + `; +} +``` + +### Examples + +The various loader hooks can be used together to accomplish wide-ranging +customizations of the Node.js code loading and evaluation behaviors. + +#### HTTPS loader + +In current Node.js, specifiers starting with `https://` are experimental (see +[HTTPS and HTTP imports][]). + +The loader below registers hooks to enable rudimentary support for such +specifiers. While this may seem like a significant improvement to Node.js core +functionality, there are substantial downsides to actually using this loader: +performance is much slower than loading files from disk, there is no caching, +and there is no security. + +```mjs +// https-loader.mjs +import { get } from 'node:https'; + +export function load(url, context, nextLoad) { + // For JavaScript to be loaded over the network, we need to fetch and + // return it. + if (url.startsWith('https://')) { + return new Promise((resolve, reject) => { + get(url, (res) => { + let data = ''; + res.setEncoding('utf8'); + res.on('data', (chunk) => data += chunk); + res.on('end', () => resolve({ + // This example assumes all network-provided JavaScript is ES module + // code. + format: 'module', + shortCircuit: true, + source: data, + })); + }).on('error', (err) => reject(err)); + }); + } + + // Let Node.js handle all other URLs. + return nextLoad(url); +} +``` + +```mjs +// main.mjs +import { VERSION } from 'https://coffeescript.org/browser-compiler-modern/coffeescript.js'; + +console.log(VERSION); +``` + +With the preceding loader, running +`node --experimental-loader ./https-loader.mjs ./main.mjs` +prints the current version of CoffeeScript per the module at the URL in +`main.mjs`. + +#### Transpiler loader + +Sources that are in formats Node.js doesn't understand can be converted into +JavaScript using the [`load` hook][load hook]. + +This is less performant than transpiling source files before running +Node.js; a transpiler loader should only be used for development and testing +purposes. + +```mjs +// coffeescript-loader.mjs +import { readFile } from 'node:fs/promises'; +import { dirname, extname, resolve as resolvePath } from 'node:path'; +import { cwd } from 'node:process'; +import { fileURLToPath, pathToFileURL } from 'node:url'; +import CoffeeScript from 'coffeescript'; + +const baseURL = pathToFileURL(`${cwd()}/`).href; + +export async function load(url, context, nextLoad) { + if (extensionsRegex.test(url)) { + // Now that we patched resolve to let CoffeeScript URLs through, we need to + // tell Node.js what format such URLs should be interpreted as. Because + // CoffeeScript transpiles into JavaScript, it should be one of the two + // JavaScript formats: 'commonjs' or 'module'. + + // CoffeeScript files can be either CommonJS or ES modules, so we want any + // CoffeeScript file to be treated by Node.js the same as a .js file at the + // same location. To determine how Node.js would interpret an arbitrary .js + // file, search up the file system for the nearest parent package.json file + // and read its "type" field. + const format = await getPackageType(url); + // When a hook returns a format of 'commonjs', `source` is ignored. + // To handle CommonJS files, a handler needs to be registered with + // `require.extensions` in order to process the files with the CommonJS + // loader. Avoiding the need for a separate CommonJS handler is a future + // enhancement planned for ES module loaders. + if (format === 'commonjs') { + return { + format, + shortCircuit: true, + }; + } + + const { source: rawSource } = await nextLoad(url, { ...context, format }); + // This hook converts CoffeeScript source code into JavaScript source code + // for all imported CoffeeScript files. + const transformedSource = coffeeCompile(rawSource.toString(), url); + + return { + format, + shortCircuit: true, + source: transformedSource, + }; + } + + // Let Node.js handle all other URLs. + return nextLoad(url); +} + +async function getPackageType(url) { + // `url` is only a file path during the first iteration when passed the + // resolved url from the load() hook + // an actual file path from load() will contain a file extension as it's + // required by the spec + // this simple truthy check for whether `url` contains a file extension will + // work for most projects but does not cover some edge-cases (such as + // extensionless files or a url ending in a trailing space) + const isFilePath = !!extname(url); + // If it is a file path, get the directory it's in + const dir = isFilePath ? + dirname(fileURLToPath(url)) : + url; + // Compose a file path to a package.json in the same directory, + // which may or may not exist + const packagePath = resolvePath(dir, 'package.json'); + // Try to read the possibly nonexistent package.json + const type = await readFile(packagePath, { encoding: 'utf8' }) + .then((filestring) => JSON.parse(filestring).type) + .catch((err) => { + if (err?.code !== 'ENOENT') console.error(err); + }); + // Ff package.json existed and contained a `type` field with a value, voila + if (type) return type; + // Otherwise, (if not at the root) continue checking the next directory up + // If at the root, stop and return false + return dir.length > 1 && getPackageType(resolvePath(dir, '..')); +} +``` + +```coffee +# main.coffee +import { scream } from './scream.coffee' +console.log scream 'hello, world' + +import { version } from 'node:process' +console.log "Brought to you by Node.js version #{version}" +``` + +```coffee +# scream.coffee +export scream = (str) -> str.toUpperCase() +``` + +With the preceding loader, running +`node --experimental-loader ./coffeescript-loader.mjs main.coffee` +causes `main.coffee` to be turned into JavaScript after its source code is +loaded from disk but before Node.js executes it; and so on for any `.coffee`, +`.litcoffee` or `.coffee.md` files referenced via `import` statements of any +loaded file. + +#### "import map" loader + +The previous two loaders defined `load` hooks. This is an example of a loader +that does its work via the `resolve` hook. This loader reads an +`import-map.json` file that specifies which specifiers to override to another +URL (this is a very simplistic implemenation of a small subset of the +"import maps" specification). + +```mjs +// import-map-loader.js +import fs from 'node:fs/promises'; + +const { imports } = JSON.parse(await fs.readFile('import-map.json')); + +export async function resolve(specifier, context, nextResolve) { + if (Object.hasOwn(imports, specifier)) { + return nextResolve(imports[specifier], context); + } + + return nextResolve(specifier, context); +} +``` + +Let's assume we have these files: + +```mjs +// main.js +import 'a-module'; +``` + +```json +// import-map.json +{ + "imports": { + "a-module": "./some-module.js" + } +} +``` + +```mjs +// some-module.js +console.log('some module!'); +``` + +If you run `node --experimental-loader ./import-map-loader.js main.js` +the output will be `some module!`. + +### Register loaders programmatically + + + +In addition to using the `--experimental-loader` option in the CLI, +loaders can also be registered programmatically. You can find +detailed information about this process in the documentation page +for [`module.register()`][]. + ## Source map v3 support > Stability: 1 - Experimental @@ -2025,6 +2029,8 @@ added: * `options` {Object} * `concurrency` {number} the maximum concurrent invocation of `fn` to call on the stream at once. **Default:** `1`. + * `highWaterMark` {number} how many items to buffer while waiting for user + consumption of the mapped items. **Default:** `concurrency * 2 - 1`. * `signal` {AbortSignal} allows destroying the stream if the signal is aborted. * Returns: {Readable} a stream mapped with the function `fn`. @@ -2059,6 +2065,10 @@ for await (const result of dnsResults) { added: - v17.4.0 - v16.14.0 +changes: + - version: REPLACEME + pr-url: https://github.com/nodejs/node/pull/49249 + description: added `highWaterMark` in options. --> > Stability: 1 - Experimental @@ -2071,6 +2081,8 @@ added: * `options` {Object} * `concurrency` {number} the maximum concurrent invocation of `fn` to call on the stream at once. **Default:** `1`. + * `highWaterMark` {number} how many items to buffer while waiting for user + consumption of the filtered items. **Default:** `concurrency * 2 - 1`. * `signal` {AbortSignal} allows destroying the stream if the signal is aborted. * Returns: {Readable} a stream filtered with the predicate `fn`. diff --git a/lib/internal/streams/operators.js b/lib/internal/streams/operators.js index 47208136e0916d..b8dde2a5b9ee8c 100644 --- a/lib/internal/streams/operators.js +++ b/lib/internal/streams/operators.js @@ -33,6 +33,7 @@ const { NumberIsNaN, Promise, PromiseReject, + PromiseResolve, PromisePrototypeThen, Symbol, } = primordials; @@ -82,7 +83,15 @@ function map(fn, options) { concurrency = MathFloor(options.concurrency); } - validateInteger(concurrency, 'concurrency', 1); + let highWaterMark = concurrency - 1; + if (options?.highWaterMark != null) { + highWaterMark = MathFloor(options.highWaterMark); + } + + validateInteger(concurrency, 'options.concurrency', 1); + validateInteger(highWaterMark, 'options.highWaterMark', 0); + + highWaterMark += concurrency; return async function* map() { const signal = AbortSignal.any([options?.signal].filter(Boolean)); @@ -93,9 +102,28 @@ function map(fn, options) { let next; let resume; let done = false; + let cnt = 0; - function onDone() { + function onCatch() { done = true; + afterItemProcessed(); + } + + function afterItemProcessed() { + cnt -= 1; + maybeResume(); + } + + function maybeResume() { + if ( + resume && + !done && + cnt < concurrency && + queue.length < highWaterMark + ) { + resume(); + resume = null; + } } async function pump() { @@ -111,17 +139,19 @@ function map(fn, options) { try { val = fn(val, signalOpt); + + if (val === kEmpty) { + continue; + } + + val = PromiseResolve(val); } catch (err) { val = PromiseReject(err); } - if (val === kEmpty) { - continue; - } + cnt += 1; - if (typeof val?.catch === 'function') { - val.catch(onDone); - } + PromisePrototypeThen(val, afterItemProcessed, onCatch); queue.push(val); if (next) { @@ -129,7 +159,7 @@ function map(fn, options) { next = null; } - if (!done && queue.length && queue.length >= concurrency) { + if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) { await new Promise((resolve) => { resume = resolve; }); @@ -138,7 +168,7 @@ function map(fn, options) { queue.push(kEof); } catch (err) { const val = PromiseReject(err); - PromisePrototypeThen(val, undefined, onDone); + PromisePrototypeThen(val, afterItemProcessed, onCatch); queue.push(val); } finally { done = true; @@ -169,10 +199,7 @@ function map(fn, options) { } queue.shift(); - if (resume) { - resume(); - resume = null; - } + maybeResume(); } await new Promise((resolve) => { diff --git a/test/parallel/test-stream-forEach.js b/test/parallel/test-stream-forEach.js index 7a21e299534742..627ea0ccf1be60 100644 --- a/test/parallel/test-stream-forEach.js +++ b/test/parallel/test-stream-forEach.js @@ -96,7 +96,7 @@ const { once } = require('events'); Readable.from([1, 2, 3, 4]).forEach(async (_, { signal }) => { calls++; await once(signal, 'abort'); - }, { signal: ac.signal, concurrency: 2 }); + }, { signal: ac.signal, concurrency: 2, highWaterMark: 0 }); // pump assert.rejects(async () => { await forEachPromise; diff --git a/test/parallel/test-stream-map.js b/test/parallel/test-stream-map.js index ba0571fe3a7b95..4a7a53c55960ea 100644 --- a/test/parallel/test-stream-map.js +++ b/test/parallel/test-stream-map.js @@ -8,6 +8,25 @@ const assert = require('assert'); const { once } = require('events'); const { setTimeout } = require('timers/promises'); +function createDependentPromises(n) { + const promiseAndResolveArray = []; + + for (let i = 0; i < n; i++) { + let res; + const promise = new Promise((resolve) => { + if (i === 0) { + res = resolve; + return; + } + res = () => promiseAndResolveArray[i - 1][0].then(resolve); + }); + + promiseAndResolveArray.push([promise, res]); + } + + return promiseAndResolveArray; +} + { // Map works on synchronous streams with a synchronous mapper const stream = Readable.from([1, 2, 3, 4, 5]).map((x) => x + x); @@ -143,7 +162,7 @@ const { setTimeout } = require('timers/promises'); const stream = range.map(common.mustCall(async (_, { signal }) => { await once(signal, 'abort'); throw signal.reason; - }, 2), { signal: ac.signal, concurrency: 2 }); + }, 2), { signal: ac.signal, concurrency: 2, highWaterMark: 0 }); // pump assert.rejects(async () => { for await (const item of stream) { @@ -173,12 +192,164 @@ const { setTimeout } = require('timers/promises'); })().then(common.mustCall()); } + +{ + // highWaterMark with small concurrency + const finishOrder = []; + + const promises = createDependentPromises(4); + + const raw = Readable.from([2, 0, 1, 3]); + const stream = raw.map(async (item) => { + const [promise, resolve] = promises[item]; + resolve(); + + await promise; + finishOrder.push(item); + return item; + }, { concurrency: 2 }); + + (async () => { + await stream.toArray(); + + assert.deepStrictEqual(finishOrder, [0, 1, 2, 3]); + })().then(common.mustCall(), common.mustNotCall()); +} + +{ + // highWaterMark with a lot of items and large concurrency + const finishOrder = []; + + const promises = createDependentPromises(20); + + const input = [10, 1, 0, 3, 4, 2, 5, 7, 8, 9, 6, 11, 12, 13, 18, 15, 16, 17, 14, 19]; + const raw = Readable.from(input); + // Should be + // 10, 1, 0, 3, 4, 2 | next: 0 + // 10, 1, 3, 4, 2, 5 | next: 1 + // 10, 3, 4, 2, 5, 7 | next: 2 + // 10, 3, 4, 5, 7, 8 | next: 3 + // 10, 4, 5, 7, 8, 9 | next: 4 + // 10, 5, 7, 8, 9, 6 | next: 5 + // 10, 7, 8, 9, 6, 11 | next: 6 + // 10, 7, 8, 9, 11, 12 | next: 7 + // 10, 8, 9, 11, 12, 13 | next: 8 + // 10, 9, 11, 12, 13, 18 | next: 9 + // 10, 11, 12, 13, 18, 15 | next: 10 + // 11, 12, 13, 18, 15, 16 | next: 11 + // 12, 13, 18, 15, 16, 17 | next: 12 + // 13, 18, 15, 16, 17, 14 | next: 13 + // 18, 15, 16, 17, 14, 19 | next: 14 + // 18, 15, 16, 17, 19 | next: 15 + // 18, 16, 17, 19 | next: 16 + // 18, 17, 19 | next: 17 + // 18, 19 | next: 18 + // 19 | next: 19 + // + + const stream = raw.map(async (item) => { + const [promise, resolve] = promises[item]; + resolve(); + + await promise; + finishOrder.push(item); + return item; + }, { concurrency: 6 }); + + (async () => { + const outputOrder = await stream.toArray(); + + assert.deepStrictEqual(outputOrder, input); + assert.deepStrictEqual(finishOrder, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]); + })().then(common.mustCall(), common.mustNotCall()); +} + +{ + // Custom highWaterMark with a lot of items and large concurrency + const finishOrder = []; + + const promises = createDependentPromises(20); + + const input = [11, 1, 0, 3, 4, 2, 5, 7, 8, 9, 6, 10, 12, 13, 18, 15, 16, 17, 14, 19]; + const raw = Readable.from(input); + // Should be + // 11, 1, 0, 3, 4 | next: 0, buffer: [] + // 11, 1, 3, 4, 2 | next: 1, buffer: [0] + // 11, 3, 4, 2, 5 | next: 2, buffer: [0, 1] + // 11, 3, 4, 5, 7 | next: 3, buffer: [0, 1, 2] + // 11, 4, 5, 7, 8 | next: 4, buffer: [0, 1, 2, 3] + // 11, 5, 7, 8, 9 | next: 5, buffer: [0, 1, 2, 3, 4] + // 11, 7, 8, 9, 6 | next: 6, buffer: [0, 1, 2, 3, 4, 5] + // 11, 7, 8, 9, 10 | next: 7, buffer: [0, 1, 2, 3, 4, 5, 6] -- buffer full + // 11, 8, 9, 10, 12 | next: 8, buffer: [0, 1, 2, 3, 4, 5, 6] + // 11, 9, 10, 12, 13 | next: 9, buffer: [0, 1, 2, 3, 4, 5, 6] + // 11, 10, 12, 13, 18 | next: 10, buffer: [0, 1, 2, 3, 4, 5, 6] + // 11, 12, 13, 18, 15 | next: 11, buffer: [0, 1, 2, 3, 4, 5, 6] + // 12, 13, 18, 15, 16 | next: 12, buffer: [] -- all items flushed as 11 is consumed and all the items wait for it + // 13, 18, 15, 16, 17 | next: 13, buffer: [] + // 18, 15, 16, 17, 14 | next: 14, buffer: [] + // 18, 15, 16, 17, 19 | next: 15, buffer: [14] + // 18, 16, 17, 19 | next: 16, buffer: [14, 15] + // 18, 17, 19 | next: 17, buffer: [14, 15, 16] + // 18, 19 | next: 18, buffer: [14, 15, 16, 17] + // 19 | next: 19, buffer: [] -- all items flushed + // + + const stream = raw.map(async (item) => { + const [promise, resolve] = promises[item]; + resolve(); + + await promise; + finishOrder.push(item); + return item; + }, { concurrency: 5, highWaterMark: 7 }); + + (async () => { + const outputOrder = await stream.toArray(); + + assert.deepStrictEqual(outputOrder, input); + assert.deepStrictEqual(finishOrder, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]); + })().then(common.mustCall(), common.mustNotCall()); +} + +{ + // Where there is a delay between the first and the next item it should not wait for filled queue + // before yielding to the user + const promises = createDependentPromises(3); + + const raw = Readable.from([0, 1, 2]); + + const stream = raw + .map(async (item) => { + if (item !== 0) { + await promises[item][0]; + } + + return item; + }, { concurrency: 2 }) + .map((item) => { + // eslint-disable-next-line no-unused-vars + for (const [_, resolve] of promises) { + resolve(); + } + + return item; + }); + + (async () => { + await stream.toArray(); + })().then(common.mustCall(), common.mustNotCall()); +} + { // Error cases assert.throws(() => Readable.from([1]).map(1), /ERR_INVALID_ARG_TYPE/); assert.throws(() => Readable.from([1]).map((x) => x, { concurrency: 'Foo' }), /ERR_OUT_OF_RANGE/); + assert.throws(() => Readable.from([1]).map((x) => x, { + concurrency: -1 + }), /ERR_OUT_OF_RANGE/); assert.throws(() => Readable.from([1]).map((x) => x, 1), /ERR_INVALID_ARG_TYPE/); assert.throws(() => Readable.from([1]).map((x) => x, { signal: true }), /ERR_INVALID_ARG_TYPE/); } From cade5716df14a24fbd018ed5fedacee643c2237b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Jos=C3=A9?= Date: Thu, 24 Aug 2023 09:02:04 -0500 Subject: [PATCH 065/125] build: add symlink to `compile_commands.json` file if needed MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Usually lsp servers needs the `compile_commands.json` file in the root directory. Signed-off-by: Juan José Arboleda PR-URL: https://github.com/nodejs/node/pull/49260 Reviewed-By: Rafael Gonzaga Reviewed-By: Yagiz Nizipli --- configure.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/configure.py b/configure.py index 9a478c5f983a51..2cb81f200c7194 100755 --- a/configure.py +++ b/configure.py @@ -2116,6 +2116,8 @@ def make_bin_override(): if options.compile_commands_json: gyp_args += ['-f', 'compile_commands_json'] + os.path.islink('./compile_commands.json') and os.unlink('./compile_commands.json') + os.symlink('./out/' + config['BUILDTYPE'] + '/compile_commands.json', './compile_commands.json') # override the variable `python` defined in common.gypi if bin_override is not None: From fefbdb92f2286bc566cdcf644aea26ce32db3673 Mon Sep 17 00:00:00 2001 From: Jungku Lee Date: Fri, 25 Aug 2023 01:00:28 +0900 Subject: [PATCH 066/125] doc: modify param description for end(),write() in `StringDecoder` PR-URL: https://github.com/nodejs/node/pull/49285 Reviewed-By: Luigi Pinca Reviewed-By: Deokjin Kim --- doc/api/string_decoder.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/doc/api/string_decoder.md b/doc/api/string_decoder.md index 70387d2edba696..4913b26cec042f 100644 --- a/doc/api/string_decoder.md +++ b/doc/api/string_decoder.md @@ -63,8 +63,7 @@ Creates a new `StringDecoder` instance. added: v0.9.3 --> -* `buffer` {Buffer|TypedArray|DataView} A `Buffer`, or `TypedArray`, or - `DataView` containing the bytes to decode. +* `buffer` {string|Buffer|TypedArray|DataView} The bytes to decode. * Returns: {string} Returns any remaining input stored in the internal buffer as a string. Bytes @@ -86,8 +85,7 @@ changes: character instead of one for each individual byte. --> -* `buffer` {Buffer|TypedArray|DataView} A `Buffer`, or `TypedArray`, or - `DataView` containing the bytes to decode. +* `buffer` {string|Buffer|TypedArray|DataView} The bytes to decode. * Returns: {string} Returns a decoded string, ensuring that any incomplete multibyte characters at From a11e64e09c3b1cd75849c0a1843f51939c849883 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Thu, 24 Aug 2023 18:25:09 +0200 Subject: [PATCH 067/125] test: fix compiler warning in NodeCryptoEnv This fixes a warning in line 26: "warning: value computed is not used" when calling BIO_seek(). Refs: https://github.com/nodejs/node/pull/47160 PR-URL: https://github.com/nodejs/node/pull/49206 Reviewed-By: Michael Dawson --- test/cctest/test_node_crypto_env.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/cctest/test_node_crypto_env.cc b/test/cctest/test_node_crypto_env.cc index b42cdc107e8a94..001867720f5e80 100644 --- a/test/cctest/test_node_crypto_env.cc +++ b/test/cctest/test_node_crypto_env.cc @@ -23,8 +23,9 @@ TEST_F(NodeCryptoEnv, LoadBIO) { Local key = String::NewFromUtf8(isolate_, "abcdef").ToLocalChecked(); node::crypto::BIOPointer bio(node::crypto::LoadBIO(*env, key)); #if OPENSSL_VERSION_NUMBER >= 0x30000000L - BIO_seek(bio.get(), 2); - ASSERT_EQ(BIO_tell(bio.get()), 2); + const int ofs = 2; + ASSERT_EQ(BIO_seek(bio.get(), ofs), ofs); + ASSERT_EQ(BIO_tell(bio.get()), ofs); #endif ASSERT_EQ(ERR_peek_error(), 0UL) << "There should not have left " "any errors on the OpenSSL error stack\n"; From ae656101c07c8faabc44b0a1224393125510e71b Mon Sep 17 00:00:00 2001 From: Geoffrey Booth Date: Thu, 24 Aug 2023 12:27:23 -0700 Subject: [PATCH 068/125] doc: update module hooks docs PR-URL: https://github.com/nodejs/node/pull/49265 Reviewed-By: Jacob Smith Reviewed-By: Antoine du Hamel --- doc/api/cli.md | 19 +- doc/api/esm.md | 2 +- doc/api/module.md | 556 +++++++++++++++++++++++++--------------------- 3 files changed, 321 insertions(+), 256 deletions(-) diff --git a/doc/api/cli.md b/doc/api/cli.md index 48d761ef5d8da6..5a4ded18e831f0 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -28,8 +28,8 @@ absolute path, it's resolved as a relative path from the current working directory. That path is then resolved by [CommonJS][] module loader. If no corresponding file is found, an error is thrown. -If a file is found, its path will be passed to the [ECMAScript module loader][] -under any of the following conditions: +If a file is found, its path will be passed to the +[ES module loader][Modules loaders] under any of the following conditions: * The program was started with a command-line flag that forces the entry point to be loaded with ECMAScript module loader. @@ -43,9 +43,9 @@ Otherwise, the file is loaded using the CommonJS module loader. See ### ECMAScript modules loader entry point caveat -When loading [ECMAScript module loader][] loads the program entry point, the `node` -command will only accept as input only files with `.js`, `.mjs`, or `.cjs` -extensions; and with `.wasm` extensions when +When loading, the [ES module loader][Modules loaders] loads the program +entry point, the `node` command will accept as input only files with `.js`, +`.mjs`, or `.cjs` extensions; and with `.wasm` extensions when [`--experimental-wasm-modules`][] is enabled. ## Options @@ -550,7 +550,11 @@ changes: `--experimental-loader`. --> -Specify the `module` of a custom experimental [ECMAScript module loader][]. +> This flag is discouraged and may be removed in a future version of Node.js. +> Please use +> [`--import` with `register()`][module customization hooks: enabling] instead. + +Specify the `module` containing exported [module customization hooks][]. `module` may be any string accepted as an [`import` specifier][]. ### `--experimental-network-imports` @@ -2640,9 +2644,10 @@ done [CommonJS module]: modules.md [CustomEvent Web API]: https://dom.spec.whatwg.org/#customevent [ECMAScript module]: esm.md#modules-ecmascript-modules -[ECMAScript module loader]: esm.md#loaders [Fetch API]: https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API [File System Permissions]: permissions.md#file-system-permissions +[Module customization hooks]: module.md#customization-hooks +[Module customization hooks: enabling]: module.md#enabling [Modules loaders]: packages.md#modules-loaders [Node.js issue tracker]: https://github.com/nodejs/node/issues [OSSL_PROVIDER-legacy]: https://www.openssl.org/docs/man3.0/man7/OSSL_PROVIDER-legacy.html diff --git a/doc/api/esm.md b/doc/api/esm.md index 4004d5f03f0fcc..120d185d39bb64 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -1070,7 +1070,7 @@ resolution for ESM specifiers is [commonjs-extension-resolution-loader][]. [`process.dlopen`]: process.md#processdlopenmodule-filename-flags [cjs-module-lexer]: https://github.com/nodejs/cjs-module-lexer/tree/1.2.2 [commonjs-extension-resolution-loader]: https://github.com/nodejs/loaders-test/tree/main/commonjs-extension-resolution-loader -[custom https loader]: module.md#https-loader +[custom https loader]: module.md#import-from-https [import.meta.resolve]: #importmetaresolvespecifier [percent-encoded]: url.md#percent-encoding-in-urls [special scheme]: https://url.spec.whatwg.org/#special-scheme diff --git a/doc/api/module.md b/doc/api/module.md index 81b89ae0a3752b..e7b94d34a63b18 100644 --- a/doc/api/module.md +++ b/doc/api/module.md @@ -102,114 +102,7 @@ added: v20.6.0 * Returns: {any} returns whatever was returned by the `initialize` hook. Register a module that exports [hooks][] that customize Node.js module -resolution and loading behavior. - -```mjs -import { register } from 'node:module'; - -register('http-to-https', import.meta.url); - -// Because this is a dynamic `import()`, the `http-to-https` hooks will run -// before importing `./my-app.mjs`. -await import('./my-app.mjs'); -``` - -In the example above, we are registering the `http-to-https` loader, -but it will only be available for subsequently imported modules—in -this case, `my-app.mjs`. If the `await import('./my-app.mjs')` had -instead been a static `import './my-app.mjs'`, _the app would already -have been loaded_ before the `http-to-https` hooks were -registered. This is part of the design of ES modules, where static -imports are evaluated from the leaves of the tree first back to the -trunk. There can be static imports _within_ `my-app.mjs`, which -will not be evaluated until `my-app.mjs` is when it's dynamically -imported. - -The `--experimental-loader` flag of the CLI can be used together -with the `register` function; the hooks registered with the -function will follow the same evaluation chain of hooks registered -within the CLI: - -```console -node \ - --experimental-loader unpkg \ - --experimental-loader http-to-https \ - --experimental-loader cache-buster \ - entrypoint.mjs -``` - -```mjs -// entrypoint.mjs -import { URL } from 'node:url'; -import { register } from 'node:module'; - -const loaderURL = new URL('./my-programmatically-loader.mjs', import.meta.url); - -register(loaderURL); -await import('./my-app.mjs'); -``` - -The `my-programmatic-loader.mjs` can leverage `unpkg`, -`http-to-https`, and `cache-buster` loaders. - -It's also possible to use `register` more than once: - -```mjs -// entrypoint.mjs -import { URL } from 'node:url'; -import { register } from 'node:module'; - -register(new URL('./first-loader.mjs', import.meta.url)); -register('./second-loader.mjs', import.meta.url); -await import('./my-app.mjs'); -``` - -Both loaders (`first-loader.mjs` and `second-loader.mjs`) can use -all the resources provided by the loaders registered in the CLI. But -remember that they will only be available in the next imported -module (`my-app.mjs`). The evaluation order of the hooks when -importing `my-app.mjs` and consecutive modules in the example above -will be: - -```console -resolve: second-loader.mjs -resolve: first-loader.mjs -resolve: cache-buster -resolve: http-to-https -resolve: unpkg -load: second-loader.mjs -load: first-loader.mjs -load: cache-buster -load: http-to-https -load: unpkg -globalPreload: second-loader.mjs -globalPreload: first-loader.mjs -globalPreload: cache-buster -globalPreload: http-to-https -globalPreload: unpkg -``` - -This function can also be used to pass data to the loader's [`initialize`][] -hook; the data passed to the hook may include transferrable objects like ports. - -```mjs -import { register } from 'node:module'; -import { MessageChannel } from 'node:worker_threads'; - -// This example showcases how a message channel can be used to -// communicate to the loader, by sending `port2` to the loader. -const { port1, port2 } = new MessageChannel(); - -port1.on('message', (msg) => { - console.log(msg); -}); - -register('./my-programmatic-loader.mjs', { - parentURL: import.meta.url, - data: { number: 1, port: port2 }, - transferList: [port2], -}); -``` +resolution and loading behavior. See [Customization hooks][]. ### `module.syncBuiltinESMExports()` @@ -250,6 +143,8 @@ import('node:fs').then((esmFS) => { }); ``` + + ## Customization Hooks -> Stability: 1 - Experimental - -> This API is currently being redesigned and will still change. +> Stability: 1.1 - Active development -To customize the default module resolution, loader hooks can optionally be -provided via a `--experimental-loader ./loader-name.mjs` argument to Node.js. + + +### Enabling + +Module resolution and loading can be customized by registering a file which +exports a set of hooks. This can be done using the [`register`][] method +from `node:module`, which you can run before your application code by +using the `--import` flag: + +```bash +node --import ./register-hooks.js ./my-app.js +``` + +```mjs +// register-hooks.js +import { register } from 'node:module'; + +register('./hooks.mjs', import.meta.url); +``` + +```cjs +// register-hooks.js +const { register } = require('node:module'); +const { pathToFileURL } = require('node:url'); + +register('./hooks.mjs', pathToFileURL(__filename)); +``` + +The file passed to `--import` can also be an export from a dependency: + +```bash +node --import some-package/register ./my-app.js +``` + +Where `some-package` has an [`"exports"`][] field defining the `/register` +export to map to a file that calls `register()`, like the following `register-hooks.js` +example. + +Using `--import` ensures that the hooks are registered before any application +files are imported, including the entry point of the application. Alternatively, +`register` can be called from the entry point, but dynamic `import()` must be +used for any code that should be run after the hooks are registered: + +```mjs +import { register } from 'node:module'; + +register('http-to-https', import.meta.url); + +// Because this is a dynamic `import()`, the `http-to-https` hooks will run +// to handle `./my-app.js` and any other files it imports or requires. +await import('./my-app.js'); +``` + +```cjs +const { register } = require('node:module'); +const { pathToFileURL } = require('node:url'); + +register('http-to-https', pathToFileURL(__filename)); + +// Because this is a dynamic `import()`, the `http-to-https` hooks will run +// to handle `./my-app.js` and any other files it imports or requires. +import('./my-app.js'); +``` + +In this example, we are registering the `http-to-https` hooks, but they will +only be available for subsequently imported modules—in this case, `my-app.js` +and anything it references via `import` (and optionally `require`). If the +`import('./my-app.js')` had instead been a static `import './my-app.js'`, the +app would have _already_ been loaded **before** the `http-to-https` hooks were +registered. This due to the ES modules specification, where static imports are +evaluated from the leaves of the tree first, then back to the trunk. There can +be static imports _within_ `my-app.js`, which will not be evaluated until +`my-app.js` is dynamically imported. -When hooks are used they apply to each subsequent loader, the entry point, and -all `import` calls. They won't apply to `require` calls; those still follow -[CommonJS][] rules. +`my-app.js` can also be CommonJS. Customization hooks will run for any +modules that it references via `import` (and optionally `require`). -Loaders follow the pattern of `--require`: +Finally, if all you want to do is register hooks before your app runs and you +don't want to create a separate file for that purpose, you can pass a `data:` +URL to `--import`: ```bash -node \ - --experimental-loader unpkg \ - --experimental-loader http-to-https \ - --experimental-loader cache-buster +node --import 'data:text/javascript,import { register } from "node:module"; import { pathToFileURL } from "node:url"; register("http-to-https", pathToFileURL("./"));' ./my-app.js ``` -These are called in the following sequence: `cache-buster` calls -`http-to-https` which calls `unpkg`. +### Chaining + +It's possible to call `register` more than once: + +```mjs +// entrypoint.mjs +import { register } from 'node:module'; + +register('./first.mjs', import.meta.url); +register('./second.mjs', import.meta.url); +await import('./my-app.mjs'); +``` + +```cjs +// entrypoint.cjs +const { register } = require('node:module'); +const { pathToFileURL } = require('node:url'); + +const parentURL = pathToFileURL(__filename); +register('./first.mjs', parentURL); +register('./second.mjs', parentURL); +import('./my-app.mjs'); +``` + +In this example, the registered hooks will form chains. If both `first.mjs` and +`second.mjs` define a `resolve` hook, both will be called, in the order they +were registered. The same applies to all the other hooks. + +The registered hooks also affect `register` itself. In this example, +`second.mjs` will be resolved and loaded per the hooks registered by +`first.mjs`. This allows for things like writing hooks in non-JavaScript +languages, so long as an earlier registered loader is one that transpiles into +JavaScript. + +The `register` method cannot be called from within the module that defines the +hooks. + +### Communication with module customization hooks + +Module customization hooks run on a dedicated thread, separate from the main +thread that runs application code. This means mutating global variables won't +affect the other thread(s), and message channels must be used to communicate +between the threads. + +The `register` method can be used to pass data to an [`initialize`][] hook. The +data passed to the hook may include transferrable objects like ports. + +```mjs +import { register } from 'node:module'; +import { MessageChannel } from 'node:worker_threads'; + +// This example demonstrates how a message channel can be used to +// communicate with the hooks, by sending `port2` to the hooks. +const { port1, port2 } = new MessageChannel(); + +port1.on('message', (msg) => { + console.log(msg); +}); + +register('./my-hooks.mjs', { + parentURL: import.meta.url, + data: { number: 1, port: port2 }, + transferList: [port2], +}); +``` + +```cjs +const { register } = require('node:module'); +const { pathToFileURL } = require('node:url'); +const { MessageChannel } = require('node:worker_threads'); + +// This example showcases how a message channel can be used to +// communicate with the hooks, by sending `port2` to the hooks. +const { port1, port2 } = new MessageChannel(); + +port1.on('message', (msg) => { + console.log(msg); +}); + +register('./my-hooks.mjs', { + parentURL: pathToFileURL(__filename), + data: { number: 1, port: port2 }, + transferList: [port2], +}); +``` ### Hooks +The [`register`][] method can be used to register a module that exports a set of +hooks. The hooks are functions that are called by Node.js to customize the +module resolution and loading process. The exported functions must have specific +names and signatures, and they must be exported as named exports. + +```mjs +export async function initialize({ number, port }) { + // Receive data from `register`, return data to `register`. +} + +export async function resolve(specifier, context, nextResolve) { + // Take an `import` or `require` specifier and resolve it to a URL. +} + +export async function load(url, context, nextLoad) { + // Take a resolved URL and return the source code to be evaluated. +} +``` + Hooks are part of a chain, even if that chain consists of only one custom (user-provided) hook and the default hook, which is always present. Hook functions nest: each one must always return a plain object, and chaining happens -as a result of each function calling `next()`, which is a reference -to the subsequent loader's hook. +as a result of each function calling `next()`, which is a reference to +the subsequent loader's hook. -A hook that returns a value lacking a required property triggers an exception. -A hook that returns without calling `next()` _and_ without returning +A hook that returns a value lacking a required property triggers an exception. A +hook that returns without calling `next()` _and_ without returning `shortCircuit: true` also triggers an exception. These errors are to help -prevent unintentional breaks in the chain. +prevent unintentional breaks in the chain. Return `shortCircuit: true` from a +hook to signal that the chain is intentionally ending at your hook. -Hooks are run in a separate thread, isolated from the main. That means it is a -different [realm](https://tc39.es/ecma262/#realm). The hooks thread may be -terminated by the main thread at any time, so do not depend on asynchronous -operations (like `console.log`) to complete. +Hooks are run in a separate thread, isolated from the main thread where +application code runs. That means it is a different [realm][]. The hooks thread +may be terminated by the main thread at any time, so do not depend on +asynchronous operations (like `console.log`) to complete. #### `initialize()` @@ -318,16 +383,14 @@ operations (like `console.log`) to complete. added: REPLACEME --> -> The loaders API is being redesigned. This hook may disappear or its -> signature may change. Do not rely on the API described below. +> Stability: 1.1 - Active development * `data` {any} The data from `register(loader, import.meta.url, { data })`. * Returns: {any} The data to be returned to the caller of `register`. -The `initialize` hook provides a way to define a custom function that runs -in the loader's thread when the loader is initialized. Initialization happens -when the loader is registered via [`register`][] or registered via the -`--experimental-loader` command line option. +The `initialize` hook provides a way to define a custom function that runs in +the hooks thread when the hooks module is initialized. Initialization happens +when the hooks module is registered via [`register`][]. This hook can send and receive data from a [`register`][] invocation, including ports and other transferrable objects. The return value of `initialize` must be @@ -338,11 +401,10 @@ either: [`port.postMessage`][]), * a `Promise` resolving to one of the aforementioned values. -Loader code: +Module customization code: ```mjs -// In the below example this file is referenced as -// '/path-to-my-loader.js' +// path-to-my-hooks.js export async function initialize({ number, port }) { port.postMessage(`increment: ${number + 1}`); @@ -357,16 +419,16 @@ import assert from 'node:assert'; import { register } from 'node:module'; import { MessageChannel } from 'node:worker_threads'; -// This example showcases how a message channel can be used to -// communicate between the main (application) thread and the loader -// running on the loaders thread, by sending `port2` to the loader. +// This example showcases how a message channel can be used to communicate +// between the main (application) thread and the hooks running on the hooks +// thread, by sending `port2` to the `initialize` hook. const { port1, port2 } = new MessageChannel(); port1.on('message', (msg) => { assert.strictEqual(msg, 'increment: 2'); }); -const result = register('/path-to-my-loader.js', { +const result = register('./path-to-my-hooks.js', { parentURL: import.meta.url, data: { number: 1, port: port2 }, transferList: [port2], @@ -375,6 +437,30 @@ const result = register('/path-to-my-loader.js', { assert.strictEqual(result, 'ok'); ``` +```cjs +const assert = require('node:assert'); +const { register } = require('node:module'); +const { pathToFileURL } = require('node:url'); +const { MessageChannel } = require('node:worker_threads'); + +// This example showcases how a message channel can be used to communicate +// between the main (application) thread and the hooks running on the hooks +// thread, by sending `port2` to the `initialize` hook. +const { port1, port2 } = new MessageChannel(); + +port1.on('message', (msg) => { + assert.strictEqual(msg, 'increment: 2'); +}); + +const result = register('./path-to-my-hooks.js', { + parentURL: pathToFileURL(__filename), + data: { number: 1, port: port2 }, + transferList: [port2], +}); + +assert.strictEqual(result, 'ok'); +``` + #### `resolve(specifier, context, nextResolve)` -> The loaders API is being redesigned. This hook may disappear or its -> signature may change. Do not rely on the API described below. +> Stability: 1.2 - Release candidate * `specifier` {string} * `context` {Object} @@ -417,21 +502,21 @@ changes: terminate the chain of `resolve` hooks. **Default:** `false` * `url` {string} The absolute URL to which this input resolves -> **Caveat** Despite support for returning promises and async functions, calls +> **Warning** Despite support for returning promises and async functions, calls > to `resolve` may block the main thread which can impact performance. The `resolve` hook chain is responsible for telling Node.js where to find and -how to cache a given `import` statement or expression. It can optionally return -its format (such as `'module'`) as a hint to the `load` hook. If a format is -specified, the `load` hook is ultimately responsible for providing the final -`format` value (and it is free to ignore the hint provided by `resolve`); if -`resolve` provides a `format`, a custom `load` hook is required even if only to -pass the value to the Node.js default `load` hook. +how to cache a given `import` statement or expression, or `require` call. It can +optionally return a format (such as `'module'`) as a hint to the `load` hook. If +a format is specified, the `load` hook is ultimately responsible for providing +the final `format` value (and it is free to ignore the hint provided by +`resolve`); if `resolve` provides a `format`, a custom `load` hook is required +even if only to pass the value to the Node.js default `load` hook. Import type assertions are part of the cache key for saving loaded modules into -the internal module cache. The `resolve` hook is responsible for -returning an `importAssertions` object if the module should be cached with -different assertions than were present in the source code. +the internal module cache. The `resolve` hook is responsible for returning an +`importAssertions` object if the module should be cached with different +assertions than were present in the source code. The `conditions` property in `context` is an array of conditions for [package exports conditions][Conditional exports] that apply to this resolution @@ -445,7 +530,7 @@ Node.js module specifier resolution behavior_ when calling `defaultResolve`, the `context.conditions` array originally passed into the `resolve` hook. ```mjs -export function resolve(specifier, context, nextResolve) { +export async function resolve(specifier, context, nextResolve) { const { parentURL = null } = context; if (Math.random() > 0.5) { // Some condition. @@ -490,11 +575,7 @@ changes: its return. --> -> The loaders API is being redesigned. This hook may disappear or its -> signature may change. Do not rely on the API described below. - -> In a previous version of this API, this was split across 3 separate, now -> deprecated, hooks (`getFormat`, `getSource`, and `transformSource`). +> Stability: 1.2 - Release candidate * `url` {string} The URL returned by the `resolve` chain * `context` {Object} @@ -512,8 +593,8 @@ changes: terminate the chain of `resolve` hooks. **Default:** `false` * `source` {string|ArrayBuffer|TypedArray} The source for Node.js to evaluate -The `load` hook provides a way to define a custom method of determining how -a URL should be interpreted, retrieved, and parsed. It is also in charge of +The `load` hook provides a way to define a custom method of determining how a +URL should be interpreted, retrieved, and parsed. It is also in charge of validating the import assertion. The final value of `format` must be one of the following: @@ -529,19 +610,23 @@ The final value of `format` must be one of the following: The value of `source` is ignored for type `'builtin'` because currently it is not possible to replace the value of a Node.js builtin (core) module. -The value of `source` can be omitted for type `'commonjs'`. When a `source` is -provided, all `require` calls from this module will be processed by the ESM -loader with registered `resolve` and `load` hooks; all `require.resolve` calls -from this module will be processed by the ESM loader with registered `resolve` -hooks; `require.extensions` and monkey-patching on the CommonJS module loader -will not apply. If `source` is undefined or `null`, it will be handled by the -CommonJS module loader and `require`/`require.resolve` calls will not go through -the registered hooks. This behavior for nullish `source` is temporary — in the -future, nullish `source` will not be supported. - -The Node.js own `load` implementation, which is the value of `next` for the last -loader in the `load` chain, returns `null` for `source` when `format` is -`'commonjs'` for backward compatibility. Here is an example loader that would +Omitting vs providing a `source` for `'commonjs'` has very different effects: + +* When a `source` is provided, all `require` calls from this module will be + processed by the ESM loader with registered `resolve` and `load` hooks; all + `require.resolve` calls from this module will be processed by the ESM loader + with registered `resolve` hooks; only a subset of the CommonJS API will be + available (e.g. no `require.extensions`, no `require.cache`, no + `require.resolve.paths`) and monkey-patching on the CommonJS module loader + will not apply. +* If `source` is undefined or `null`, it will be handled by the CommonJS module + loader and `require`/`require.resolve` calls will not go through the + registered hooks. This behavior for nullish `source` is temporary — in the + future, nullish `source` will not be supported. + +The Node.js internal `load` implementation, which is the value of `next` for the +last hook in the `load` chain, returns `null` for `source` when `format` is +`'commonjs'` for backward compatibility. Here is an example hook that would opt-in to using the non-default behavior: ```mjs @@ -556,7 +641,7 @@ export async function load(url, context, nextLoad) { } ``` -> **Caveat**: The ESM `load` hook and namespaced exports from CommonJS modules +> **Warning**: The ESM `load` hook and namespaced exports from CommonJS modules > are incompatible. Attempting to use them together will result in an empty > object from the import. This may be addressed in the future. @@ -569,9 +654,9 @@ If the source value of a text-based format (i.e., `'json'`, `'module'`) is not a string, it is converted to a string using [`util.TextDecoder`][]. The `load` hook provides a way to define a custom method for retrieving the -source code of an ES module specifier. This would allow a loader to potentially -avoid reading files from disk. It could also be used to map an unrecognized -format to a supported one, for example `yaml` to `module`. +source code of a resolved URL. This would allow a loader to potentially avoid +reading files from disk. It could also be used to map an unrecognized format to +a supported one, for example `yaml` to `module`. ```mjs export async function load(url, context, nextLoad) { @@ -611,11 +696,11 @@ changes: description: Add support for chaining globalPreload hooks. --> -> This hook will be removed in a future version. Use [`initialize`][] instead. -> When a loader has an `initialize` export, `globalPreload` will be ignored. +> Stability: 1.0 - Early development -> In a previous version of this API, this hook was named -> `getGlobalPreloadCode`. +> **Warning:** This hook will be removed in a future version. Use +> [`initialize`][] instead. When a hooks module has an `initialize` export, +> `globalPreload` will be ignored. * `context` {Object} Information to assist the preload code * `port` {MessagePort} @@ -647,16 +732,17 @@ const require = createRequire(cwd() + '/'); } ``` -In order to allow communication between the application and the loader, another -argument is provided to the preload code: `port`. This is available as a -parameter to the loader hook and inside of the source text returned by the hook. -Some care must be taken in order to properly call [`port.ref()`][] and +Another argument is provided to the preload code: `port`. This is available as a +parameter to the hook and inside of the source text returned by the hook. This +functionality has been moved to the `initialize` hook. + +Care must be taken in order to properly call [`port.ref()`][] and [`port.unref()`][] to prevent a process from being in a state where it won't close normally. ```mjs /** - * This example has the application context send a message to the loader + * This example has the application context send a message to the hook * and sends the message back to the application context */ export function globalPreload({ port }) { @@ -664,7 +750,7 @@ export function globalPreload({ port }) { port.postMessage(evt.data); }; return `\ - port.postMessage('console.log("I went to the Loader and back");'); + port.postMessage('console.log("I went to the hook and back");'); port.onmessage = (evt) => { eval(evt.data); }; @@ -674,22 +760,23 @@ export function globalPreload({ port }) { ### Examples -The various loader hooks can be used together to accomplish wide-ranging -customizations of the Node.js code loading and evaluation behaviors. +The various module customization hooks can be used together to accomplish +wide-ranging customizations of the Node.js code loading and evaluation +behaviors. -#### HTTPS loader +#### Import from HTTPS In current Node.js, specifiers starting with `https://` are experimental (see [HTTPS and HTTP imports][]). -The loader below registers hooks to enable rudimentary support for such +The hook below registers hooks to enable rudimentary support for such specifiers. While this may seem like a significant improvement to Node.js core -functionality, there are substantial downsides to actually using this loader: +functionality, there are substantial downsides to actually using these hooks: performance is much slower than loading files from disk, there is no caching, and there is no security. ```mjs -// https-loader.mjs +// https-hooks.mjs import { get } from 'node:https'; export function load(url, context, nextLoad) { @@ -724,59 +811,42 @@ import { VERSION } from 'https://coffeescript.org/browser-compiler-modern/coffee console.log(VERSION); ``` -With the preceding loader, running -`node --experimental-loader ./https-loader.mjs ./main.mjs` +With the preceding hooks module, running +`node --import 'data:text/javascript,import { register } from "node:module"; import { pathToFileURL } from "node:url"; register(pathToFileURL("./https-hooks.mjs"));' ./main.mjs` prints the current version of CoffeeScript per the module at the URL in `main.mjs`. -#### Transpiler loader +#### Transpilation Sources that are in formats Node.js doesn't understand can be converted into JavaScript using the [`load` hook][load hook]. -This is less performant than transpiling source files before running -Node.js; a transpiler loader should only be used for development and testing -purposes. +This is less performant than transpiling source files before running Node.js; +transpiler hooks should only be used for development and testing purposes. ```mjs -// coffeescript-loader.mjs +// coffeescript-hooks.mjs import { readFile } from 'node:fs/promises'; import { dirname, extname, resolve as resolvePath } from 'node:path'; import { cwd } from 'node:process'; import { fileURLToPath, pathToFileURL } from 'node:url'; -import CoffeeScript from 'coffeescript'; +import coffeescript from 'coffeescript'; -const baseURL = pathToFileURL(`${cwd()}/`).href; +const extensionsRegex = /\.(coffee|litcoffee|coffee\.md)$/; export async function load(url, context, nextLoad) { if (extensionsRegex.test(url)) { - // Now that we patched resolve to let CoffeeScript URLs through, we need to - // tell Node.js what format such URLs should be interpreted as. Because - // CoffeeScript transpiles into JavaScript, it should be one of the two - // JavaScript formats: 'commonjs' or 'module'. - // CoffeeScript files can be either CommonJS or ES modules, so we want any // CoffeeScript file to be treated by Node.js the same as a .js file at the // same location. To determine how Node.js would interpret an arbitrary .js // file, search up the file system for the nearest parent package.json file // and read its "type" field. const format = await getPackageType(url); - // When a hook returns a format of 'commonjs', `source` is ignored. - // To handle CommonJS files, a handler needs to be registered with - // `require.extensions` in order to process the files with the CommonJS - // loader. Avoiding the need for a separate CommonJS handler is a future - // enhancement planned for ES module loaders. - if (format === 'commonjs') { - return { - format, - shortCircuit: true, - }; - } const { source: rawSource } = await nextLoad(url, { ...context, format }); // This hook converts CoffeeScript source code into JavaScript source code // for all imported CoffeeScript files. - const transformedSource = coffeeCompile(rawSource.toString(), url); + const transformedSource = coffeescript.compile(rawSource.toString(), url); return { format, @@ -833,23 +903,22 @@ console.log "Brought to you by Node.js version #{version}" export scream = (str) -> str.toUpperCase() ``` -With the preceding loader, running -`node --experimental-loader ./coffeescript-loader.mjs main.coffee` +With the preceding hooks module, running +`node --import 'data:text/javascript,import { register } from "node:module"; import { pathToFileURL } from "node:url"; register(pathToFileURL("./coffeescript-hooks.mjs"));' ./main.coffee` causes `main.coffee` to be turned into JavaScript after its source code is loaded from disk but before Node.js executes it; and so on for any `.coffee`, `.litcoffee` or `.coffee.md` files referenced via `import` statements of any loaded file. -#### "import map" loader +#### Import maps -The previous two loaders defined `load` hooks. This is an example of a loader -that does its work via the `resolve` hook. This loader reads an -`import-map.json` file that specifies which specifiers to override to another -URL (this is a very simplistic implemenation of a small subset of the -"import maps" specification). +The previous two examples defined `load` hooks. This is an example of a +`resolve` hook. This hooks module reads an `import-map.json` file that defines +which specifiers to override to other URLs (this is a very simplistic +implementation of a small subset of the "import maps" specification). ```mjs -// import-map-loader.js +// import-map-hooks.js import fs from 'node:fs/promises'; const { imports } = JSON.parse(await fs.readFile('import-map.json')); @@ -863,7 +932,7 @@ export async function resolve(specifier, context, nextResolve) { } ``` -Let's assume we have these files: +With these files: ```mjs // main.js @@ -884,19 +953,8 @@ import 'a-module'; console.log('some module!'); ``` -If you run `node --experimental-loader ./import-map-loader.js main.js` -the output will be `some module!`. - -### Register loaders programmatically - - - -In addition to using the `--experimental-loader` option in the CLI, -loaders can also be registered programmatically. You can find -detailed information about this process in the documentation page -for [`module.register()`][]. +Running `node --import 'data:text/javascript,import { register } from "node:module"; import { pathToFileURL } from "node:url"; register(pathToFileURL("./import-map-hooks.js"));' main.js` +should print `some module!`. ## Source map v3 support @@ -1044,9 +1102,11 @@ returned object contains the following keys: [CommonJS]: modules.md [Conditional exports]: packages.md#conditional-exports +[Customization hooks]: #customization-hooks [ES Modules]: esm.md [HTTPS and HTTP imports]: esm.md#https-and-http-imports [Source map v3 format]: https://sourcemaps.info/spec.html#h.mofvlxcwqzej +[`"exports"`]: packages.md#exports [`--enable-source-maps`]: cli.md#--enable-source-maps [`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer [`NODE_V8_COVERAGE=dir`]: cli.md#node_v8_coveragedir @@ -1055,7 +1115,6 @@ returned object contains the following keys: [`TypedArray`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray [`Uint8Array`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array [`initialize`]: #initialize -[`module.register()`]: #moduleregisterspecifier-parenturl-options [`module`]: modules.md#the-module-object [`port.postMessage`]: worker_threads.md#portpostmessagevalue-transferlist [`port.ref()`]: worker_threads.md#portref @@ -1066,5 +1125,6 @@ returned object contains the following keys: [hooks]: #customization-hooks [load hook]: #loadurl-context-nextload [module wrapper]: modules.md#the-module-wrapper +[realm]: https://tc39.es/ecma262/#realm [source map include directives]: https://sourcemaps.info/spec.html#h.lmz475t4mvbx [transferrable objects]: worker_threads.md#portpostmessagevalue-transferlist From 92772a8175c26a87252c9f1f680533b19f42e3cb Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Sat, 26 Aug 2023 00:23:15 +0900 Subject: [PATCH 069/125] fs: remove redundant code in readableWebStream() Remove redundant code by moving it to outside of `if/else`. Plus, make `options` optional in doc. PR-URL: https://github.com/nodejs/node/pull/49298 Reviewed-By: Debadree Chatterjee Reviewed-By: Luigi Pinca --- doc/api/fs.md | 2 +- lib/internal/fs/promises.js | 23 ++++++++--------------- 2 files changed, 9 insertions(+), 16 deletions(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index ecdce382a9bc35..4438465882d25f 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -446,7 +446,7 @@ Reads data from the file and stores that in the given buffer. If the file is not modified concurrently, the end-of-file is reached when the number of bytes read is zero. -#### `filehandle.readableWebStream(options)` +#### `filehandle.readableWebStream([options])` regexps + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + // check if it's a drive or unc path. + const isUNC = s[0] === '' && + s[1] === '' && + (s[2] === '?' || !globMagic.test(s[2])) && + !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; + } + else if (isDrive) { + return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; + } + } + return s.map(ss => this.parse(ss)); + }); + this.debug(this.pattern, set); + // filter out everything that didn't compile properly. + this.set = set.filter(s => s.indexOf(false) === -1); + // do not treat the ? in UNC paths as magic + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === '' && + p[1] === '' && + this.globParts[i][2] === '?' && + typeof p[3] === 'string' && + /^[a-z]:$/i.test(p[3])) { + p[2] = '?'; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + // if we're not in globstar mode, then turn all ** into * + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === '**') { + globParts[i][j] = '*'; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + // aggressive optimization for the purpose of fs walking + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } + else if (optimizationLevel >= 1) { + // just basic optimizations to remove some .. parts + globParts = this.levelOneOptimize(globParts); + } + else { + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map(parts => { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let i = gs; + while (parts[i + 1] === '**') { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map(parts => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === '**' && prev === '**') { + return set; + } + if (part === '..') { + if (prev && prev !== '..' && prev !== '.' && prev !== '**') { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [''] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + //
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (!matched)
+                    continue;
+                globParts[i] = matched;
+                globParts[j] = [];
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        return fastTest ? Object.assign(re, { test: fastTest }) : re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json
new file mode 100644
index 00000000000000..5bbefffbabee39
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js
new file mode 100644
index 00000000000000..47c36bcee5a02a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js
new file mode 100644
index 00000000000000..7b534fc30200bb
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js
new file mode 100644
index 00000000000000..7fb1f83e6182a0
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js
@@ -0,0 +1,585 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav =
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js
new file mode 100644
index 00000000000000..c629d6ae816e27
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js
new file mode 100644
index 00000000000000..16f7c8c7bdc646
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js
new file mode 100644
index 00000000000000..831b6a67f63fb4
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js
@@ -0,0 +1,995 @@
+import expand from 'brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (!matched)
+                    continue;
+                globParts[i] = matched;
+                globParts[j] = [];
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        return fastTest ? Object.assign(re, { test: fastTest }) : re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json
new file mode 100644
index 00000000000000..3dbc1ca591c055
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js
new file mode 100644
index 00000000000000..0faf9a2b7306f7
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json
new file mode 100644
index 00000000000000..061c3b9f343306
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json
@@ -0,0 +1,86 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "9.0.3",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+    "postprepare": "bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  },
+  "dependencies": {
+    "brace-expansion": "^2.0.1"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.15.11",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.12.0",
+    "eslint-config-prettier": "^8.6.0",
+    "mkdirp": "1",
+    "prettier": "^2.8.2",
+    "tap": "^16.3.7",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE
new file mode 100644
index 00000000000000..97f8e32ed82e4c
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
new file mode 100644
index 00000000000000..b6cdae8eb514b8
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
@@ -0,0 +1,1028 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+const events_1 = require("events");
+const stream_1 = __importDefault(require("stream"));
+const string_decoder_1 = require("string_decoder");
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof stream_1.default ||
+        (0, exports.isReadable)(s) ||
+        (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends events_1.EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new string_decoder_1.StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return exports.isStream;
+    }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
new file mode 100644
index 00000000000000..5bbefffbabee39
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
new file mode 100644
index 00000000000000..b65fafbae43a4e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
@@ -0,0 +1,1018 @@
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+import { EventEmitter } from 'events';
+import Stream from 'stream';
+import { StringDecoder } from 'string_decoder';
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+export const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof Stream ||
+        isReadable(s) ||
+        isWritable(s));
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+export const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== Stream.Writable.prototype.pipe;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+export const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+export class Minipass extends EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return isStream;
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
new file mode 100644
index 00000000000000..3dbc1ca591c055
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json
new file mode 100644
index 00000000000000..6faaa247a5bc66
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "minipass",
+  "version": "7.0.3",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^20.1.2",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.13.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.3.0",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.3",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "sync-content": "^1.0.2",
+    "through2": "^2.0.3"
+  },
+  "repository": "https://github.com/isaacs/minipass",
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json
new file mode 100644
index 00000000000000..ab58cb8b7c50f4
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/LICENSE
similarity index 100%
rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/LICENSE
rename to deps/npm/node_modules/node-gyp/node_modules/lru-cache/LICENSE
diff --git a/deps/npm/node_modules/lru-cache/index.js b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/index.js
similarity index 100%
rename from deps/npm/node_modules/lru-cache/index.js
rename to deps/npm/node_modules/node-gyp/node_modules/lru-cache/index.js
diff --git a/deps/npm/node_modules/lru-cache/index.mjs b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/index.mjs
similarity index 100%
rename from deps/npm/node_modules/lru-cache/index.mjs
rename to deps/npm/node_modules/node-gyp/node_modules/lru-cache/index.mjs
diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/package.json b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/package.json
similarity index 55%
rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/package.json
rename to deps/npm/node_modules/node-gyp/node_modules/lru-cache/package.json
index 69a20582ff9b6f..9684991727e7a2 100644
--- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/package.json
+++ b/deps/npm/node_modules/node-gyp/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
 {
   "name": "lru-cache",
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "9.1.1",
+  "version": "7.18.3",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -11,47 +11,34 @@
   "sideEffects": false,
   "scripts": {
     "build": "npm run prepare",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
     "pretest": "npm run prepare",
     "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "size": "size-limit",
+    "test": "tap",
+    "snap": "tap",
     "preversion": "npm test",
     "postversion": "npm publish",
     "prepublishOnly": "git push origin --follow-tags",
     "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
+    "typedoc": "typedoc ./index.d.ts"
   },
-  "main": "./dist/cjs/index.js",
-  "module": "./dist/mjs/index.js",
+  "type": "commonjs",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
   "exports": {
-    "./min": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.min.js"
-      },
-      "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.min.js"
-      }
-    },
     ".": {
       "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
       },
       "require": {
-        "types": "./dist/cjs/index.d.ts",
-        "default": "./dist/cjs/index.js"
+        "types": "./index.d.ts",
+        "default": "./index.js"
       }
-    }
+    },
+    "./package.json": "./package.json"
   },
   "repository": "git://github.com/isaacs/node-lru-cache.git",
   "devDependencies": {
@@ -61,10 +48,7 @@
     "benchmark": "^2.1.4",
     "c8": "^7.11.2",
     "clock-mock": "^1.0.6",
-    "esbuild": "^0.17.11",
     "eslint-config-prettier": "^8.5.0",
-    "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
     "prettier": "^2.6.2",
     "size-limit": "^7.0.8",
     "tap": "^16.3.4",
@@ -75,10 +59,12 @@
   },
   "license": "ISC",
   "files": [
-    "dist"
+    "index.js",
+    "index.mjs",
+    "index.d.ts"
   ],
   "engines": {
-    "node": "14 || >=16.14"
+    "node": ">=12"
   },
   "prettier": {
     "semi": false,
@@ -92,18 +78,19 @@
     "endOfLine": "lf"
   },
   "tap": {
-    "coverage": false,
+    "nyc-arg": [
+      "--include=index.js"
+    ],
     "node-arg": [
       "--expose-gc",
-      "--no-warnings",
-      "--loader",
-      "ts-node/esm"
+      "--require",
+      "ts-node/register"
     ],
     "ts": false
   },
   "size-limit": [
     {
-      "path": "./dist/mjs/index.js"
+      "path": "./index.js"
     }
   ]
 }
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
new file mode 100644
index 00000000000000..1808eb2844231c
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright 2017-2022 (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/make-fetch-happen/lib/agent.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/lib/agent.js
rename to deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js
new file mode 100644
index 00000000000000..45141095074ecb
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js
@@ -0,0 +1,469 @@
+const { Request, Response } = require('minipass-fetch')
+const { Minipass } = require('minipass')
+const MinipassFlush = require('minipass-flush')
+const cacache = require('cacache')
+const url = require('url')
+
+const CachingMinipassPipeline = require('../pipeline.js')
+const CachePolicy = require('./policy.js')
+const cacheKey = require('./key.js')
+const remote = require('../remote.js')
+
+const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
+
+// allow list for request headers that will be written to the cache index
+// note: we will also store any request headers
+// that are named in a response's vary header
+const KEEP_REQUEST_HEADERS = [
+  'accept-charset',
+  'accept-encoding',
+  'accept-language',
+  'accept',
+  'cache-control',
+]
+
+// allow list for response headers that will be written to the cache index
+// note: we must not store the real response's age header, or when we load
+// a cache policy based on the metadata it will think the cached response
+// is always stale
+const KEEP_RESPONSE_HEADERS = [
+  'cache-control',
+  'content-encoding',
+  'content-language',
+  'content-type',
+  'date',
+  'etag',
+  'expires',
+  'last-modified',
+  'link',
+  'location',
+  'pragma',
+  'vary',
+]
+
+// return an object containing all metadata to be written to the index
+const getMetadata = (request, response, options) => {
+  const metadata = {
+    time: Date.now(),
+    url: request.url,
+    reqHeaders: {},
+    resHeaders: {},
+
+    // options on which we must match the request and vary the response
+    options: {
+      compress: options.compress != null ? options.compress : request.compress,
+    },
+  }
+
+  // only save the status if it's not a 200 or 304
+  if (response.status !== 200 && response.status !== 304) {
+    metadata.status = response.status
+  }
+
+  for (const name of KEEP_REQUEST_HEADERS) {
+    if (request.headers.has(name)) {
+      metadata.reqHeaders[name] = request.headers.get(name)
+    }
+  }
+
+  // if the request's host header differs from the host in the url
+  // we need to keep it, otherwise it's just noise and we ignore it
+  const host = request.headers.get('host')
+  const parsedUrl = new url.URL(request.url)
+  if (host && parsedUrl.host !== host) {
+    metadata.reqHeaders.host = host
+  }
+
+  // if the response has a vary header, make sure
+  // we store the relevant request headers too
+  if (response.headers.has('vary')) {
+    const vary = response.headers.get('vary')
+    // a vary of "*" means every header causes a different response.
+    // in that scenario, we do not include any additional headers
+    // as the freshness check will always fail anyway and we don't
+    // want to bloat the cache indexes
+    if (vary !== '*') {
+      // copy any other request headers that will vary the response
+      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
+      for (const name of varyHeaders) {
+        if (request.headers.has(name)) {
+          metadata.reqHeaders[name] = request.headers.get(name)
+        }
+      }
+    }
+  }
+
+  for (const name of KEEP_RESPONSE_HEADERS) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  for (const name of options.cacheAdditionalHeaders) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  return metadata
+}
+
+// symbols used to hide objects that may be lazily evaluated in a getter
+const _request = Symbol('request')
+const _response = Symbol('response')
+const _policy = Symbol('policy')
+
+class CacheEntry {
+  constructor ({ entry, request, response, options }) {
+    if (entry) {
+      this.key = entry.key
+      this.entry = entry
+      // previous versions of this module didn't write an explicit timestamp in
+      // the metadata, so fall back to the entry's timestamp. we can't use the
+      // entry timestamp to determine staleness because cacache will update it
+      // when it verifies its data
+      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
+    } else {
+      this.key = cacheKey(request)
+    }
+
+    this.options = options
+
+    // these properties are behind getters that lazily evaluate
+    this[_request] = request
+    this[_response] = response
+    this[_policy] = null
+  }
+
+  // returns a CacheEntry instance that satisfies the given request
+  // or undefined if no existing entry satisfies
+  static async find (request, options) {
+    try {
+      // compacts the index and returns an array of unique entries
+      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
+        const entryA = new CacheEntry({ entry: A, options })
+        const entryB = new CacheEntry({ entry: B, options })
+        return entryA.policy.satisfies(entryB.request)
+      }, {
+        validateEntry: (entry) => {
+          // clean out entries with a buggy content-encoding value
+          if (entry.metadata &&
+              entry.metadata.resHeaders &&
+              entry.metadata.resHeaders['content-encoding'] === null) {
+            return false
+          }
+
+          // if an integrity is null, it needs to have a status specified
+          if (entry.integrity === null) {
+            return !!(entry.metadata && entry.metadata.status)
+          }
+
+          return true
+        },
+      })
+    } catch (err) {
+      // if the compact request fails, ignore the error and return
+      return
+    }
+
+    // a cache mode of 'reload' means to behave as though we have no cache
+    // on the way to the network. return undefined to allow cacheFetch to
+    // create a brand new request no matter what.
+    if (options.cache === 'reload') {
+      return
+    }
+
+    // find the specific entry that satisfies the request
+    let match
+    for (const entry of matches) {
+      const _entry = new CacheEntry({
+        entry,
+        options,
+      })
+
+      if (_entry.policy.satisfies(request)) {
+        match = _entry
+        break
+      }
+    }
+
+    return match
+  }
+
+  // if the user made a PUT/POST/PATCH then we invalidate our
+  // cache for the same url by deleting the index entirely
+  static async invalidate (request, options) {
+    const key = cacheKey(request)
+    try {
+      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
+    } catch (err) {
+      // ignore errors
+    }
+  }
+
+  get request () {
+    if (!this[_request]) {
+      this[_request] = new Request(this.entry.metadata.url, {
+        method: 'GET',
+        headers: this.entry.metadata.reqHeaders,
+        ...this.entry.metadata.options,
+      })
+    }
+
+    return this[_request]
+  }
+
+  get response () {
+    if (!this[_response]) {
+      this[_response] = new Response(null, {
+        url: this.entry.metadata.url,
+        counter: this.options.counter,
+        status: this.entry.metadata.status || 200,
+        headers: {
+          ...this.entry.metadata.resHeaders,
+          'content-length': this.entry.size,
+        },
+      })
+    }
+
+    return this[_response]
+  }
+
+  get policy () {
+    if (!this[_policy]) {
+      this[_policy] = new CachePolicy({
+        entry: this.entry,
+        request: this.request,
+        response: this.response,
+        options: this.options,
+      })
+    }
+
+    return this[_policy]
+  }
+
+  // wraps the response in a pipeline that stores the data
+  // in the cache while the user consumes it
+  async store (status) {
+    // if we got a status other than 200, 301, or 308,
+    // or the CachePolicy forbid storage, append the
+    // cache status header and return it untouched
+    if (
+      this.request.method !== 'GET' ||
+      ![200, 301, 308].includes(this.response.status) ||
+      !this.policy.storable()
+    ) {
+      this.response.headers.set('x-local-cache-status', 'skip')
+      return this.response
+    }
+
+    const size = this.response.headers.get('content-length')
+    const cacheOpts = {
+      algorithms: this.options.algorithms,
+      metadata: getMetadata(this.request, this.response, this.options),
+      size,
+      integrity: this.options.integrity,
+      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
+    }
+
+    let body = null
+    // we only set a body if the status is a 200, redirects are
+    // stored as metadata only
+    if (this.response.status === 200) {
+      let cacheWriteResolve, cacheWriteReject
+      const cacheWritePromise = new Promise((resolve, reject) => {
+        cacheWriteResolve = resolve
+        cacheWriteReject = reject
+      })
+
+      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
+        flush () {
+          return cacheWritePromise
+        },
+      }))
+      // this is always true since if we aren't reusing the one from the remote fetch, we
+      // are using the one from cacache
+      body.hasIntegrityEmitter = true
+
+      const onResume = () => {
+        const tee = new Minipass()
+        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
+        // re-emit the integrity and size events on our new response body so they can be reused
+        cacheStream.on('integrity', i => body.emit('integrity', i))
+        cacheStream.on('size', s => body.emit('size', s))
+        // stick a flag on here so downstream users will know if they can expect integrity events
+        tee.pipe(cacheStream)
+        // TODO if the cache write fails, log a warning but return the response anyway
+        // eslint-disable-next-line promise/catch-or-return
+        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
+        body.unshift(tee)
+        body.unshift(this.response.body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+    } else {
+      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
+    }
+
+    // note: we do not set the x-local-cache-hash header because we do not know
+    // the hash value until after the write to the cache completes, which doesn't
+    // happen until after the response has been sent and it's too late to write
+    // the header anyway
+    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    this.response.headers.set('x-local-cache-mode', 'stream')
+    this.response.headers.set('x-local-cache-status', status)
+    this.response.headers.set('x-local-cache-time', new Date().toISOString())
+    const newResponse = new Response(body, {
+      url: this.response.url,
+      status: this.response.status,
+      headers: this.response.headers,
+      counter: this.options.counter,
+    })
+    return newResponse
+  }
+
+  // use the cached data to create a response and return it
+  async respond (method, options, status) {
+    let response
+    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
+      // if the request is a HEAD, or the response is a redirect,
+      // then the metadata in the entry already includes everything
+      // we need to build a response
+      response = this.response
+    } else {
+      // we're responding with a full cached response, so create a body
+      // that reads from cacache and attach it to a new Response
+      const body = new Minipass()
+      const headers = { ...this.policy.responseHeaders() }
+
+      const onResume = () => {
+        const cacheStream = cacache.get.stream.byDigest(
+          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+        )
+        cacheStream.on('error', async (err) => {
+          cacheStream.pause()
+          if (err.code === 'EINTEGRITY') {
+            await cacache.rm.content(
+              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+            )
+          }
+          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
+            await CacheEntry.invalidate(this.request, this.options)
+          }
+          body.emit('error', err)
+          cacheStream.resume()
+        })
+        // emit the integrity and size events based on our metadata so we're consistent
+        body.emit('integrity', this.entry.integrity)
+        body.emit('size', Number(headers['content-length']))
+        cacheStream.pipe(body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+      response = new Response(body, {
+        url: this.entry.metadata.url,
+        counter: options.counter,
+        status: 200,
+        headers,
+      })
+    }
+
+    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
+    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    response.headers.set('x-local-cache-mode', 'stream')
+    response.headers.set('x-local-cache-status', status)
+    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
+    return response
+  }
+
+  // use the provided request along with this cache entry to
+  // revalidate the stored response. returns a response, either
+  // from the cache or from the update
+  async revalidate (request, options) {
+    const revalidateRequest = new Request(request, {
+      headers: this.policy.revalidationHeaders(request),
+    })
+
+    try {
+      // NOTE: be sure to remove the headers property from the
+      // user supplied options, since we have already defined
+      // them on the new request object. if they're still in the
+      // options then those will overwrite the ones from the policy
+      var response = await remote(revalidateRequest, {
+        ...options,
+        headers: undefined,
+      })
+    } catch (err) {
+      // if the network fetch fails, return the stale
+      // cached response unless it has a cache-control
+      // of 'must-revalidate'
+      if (!this.policy.mustRevalidate) {
+        return this.respond(request.method, options, 'stale')
+      }
+
+      throw err
+    }
+
+    if (this.policy.revalidated(revalidateRequest, response)) {
+      // we got a 304, write a new index to the cache and respond from cache
+      const metadata = getMetadata(request, response, options)
+      // 304 responses do not include headers that are specific to the response data
+      // since they do not include a body, so we copy values for headers that were
+      // in the old cache entry to the new one, if the new metadata does not already
+      // include that header
+      for (const name of KEEP_RESPONSE_HEADERS) {
+        if (
+          !hasOwnProperty(metadata.resHeaders, name) &&
+          hasOwnProperty(this.entry.metadata.resHeaders, name)
+        ) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+      }
+
+      for (const name of options.cacheAdditionalHeaders) {
+        const inMeta = hasOwnProperty(metadata.resHeaders, name)
+        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
+        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
+
+        // if the header is in the existing entry, but it is not in the metadata
+        // then we need to write it to the metadata as this will refresh the on-disk cache
+        if (!inMeta && inEntry) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+        // if the header is in the metadata, but not in the policy, then we need to set
+        // it in the policy so that it's included in the immediate response. future
+        // responses will load a new cache entry, so we don't need to change that
+        if (!inPolicy && inMeta) {
+          this.policy.response.headers[name] = metadata.resHeaders[name]
+        }
+      }
+
+      try {
+        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
+          size: this.entry.size,
+          metadata,
+        })
+      } catch (err) {
+        // if updating the cache index fails, we ignore it and
+        // respond anyway
+      }
+      return this.respond(request.method, options, 'revalidated')
+    }
+
+    // if we got a modified response, create a new entry based on it
+    const newEntry = new CacheEntry({
+      request,
+      response,
+      options,
+    })
+
+    // respond with the new entry while writing it to the cache
+    return newEntry.store('updated')
+  }
+}
+
+module.exports = CacheEntry
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js
new file mode 100644
index 00000000000000..67a66573bebe66
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js
@@ -0,0 +1,11 @@
+class NotCachedError extends Error {
+  constructor (url) {
+    /* eslint-disable-next-line max-len */
+    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
+    this.code = 'ENOTCACHED'
+  }
+}
+
+module.exports = {
+  NotCachedError,
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js
new file mode 100644
index 00000000000000..0de49d23fb9336
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js
@@ -0,0 +1,49 @@
+const { NotCachedError } = require('./errors.js')
+const CacheEntry = require('./entry.js')
+const remote = require('../remote.js')
+
+// do whatever is necessary to get a Response and return it
+const cacheFetch = async (request, options) => {
+  // try to find a cached entry that satisfies this request
+  const entry = await CacheEntry.find(request, options)
+  if (!entry) {
+    // no cached result, if the cache mode is 'only-if-cached' that's a failure
+    if (options.cache === 'only-if-cached') {
+      throw new NotCachedError(request.url)
+    }
+
+    // otherwise, we make a request, store it and return it
+    const response = await remote(request, options)
+    const newEntry = new CacheEntry({ request, response, options })
+    return newEntry.store('miss')
+  }
+
+  // we have a cached response that satisfies this request, however if the cache
+  // mode is 'no-cache' then we send the revalidation request no matter what
+  if (options.cache === 'no-cache') {
+    return entry.revalidate(request, options)
+  }
+
+  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
+  // 'only-if-cached' we can respond with the cached entry. set the status
+  // based on the result of needsRevalidation and respond
+  const _needsRevalidation = entry.policy.needsRevalidation(request)
+  if (options.cache === 'force-cache' ||
+      options.cache === 'only-if-cached' ||
+      !_needsRevalidation) {
+    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
+  }
+
+  // if we got here, the cache entry is stale so revalidate it
+  return entry.revalidate(request, options)
+}
+
+cacheFetch.invalidate = async (request, options) => {
+  if (!options.cachePath) {
+    return
+  }
+
+  return CacheEntry.invalidate(request, options)
+}
+
+module.exports = cacheFetch
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js
new file mode 100644
index 00000000000000..f7684d562b7fae
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js
@@ -0,0 +1,17 @@
+const { URL, format } = require('url')
+
+// options passed to url.format() when generating a key
+const formatOptions = {
+  auth: false,
+  fragment: false,
+  search: true,
+  unicode: false,
+}
+
+// returns a string to be used as the cache key for the Request
+const cacheKey = (request) => {
+  const parsed = new URL(request.url)
+  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
+}
+
+module.exports = cacheKey
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js
new file mode 100644
index 00000000000000..ada3c8600dae92
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js
@@ -0,0 +1,161 @@
+const CacheSemantics = require('http-cache-semantics')
+const Negotiator = require('negotiator')
+const ssri = require('ssri')
+
+// options passed to http-cache-semantics constructor
+const policyOptions = {
+  shared: false,
+  ignoreCargoCult: true,
+}
+
+// a fake empty response, used when only testing the
+// request for storability
+const emptyResponse = { status: 200, headers: {} }
+
+// returns a plain object representation of the Request
+const requestObject = (request) => {
+  const _obj = {
+    method: request.method,
+    url: request.url,
+    headers: {},
+    compress: request.compress,
+  }
+
+  request.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+// returns a plain object representation of the Response
+const responseObject = (response) => {
+  const _obj = {
+    status: response.status,
+    headers: {},
+  }
+
+  response.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+class CachePolicy {
+  constructor ({ entry, request, response, options }) {
+    this.entry = entry
+    this.request = requestObject(request)
+    this.response = responseObject(response)
+    this.options = options
+    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
+
+    if (this.entry) {
+      // if we have an entry, copy the timestamp to the _responseTime
+      // this is necessary because the CacheSemantics constructor forces
+      // the value to Date.now() which means a policy created from a
+      // cache entry is likely to always identify itself as stale
+      this.policy._responseTime = this.entry.metadata.time
+    }
+  }
+
+  // static method to quickly determine if a request alone is storable
+  static storable (request, options) {
+    // no cachePath means no caching
+    if (!options.cachePath) {
+      return false
+    }
+
+    // user explicitly asked not to cache
+    if (options.cache === 'no-store') {
+      return false
+    }
+
+    // we only cache GET and HEAD requests
+    if (!['GET', 'HEAD'].includes(request.method)) {
+      return false
+    }
+
+    // otherwise, let http-cache-semantics make the decision
+    // based on the request's headers
+    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
+    return policy.storable()
+  }
+
+  // returns true if the policy satisfies the request
+  satisfies (request) {
+    const _req = requestObject(request)
+    if (this.request.headers.host !== _req.headers.host) {
+      return false
+    }
+
+    if (this.request.compress !== _req.compress) {
+      return false
+    }
+
+    const negotiatorA = new Negotiator(this.request)
+    const negotiatorB = new Negotiator(_req)
+
+    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
+      return false
+    }
+
+    if (this.options.integrity) {
+      return ssri.parse(this.options.integrity).match(this.entry.integrity)
+    }
+
+    return true
+  }
+
+  // returns true if the request and response allow caching
+  storable () {
+    return this.policy.storable()
+  }
+
+  // NOTE: this is a hack to avoid parsing the cache-control
+  // header ourselves, it returns true if the response's
+  // cache-control contains must-revalidate
+  get mustRevalidate () {
+    return !!this.policy._rescc['must-revalidate']
+  }
+
+  // returns true if the cached response requires revalidation
+  // for the given request
+  needsRevalidation (request) {
+    const _req = requestObject(request)
+    // force method to GET because we only cache GETs
+    // but can serve a HEAD from a cached GET
+    _req.method = 'GET'
+    return !this.policy.satisfiesWithoutRevalidation(_req)
+  }
+
+  responseHeaders () {
+    return this.policy.responseHeaders()
+  }
+
+  // returns a new object containing the appropriate headers
+  // to send a revalidation request
+  revalidationHeaders (request) {
+    const _req = requestObject(request)
+    return this.policy.revalidationHeaders(_req)
+  }
+
+  // returns true if the request/response was revalidated
+  // successfully. returns false if a new response was received
+  revalidated (request, response) {
+    const _req = requestObject(request)
+    const _res = responseObject(response)
+    const policy = this.policy.revalidatedPolicy(_req, _res)
+    return !policy.modified
+  }
+}
+
+module.exports = CachePolicy
diff --git a/deps/npm/node_modules/make-fetch-happen/lib/dns.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/lib/dns.js
rename to deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js
new file mode 100644
index 00000000000000..233ba67e165502
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js
@@ -0,0 +1,118 @@
+'use strict'
+
+const { FetchError, Request, isRedirect } = require('minipass-fetch')
+const url = require('url')
+
+const CachePolicy = require('./cache/policy.js')
+const cache = require('./cache/index.js')
+const remote = require('./remote.js')
+
+// given a Request, a Response and user options
+// return true if the response is a redirect that
+// can be followed. we throw errors that will result
+// in the fetch being rejected if the redirect is
+// possible but invalid for some reason
+const canFollowRedirect = (request, response, options) => {
+  if (!isRedirect(response.status)) {
+    return false
+  }
+
+  if (options.redirect === 'manual') {
+    return false
+  }
+
+  if (options.redirect === 'error') {
+    throw new FetchError(`redirect mode is set to error: ${request.url}`,
+      'no-redirect', { code: 'ENOREDIRECT' })
+  }
+
+  if (!response.headers.has('location')) {
+    throw new FetchError(`redirect location header missing for: ${request.url}`,
+      'no-location', { code: 'EINVALIDREDIRECT' })
+  }
+
+  if (request.counter >= request.follow) {
+    throw new FetchError(`maximum redirect reached at: ${request.url}`,
+      'max-redirect', { code: 'EMAXREDIRECT' })
+  }
+
+  return true
+}
+
+// given a Request, a Response, and the user's options return an object
+// with a new Request and a new options object that will be used for
+// following the redirect
+const getRedirect = (request, response, options) => {
+  const _opts = { ...options }
+  const location = response.headers.get('location')
+  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
+  // Comment below is used under the following license:
+  /**
+   * @license
+   * Copyright (c) 2010-2012 Mikeal Rogers
+   * Licensed under the Apache License, Version 2.0 (the "License");
+   * you may not use this file except in compliance with the License.
+   * You may obtain a copy of the License at
+   * http://www.apache.org/licenses/LICENSE-2.0
+   * Unless required by applicable law or agreed to in writing,
+   * software distributed under the License is distributed on an "AS
+   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+   * express or implied. See the License for the specific language
+   * governing permissions and limitations under the License.
+   */
+
+  // Remove authorization if changing hostnames (but not if just
+  // changing ports or protocols).  This matches the behavior of request:
+  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
+  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
+    request.headers.delete('authorization')
+    request.headers.delete('cookie')
+  }
+
+  // for POST request with 301/302 response, or any request with 303 response,
+  // use GET when following redirect
+  if (
+    response.status === 303 ||
+    (request.method === 'POST' && [301, 302].includes(response.status))
+  ) {
+    _opts.method = 'GET'
+    _opts.body = null
+    request.headers.delete('content-length')
+  }
+
+  _opts.headers = {}
+  request.headers.forEach((value, key) => {
+    _opts.headers[key] = value
+  })
+
+  _opts.counter = ++request.counter
+  const redirectReq = new Request(url.format(redirectUrl), _opts)
+  return {
+    request: redirectReq,
+    options: _opts,
+  }
+}
+
+const fetch = async (request, options) => {
+  const response = CachePolicy.storable(request, options)
+    ? await cache(request, options)
+    : await remote(request, options)
+
+  // if the request wasn't a GET or HEAD, and the response
+  // status is between 200 and 399 inclusive, invalidate the
+  // request url
+  if (!['GET', 'HEAD'].includes(request.method) &&
+      response.status >= 200 &&
+      response.status <= 399) {
+    await cache.invalidate(request, options)
+  }
+
+  if (!canFollowRedirect(request, response, options)) {
+    return response
+  }
+
+  const redirect = getRedirect(request, response, options)
+  return fetch(redirect.request, redirect.options)
+}
+
+module.exports = fetch
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js
new file mode 100644
index 00000000000000..2f12e8e1b61131
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js
@@ -0,0 +1,41 @@
+const { FetchError, Headers, Request, Response } = require('minipass-fetch')
+
+const configureOptions = require('./options.js')
+const fetch = require('./fetch.js')
+
+const makeFetchHappen = (url, opts) => {
+  const options = configureOptions(opts)
+
+  const request = new Request(url, options)
+  return fetch(request, options)
+}
+
+makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
+  if (typeof defaultUrl === 'object') {
+    defaultOptions = defaultUrl
+    defaultUrl = null
+  }
+
+  const defaultedFetch = (url, options = {}) => {
+    const finalUrl = url || defaultUrl
+    const finalOptions = {
+      ...defaultOptions,
+      ...options,
+      headers: {
+        ...defaultOptions.headers,
+        ...options.headers,
+      },
+    }
+    return wrappedFetch(finalUrl, finalOptions)
+  }
+
+  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
+    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
+  return defaultedFetch
+}
+
+module.exports = makeFetchHappen
+module.exports.FetchError = FetchError
+module.exports.Headers = Headers
+module.exports.Request = Request
+module.exports.Response = Response
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js
new file mode 100644
index 00000000000000..f77511279f831d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js
@@ -0,0 +1,54 @@
+const dns = require('dns')
+
+const conditionalHeaders = [
+  'if-modified-since',
+  'if-none-match',
+  'if-unmodified-since',
+  'if-match',
+  'if-range',
+]
+
+const configureOptions = (opts) => {
+  const { strictSSL, ...options } = { ...opts }
+  options.method = options.method ? options.method.toUpperCase() : 'GET'
+  options.rejectUnauthorized = strictSSL !== false
+
+  if (!options.retry) {
+    options.retry = { retries: 0 }
+  } else if (typeof options.retry === 'string') {
+    const retries = parseInt(options.retry, 10)
+    if (isFinite(retries)) {
+      options.retry = { retries }
+    } else {
+      options.retry = { retries: 0 }
+    }
+  } else if (typeof options.retry === 'number') {
+    options.retry = { retries: options.retry }
+  } else {
+    options.retry = { retries: 0, ...options.retry }
+  }
+
+  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
+
+  options.cache = options.cache || 'default'
+  if (options.cache === 'default') {
+    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
+      return conditionalHeaders.includes(name.toLowerCase())
+    })
+    if (hasConditionalHeader) {
+      options.cache = 'no-store'
+    }
+  }
+
+  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
+
+  // cacheManager is deprecated, but if it's set and
+  // cachePath is not we should copy it to the new field
+  if (options.cacheManager && !options.cachePath) {
+    options.cachePath = options.cacheManager
+  }
+
+  return options
+}
+
+module.exports = configureOptions
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js
new file mode 100644
index 00000000000000..b1d221b2d0ce31
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const MinipassPipeline = require('minipass-pipeline')
+
+class CachingMinipassPipeline extends MinipassPipeline {
+  #events = []
+  #data = new Map()
+
+  constructor (opts, ...streams) {
+    // CRITICAL: do NOT pass the streams to the call to super(), this will start
+    // the flow of data and potentially cause the events we need to catch to emit
+    // before we've finished our own setup. instead we call super() with no args,
+    // finish our setup, and then push the streams into ourselves to start the
+    // data flow
+    super()
+    this.#events = opts.events
+
+    /* istanbul ignore next - coverage disabled because this is pointless to test here */
+    if (streams.length) {
+      this.push(...streams)
+    }
+  }
+
+  on (event, handler) {
+    if (this.#events.includes(event) && this.#data.has(event)) {
+      return handler(...this.#data.get(event))
+    }
+
+    return super.on(event, handler)
+  }
+
+  emit (event, ...data) {
+    if (this.#events.includes(event)) {
+      this.#data.set(event, data)
+    }
+
+    return super.emit(event, ...data)
+  }
+}
+
+module.exports = CachingMinipassPipeline
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js
new file mode 100644
index 00000000000000..bdbcc79cad908d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js
@@ -0,0 +1,121 @@
+const { Minipass } = require('minipass')
+const fetch = require('minipass-fetch')
+const promiseRetry = require('promise-retry')
+const ssri = require('ssri')
+
+const CachingMinipassPipeline = require('./pipeline.js')
+const getAgent = require('./agent.js')
+const pkg = require('../package.json')
+
+const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
+
+const RETRY_ERRORS = [
+  'ECONNRESET', // remote socket closed on us
+  'ECONNREFUSED', // remote host refused to open connection
+  'EADDRINUSE', // failed to bind to a local port (proxy?)
+  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
+  'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive
+  // Known codes we do NOT retry on:
+  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
+]
+
+const RETRY_TYPES = [
+  'request-timeout',
+]
+
+// make a request directly to the remote source,
+// retrying certain classes of errors as well as
+// following redirects (through the cache if necessary)
+// and verifying response integrity
+const remoteFetch = (request, options) => {
+  const agent = getAgent(request.url, options)
+  if (!request.headers.has('connection')) {
+    request.headers.set('connection', agent ? 'keep-alive' : 'close')
+  }
+
+  if (!request.headers.has('user-agent')) {
+    request.headers.set('user-agent', USER_AGENT)
+  }
+
+  // keep our own options since we're overriding the agent
+  // and the redirect mode
+  const _opts = {
+    ...options,
+    agent,
+    redirect: 'manual',
+  }
+
+  return promiseRetry(async (retryHandler, attemptNum) => {
+    const req = new fetch.Request(request, _opts)
+    try {
+      let res = await fetch(req, _opts)
+      if (_opts.integrity && res.status === 200) {
+        // we got a 200 response and the user has specified an expected
+        // integrity value, so wrap the response in an ssri stream to verify it
+        const integrityStream = ssri.integrityStream({
+          algorithms: _opts.algorithms,
+          integrity: _opts.integrity,
+          size: _opts.size,
+        })
+        const pipeline = new CachingMinipassPipeline({
+          events: ['integrity', 'size'],
+        }, res.body, integrityStream)
+        // we also propagate the integrity and size events out to the pipeline so we can use
+        // this new response body as an integrityEmitter for cacache
+        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
+        integrityStream.on('size', s => pipeline.emit('size', s))
+        res = new fetch.Response(pipeline, res)
+        // set an explicit flag so we know if our response body will emit integrity and size
+        res.body.hasIntegrityEmitter = true
+      }
+
+      res.headers.set('x-fetch-attempts', attemptNum)
+
+      // do not retry POST requests, or requests with a streaming body
+      // do retry requests with a 408, 420, 429 or 500+ status in the response
+      const isStream = Minipass.isStream(req.body)
+      const isRetriable = req.method !== 'POST' &&
+          !isStream &&
+          ([408, 420, 429].includes(res.status) || res.status >= 500)
+
+      if (isRetriable) {
+        if (typeof options.onRetry === 'function') {
+          options.onRetry(res)
+        }
+
+        return retryHandler(res)
+      }
+
+      return res
+    } catch (err) {
+      const code = (err.code === 'EPROMISERETRY')
+        ? err.retried.code
+        : err.code
+
+      // err.retried will be the thing that was thrown from above
+      // if it's a response, we just got a bad status code and we
+      // can re-throw to allow the retry
+      const isRetryError = err.retried instanceof fetch.Response ||
+        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
+
+      if (req.method === 'POST' || isRetryError) {
+        throw err
+      }
+
+      if (typeof options.onRetry === 'function') {
+        options.onRetry(err)
+      }
+
+      return retryHandler(err)
+    }
+  }, options.retry).catch((err) => {
+    // don't reject for http errors, just return them
+    if (err.status >= 400 && err.type !== 'system') {
+      return err
+    }
+
+    throw err
+  })
+}
+
+module.exports = remoteFetch
diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
new file mode 100644
index 00000000000000..fd415dc9966faa
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
@@ -0,0 +1,78 @@
+{
+  "name": "make-fetch-happen",
+  "version": "11.1.1",
+  "description": "Opinionated, caching, retrying fetch client",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint",
+    "lint": "eslint \"**/*.js\"",
+    "lintfix": "npm run lint -- --fix",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/make-fetch-happen.git"
+  },
+  "keywords": [
+    "http",
+    "request",
+    "fetch",
+    "mean girls",
+    "caching",
+    "cache",
+    "subresource integrity"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "agentkeepalive": "^4.2.1",
+    "cacache": "^17.0.0",
+    "http-cache-semantics": "^4.1.1",
+    "http-proxy-agent": "^5.0.0",
+    "https-proxy-agent": "^5.0.0",
+    "is-lambda": "^1.0.1",
+    "lru-cache": "^7.7.1",
+    "minipass": "^5.0.0",
+    "minipass-fetch": "^3.0.0",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "negotiator": "^0.6.3",
+    "promise-retry": "^2.0.1",
+    "socks-proxy-agent": "^7.0.0",
+    "ssri": "^10.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.14.1",
+    "nock": "^13.2.4",
+    "safe-buffer": "^5.2.1",
+    "standard-version": "^9.3.2",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "tap": {
+    "color": 1,
+    "files": "test/*.js",
+    "check-coverage": true,
+    "timeout": 60,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.14.1",
+    "publish": "true"
+  }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/minipass/LICENSE
new file mode 100644
index 00000000000000..97f8e32ed82e4c
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/minipass/index.js b/deps/npm/node_modules/node-gyp/node_modules/minipass/index.js
similarity index 100%
rename from deps/npm/node_modules/minipass/index.js
rename to deps/npm/node_modules/node-gyp/node_modules/minipass/index.js
diff --git a/deps/npm/node_modules/minipass/index.mjs b/deps/npm/node_modules/node-gyp/node_modules/minipass/index.mjs
similarity index 99%
rename from deps/npm/node_modules/minipass/index.mjs
rename to deps/npm/node_modules/node-gyp/node_modules/minipass/index.mjs
index d1be109c9fc063..89b3fbf1a4d445 100644
--- a/deps/npm/node_modules/minipass/index.mjs
+++ b/deps/npm/node_modules/node-gyp/node_modules/minipass/index.mjs
@@ -698,4 +698,3 @@ export class Minipass extends Stream {
     )
   }
 }
-
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass/package.json b/deps/npm/node_modules/node-gyp/node_modules/minipass/package.json
new file mode 100644
index 00000000000000..0e20e988047f23
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minipass/package.json
@@ -0,0 +1,76 @@
+{
+  "name": "minipass",
+  "version": "5.0.0",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
+      },
+      "require": {
+        "types": "./index.d.ts",
+        "default": "./index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "devDependencies": {
+    "@types/node": "^17.0.41",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "prettier": "^2.6.2",
+    "tap": "^16.2.0",
+    "through2": "^2.0.3",
+    "ts-node": "^10.8.1",
+    "typedoc": "^0.23.24",
+    "typescript": "^4.7.3"
+  },
+  "scripts": {
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "snap": "tap",
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --follow-tags",
+    "typedoc": "typedoc ./index.d.ts",
+    "format": "prettier --write . --loglevel warn"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minipass.git"
+  },
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "files": [
+    "index.d.ts",
+    "index.js",
+    "index.mjs"
+  ],
+  "tap": {
+    "check-coverage": true
+  },
+  "engines": {
+    "node": ">=8"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  }
+}
diff --git a/deps/npm/node_modules/normalize-package-data/package.json b/deps/npm/node_modules/normalize-package-data/package.json
index ec2773bfbe6bf8..48d2371d4a66b5 100644
--- a/deps/npm/node_modules/normalize-package-data/package.json
+++ b/deps/npm/node_modules/normalize-package-data/package.json
@@ -1,6 +1,6 @@
 {
   "name": "normalize-package-data",
-  "version": "5.0.0",
+  "version": "6.0.0",
   "author": "GitHub Inc.",
   "description": "Normalizes data that can be found in package.json files.",
   "license": "BSD-2-Clause",
@@ -21,14 +21,14 @@
     "template-oss-apply": "template-oss-apply --force"
   },
   "dependencies": {
-    "hosted-git-info": "^6.0.0",
+    "hosted-git-info": "^7.0.0",
     "is-core-module": "^2.8.1",
     "semver": "^7.3.5",
     "validate-npm-package-license": "^3.0.4"
   },
   "devDependencies": {
-    "@npmcli/eslint-config": "^3.0.1",
-    "@npmcli/template-oss": "4.5.1",
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "files": [
@@ -36,11 +36,18 @@
     "lib/"
   ],
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.5.1"
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   },
   "tap": {
     "branches": 86,
diff --git a/deps/npm/node_modules/npm-install-checks/lib/index.js b/deps/npm/node_modules/npm-install-checks/lib/index.js
index fa5f593aaac647..f0ba2c07ad0812 100644
--- a/deps/npm/node_modules/npm-install-checks/lib/index.js
+++ b/deps/npm/node_modules/npm-install-checks/lib/index.js
@@ -22,13 +22,13 @@ const checkEngine = (target, npmVer, nodeVer, force = false) => {
 
 const isMusl = (file) => file.includes('libc.musl-') || file.includes('ld-musl-')
 
-const checkPlatform = (target, force = false) => {
+const checkPlatform = (target, force = false, environment = {}) => {
   if (force) {
     return
   }
 
-  const platform = process.platform
-  const arch = process.arch
+  const platform = environment.os || process.platform
+  const arch = environment.cpu || process.arch
   const osOk = target.os ? checkList(platform, target.os) : true
   const cpuOk = target.cpu ? checkList(arch, target.cpu) : true
 
diff --git a/deps/npm/node_modules/npm-install-checks/package.json b/deps/npm/node_modules/npm-install-checks/package.json
index 192cf68837146f..50378808d75d08 100644
--- a/deps/npm/node_modules/npm-install-checks/package.json
+++ b/deps/npm/node_modules/npm-install-checks/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-install-checks",
-  "version": "6.1.1",
+  "version": "6.2.0",
   "description": "Check the engines and platform fields in package.json",
   "main": "lib/index.js",
   "dependencies": {
@@ -8,7 +8,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.13.0",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "scripts": {
@@ -39,7 +39,7 @@
   "author": "GitHub Inc.",
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.13.0",
+    "version": "4.18.0",
     "publish": "true"
   },
   "tap": {
diff --git a/deps/npm/node_modules/npm-package-arg/lib/npa.js b/deps/npm/node_modules/npm-package-arg/lib/npa.js
index 36bd18cd9f9a6e..f5ede2326e7b47 100644
--- a/deps/npm/node_modules/npm-package-arg/lib/npa.js
+++ b/deps/npm/node_modules/npm-package-arg/lib/npa.js
@@ -257,40 +257,23 @@ function fromFile (res, where) {
     })
   }
 
-  // environment switch for testing
-  if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') {
-    // XXX backwards compatibility lack of compliance with 8909
-    // Remove when we want a breaking change to come into RFC compliance.
-    if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-      const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // turn file:/../foo into file:../foo
-    // for 1, 2 or 3 leading slashes since we attempted
-    // in the previous step to make it a file protocol url with a leading slash
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
-      const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
-      resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
-      specUrl = new url.URL(rawSpec)
-      rawNoPrefix = rawSpec.replace(/^file:/, '')
-    }
-    // XXX end 8909 violation backwards compatibility section
-  }
-
-  // file:foo - relative url to ./foo
-  // file:/foo - absolute path /foo
-  // file:///foo - absolute path to /foo, no authority host
-  // file://localhost/foo - absolute path to /foo, on localhost
-  // file://foo - absolute path to / on foo host (error!)
+  // XXX backwards compatibility lack of compliance with RFC 8909
   if (resolvedUrl.host && resolvedUrl.host !== 'localhost') {
-    const msg = `Invalid file: URL, must be absolute if // present`
-    throw Object.assign(new Error(msg), {
-      raw: res.rawSpec,
-      parsed: resolvedUrl,
-    })
+    const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///')
+    resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawSpec)
+    rawNoPrefix = rawSpec.replace(/^file:/, '')
+  }
+  // turn file:/../foo into file:../foo
+  // for 1, 2 or 3 leading slashes since we attempted
+  // in the previous step to make it a file protocol url with a leading slash
+  if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) {
+    const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:')
+    resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`)
+    specUrl = new url.URL(rawSpec)
+    rawNoPrefix = rawSpec.replace(/^file:/, '')
   }
+  // XXX end RFC 8909 violation backwards compatibility section
 
   // turn /C:/blah into just C:/blah on windows
   let specPath = decodeURIComponent(specUrl.pathname)
diff --git a/deps/npm/node_modules/npm-package-arg/package.json b/deps/npm/node_modules/npm-package-arg/package.json
index bb9e71b258a939..9ba1d135f3ebf0 100644
--- a/deps/npm/node_modules/npm-package-arg/package.json
+++ b/deps/npm/node_modules/npm-package-arg/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-package-arg",
-  "version": "10.1.0",
+  "version": "11.0.0",
   "description": "Parse the things that can be arguments to `npm install`",
   "main": "./lib/npa.js",
   "directories": {
@@ -11,14 +11,14 @@
     "lib/"
   ],
   "dependencies": {
-    "hosted-git-info": "^6.0.0",
+    "hosted-git-info": "^7.0.0",
     "proc-log": "^3.0.0",
     "semver": "^7.3.5",
     "validate-npm-package-name": "^5.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "scripts": {
@@ -43,7 +43,7 @@
   },
   "homepage": "https://github.com/npm/npm-package-arg",
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "tap": {
     "branches": 97,
@@ -54,6 +54,13 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
+    "version": "4.18.0",
+    "publish": true,
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/deps/npm/node_modules/npm-packlist/lib/index.js b/deps/npm/node_modules/npm-packlist/lib/index.js
index 887018bd7d424c..7577cba0b865d4 100644
--- a/deps/npm/node_modules/npm-packlist/lib/index.js
+++ b/deps/npm/node_modules/npm-packlist/lib/index.js
@@ -38,13 +38,22 @@ const defaults = [
 ]
 
 const strictDefaults = [
-  // these are forcibly included at all levels
+  // these are forcibly excluded
+  '/.git',
+]
+
+const allLevels = [
+  // these are included by default but can be excluded by package.json files array
   '!/readme{,.*[^~$]}',
   '!/copying{,.*[^~$]}',
   '!/license{,.*[^~$]}',
   '!/licence{,.*[^~$]}',
-  // these are forcibly excluded
-  '/.git',
+]
+
+const rootOnly = [
+  /^!.*readme/i,
+  /^!.*copying/i,
+  /^!.*licen[sc]e/i,
 ]
 
 const normalizePath = (path) => path.split('\\').join('/')
@@ -132,6 +141,7 @@ class PackWalker extends IgnoreWalker {
       // known required files for this directory
       this.injectRules(strictRules, [
         ...strictDefaults,
+        ...allLevels,
         ...this.requiredFiles.map((file) => `!${file}`),
       ])
     }
@@ -284,6 +294,7 @@ class PackWalker extends IgnoreWalker {
     const ignores = []
     const strict = [
       ...strictDefaults,
+      ...allLevels,
       '!/package.json',
       '/.git',
       '/node_modules',
@@ -304,6 +315,9 @@ class PackWalker extends IgnoreWalker {
           file = file.slice(0, -2)
         }
         const inverse = `!${file}`
+
+        this.excludeNonRoot(file)
+
         try {
           // if an entry in the files array is a specific file, then we need to include it as a
           // strict requirement for this package. if it's a directory or a pattern, it's a default
@@ -352,6 +366,20 @@ class PackWalker extends IgnoreWalker {
     this.injectRules(strictRules, strict, callback)
   }
 
+  // excludes non root files by checking if elements from the files array in
+  // package.json contain an ! and readme/license/licence/copying, and then
+  // removing readme/license/licence/copying accordingly from strict defaults
+  excludeNonRoot (file) {
+    // Find the pattern
+    const matchingPattern = rootOnly.find(regex => regex.test(file))
+
+    if (matchingPattern) {
+      // Find which index matches the pattern and remove it from allLevels
+      const indexToRemove = allLevels.findIndex(element => matchingPattern.test(element))
+      allLevels.splice(indexToRemove, 1)
+    }
+  }
+
   // custom method: after we've finished gathering the files for the root package, we call this
   // before emitting the 'done' event in order to gather all of the files for bundled deps
   async gatherBundles () {
diff --git a/deps/npm/node_modules/npm-packlist/package.json b/deps/npm/node_modules/npm-packlist/package.json
index 6023ad34df3b42..460ca7e30ad23f 100644
--- a/deps/npm/node_modules/npm-packlist/package.json
+++ b/deps/npm/node_modules/npm-packlist/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-packlist",
-  "version": "7.0.4",
+  "version": "8.0.0",
   "description": "Get a list of the files to add from a folder into an npm package",
   "directories": {
     "test": "test"
@@ -18,7 +18,7 @@
   "devDependencies": {
     "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0",
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.10.0",
+    "@npmcli/template-oss": "4.18.0",
     "mutate-fs": "^2.1.1",
     "tap": "^16.0.1"
   },
@@ -55,6 +55,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.10.0"
+    "version": "4.18.0",
+    "publish": true
   }
 }
diff --git a/deps/npm/node_modules/npm-pick-manifest/lib/index.js b/deps/npm/node_modules/npm-pick-manifest/lib/index.js
index f2934e9ca1822a..8dbd2721c89963 100644
--- a/deps/npm/node_modules/npm-pick-manifest/lib/index.js
+++ b/deps/npm/node_modules/npm-pick-manifest/lib/index.js
@@ -210,7 +210,7 @@ module.exports = (packument, wanted, opts = {}) => {
     code,
     type: npa.resolve(packument.name, wanted).type,
     wanted,
-    versions: Object.keys(packument.versions),
+    versions: Object.keys(packument.versions ?? {}),
     name,
     distTags: packument['dist-tags'],
     defaultTag,
diff --git a/deps/npm/node_modules/npm-pick-manifest/package.json b/deps/npm/node_modules/npm-pick-manifest/package.json
index 89ff8966f1a39b..e30c2cfe341fc6 100644
--- a/deps/npm/node_modules/npm-pick-manifest/package.json
+++ b/deps/npm/node_modules/npm-pick-manifest/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-pick-manifest",
-  "version": "8.0.1",
+  "version": "9.0.0",
   "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
   "main": "./lib",
   "files": [
@@ -31,12 +31,12 @@
   "dependencies": {
     "npm-install-checks": "^6.0.0",
     "npm-normalize-package-bin": "^3.0.0",
-    "npm-package-arg": "^10.0.0",
+    "npm-package-arg": "^11.0.0",
     "semver": "^7.3.5"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.6.1",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "tap": {
@@ -47,10 +47,17 @@
     ]
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.6.1"
+    "version": "4.18.0",
+    "publish": true,
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/deps/npm/node_modules/npm-profile/package.json b/deps/npm/node_modules/npm-profile/package.json
index 9c0b77b8a6dd5d..af57e9e73509c3 100644
--- a/deps/npm/node_modules/npm-profile/package.json
+++ b/deps/npm/node_modules/npm-profile/package.json
@@ -1,12 +1,12 @@
 {
   "name": "npm-profile",
-  "version": "7.0.1",
+  "version": "9.0.0",
   "description": "Library for updating an npmjs.com profile",
   "keywords": [],
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "npm-registry-fetch": "^14.0.0",
+    "npm-registry-fetch": "^16.0.0",
     "proc-log": "^3.0.0"
   },
   "main": "./lib/index.js",
@@ -20,7 +20,7 @@
   ],
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.5.1",
+    "@npmcli/template-oss": "4.18.0",
     "nock": "^13.2.4",
     "tap": "^16.0.1"
   },
@@ -41,10 +41,17 @@
     ]
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.5.1"
+    "version": "4.18.0",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ],
+    "publish": true
   }
 }
diff --git a/deps/npm/node_modules/npm-registry-fetch/package.json b/deps/npm/node_modules/npm-registry-fetch/package.json
index 63a44725886ccf..2afadf939743b8 100644
--- a/deps/npm/node_modules/npm-registry-fetch/package.json
+++ b/deps/npm/node_modules/npm-registry-fetch/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-registry-fetch",
-  "version": "14.0.5",
+  "version": "16.0.0",
   "description": "Fetch-based http client for use with npm registry APIs",
   "main": "lib",
   "files": [
@@ -31,18 +31,18 @@
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "make-fetch-happen": "^11.0.0",
-    "minipass": "^5.0.0",
+    "make-fetch-happen": "^13.0.0",
+    "minipass": "^7.0.2",
     "minipass-fetch": "^3.0.0",
     "minipass-json-stream": "^1.0.1",
     "minizlib": "^2.1.2",
-    "npm-package-arg": "^10.0.0",
+    "npm-package-arg": "^11.0.0",
     "proc-log": "^3.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
-    "cacache": "^17.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "cacache": "^18.0.0",
     "nock": "^13.2.4",
     "require-inject": "^1.4.4",
     "ssri": "^10.0.0",
@@ -57,11 +57,17 @@
     ]
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
-    "publish": "true"
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/deps/npm/node_modules/pacote/lib/registry.js b/deps/npm/node_modules/pacote/lib/registry.js
index 34d9b2b87f3f3d..993fd3f08a6d91 100644
--- a/deps/npm/node_modules/pacote/lib/registry.js
+++ b/deps/npm/node_modules/pacote/lib/registry.js
@@ -8,7 +8,7 @@ const pickManifest = require('npm-pick-manifest')
 const ssri = require('ssri')
 const crypto = require('crypto')
 const npa = require('npm-package-arg')
-const { sigstore } = require('sigstore')
+const sigstore = require('sigstore')
 
 // Corgis are cute. 🐕🐶
 const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
@@ -299,7 +299,7 @@ class RegistryFetcher extends Fetcher {
                 tufCachePath: this.tufCache,
                 keySelector: publicKey ? () => publicKey.pemkey : undefined,
               }
-              await sigstore.verify(bundle, null, options)
+              await sigstore.verify(bundle, options)
             } catch (e) {
               throw Object.assign(new Error(
                 `${mani._id} failed to verify attestation: ${e.message}`
diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json
index bc8d984704af5b..4654b03d988c32 100644
--- a/deps/npm/node_modules/pacote/package.json
+++ b/deps/npm/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
 {
   "name": "pacote",
-  "version": "15.2.0",
+  "version": "17.0.4",
   "description": "JavaScript package downloader",
   "author": "GitHub Inc.",
   "bin": {
@@ -27,8 +27,8 @@
   "devDependencies": {
     "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0",
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
-    "hosted-git-info": "^6.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "hosted-git-info": "^7.0.0",
     "mutate-fs": "^2.1.1",
     "nock": "^13.2.4",
     "npm-registry-mock": "^1.3.2",
@@ -44,27 +44,27 @@
     "git"
   ],
   "dependencies": {
-    "@npmcli/git": "^4.0.0",
+    "@npmcli/git": "^5.0.0",
     "@npmcli/installed-package-contents": "^2.0.1",
-    "@npmcli/promise-spawn": "^6.0.1",
-    "@npmcli/run-script": "^6.0.0",
-    "cacache": "^17.0.0",
+    "@npmcli/promise-spawn": "^7.0.0",
+    "@npmcli/run-script": "^7.0.0",
+    "cacache": "^18.0.0",
     "fs-minipass": "^3.0.0",
-    "minipass": "^5.0.0",
-    "npm-package-arg": "^10.0.0",
-    "npm-packlist": "^7.0.0",
-    "npm-pick-manifest": "^8.0.0",
-    "npm-registry-fetch": "^14.0.0",
+    "minipass": "^7.0.2",
+    "npm-package-arg": "^11.0.0",
+    "npm-packlist": "^8.0.0",
+    "npm-pick-manifest": "^9.0.0",
+    "npm-registry-fetch": "^16.0.0",
     "proc-log": "^3.0.0",
     "promise-retry": "^2.0.1",
-    "read-package-json": "^6.0.0",
+    "read-package-json": "^7.0.0",
     "read-package-json-fast": "^3.0.0",
-    "sigstore": "^1.3.0",
+    "sigstore": "^2.0.0",
     "ssri": "^10.0.0",
     "tar": "^6.1.11"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "repository": {
     "type": "git",
@@ -72,7 +72,13 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ],
+    "version": "4.18.0",
     "windowsCI": false,
     "publish": "true"
   }
diff --git a/deps/npm/node_modules/path-scurry/dist/cjs/index.js b/deps/npm/node_modules/path-scurry/dist/cjs/index.js
index 8044c7e581d2e4..23eb5b0853ff28 100644
--- a/deps/npm/node_modules/path-scurry/dist/cjs/index.js
+++ b/deps/npm/node_modules/path-scurry/dist/cjs/index.js
@@ -521,6 +521,29 @@ class PathBase {
     isUnknown() {
         return (this.#type & IFMT) === UNKNOWN;
     }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return this.isUnknown()
+            ? 'Unknown'
+            : this.isDirectory()
+                ? 'Directory'
+                : this.isFile()
+                    ? 'File'
+                    : this.isSymbolicLink()
+                        ? 'SymbolicLink'
+                        : this.isFIFO()
+                            ? 'FIFO'
+                            : this.isCharacterDevice()
+                                ? 'CharacterDevice'
+                                : this.isBlockDevice()
+                                    ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket()
+                                        ? 'Socket'
+                                        : 'Unknown';
+        /* c8 ignore stop */
+    }
     /**
      * Is the Path a regular file?
      */
diff --git a/deps/npm/node_modules/path-scurry/dist/mjs/index.js b/deps/npm/node_modules/path-scurry/dist/mjs/index.js
index 957f087c865147..079253a6aee967 100644
--- a/deps/npm/node_modules/path-scurry/dist/mjs/index.js
+++ b/deps/npm/node_modules/path-scurry/dist/mjs/index.js
@@ -493,6 +493,29 @@ export class PathBase {
     isUnknown() {
         return (this.#type & IFMT) === UNKNOWN;
     }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return this.isUnknown()
+            ? 'Unknown'
+            : this.isDirectory()
+                ? 'Directory'
+                : this.isFile()
+                    ? 'File'
+                    : this.isSymbolicLink()
+                        ? 'SymbolicLink'
+                        : this.isFIFO()
+                            ? 'FIFO'
+                            : this.isCharacterDevice()
+                                ? 'CharacterDevice'
+                                : this.isBlockDevice()
+                                    ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket()
+                                        ? 'Socket'
+                                        : 'Unknown';
+        /* c8 ignore stop */
+    }
     /**
      * Is the Path a regular file?
      */
diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js b/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js
deleted file mode 100644
index d854bf570d346c..00000000000000
--- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),k=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=k(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#_;#g;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#p;#n;#i;#t;#l;#c;#o;#h;#w;#r;#m;#F;#S;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#S,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#w,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#p}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#_}get disposeAfter(){return this.#g}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:u,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:d,maxSize:p=0,maxEntrySize:F=0,sizeCalculation:c,fetchMethod:w,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:S,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:g,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?k(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=p,this.maxEntrySize=F||this.#f,this.sizeCalculation=c,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=w,this.#T=!!w,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#w=R.create(e),this.#s=0,this.#p=0,typeof u=="function"&&(this.#_=u),typeof b=="function"?(this.#g=b,this.#r=[]):(this.#g=void 0,this.#r=void 0),this.#b=!!this.#_,this.#a=!!this.#g,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!d,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!g,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!S,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#S=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let u=n.now-r;n.remainingTTL=a-u}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let u=(i||s())-r;return a-u},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#p=0,this.#m=t,this.#E=e=>{this.#p-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#p>n;)this.#W(!0)}this.#p+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#p)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#S&&this.#F){h.ttl=this.#S[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:u=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#w.length!==0?this.#w.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),u=!1;else{this.#v(f);let d=this.#t[f];if(e!==d){if(this.#T&&this.#e(d)?d.__abortController.abort(new Error("replaced")):h||(this.#b&&this.#_?.(d,t,"set"),this.#a&&this.#r?.push([d,t,"set"])),this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let p=d&&this.#e(d)?d.__staleWhileFetching:d;p!==void 0&&(r.oldValue=p)}}else r&&(r.set="update")}if(s!==0&&!this.#S&&this.#L(),this.#S&&(u||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let d=this.#r,p;for(;p=d?.shift();)this.#g?.(...p)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#w.push(e)),this.#s===1?(this.#o=this.#h=0,this.#w.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},u=(c,w=!1)=>{let{aborted:l}=h.signal,S=i.ignoreFetchAbort&&c!==void 0;if(i.status&&(l&&!w?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,S&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!S&&!w)return f(h.signal.reason);let y=p;return this.#t[e]===p&&(c===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,c,r.options))),c},b=c=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=c),f(c)),f=c=>{let{aborted:w}=h.signal,l=w&&i.allowStaleOnFetchAbort,S=l||i.allowStaleOnFetchRejection,y=S||i.noDeleteOnFetchRejection,g=p;if(this.#t[e]===p&&(!y||g.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=g.__staleWhileFetching)),S)return i.status&&g.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),g.__staleWhileFetching;if(g.__returned===g)throw c},d=(c,w)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(S=>c(S),w),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(c(),i.allowStaleOnFetchAbort&&(c=S=>u(S,!0)))})};i.status&&(i.status.fetchDispatched=!0);let p=new Promise(d).then(u,b),F=Object.assign(p,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:u=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:d=this.allowStaleOnFetchRejection,ignoreFetchAbort:p=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:c,forceRefresh:w=!1,status:l,signal:S}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:u,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:d,allowStaleOnFetchAbort:F,ignoreFetchAbort:p,status:l,signal:S},g=this.#n.get(t);if(g===void 0){l&&(l.fetch="miss");let _=this.#D(t,g,y,c);return _.__returned=_}else{let _=this.#t[g];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(g);if(!w&&!O)return l&&(l.fetch="hit"),this.#v(g),s&&this.#z(g),l&&this.#O(l,g),_;let A=this.#D(t,g,y,c),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],u=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),u?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),u?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#w.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#g?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#_?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#S&&this.#F&&(this.#S.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#w.length=0,this.#p=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}};exports.LRUCache=C;
-//# sourceMappingURL=index.min.js.map
diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js b/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js
deleted file mode 100644
index 44bd1c23b86e74..00000000000000
--- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},R=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof R>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},R=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),k=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=k(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var W=class{#d;#f;#_;#g;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#p;#n;#i;#t;#l;#c;#o;#h;#w;#r;#m;#F;#S;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#S,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#w,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#p}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#_}get disposeAfter(){return this.#g}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:u,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:d,maxSize:p=0,maxEntrySize:F=0,sizeCalculation:c,fetchMethod:w,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:S,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:g,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?k(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=p,this.maxEntrySize=F||this.#f,this.sizeCalculation=c,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=w,this.#T=!!w,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#w=C.create(e),this.#s=0,this.#p=0,typeof u=="function"&&(this.#_=u),typeof b=="function"?(this.#g=b,this.#r=[]):(this.#g=void 0,this.#r=void 0),this.#b=!!this.#_,this.#a=!!this.#g,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!d,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!g,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!S,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,W))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#S=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let u=n.now-r;n.remainingTTL=a-u}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let u=(i||s())-r;return a-u},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#p=0,this.#m=t,this.#E=e=>{this.#p-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#p>n;)this.#W(!0)}this.#p+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#p)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#S&&this.#F){h.ttl=this.#S[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:u=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#w.length!==0?this.#w.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),u=!1;else{this.#v(f);let d=this.#t[f];if(e!==d){if(this.#T&&this.#e(d)?d.__abortController.abort(new Error("replaced")):h||(this.#b&&this.#_?.(d,t,"set"),this.#a&&this.#r?.push([d,t,"set"])),this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let p=d&&this.#e(d)?d.__staleWhileFetching:d;p!==void 0&&(r.oldValue=p)}}else r&&(r.set="update")}if(s!==0&&!this.#S&&this.#L(),this.#S&&(u||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let d=this.#r,p;for(;p=d?.shift();)this.#g?.(...p)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#w.push(e)),this.#s===1?(this.#o=this.#h=0,this.#w.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new R,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},u=(c,w=!1)=>{let{aborted:l}=h.signal,S=i.ignoreFetchAbort&&c!==void 0;if(i.status&&(l&&!w?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,S&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!S&&!w)return f(h.signal.reason);let y=p;return this.#t[e]===p&&(c===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,c,r.options))),c},b=c=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=c),f(c)),f=c=>{let{aborted:w}=h.signal,l=w&&i.allowStaleOnFetchAbort,S=l||i.allowStaleOnFetchRejection,y=S||i.noDeleteOnFetchRejection,g=p;if(this.#t[e]===p&&(!y||g.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=g.__staleWhileFetching)),S)return i.status&&g.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),g.__staleWhileFetching;if(g.__returned===g)throw c},d=(c,w)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(S=>c(S),w),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(c(),i.allowStaleOnFetchAbort&&(c=S=>u(S,!0)))})};i.status&&(i.status.fetchDispatched=!0);let p=new Promise(d).then(u,b),F=Object.assign(p,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof R}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:u=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:d=this.allowStaleOnFetchRejection,ignoreFetchAbort:p=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:c,forceRefresh:w=!1,status:l,signal:S}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:u,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:d,allowStaleOnFetchAbort:F,ignoreFetchAbort:p,status:l,signal:S},g=this.#n.get(t);if(g===void 0){l&&(l.fetch="miss");let _=this.#D(t,g,y,c);return _.__returned=_}else{let _=this.#t[g];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(g);if(!w&&!O)return l&&(l.fetch="hit"),this.#v(g),s&&this.#z(g),l&&this.#O(l,g),_;let A=this.#D(t,g,y,c),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],u=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),u?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),u?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#w.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#g?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#_?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#S&&this.#F&&(this.#S.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#w.length=0,this.#p=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}};export{W as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/deps/npm/node_modules/path-scurry/package.json b/deps/npm/node_modules/path-scurry/package.json
index 5b900825e44e00..af04f807fed2bc 100644
--- a/deps/npm/node_modules/path-scurry/package.json
+++ b/deps/npm/node_modules/path-scurry/package.json
@@ -1,6 +1,6 @@
 {
   "name": "path-scurry",
-  "version": "1.9.2",
+  "version": "1.10.1",
   "description": "walk paths fast and efficiently",
   "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
   "main": "./dist/cjs/index.js",
@@ -64,7 +64,7 @@
     "eslint-config-prettier": "^8.6.0",
     "mkdirp": "^3.0.0",
     "prettier": "^2.8.3",
-    "rimraf": "^4.1.2",
+    "rimraf": "^5.0.1",
     "tap": "^16.3.4",
     "ts-node": "^10.9.1",
     "typedoc": "^0.23.24",
@@ -78,10 +78,10 @@
   },
   "repository": {
     "type": "git",
-    "url": "git+https://github.com/isaacs/path-walker"
+    "url": "git+https://github.com/isaacs/path-scurry"
   },
   "dependencies": {
-    "lru-cache": "^9.1.1",
-    "minipass": "^5.0.0 || ^6.0.2"
+    "lru-cache": "^9.1.1 || ^10.0.0",
+    "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
   }
 }
diff --git a/deps/npm/node_modules/read-package-json/package.json b/deps/npm/node_modules/read-package-json/package.json
index 90ab321d51743e..01061f2bc27921 100644
--- a/deps/npm/node_modules/read-package-json/package.json
+++ b/deps/npm/node_modules/read-package-json/package.json
@@ -1,6 +1,6 @@
 {
   "name": "read-package-json",
-  "version": "6.0.4",
+  "version": "7.0.0",
   "author": "GitHub Inc.",
   "description": "The thing npm uses to read package.json files with semantics and defaults and validation",
   "repository": {
@@ -25,12 +25,12 @@
   "dependencies": {
     "glob": "^10.2.2",
     "json-parse-even-better-errors": "^3.0.0",
-    "normalize-package-data": "^5.0.0",
+    "normalize-package-data": "^6.0.0",
     "npm-normalize-package-bin": "^3.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.15.1",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "license": "ISC",
@@ -39,7 +39,7 @@
     "lib/"
   ],
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   },
   "tap": {
     "branches": 73,
@@ -53,7 +53,13 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.15.1",
-    "publish": "true"
+    "version": "4.18.0",
+    "publish": "true",
+    "ciVersions": [
+      "16.14.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ]
   }
 }
diff --git a/deps/npm/node_modules/sigstore/README.md b/deps/npm/node_modules/sigstore/README.md
deleted file mode 100644
index 2540fa808b9796..00000000000000
--- a/deps/npm/node_modules/sigstore/README.md
+++ /dev/null
@@ -1,165 +0,0 @@
-# sigstore · [![npm version](https://img.shields.io/npm/v/sigstore.svg?style=flat)](https://www.npmjs.com/package/sigstore) [![CI Status](https://github.com/sigstore/sigstore-js/workflows/CI/badge.svg)](https://github.com/sigstore/sigstore-js/actions/workflows/ci.yml) [![Smoke Test Status](https://github.com/sigstore/sigstore-js/workflows/smoke-test/badge.svg)](https://github.com/sigstore/sigstore-js/actions/workflows/smoke-test.yml)
-
-A JavaScript library for generating and verifying Sigstore signatures. One of
-the intended uses is to sign and verify npm packages but it can be used to sign
-and verify any file.
-
-## Features
-
-* Support for signing using an OpenID Connect identity
-* Support for publishing signatures to a [Rekor][1] instance
-* Support for verifying Sigstore bundles
-
-## Prerequisites
-
-- Node.js version >= 14.17.0
-
-## Installation
-
-```
-npm install sigstore
-```
-
-## Usage
-
-```javascript
-const { sigstore } = require('sigstore')
-```
-
-```javascript
-import { sigstore } from 'sigstore'
-```
-
-### sign(payload[, options])
-
-Generates a Sigstore signature for the supplied payload. Returns a
-[Sigstore bundle][2] containing the signature and the verification material
-necessary to verify the signature.
-
-* `payload` ``: The bytes of the artifact to be signed.
-* `options` ``
-  * `fulcioURL` ``: The base URL of the Fulcio instance to use for retrieving the signing certificate. Defaults to `'https://fulcio.sigstore.dev'`.
-  * `rekorURL` ``: The base URL of the Rekor instance to use when adding the signature to the transparency log. Defaults to `'https://rekor.sigstore.dev'`.
-  * `tsaServerURL` ``: The base URL of the Timestamp Authority instance to use when requesting a signed timestamp. If omitted, no timestamp will be requested.
-  * `tlogUpload` ``: Flag indicating whether or not the signature should be recorded on the Rekor transparency log. Defaults to `true`.
-  * `identityToken` ``: The OIDC token identifying the signer. If no explicit token is supplied, an attempt will be made to retrieve one from the environment. This config cannot be used with `identityProvider`.
-  * `identityProvider` ``: Object which implements `getToken: () => Promise`. The supplied provider will be used to retrieve an OIDC token. If no provider is supplied, an attempt will be made to retrieve an OIDC token from the environment. This config cannot be used with `identityToken`.
-
-### attest(payload, payloadType[, options])
-
-Generates a Sigstore signature for the supplied in-toto statement. Returns a
-[Sigstore bundle][2] containing the [DSSE][3]-wrapped statement and signature
-as well as the verification material necessary to verify the signature.
-
-* `payload` ``: The bytes of the statement to be signed.
-* `payloadType` ``: MIME or content type describing the statement to be signed.
-* `options` ``
-  * `fulcioURL` ``: The base URL of the Fulcio instance to use for retrieving the signing certificate. Defaults to `'https://fulcio.sigstore.dev'`.
-  * `rekorURL` ``: The base URL of the Rekor instance to use when adding the signature to the transparency log. Defaults to `'https://rekor.sigstore.dev'`.
-  * `tsaServerURL` ``: The base URL of the Timestamp Authority instance to use when requesting a signed timestamp. If omitted, no timestamp will be requested.
-  * `tlogUpload` ``: Flag indicating whether or not the signed statement should be recorded on the Rekor transparency log. Defaults to `true`.
-  * `identityToken` ``: The OIDC token identifying the signer. If no explicit token is supplied, an attempt will be made to retrieve one from the environment. This config cannot be used with `identityProvider`.
-  * `identityProvider` ``: Object which implements `getToken: () => Promise`. The supplied provider will be used to retrieve an OIDC token. If no provider is supplied, an attempt will be made to retrieve an OIDC token from the environment. This config cannot be used with `identityToken`.
-
-
-### verify(bundle[, payload][, options])
-
-Verifies the signature in the supplied bundle.
-
-* `bundle` ``: The Sigstore bundle containing the signature to be verified and the verification material necessary to verify the signature.
-* `payload` ``: The bytes of the artifact over which the signature was created. Only necessary when the `sign` function was used to generate the signature since the Bundle does not contain any information about the artifact which was signed. Not required when the `attest` function was used to generate the Bundle.
-* `options` ``
-  * `ctLogThreshold` ``: The number of certificate transparency logs on which the signing certificate must appear. Defaults to `1`.
-  * `tlogThreshold` ``: The number of transparency logs on which the signature must appear. Defaults to `1`.
-  * `certificateIssuer` ``: Value that must appear in the signing certificate's issuer extension (OID 1.3.6.1.4.1.57264.1.1). Not verified if no value is supplied.
-  * `certificateIdentityEmail` ``: Email address which must appear in the signing certificate's Subject Alternative Name (SAN) extension. Must be specified in conjunction with the `certificateIssuer` option. Takes precedence over the `certificateIdentityURI` option. Not verified if no value is supplied.
-  * `certificateIdentityURI` ``: URI which must appear in the signing certificate's Subject Alternative Name (SAN) extension. Must be specified in conjunction with the `certificateIssuer` option. Ignored if the `certificateIdentityEmail` option is set. Not verified if no value is supplied.
-  * `certificateOIDs` ``: A collection of OID/value pairs which must be present in the certificate's extension list. Not verified if no value is supplied.
-  * `keySelector` ``: Callback invoked to retrieve the public key (as either `string` or `Buffer`) necessary to verify the bundle signature. Not used when the signature was generated from a Fulcio-issued signing certificate.
-    * `hint` ``: The hint from the bundle used to identify the the signing key.
-
-### tuf
-
-The `tuf` object contains utility function for working with the Sigstore TUF repository.
-
-#### client([options])
-
-Returns a TUF client which can be used to retrieve targets from the Sigstore TUF repository.
-
-* `options` ``
-  * `tufMirrorURL` ``: Base URL for the Sigstore TUF repository. Defaults to `'https://tuf-repo-cdn.sigstore.dev'`
-  * `tufRootPath` ``: Path to the initial trusted root for the TUF repository. Defaults to the embedded root.
-  * `tufCachePath` ``: Absolute path to the directory to be used for caching downloaded TUF metadata and targets. Defaults to a directory named "sigstore-js" within the platform-specific application data directory.
-
-The returned object exposes a `getTarget(path)` function which returns the
-contents of the target at the specified path in the Sigstore TUF repository.
-
-#### getTarget(path[, options]) (deprecated)
-
-Returns the contents of the target at the specified path in the Sigstore TUF repository.
-This method has been deprecated and will be removed in the next major version.
-You should use the TUF `client` function to retrieve a stateful TUF client and
-then call `getTarget` against that object. This will avoid re-initializing the
-internal TUF state between requests.
-
-* `path` ``: The [path-relative-url string](https://url.spec.whatwg.org/#path-relative-url-string) that uniquely identifies the target within the Sigstore TUF repository.
-* `options` ``
-  * `tufMirrorURL` ``: Base URL for the Sigstore TUF repository. Defaults to `'https://tuf-repo-cdn.sigstore.dev'`
-  * `tufRootPath` ``: Path to the initial trusted root for the TUF repository. Defaults to the embedded root.
-  * `tufCachePath` ``: Absolute path to the directory to be used for caching downloaded TUF metadata and targets. Defaults to a directory named "sigstore-js" within the platform-specific application data directory.
-
-
-### utils
-
-The `utils` object contains a few internal utility functions. These are exposed
-to support the needs of specific `sigstore-js` consumers but should **NOT** be
-considered part of the stable public interface.
-
-## CLI
-
-The `sigstore-js` library comes packaged with a basic command line interface
-for testing and demo purposes. However, the CLI should **NOT** be considered
-part of the stable interface of the library. If you require a production-ready
-Sigstore CLI, we recommend you use [`cosign`][4].
-
-```shell
-$ npx sigstore help
-sigstore  
-
-  Usage:
-
-  sigstore sign         sign an artifact
-  sigstore attest       sign an artifact using dsse (Dead Simple Signing Envelope)
-  sigstore verify       verify an artifact
-  sigstore version      print version information
-  sigstore help         print help information
-```
-
-## Credential Sources
-
-### GitHub Actions
-
-If sigstore-js detects that it is being executed on GitHub Actions, it will use `ACTIONS_ID_TOKEN_REQUEST_URL`
-and `ACTIONS_ID_TOKEN_REQUEST_TOKEN` environment variables to request an OIDC token with the correct scope.
-
-Note: the `id_token: write` permission must be granted to the GitHub Action Job.
-
-See https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect
-for more details.
-
-### Environment Variables
-
-If the `SIGSTORE_ID_TOKEN` environment variable is set, it will use this to authenticate to Fulcio.
-It is the callers responsibility to make sure that this token has the correct scopes.
-
-### Interactive Flow
-
-If sigstore-js cannot detect ambient credentials, then it will prompt the user to go through the
-interactive flow.
-
-
-
-[1]: https://github.com/sigstore/rekor
-[2]: https://github.com/sigstore/protobuf-specs/blob/9b722b68a717778ba4f11543afa4ef93205ab502/protos/sigstore_bundle.proto#L63-L84
-[3]: https://github.com/secure-systems-lab/dsse
-[4]: https://github.com/sigstore/cosign
diff --git a/deps/npm/node_modules/sigstore/dist/ca/format.d.ts b/deps/npm/node_modules/sigstore/dist/ca/format.d.ts
deleted file mode 100644
index b29f51a71f5647..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/ca/format.d.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-/// 
-/// 
-import { KeyObject } from 'crypto';
-import type { SigningCertificateRequest } from '../external/fulcio';
-export declare function toCertificateRequest(identityToken: string, publicKey: KeyObject, challenge: Buffer): SigningCertificateRequest;
diff --git a/deps/npm/node_modules/sigstore/dist/ca/format.js b/deps/npm/node_modules/sigstore/dist/ca/format.js
deleted file mode 100644
index 6374243e80e026..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/ca/format.js
+++ /dev/null
@@ -1,20 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toCertificateRequest = void 0;
-function toCertificateRequest(identityToken, publicKey, challenge) {
-    return {
-        credentials: {
-            oidcIdentityToken: identityToken,
-        },
-        publicKeyRequest: {
-            publicKey: {
-                algorithm: 'ECDSA',
-                content: publicKey
-                    .export({ format: 'pem', type: 'spki' })
-                    .toString('ascii'),
-            },
-            proofOfPossession: challenge.toString('base64'),
-        },
-    };
-}
-exports.toCertificateRequest = toCertificateRequest;
diff --git a/deps/npm/node_modules/sigstore/dist/ca/index.d.ts b/deps/npm/node_modules/sigstore/dist/ca/index.d.ts
deleted file mode 100644
index 3a6347293aaa8b..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/ca/index.d.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-/// 
-/// 
-import { KeyObject } from 'crypto';
-import type { FetchOptions } from '../types/fetch';
-export interface CA {
-    createSigningCertificate: (identityToken: string, publicKey: KeyObject, challenge: Buffer) => Promise;
-}
-export type CAClientOptions = {
-    fulcioBaseURL: string;
-} & FetchOptions;
-export declare class CAClient implements CA {
-    private fulcio;
-    constructor(options: CAClientOptions);
-    createSigningCertificate(identityToken: string, publicKey: KeyObject, challenge: Buffer): Promise;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/ca/index.js b/deps/npm/node_modules/sigstore/dist/ca/index.js
deleted file mode 100644
index 340dd46609aad2..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/ca/index.js
+++ /dev/null
@@ -1,39 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CAClient = void 0;
-const error_1 = require("../error");
-const external_1 = require("../external");
-const format_1 = require("./format");
-class CAClient {
-    constructor(options) {
-        this.fulcio = new external_1.Fulcio({
-            baseURL: options.fulcioBaseURL,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async createSigningCertificate(identityToken, publicKey, challenge) {
-        const request = (0, format_1.toCertificateRequest)(identityToken, publicKey, challenge);
-        try {
-            const resp = await this.fulcio.createSigningCertificate(request);
-            // Account for the fact that the response may contain either a
-            // signedCertificateEmbeddedSct or a signedCertificateDetachedSct.
-            const cert = resp.signedCertificateEmbeddedSct
-                ? resp.signedCertificateEmbeddedSct
-                : resp.signedCertificateDetachedSct;
-            // Return the first certificate in the chain, which is the signing
-            // certificate. Specifically not returning the rest of the chain to
-            // mitigate the risk of errors when verifying the certificate chain.
-            // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
-            return cert.chain.certificates.slice(0, 1);
-        }
-        catch (err) {
-            throw new error_1.InternalError({
-                code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
-                message: 'error creating signing certificate',
-                cause: err,
-            });
-        }
-    }
-}
-exports.CAClient = CAClient;
diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/chain.d.ts b/deps/npm/node_modules/sigstore/dist/ca/verify/chain.d.ts
deleted file mode 100644
index 0a79b42f714a0f..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/ca/verify/chain.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-import * as sigstore from '../../types/sigstore';
-import { x509Certificate } from '../../x509/cert';
-export declare function verifyChain(certificate: sigstore.X509Certificate, certificateAuthorities: sigstore.CertificateAuthority[]): x509Certificate[];
diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/index.d.ts b/deps/npm/node_modules/sigstore/dist/ca/verify/index.d.ts
deleted file mode 100644
index ddf65ff6dfffd8..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/ca/verify/index.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-import * as sigstore from '../../types/sigstore';
-export declare function verifySigningCertificate(bundle: sigstore.BundleWithCertificateChain, trustedRoot: sigstore.TrustedRoot, options: sigstore.CAArtifactVerificationOptions): void;
diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/sct.d.ts b/deps/npm/node_modules/sigstore/dist/ca/verify/sct.d.ts
deleted file mode 100644
index 29391a74cb65e6..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/ca/verify/sct.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-import * as sigstore from '../../types/sigstore';
-import { x509Certificate } from '../../x509/cert';
-export declare function verifySCTs(certificateChain: x509Certificate[], ctLogs: sigstore.TransparencyLogInstance[], options: sigstore.ArtifactVerificationOptions_CtlogOptions): void;
diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/signer.d.ts b/deps/npm/node_modules/sigstore/dist/ca/verify/signer.d.ts
deleted file mode 100644
index 7241b90f6ac5c8..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/ca/verify/signer.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-import * as sigstore from '../../types/sigstore';
-import { x509Certificate } from '../../x509/cert';
-export declare function verifySignerIdentity(signingCert: x509Certificate, identities: sigstore.CertificateIdentities): void;
diff --git a/deps/npm/node_modules/sigstore/dist/ca/verify/signer.js b/deps/npm/node_modules/sigstore/dist/ca/verify/signer.js
index 51d722d7631ee0..6f47651b944c94 100644
--- a/deps/npm/node_modules/sigstore/dist/ca/verify/signer.js
+++ b/deps/npm/node_modules/sigstore/dist/ca/verify/signer.js
@@ -54,7 +54,10 @@ function verifySignerIdentity(signingCert, identities) {
     // specified identities
     const signerVerified = identities.identities.some((identity) => verifyIdentity(signingCert, identity));
     if (!signerVerified) {
-        throw new error_1.PolicyError('Certificate issued to untrusted signer');
+        throw new error_1.PolicyError({
+            code: 'UNTRUSTED_SIGNER_ERROR',
+            message: 'Certificate issued to untrusted signer',
+        });
     }
 }
 exports.verifySignerIdentity = verifySignerIdentity;
diff --git a/deps/npm/node_modules/sigstore/dist/cli/index.d.ts b/deps/npm/node_modules/sigstore/dist/cli/index.d.ts
deleted file mode 100644
index 395f0a5a69d304..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/cli/index.d.ts
+++ /dev/null
@@ -1 +0,0 @@
-export declare function processArgv(): Promise;
diff --git a/deps/npm/node_modules/sigstore/dist/cli/index.js b/deps/npm/node_modules/sigstore/dist/cli/index.js
deleted file mode 100644
index 6015cd9df74eac..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/cli/index.js
+++ /dev/null
@@ -1,125 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.processArgv = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fs_1 = __importDefault(require("fs"));
-const index_1 = require("../index");
-const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json';
-async function cli(args) {
-    switch (args[0]) {
-        case 'sign':
-            await sign(args[1]);
-            break;
-        case 'attest':
-            await attest(args[1], args[2]);
-            break;
-        case 'verify':
-            await verify(args[1], args[2]);
-            break;
-        case 'version':
-        case '-version':
-        case '--version':
-        case '-v':
-            // eslint-disable-next-line @typescript-eslint/no-var-requires
-            console.log(require('../../package.json').version);
-            break;
-        case 'help':
-        case '--help':
-        case '-h':
-        case '-?':
-            printUsage();
-            break;
-        default:
-            throw 'Unknown command';
-    }
-}
-function printUsage() {
-    console.log(`sigstore  
-
-  Usage:
-
-  sigstore sign         sign an artifact
-  sigstore attest       sign an artifact using dsse (Dead Simple Signing Envelope)
-  sigstore verify       verify an artifact
-  sigstore version      print version information
-  sigstore help         print help information
-  `);
-}
-function printRekorEntry(bundle, options) {
-    let url;
-    if (options.rekorURL === index_1.sigstore.DEFAULT_REKOR_URL) {
-        url = `https://search.sigstore.dev`;
-    }
-    else {
-        url = `${options.rekorURL}/api/v1/log/entries`;
-    }
-    const logIndex = bundle.verificationMaterial?.tlogEntries[0].logIndex;
-    console.error(`Created entry at index ${logIndex}, available at`);
-    console.error(`${url}?logIndex=${logIndex}`);
-}
-// TODO: Allow customing these options
-const signOptions = {
-    oidcClientID: 'sigstore',
-    oidcIssuer: 'https://oauth2.sigstore.dev/auth',
-    oidcRedirectURL: process.env.OIDC_REDIRECT_URL,
-    rekorURL: index_1.sigstore.DEFAULT_REKOR_URL,
-};
-async function sign(artifactPath) {
-    const buffer = fs_1.default.readFileSync(artifactPath);
-    const bundle = await index_1.sigstore.sign(buffer, signOptions);
-    printRekorEntry(bundle, signOptions);
-    console.log(JSON.stringify(bundle));
-}
-async function attest(artifactPath, payloadType = INTOTO_PAYLOAD_TYPE) {
-    const buffer = fs_1.default.readFileSync(artifactPath);
-    const bundle = await index_1.sigstore.attest(buffer, payloadType, signOptions);
-    printRekorEntry(bundle, signOptions);
-    console.log(JSON.stringify(bundle));
-}
-async function verify(bundlePath, artifactPath) {
-    let payload = undefined;
-    if (artifactPath) {
-        payload = fs_1.default.readFileSync(artifactPath);
-    }
-    const bundleFile = fs_1.default.readFileSync(bundlePath);
-    const bundle = JSON.parse(bundleFile.toString('utf-8'));
-    try {
-        await index_1.sigstore.verify(bundle, payload, {});
-        console.error('Verified OK');
-    }
-    catch (e) {
-        console.error('Verification failed');
-        if (e instanceof Error) {
-            console.error('Error: ' + e.message);
-        }
-        process.exit(1);
-    }
-}
-async function processArgv() {
-    try {
-        await cli(process.argv.slice(2));
-        process.exit(0);
-    }
-    catch (e) {
-        console.error(e);
-        process.exit(1);
-    }
-}
-exports.processArgv = processArgv;
diff --git a/deps/npm/node_modules/sigstore/dist/config.d.ts b/deps/npm/node_modules/sigstore/dist/config.d.ts
deleted file mode 100644
index 89f42038099530..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/config.d.ts
+++ /dev/null
@@ -1,54 +0,0 @@
-import { CA } from './ca';
-import { Provider } from './identity';
-import { TLog } from './tlog';
-import { TSA } from './tsa';
-import * as sigstore from './types/sigstore';
-import type { FetchOptions, Retry } from './types/fetch';
-import type { KeySelector } from './verify';
-interface CAOptions {
-    fulcioURL?: string;
-}
-interface TLogOptions {
-    rekorURL?: string;
-}
-interface TSAOptions {
-    tsaServerURL?: string;
-}
-export interface IdentityProviderOptions {
-    identityToken?: string;
-    oidcIssuer?: string;
-    oidcClientID?: string;
-    oidcClientSecret?: string;
-    oidcRedirectURL?: string;
-}
-export type TUFOptions = {
-    tufMirrorURL?: string;
-    tufRootPath?: string;
-    tufCachePath?: string;
-} & FetchOptions;
-export type SignOptions = {
-    identityProvider?: Provider;
-    tlogUpload?: boolean;
-} & CAOptions & TLogOptions & TSAOptions & FetchOptions & IdentityProviderOptions;
-export type VerifyOptions = {
-    ctLogThreshold?: number;
-    tlogThreshold?: number;
-    certificateIssuer?: string;
-    certificateIdentityEmail?: string;
-    certificateIdentityURI?: string;
-    certificateOIDs?: Record;
-    keySelector?: KeySelector;
-} & TLogOptions & TUFOptions;
-export type CreateVerifierOptions = {
-    keySelector?: KeySelector;
-} & TUFOptions;
-export declare const DEFAULT_FULCIO_URL = "https://fulcio.sigstore.dev";
-export declare const DEFAULT_REKOR_URL = "https://rekor.sigstore.dev";
-export declare const DEFAULT_RETRY: Retry;
-export declare const DEFAULT_TIMEOUT = 5000;
-export declare function createCAClient(options: CAOptions & FetchOptions): CA;
-export declare function createTLogClient(options: TLogOptions & FetchOptions): TLog;
-export declare function createTSAClient(options: TSAOptions & FetchOptions): TSA | undefined;
-export declare function artifactVerificationOptions(options: VerifyOptions): sigstore.RequiredArtifactVerificationOptions;
-export declare function identityProviders(options: IdentityProviderOptions): Provider[];
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/config.js b/deps/npm/node_modules/sigstore/dist/config.js
index 1a22c5fef313b7..43c236f0eebd07 100644
--- a/deps/npm/node_modules/sigstore/dist/config.js
+++ b/deps/npm/node_modules/sigstore/dist/config.js
@@ -22,11 +22,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
     __setModuleDefault(result, mod);
     return result;
 };
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.identityProviders = exports.artifactVerificationOptions = exports.createTSAClient = exports.createTLogClient = exports.createCAClient = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = void 0;
+exports.artifactVerificationOptions = exports.createBundleBuilder = exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -42,41 +39,72 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
-const ca_1 = require("./ca");
-const identity_1 = __importDefault(require("./identity"));
-const tlog_1 = require("./tlog");
-const tsa_1 = require("./tsa");
+const sign_1 = require("@sigstore/sign");
 const sigstore = __importStar(require("./types/sigstore"));
-exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
-exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
 exports.DEFAULT_RETRY = { retries: 2 };
 exports.DEFAULT_TIMEOUT = 5000;
-function createCAClient(options) {
-    return new ca_1.CAClient({
-        fulcioBaseURL: options.fulcioURL || exports.DEFAULT_FULCIO_URL,
-        retry: options.retry ?? exports.DEFAULT_RETRY,
-        timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
-    });
+function createBundleBuilder(bundleType, options) {
+    const bundlerOptions = {
+        signer: initSigner(options),
+        witnesses: initWitnesses(options),
+    };
+    switch (bundleType) {
+        case 'messageSignature':
+            return new sign_1.MessageSignatureBundleBuilder(bundlerOptions);
+        case 'dsseEnvelope':
+            return new sign_1.DSSEBundleBuilder(bundlerOptions);
+    }
 }
-exports.createCAClient = createCAClient;
-function createTLogClient(options) {
-    return new tlog_1.TLogClient({
-        rekorBaseURL: options.rekorURL || exports.DEFAULT_REKOR_URL,
+exports.createBundleBuilder = createBundleBuilder;
+// Instantiate the FulcioSigner based on the supplied options.
+function initSigner(options) {
+    return new sign_1.FulcioSigner({
+        fulcioBaseURL: options.fulcioURL,
+        identityProvider: options.identityProvider || initIdentityProvider(options),
         retry: options.retry ?? exports.DEFAULT_RETRY,
         timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
     });
 }
-exports.createTLogClient = createTLogClient;
-function createTSAClient(options) {
-    return options.tsaServerURL
-        ? new tsa_1.TSAClient({
+// Instantiate an identity provider based on the supplied options. If an
+// explicit identity token is provided, use that. Otherwise, use the CI
+// context provider.
+function initIdentityProvider(options) {
+    const token = options.identityToken;
+    if (token) {
+        return { getToken: () => Promise.resolve(token) };
+    }
+    else {
+        return new sign_1.CIContextProvider('sigstore');
+    }
+}
+// Instantiate a collection of witnesses based on the supplied options.
+function initWitnesses(options) {
+    const witnesses = [];
+    if (isRekorEnabled(options)) {
+        witnesses.push(new sign_1.RekorWitness({
+            rekorBaseURL: options.rekorURL,
+            fetchOnConflict: false,
+            retry: options.retry ?? exports.DEFAULT_RETRY,
+            timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
+        }));
+    }
+    if (isTSAEnabled(options)) {
+        witnesses.push(new sign_1.TSAWitness({
             tsaBaseURL: options.tsaServerURL,
             retry: options.retry ?? exports.DEFAULT_RETRY,
             timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
-        })
-        : undefined;
+        }));
+    }
+    return witnesses;
+}
+// Type assertion to ensure that Rekor is enabled
+function isRekorEnabled(options) {
+    return options.tlogUpload !== false;
+}
+// Type assertion to ensure that TSA is enabled
+function isTSAEnabled(options) {
+    return options.tsaServerURL !== undefined;
 }
-exports.createTSAClient = createTSAClient;
 // Assembles the AtifactVerificationOptions from the supplied VerifyOptions.
 function artifactVerificationOptions(options) {
     // The trusted signers are only used if the options contain a certificate
@@ -102,7 +130,7 @@ function artifactVerificationOptions(options) {
                 },
             };
         }
-        const oids = Object.entries(options.certificateOIDs || {}).map(([oid, value]) => ({
+        const oids = Object.entries(options.certificateOIDs || /* istanbul ignore next */ {}).map(([oid, value]) => ({
             oid: { id: oid.split('.').map((s) => parseInt(s, 10)) },
             value: Buffer.from(value),
         }));
@@ -122,41 +150,16 @@ function artifactVerificationOptions(options) {
     // Construct the artifact verification options w/ defaults
     return {
         ctlogOptions: {
-            disable: false,
-            threshold: options.ctLogThreshold || 1,
+            disable: options.ctLogThreshold === 0,
+            threshold: options.ctLogThreshold ?? 1,
             detachedSct: false,
         },
         tlogOptions: {
-            disable: false,
-            threshold: options.tlogThreshold || 1,
+            disable: options.tlogThreshold === 0,
+            threshold: options.tlogThreshold ?? 1,
             performOnlineVerification: false,
         },
         signers,
     };
 }
 exports.artifactVerificationOptions = artifactVerificationOptions;
-// Translates the IdenityProviderOptions into a list of Providers which
-// should be queried to retrieve an identity token.
-function identityProviders(options) {
-    const idps = [];
-    const token = options.identityToken;
-    // If an explicit identity token is provided, use that. Setup a dummy
-    // provider that just returns the token. Otherwise, setup the CI context
-    // provider and (optionally) the OAuth provider.
-    if (token) {
-        idps.push({ getToken: () => Promise.resolve(token) });
-    }
-    else {
-        idps.push(identity_1.default.ciContextProvider());
-        if (options.oidcIssuer && options.oidcClientID) {
-            idps.push(identity_1.default.oauthProvider({
-                issuer: options.oidcIssuer,
-                clientID: options.oidcClientID,
-                clientSecret: options.oidcClientSecret,
-                redirectURL: options.oidcRedirectURL,
-            }));
-        }
-    }
-    return idps;
-}
-exports.identityProviders = identityProviders;
diff --git a/deps/npm/node_modules/sigstore/dist/error.d.ts b/deps/npm/node_modules/sigstore/dist/error.d.ts
deleted file mode 100644
index c03bbc31697745..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/error.d.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-declare class BaseError extends Error {
-    cause: any | undefined;
-    constructor(message: string, cause?: any);
-}
-export declare class VerificationError extends BaseError {
-}
-export declare class ValidationError extends BaseError {
-}
-export declare class PolicyError extends BaseError {
-}
-type InternalErrorCode = 'TLOG_FETCH_ENTRY_ERROR' | 'TLOG_CREATE_ENTRY_ERROR' | 'CA_CREATE_SIGNING_CERTIFICATE_ERROR' | 'TSA_CREATE_TIMESTAMP_ERROR' | 'TUF_FIND_TARGET_ERROR' | 'TUF_REFRESH_METADATA_ERROR' | 'TUF_DOWNLOAD_TARGET_ERROR' | 'TUF_READ_TARGET_ERROR';
-export declare class InternalError extends BaseError {
-    code: InternalErrorCode;
-    constructor({ code, message, cause, }: {
-        code: InternalErrorCode;
-        message: string;
-        cause?: any;
-    });
-}
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/error.js b/deps/npm/node_modules/sigstore/dist/error.js
index cee15dff90b614..b0a7dbc83f7105 100644
--- a/deps/npm/node_modules/sigstore/dist/error.js
+++ b/deps/npm/node_modules/sigstore/dist/error.js
@@ -1,6 +1,4 @@
 "use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.InternalError = exports.PolicyError = exports.ValidationError = exports.VerificationError = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -16,27 +14,22 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
-/* eslint-disable @typescript-eslint/no-explicit-any */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PolicyError = exports.VerificationError = void 0;
 class BaseError extends Error {
-    constructor(message, cause) {
+    constructor({ code, message, cause, }) {
         super(message);
         this.name = this.constructor.name;
+        this.code = code;
         this.cause = cause;
     }
 }
 class VerificationError extends BaseError {
+    constructor(message) {
+        super({ code: 'VERIFICATION_ERROR', message });
+    }
 }
 exports.VerificationError = VerificationError;
-class ValidationError extends BaseError {
-}
-exports.ValidationError = ValidationError;
 class PolicyError extends BaseError {
 }
 exports.PolicyError = PolicyError;
-class InternalError extends BaseError {
-    constructor({ code, message, cause, }) {
-        super(message, cause);
-        this.code = code;
-    }
-}
-exports.InternalError = InternalError;
diff --git a/deps/npm/node_modules/sigstore/dist/external/error.d.ts b/deps/npm/node_modules/sigstore/dist/external/error.d.ts
deleted file mode 100644
index 87a4bc5451a3de..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/external/error.d.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-import fetch from 'make-fetch-happen';
-type Response = Awaited>;
-export declare class HTTPError extends Error {
-    response: Response;
-    statusCode: number;
-    location?: string;
-    constructor(response: Response);
-}
-export declare const checkStatus: (response: Response) => Response;
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/external/error.js b/deps/npm/node_modules/sigstore/dist/external/error.js
deleted file mode 100644
index d1e1c3df8a8787..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/external/error.js
+++ /dev/null
@@ -1,21 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.checkStatus = exports.HTTPError = void 0;
-class HTTPError extends Error {
-    constructor(response) {
-        super(`HTTP Error: ${response.status} ${response.statusText}`);
-        this.response = response;
-        this.statusCode = response.status;
-        this.location = response.headers?.get('Location') || undefined;
-    }
-}
-exports.HTTPError = HTTPError;
-const checkStatus = (response) => {
-    if (response.ok) {
-        return response;
-    }
-    else {
-        throw new HTTPError(response);
-    }
-};
-exports.checkStatus = checkStatus;
diff --git a/deps/npm/node_modules/sigstore/dist/external/fulcio.d.ts b/deps/npm/node_modules/sigstore/dist/external/fulcio.d.ts
deleted file mode 100644
index 64b0fc5e347982..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/external/fulcio.d.ts
+++ /dev/null
@@ -1,38 +0,0 @@
-import type { FetchOptions } from '../types/fetch';
-export type FulcioOptions = {
-    baseURL: string;
-} & FetchOptions;
-export interface SigningCertificateRequest {
-    credentials: {
-        oidcIdentityToken: string;
-    };
-    publicKeyRequest: {
-        publicKey: {
-            algorithm: string;
-            content: string;
-        };
-        proofOfPossession: string;
-    };
-}
-export interface SigningCertificateResponse {
-    signedCertificateEmbeddedSct?: {
-        chain: {
-            certificates: string[];
-        };
-    };
-    signedCertificateDetachedSct?: {
-        chain: {
-            certificates: string[];
-        };
-        signedCertificateTimestamp: string;
-    };
-}
-/**
- * Fulcio API client.
- */
-export declare class Fulcio {
-    private fetch;
-    private baseUrl;
-    constructor(options: FulcioOptions);
-    createSigningCertificate(request: SigningCertificateRequest): Promise;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/external/index.d.ts b/deps/npm/node_modules/sigstore/dist/external/index.d.ts
deleted file mode 100644
index ef28eca4a951dd..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/external/index.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-export { HTTPError } from './error';
-export { Fulcio } from './fulcio';
-export { Rekor } from './rekor';
-export { TimestampAuthority } from './tsa';
diff --git a/deps/npm/node_modules/sigstore/dist/external/index.js b/deps/npm/node_modules/sigstore/dist/external/index.js
deleted file mode 100644
index f40816e9b7ca40..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/external/index.js
+++ /dev/null
@@ -1,26 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimestampAuthority = exports.Rekor = exports.Fulcio = exports.HTTPError = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var error_1 = require("./error");
-Object.defineProperty(exports, "HTTPError", { enumerable: true, get: function () { return error_1.HTTPError; } });
-var fulcio_1 = require("./fulcio");
-Object.defineProperty(exports, "Fulcio", { enumerable: true, get: function () { return fulcio_1.Fulcio; } });
-var rekor_1 = require("./rekor");
-Object.defineProperty(exports, "Rekor", { enumerable: true, get: function () { return rekor_1.Rekor; } });
-var tsa_1 = require("./tsa");
-Object.defineProperty(exports, "TimestampAuthority", { enumerable: true, get: function () { return tsa_1.TimestampAuthority; } });
diff --git a/deps/npm/node_modules/sigstore/dist/external/rekor.d.ts b/deps/npm/node_modules/sigstore/dist/external/rekor.d.ts
deleted file mode 100644
index 6729ad3e2aacf6..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/external/rekor.d.ts
+++ /dev/null
@@ -1,41 +0,0 @@
-import type { LogEntry, ProposedDSSEEntry, ProposedEntry, ProposedHashedRekordEntry, ProposedIntotoEntry, InclusionProof as RekorInclusionProof, SearchIndex, SearchLogQuery } from '@sigstore/rekor-types';
-import type { FetchOptions } from '../types/fetch';
-export type { ProposedDSSEEntry, ProposedEntry, ProposedHashedRekordEntry, ProposedIntotoEntry, RekorInclusionProof, SearchIndex, SearchLogQuery, };
-export type Entry = {
-    uuid: string;
-} & LogEntry['x'];
-export type RekorOptions = {
-    baseURL: string;
-} & FetchOptions;
-/**
- * Rekor API client.
- */
-export declare class Rekor {
-    private fetch;
-    private baseUrl;
-    constructor(options: RekorOptions);
-    /**
-     * Create a new entry in the Rekor log.
-     * @param propsedEntry {ProposedEntry} Data to create a new entry
-     * @returns {Promise} The created entry
-     */
-    createEntry(propsedEntry: ProposedEntry): Promise;
-    /**
-     * Get an entry from the Rekor log.
-     * @param uuid {string} The UUID of the entry to retrieve
-     * @returns {Promise} The retrieved entry
-     */
-    getEntry(uuid: string): Promise;
-    /**
-     * Search the Rekor log index for entries matching the given query.
-     * @param opts {SearchIndex} Options to search the Rekor log
-     * @returns {Promise} UUIDs of matching entries
-     */
-    searchIndex(opts: SearchIndex): Promise;
-    /**
-     * Search the Rekor logs for matching the given query.
-     * @param opts {SearchLogQuery} Query to search the Rekor log
-     * @returns {Promise} List of matching entries
-     */
-    searchLog(opts: SearchLogQuery): Promise;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/external/tsa.d.ts b/deps/npm/node_modules/sigstore/dist/external/tsa.d.ts
deleted file mode 100644
index 9b5f31151a83d8..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/external/tsa.d.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-/// 
-import type { FetchOptions } from '../types/fetch';
-export interface TimestampRequest {
-    artifactHash: string;
-    hashAlgorithm: string;
-    certificates?: boolean;
-    nonce?: number;
-    tsaPolicyOID?: string;
-}
-export type TimestampAuthorityOptions = {
-    baseURL: string;
-} & FetchOptions;
-export declare class TimestampAuthority {
-    private fetch;
-    private baseUrl;
-    constructor(options: TimestampAuthorityOptions);
-    createTimestamp(request: TimestampRequest): Promise;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/identity/ci.d.ts b/deps/npm/node_modules/sigstore/dist/identity/ci.d.ts
deleted file mode 100644
index 428606f26524bb..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/identity/ci.d.ts
+++ /dev/null
@@ -1,11 +0,0 @@
-import { Provider } from './provider';
-/**
- * CIContextProvider is a composite identity provider which will iterate
- * over all of the CI-specific providers and return the token from the first
- * one that resolves.
- */
-export declare class CIContextProvider implements Provider {
-    private audience;
-    constructor(audience: string);
-    getToken(): Promise;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/identity/index.d.ts b/deps/npm/node_modules/sigstore/dist/identity/index.d.ts
deleted file mode 100644
index 3eb0b444d120ff..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/identity/index.d.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-import { Provider } from './provider';
-/**
- * oauthProvider returns a new Provider instance which attempts to retrieve
- * an identity token from the configured OAuth2 issuer.
- *
- * @param issuer Base URL of the issuer
- * @param clientID Client ID for the issuer
- * @param clientSecret Client secret for the issuer (optional)
- * @returns {Provider}
- */
-declare function oauthProvider(options: {
-    issuer: string;
-    clientID: string;
-    clientSecret?: string;
-    redirectURL?: string;
-}): Provider;
-/**
- * ciContextProvider returns a new Provider instance which attempts to retrieve
- * an identity token from the CI context.
- *
- * @param audience audience claim for the generated token
- * @returns {Provider}
- */
-declare function ciContextProvider(audience?: string): Provider;
-declare const _default: {
-    ciContextProvider: typeof ciContextProvider;
-    oauthProvider: typeof oauthProvider;
-};
-export default _default;
-export { Provider } from './provider';
diff --git a/deps/npm/node_modules/sigstore/dist/identity/index.js b/deps/npm/node_modules/sigstore/dist/identity/index.js
deleted file mode 100644
index 351d607106700f..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/identity/index.js
+++ /dev/null
@@ -1,51 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const ci_1 = require("./ci");
-const issuer_1 = require("./issuer");
-const oauth_1 = require("./oauth");
-/**
- * oauthProvider returns a new Provider instance which attempts to retrieve
- * an identity token from the configured OAuth2 issuer.
- *
- * @param issuer Base URL of the issuer
- * @param clientID Client ID for the issuer
- * @param clientSecret Client secret for the issuer (optional)
- * @returns {Provider}
- */
-function oauthProvider(options) {
-    return new oauth_1.OAuthProvider({
-        issuer: new issuer_1.Issuer(options.issuer),
-        clientID: options.clientID,
-        clientSecret: options.clientSecret,
-        redirectURL: options.redirectURL,
-    });
-}
-/**
- * ciContextProvider returns a new Provider instance which attempts to retrieve
- * an identity token from the CI context.
- *
- * @param audience audience claim for the generated token
- * @returns {Provider}
- */
-function ciContextProvider(audience = 'sigstore') {
-    return new ci_1.CIContextProvider(audience);
-}
-exports.default = {
-    ciContextProvider,
-    oauthProvider,
-};
diff --git a/deps/npm/node_modules/sigstore/dist/identity/issuer.d.ts b/deps/npm/node_modules/sigstore/dist/identity/issuer.d.ts
deleted file mode 100644
index 37ad713f4d89a7..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/identity/issuer.d.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-/**
- * The Issuer reperesents a single OAuth2 provider.
- *
- * The Issuer is configured with a provider's base OAuth2 endpoint which is
- * used to retrieve the associated configuration information.
- */
-export declare class Issuer {
-    private baseURL;
-    private fetch;
-    private config?;
-    constructor(baseURL: string);
-    authEndpoint(): Promise;
-    tokenEndpoint(): Promise;
-    private loadOpenIDConfig;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/identity/issuer.js b/deps/npm/node_modules/sigstore/dist/identity/issuer.js
deleted file mode 100644
index 2bf6c20f34932d..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/identity/issuer.js
+++ /dev/null
@@ -1,53 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Issuer = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-// Standard endpoint for retrieving OpenID configuration information
-const OPENID_CONFIG_PATH = '/.well-known/openid-configuration';
-/**
- * The Issuer reperesents a single OAuth2 provider.
- *
- * The Issuer is configured with a provider's base OAuth2 endpoint which is
- * used to retrieve the associated configuration information.
- */
-class Issuer {
-    constructor(baseURL) {
-        this.baseURL = baseURL;
-        this.fetch = make_fetch_happen_1.default.defaults({ retry: 2 });
-    }
-    async authEndpoint() {
-        if (!this.config) {
-            this.config = await this.loadOpenIDConfig();
-        }
-        return this.config.authorization_endpoint;
-    }
-    async tokenEndpoint() {
-        if (!this.config) {
-            this.config = await this.loadOpenIDConfig();
-        }
-        return this.config.token_endpoint;
-    }
-    async loadOpenIDConfig() {
-        const url = `${this.baseURL}${OPENID_CONFIG_PATH}`;
-        return this.fetch(url).then((res) => res.json());
-    }
-}
-exports.Issuer = Issuer;
diff --git a/deps/npm/node_modules/sigstore/dist/identity/oauth.d.ts b/deps/npm/node_modules/sigstore/dist/identity/oauth.d.ts
deleted file mode 100644
index 3c9fae9ac15387..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/identity/oauth.d.ts
+++ /dev/null
@@ -1,26 +0,0 @@
-import { Issuer } from './issuer';
-import { Provider } from './provider';
-interface OAuthProviderOptions {
-    issuer: Issuer;
-    clientID: string;
-    clientSecret?: string;
-    redirectURL?: string;
-}
-export declare class OAuthProvider implements Provider {
-    private clientID;
-    private clientSecret;
-    private issuer;
-    private codeVerifier;
-    private state;
-    private redirectURI?;
-    constructor(options: OAuthProviderOptions);
-    getToken(): Promise;
-    private initiateAuthRequest;
-    private getIDToken;
-    private getBasicAuthHeaderValue;
-    private getAuthRequestURL;
-    private getAuthRequestParams;
-    private getCodeChallenge;
-    private openURL;
-}
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/identity/oauth.js b/deps/npm/node_modules/sigstore/dist/identity/oauth.js
deleted file mode 100644
index 7cb5a00cdb6942..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/identity/oauth.js
+++ /dev/null
@@ -1,197 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.OAuthProvider = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const assert_1 = __importDefault(require("assert"));
-const child_process_1 = __importDefault(require("child_process"));
-const http_1 = __importDefault(require("http"));
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const url_1 = require("url");
-const util_1 = require("../util");
-class OAuthProvider {
-    constructor(options) {
-        this.clientID = options.clientID;
-        this.clientSecret = options.clientSecret || '';
-        this.issuer = options.issuer;
-        this.redirectURI = options.redirectURL;
-        this.codeVerifier = generateRandomString(32);
-        this.state = generateRandomString(16);
-    }
-    async getToken() {
-        const authCode = await this.initiateAuthRequest();
-        return this.getIDToken(authCode);
-    }
-    // Initates the authorization request. This will start an HTTP server to
-    // receive the post-auth redirect and then open the user's default browser to
-    // the provider's authorization page.
-    async initiateAuthRequest() {
-        const server = http_1.default.createServer();
-        const sockets = new Set();
-        // Start server and wait till it is listening. If a redirect URL was
-        // provided, use that. Otherwise, use a random port and construct the
-        // redirect URL.
-        await new Promise((resolve) => {
-            if (this.redirectURI) {
-                const url = new url_1.URL(this.redirectURI);
-                server.listen(Number(url.port), url.hostname, resolve);
-            }
-            else {
-                server.listen(0, resolve);
-                // Get port the server is listening on and construct the server URL
-                const port = server.address().port;
-                this.redirectURI = `http://localhost:${port}`;
-            }
-        });
-        // Keep track of connections to the server so we can force a shutdown
-        server.on('connection', (socket) => {
-            sockets.add(socket);
-            socket.once('close', () => {
-                sockets.delete(socket);
-            });
-        });
-        const result = new Promise((resolve, reject) => {
-            // Set-up handler for post-auth redirect
-            server.on('request', (req, res) => {
-                if (!req.url) {
-                    reject('invalid server request');
-                    return;
-                }
-                res.writeHead(200);
-                res.end('Auth Successful');
-                // Parse incoming request URL
-                const query = new url_1.URL(req.url, this.redirectURI).searchParams;
-                // Check to see if the state matches
-                if (query.get('state') !== this.state) {
-                    reject('invalid state value');
-                    return;
-                }
-                const authCode = query.get('code');
-                // Force-close any open connections to the server so we can get a
-                // clean shutdown
-                for (const socket of sockets) {
-                    socket.destroy();
-                    sockets.delete(socket);
-                }
-                // Return auth code once we've shutdown server
-                server.close(() => {
-                    if (!authCode) {
-                        reject('authorization code not found');
-                    }
-                    else {
-                        resolve(authCode);
-                    }
-                });
-            });
-        });
-        try {
-            // Open browser to start authorization request
-            const authBaseURL = await this.issuer.authEndpoint();
-            const authURL = this.getAuthRequestURL(authBaseURL);
-            await this.openURL(authURL);
-        }
-        catch (err) {
-            // Prevent leaked server handler on error
-            server.close();
-            throw err;
-        }
-        return result;
-    }
-    // Uses the provided authorization code, to retrieve the ID token from the
-    // provider
-    async getIDToken(authCode) {
-        (0, assert_1.default)(this.redirectURI);
-        const tokenEndpointURL = await this.issuer.tokenEndpoint();
-        const params = new url_1.URLSearchParams();
-        params.append('grant_type', 'authorization_code');
-        params.append('code', authCode);
-        params.append('redirect_uri', this.redirectURI);
-        params.append('code_verifier', this.codeVerifier);
-        const response = await (0, make_fetch_happen_1.default)(tokenEndpointURL, {
-            method: 'POST',
-            headers: { Authorization: `Basic ${this.getBasicAuthHeaderValue()}` },
-            body: params,
-        }).then((r) => r.json());
-        return response.id_token;
-    }
-    // Construct the basic auth header value from the client ID and secret
-    getBasicAuthHeaderValue() {
-        return util_1.encoding.base64Encode(`${this.clientID}:${this.clientSecret}`);
-    }
-    // Generate starting URL for authorization request
-    getAuthRequestURL(baseURL) {
-        const params = this.getAuthRequestParams();
-        return `${baseURL}?${params.toString()}`;
-    }
-    // Collect parameters for authorization request
-    getAuthRequestParams() {
-        (0, assert_1.default)(this.redirectURI);
-        const codeChallenge = this.getCodeChallenge();
-        return new url_1.URLSearchParams({
-            response_type: 'code',
-            client_id: this.clientID,
-            client_secret: this.clientSecret,
-            scope: 'openid email',
-            redirect_uri: this.redirectURI,
-            code_challenge: codeChallenge,
-            code_challenge_method: 'S256',
-            state: this.state,
-            nonce: generateRandomString(16),
-        });
-    }
-    // Generate code challenge for authorization request
-    getCodeChallenge() {
-        return util_1.encoding.base64URLEscape(util_1.crypto.hash(this.codeVerifier).toString('base64'));
-    }
-    // Open the supplied URL in the user's default browser
-    async openURL(url) {
-        return new Promise((resolve, reject) => {
-            let open = null;
-            let command = `"${url}"`;
-            switch (process.platform) {
-                case 'darwin':
-                    open = 'open';
-                    break;
-                case 'linux' || 'freebsd' || 'netbsd' || 'openbsd':
-                    open = 'xdg-open';
-                    break;
-                case 'win32':
-                    open = 'start';
-                    command = `"" ${command}`;
-                    break;
-                default:
-                    return reject(`OAuth: unsupported platform: ${process.platform}`);
-            }
-            console.error(`Your browser will now be opened to: ${url}`);
-            child_process_1.default.exec(`${open} ${command}`, undefined, (err) => {
-                if (err) {
-                    reject(err);
-                }
-                else {
-                    resolve();
-                }
-            });
-        });
-    }
-}
-exports.OAuthProvider = OAuthProvider;
-// Generate random code verifier value
-function generateRandomString(len) {
-    return util_1.encoding.base64URLEscape(util_1.crypto.randomBytes(len).toString('base64'));
-}
diff --git a/deps/npm/node_modules/sigstore/dist/identity/provider.d.ts b/deps/npm/node_modules/sigstore/dist/identity/provider.d.ts
deleted file mode 100644
index 95ec03e9ffff6c..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/identity/provider.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export interface Provider {
-    getToken: () => Promise;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/index.d.ts b/deps/npm/node_modules/sigstore/dist/index.d.ts
deleted file mode 100644
index dbac0640092b04..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/index.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-export type { Provider as IdentityProvider } from './identity';
-export * as sigstore from './sigstore';
diff --git a/deps/npm/node_modules/sigstore/dist/index.js b/deps/npm/node_modules/sigstore/dist/index.js
index 126fce58e45bde..341c1fa504d1e8 100644
--- a/deps/npm/node_modules/sigstore/dist/index.js
+++ b/deps/npm/node_modules/sigstore/dist/index.js
@@ -1,27 +1,34 @@
 "use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.sigstore = void 0;
-exports.sigstore = __importStar(require("./sigstore"));
+exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var bundle_1 = require("@sigstore/bundle");
+Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } });
+var sign_1 = require("@sigstore/sign");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } });
+Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } });
+var tuf_1 = require("@sigstore/tuf");
+Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
+Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
+var sigstore_1 = require("./sigstore");
+Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } });
+Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } });
+Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } });
+Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } });
diff --git a/deps/npm/node_modules/sigstore/dist/sign.d.ts b/deps/npm/node_modules/sigstore/dist/sign.d.ts
deleted file mode 100644
index 7d903c06e120a0..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/sign.d.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-/// 
-import { SignerFunc } from './types/signature';
-import * as sigstore from './types/sigstore';
-import type { CA } from './ca';
-import type { Provider } from './identity';
-import type { TLog } from './tlog';
-import type { TSA } from './tsa';
-export interface SignOptions {
-    ca: CA;
-    tlog: TLog;
-    tsa?: TSA;
-    identityProviders: Provider[];
-    tlogUpload?: boolean;
-    signer?: SignerFunc;
-}
-export declare class Signer {
-    private ca;
-    private tlog;
-    private tsa?;
-    private tlogUpload;
-    private signer;
-    private identityProviders;
-    constructor(options: SignOptions);
-    signBlob(payload: Buffer): Promise;
-    signAttestation(payload: Buffer, payloadType: string): Promise;
-    private signWithEphemeralKey;
-    private getIdentityToken;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/sign.js b/deps/npm/node_modules/sigstore/dist/sign.js
deleted file mode 100644
index 96e6272750b493..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/sign.js
+++ /dev/null
@@ -1,120 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signer = void 0;
-const sigstore = __importStar(require("./types/sigstore"));
-const util_1 = require("./util");
-class Signer {
-    constructor(options) {
-        this.identityProviders = [];
-        this.ca = options.ca;
-        this.tlog = options.tlog;
-        this.tsa = options.tsa;
-        this.identityProviders = options.identityProviders;
-        this.tlogUpload = options.tlogUpload ?? true;
-        this.signer = options.signer || this.signWithEphemeralKey.bind(this);
-    }
-    async signBlob(payload) {
-        // Get signature and verification material for payload
-        const sigMaterial = await this.signer(payload);
-        // Calculate artifact digest
-        const digest = util_1.crypto.hash(payload);
-        // Create a Rekor entry (if tlogUpload is enabled)
-        const entry = this.tlogUpload
-            ? await this.tlog.createMessageSignatureEntry(digest, sigMaterial)
-            : undefined;
-        return sigstore.toMessageSignatureBundle({
-            digest,
-            signature: sigMaterial,
-            tlogEntry: entry,
-            timestamp: this.tsa
-                ? await this.tsa.createTimestamp(sigMaterial.signature)
-                : undefined,
-        });
-    }
-    async signAttestation(payload, payloadType) {
-        // Pre-authentication encoding to be signed
-        const paeBuffer = util_1.dsse.preAuthEncoding(payloadType, payload);
-        // Get signature and verification material for pae
-        const sigMaterial = await this.signer(paeBuffer);
-        const envelope = {
-            payloadType,
-            payload: payload,
-            signatures: [
-                {
-                    keyid: sigMaterial.key?.id || '',
-                    sig: sigMaterial.signature,
-                },
-            ],
-        };
-        // Create a Rekor entry (if tlogUpload is enabled)
-        const entry = this.tlogUpload
-            ? await this.tlog.createDSSEEntry(envelope, sigMaterial)
-            : undefined;
-        return sigstore.toDSSEBundle({
-            envelope,
-            signature: sigMaterial,
-            tlogEntry: entry,
-            timestamp: this.tsa
-                ? await this.tsa.createTimestamp(sigMaterial.signature)
-                : undefined,
-        });
-    }
-    async signWithEphemeralKey(payload) {
-        // Create emphemeral key pair
-        const keypair = util_1.crypto.generateKeyPair();
-        // Retrieve identity token from one of the supplied identity providers
-        const identityToken = await this.getIdentityToken();
-        // Extract challenge claim from OIDC token
-        const subject = util_1.oidc.extractJWTSubject(identityToken);
-        // Construct challenge value by encrypting subject with private key
-        const challenge = util_1.crypto.signBlob(Buffer.from(subject), keypair.privateKey);
-        // Create signing certificate
-        const certificates = await this.ca.createSigningCertificate(identityToken, keypair.publicKey, challenge);
-        // Generate artifact signature
-        const signature = util_1.crypto.signBlob(payload, keypair.privateKey);
-        return {
-            signature,
-            certificates,
-            key: undefined,
-        };
-    }
-    async getIdentityToken() {
-        const aggErrs = [];
-        for (const provider of this.identityProviders) {
-            try {
-                const token = await provider.getToken();
-                if (token) {
-                    return token;
-                }
-            }
-            catch (err) {
-                aggErrs.push(err);
-            }
-        }
-        throw new Error(`Identity token providers failed: ${aggErrs}`);
-    }
-}
-exports.Signer = Signer;
diff --git a/deps/npm/node_modules/sigstore/dist/sigstore-utils.d.ts b/deps/npm/node_modules/sigstore/dist/sigstore-utils.d.ts
deleted file mode 100644
index 38f15dc7340d29..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/sigstore-utils.d.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-/// 
-import { SignOptions } from './config';
-import { SignerFunc } from './types/signature';
-import * as sigstore from './types/sigstore';
-export declare function createDSSEEnvelope(payload: Buffer, payloadType: string, options: {
-    signer: SignerFunc;
-}): Promise;
-export declare function createRekorEntry(dsseEnvelope: sigstore.SerializedEnvelope, publicKey: string, options?: SignOptions): Promise;
diff --git a/deps/npm/node_modules/sigstore/dist/sigstore-utils.js b/deps/npm/node_modules/sigstore/dist/sigstore-utils.js
deleted file mode 100644
index dc75692f40bf02..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/sigstore-utils.js
+++ /dev/null
@@ -1,80 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.createRekorEntry = exports.createDSSEEnvelope = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const config_1 = require("./config");
-const signature_1 = require("./types/signature");
-const sigstore = __importStar(require("./types/sigstore"));
-const util_1 = require("./util");
-async function createDSSEEnvelope(payload, payloadType, options) {
-    // Pre-authentication encoding to be signed
-    const paeBuffer = util_1.dsse.preAuthEncoding(payloadType, payload);
-    // Get signature and verification material for pae
-    const sigMaterial = await options.signer(paeBuffer);
-    const envelope = {
-        payloadType,
-        payload,
-        signatures: [
-            {
-                keyid: sigMaterial.key?.id || '',
-                sig: sigMaterial.signature,
-            },
-        ],
-    };
-    return sigstore.Envelope.toJSON(envelope);
-}
-exports.createDSSEEnvelope = createDSSEEnvelope;
-// Accepts a signed DSSE envelope and a PEM-encoded public key to be added to the
-// transparency log. Returns a Sigstore bundle suitable for offline verification.
-async function createRekorEntry(dsseEnvelope, publicKey, options = {}) {
-    const envelope = sigstore.Envelope.fromJSON(dsseEnvelope);
-    const tlog = (0, config_1.createTLogClient)(options);
-    const sigMaterial = (0, signature_1.extractSignatureMaterial)(envelope, publicKey);
-    const entry = await tlog.createDSSEEntry(envelope, sigMaterial, {
-        fetchOnConflict: true,
-    });
-    const bundle = sigstore.toDSSEBundle({
-        envelope,
-        signature: sigMaterial,
-        tlogEntry: entry,
-    });
-    return sigstore.bundleToJSON(bundle);
-}
-exports.createRekorEntry = createRekorEntry;
diff --git a/deps/npm/node_modules/sigstore/dist/sigstore.d.ts b/deps/npm/node_modules/sigstore/dist/sigstore.d.ts
deleted file mode 100644
index 1da5e8ecc5fe5b..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/sigstore.d.ts
+++ /dev/null
@@ -1,23 +0,0 @@
-/// 
-import * as tuf from '@sigstore/tuf';
-import * as config from './config';
-import * as sigstore from './types/sigstore';
-export declare function sign(payload: Buffer, options?: config.SignOptions): Promise;
-export declare function attest(payload: Buffer, payloadType: string, options?: config.SignOptions): Promise;
-export declare function verify(bundle: sigstore.SerializedBundle, payload?: Buffer, options?: config.VerifyOptions): Promise;
-export interface BundleVerifier {
-    verify(bundle: sigstore.SerializedBundle): void;
-}
-export declare function createVerifier(options: config.CreateVerifierOptions): Promise;
-declare const tufUtils: {
-    client: (options?: config.TUFOptions) => Promise;
-    getTarget: (path: string, options?: config.TUFOptions) => Promise;
-};
-export type { TUF } from '@sigstore/tuf';
-export type { SignOptions, VerifyOptions } from './config';
-export { InternalError, PolicyError, ValidationError, VerificationError, } from './error';
-export * as utils from './sigstore-utils';
-export type { SerializedBundle as Bundle, SerializedEnvelope as Envelope, } from './types/sigstore';
-export { tufUtils as tuf };
-export declare const DEFAULT_FULCIO_URL = "https://fulcio.sigstore.dev";
-export declare const DEFAULT_REKOR_URL = "https://rekor.sigstore.dev";
diff --git a/deps/npm/node_modules/sigstore/dist/sigstore.js b/deps/npm/node_modules/sigstore/dist/sigstore.js
index dca476dd292030..24fff291ab2b7e 100644
--- a/deps/npm/node_modules/sigstore/dist/sigstore.js
+++ b/deps/npm/node_modules/sigstore/dist/sigstore.js
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
     return result;
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.tuf = exports.utils = exports.VerificationError = exports.ValidationError = exports.PolicyError = exports.InternalError = exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0;
+exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0;
 /*
 Copyright 2023 The Sigstore Authors.
 
@@ -39,60 +39,40 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
+const bundle_1 = require("@sigstore/bundle");
 const tuf = __importStar(require("@sigstore/tuf"));
 const config = __importStar(require("./config"));
-const sign_1 = require("./sign");
-const sigstore = __importStar(require("./types/sigstore"));
 const verify_1 = require("./verify");
-async function sign(payload, options = {}) {
-    const ca = config.createCAClient(options);
-    const tlog = config.createTLogClient(options);
-    const idps = config.identityProviders(options);
-    const signer = new sign_1.Signer({
-        ca,
-        tlog,
-        identityProviders: options.identityProvider
-            ? [options.identityProvider]
-            : idps,
-        tlogUpload: options.tlogUpload,
-    });
-    const bundle = await signer.signBlob(payload);
-    return sigstore.bundleToJSON(bundle);
+async function sign(payload,
+/* istanbul ignore next */
+options = {}) {
+    const bundler = config.createBundleBuilder('messageSignature', options);
+    const bundle = await bundler.create({ data: payload });
+    return (0, bundle_1.bundleToJSON)(bundle);
 }
 exports.sign = sign;
-async function attest(payload, payloadType, options = {}) {
-    const ca = config.createCAClient(options);
-    const tlog = config.createTLogClient(options);
-    const tsa = config.createTSAClient(options);
-    const idps = config.identityProviders(options);
-    const signer = new sign_1.Signer({
-        ca,
-        tlog,
-        tsa,
-        identityProviders: options.identityProvider
-            ? [options.identityProvider]
-            : idps,
-        tlogUpload: options.tlogUpload,
-    });
-    const bundle = await signer.signAttestation(payload, payloadType);
-    return sigstore.bundleToJSON(bundle);
+async function attest(payload, payloadType,
+/* istanbul ignore next */
+options = {}) {
+    const bundler = config.createBundleBuilder('dsseEnvelope', options);
+    const bundle = await bundler.create({ data: payload, type: payloadType });
+    return (0, bundle_1.bundleToJSON)(bundle);
 }
 exports.attest = attest;
-async function verify(bundle, payload, options = {}) {
-    const trustedRoot = await tuf.getTrustedRoot({
-        mirrorURL: options.tufMirrorURL,
-        rootPath: options.tufRootPath,
-        cachePath: options.tufCachePath,
-        retry: options.retry ?? config.DEFAULT_RETRY,
-        timeout: options.timeout ?? config.DEFAULT_TIMEOUT,
-    });
-    const verifier = new verify_1.Verifier(trustedRoot, options.keySelector);
-    const deserializedBundle = sigstore.bundleFromJSON(bundle);
-    const opts = config.artifactVerificationOptions(options);
-    return verifier.verify(deserializedBundle, opts, payload);
+async function verify(bundle, dataOrOptions, options) {
+    let data;
+    if (Buffer.isBuffer(dataOrOptions)) {
+        data = dataOrOptions;
+    }
+    else {
+        options = dataOrOptions;
+    }
+    return createVerifier(options).then((verifier) => verifier.verify(bundle, data));
 }
 exports.verify = verify;
-async function createVerifier(options) {
+async function createVerifier(
+/* istanbul ignore next */
+options = {}) {
     const trustedRoot = await tuf.getTrustedRoot({
         mirrorURL: options.tufMirrorURL,
         rootPath: options.tufRootPath,
@@ -103,44 +83,10 @@ async function createVerifier(options) {
     const verifier = new verify_1.Verifier(trustedRoot, options.keySelector);
     const verifyOpts = config.artifactVerificationOptions(options);
     return {
-        verify: (bundle) => {
-            const deserializedBundle = sigstore.bundleFromJSON(bundle);
-            return verifier.verify(deserializedBundle, verifyOpts);
+        verify: (bundle, payload) => {
+            const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle);
+            return verifier.verify(deserializedBundle, verifyOpts, payload);
         },
     };
 }
 exports.createVerifier = createVerifier;
-const tufUtils = {
-    client: (options = {}) => {
-        return tuf.initTUF({
-            mirrorURL: options.tufMirrorURL,
-            rootPath: options.tufRootPath,
-            cachePath: options.tufCachePath,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    },
-    /*
-     * @deprecated Use tufUtils.client instead.
-     */
-    getTarget: (path, options = {}) => {
-        return tuf
-            .initTUF({
-            mirrorURL: options.tufMirrorURL,
-            rootPath: options.tufRootPath,
-            cachePath: options.tufCachePath,
-            retry: options.retry,
-            timeout: options.timeout,
-        })
-            .then((t) => t.getTarget(path));
-    },
-};
-exports.tuf = tufUtils;
-var error_1 = require("./error");
-Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } });
-Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
-Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
-Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
-exports.utils = __importStar(require("./sigstore-utils"));
-exports.DEFAULT_FULCIO_URL = config.DEFAULT_FULCIO_URL;
-exports.DEFAULT_REKOR_URL = config.DEFAULT_REKOR_URL;
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/format.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/format.d.ts
deleted file mode 100644
index 8a00f546b874fd..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/tlog/format.d.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-/// 
-import { SignatureMaterial } from '../types/signature';
-import { Envelope } from '../types/sigstore';
-import type { ProposedDSSEEntry, ProposedHashedRekordEntry, ProposedIntotoEntry } from '../external/rekor';
-export declare function toProposedDSSEEntry(envelope: Envelope, signature: SignatureMaterial, apiVersion?: string): ProposedDSSEEntry;
-export declare function toProposedHashedRekordEntry(digest: Buffer, signature: SignatureMaterial): ProposedHashedRekordEntry;
-export declare function toProposedIntotoEntry(envelope: Envelope, signature: SignatureMaterial, apiVersion?: string): ProposedIntotoEntry;
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/index.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/index.d.ts
deleted file mode 100644
index 6bb7d42861dc2c..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/tlog/index.d.ts
+++ /dev/null
@@ -1,23 +0,0 @@
-/// 
-import { SignatureMaterial } from '../types/signature';
-import * as sigstore from '../types/sigstore';
-import type { Entry } from '../external/rekor';
-import type { FetchOptions } from '../types/fetch';
-interface CreateEntryOptions {
-    fetchOnConflict?: boolean;
-}
-export interface TLog {
-    createMessageSignatureEntry: (digest: Buffer, sigMaterial: SignatureMaterial) => Promise;
-    createDSSEEntry: (envelope: sigstore.Envelope, sigMaterial: SignatureMaterial, options?: CreateEntryOptions) => Promise;
-}
-export type TLogClientOptions = {
-    rekorBaseURL: string;
-} & FetchOptions;
-export declare class TLogClient implements TLog {
-    private rekor;
-    constructor(options: TLogClientOptions);
-    createMessageSignatureEntry(digest: Buffer, sigMaterial: SignatureMaterial, options?: CreateEntryOptions): Promise;
-    createDSSEEntry(envelope: sigstore.Envelope, sigMaterial: SignatureMaterial, options?: CreateEntryOptions): Promise;
-    private createEntry;
-}
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/body.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/verify/body.d.ts
deleted file mode 100644
index 17de4f5c9698ab..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/tlog/verify/body.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-import * as sigstore from '../../types/sigstore';
-export declare function verifyTLogBody(entry: sigstore.VerifiableTransparencyLogEntry, bundleContent: sigstore.Bundle['content']): boolean;
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/checkpoint.js b/deps/npm/node_modules/sigstore/dist/tlog/verify/checkpoint.js
new file mode 100644
index 00000000000000..f6f35a5cad64dd
--- /dev/null
+++ b/deps/npm/node_modules/sigstore/dist/tlog/verify/checkpoint.js
@@ -0,0 +1,148 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyCheckpoint = void 0;
+const error_1 = require("../../error");
+const util_1 = require("../../util");
+// Separator between the note and the signatures in a checkpoint
+const CHECKPOINT_SEPARATOR = '\n\n';
+// Checkpoint signatures are of the following form:
+// "–  \n"
+// where:
+// - the prefix is an emdash (U+2014).
+// -  gives a human-readable representation of the signing ID.
+// -  is the first 4 bytes of the SHA256 hash of the
+//   associated public key followed by the signature bytes.
+const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g;
+// Verifies the checkpoint value in the given tlog entry. There are two steps
+// to the verification:
+// 1. Verify that all signatures in the checkpoint can be verified against a
+//    trusted public key
+// 2. Verify that the root hash in the checkpoint matches the root hash in the
+//    inclusion proof
+// See: https://github.com/transparency-dev/formats/blob/main/log/README.md
+function verifyCheckpoint(entry, tlogs) {
+    // Filter tlog instances to just those which were valid at the time of the
+    // entry
+    const validTLogs = filterTLogInstances(tlogs, entry.integratedTime);
+    const inclusionProof = entry.inclusionProof;
+    const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope);
+    const checkpoint = LogCheckpoint.fromString(signedNote.note);
+    // Verify that the signatures in the checkpoint are all valid, also check
+    // that the root hash from the checkpoint matches the root hash in the
+    // inclusion proof
+    return (signedNote.verify(validTLogs) &&
+        util_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash));
+}
+exports.verifyCheckpoint = verifyCheckpoint;
+// SignedNote represents a signed note from a transparency log checkpoint. Consists
+// of a body (or note) and one more signatures calculated over the body. See
+// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope
+class SignedNote {
+    constructor(note, signatures) {
+        this.note = note;
+        this.signatures = signatures;
+    }
+    // Deserialize a SignedNote from a string
+    static fromString(envelope) {
+        if (!envelope.includes(CHECKPOINT_SEPARATOR)) {
+            throw new error_1.VerificationError('malformed checkpoint: no separator');
+        }
+        // Split the note into the header and the data portions at the separator
+        const split = envelope.indexOf(CHECKPOINT_SEPARATOR);
+        const header = envelope.slice(0, split + 1);
+        const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length);
+        // Find all the signature lines in the data portion
+        const matches = data.matchAll(SIGNATURE_REGEX);
+        // Parse each of the matched signature lines into the name and signature.
+        // The first four bytes of the signature are the key hint (should match the
+        // first four bytes of the log ID), and the rest is the signature itself.
+        const signatures = Array.from(matches, (match) => {
+            const [, name, signature] = match;
+            const sigBytes = Buffer.from(signature, 'base64');
+            if (sigBytes.length < 5) {
+                throw new error_1.VerificationError('malformed checkpoint: invalid signature');
+            }
+            return {
+                name,
+                keyHint: sigBytes.subarray(0, 4),
+                signature: sigBytes.subarray(4),
+            };
+        });
+        if (signatures.length === 0) {
+            throw new error_1.VerificationError('malformed checkpoint: no signatures');
+        }
+        return new SignedNote(header, signatures);
+    }
+    // Verifies the signatures in the SignedNote. For each signature, the
+    // corresponding transparency log is looked up by the key hint and the
+    // signature is verified against the public key in the transparency log.
+    // Throws an error if any of the signatures are invalid.
+    verify(tlogs) {
+        const data = Buffer.from(this.note, 'utf-8');
+        return this.signatures.every((signature) => {
+            // Find the transparency log instance with the matching key hint
+            const tlog = tlogs.find((tlog) => util_1.crypto.bufferEqual(tlog.logId.keyId.subarray(0, 4), signature.keyHint));
+            if (!tlog) {
+                return false;
+            }
+            const publicKey = util_1.crypto.createPublicKey(tlog.publicKey.rawBytes);
+            return util_1.crypto.verifyBlob(data, publicKey, signature.signature);
+        });
+    }
+}
+// LogCheckpoint represents a transparency log checkpoint. Consists of the
+// following:
+//  - origin: the name of the transparency log
+//  - logSize: the size of the log at the time of the checkpoint
+//  - logHash: the root hash of the log at the time of the checkpoint
+//  - rest: the rest of the checkpoint body, which is a list of log entries
+// See:
+// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body
+class LogCheckpoint {
+    constructor(origin, logSize, logHash, rest) {
+        this.origin = origin;
+        this.logSize = logSize;
+        this.logHash = logHash;
+        this.rest = rest;
+    }
+    static fromString(note) {
+        const lines = note.trim().split('\n');
+        if (lines.length < 4) {
+            throw new error_1.VerificationError('malformed checkpoint: too few lines in header');
+        }
+        const origin = lines[0];
+        const logSize = BigInt(lines[1]);
+        const rootHash = Buffer.from(lines[2], 'base64');
+        const rest = lines.slice(3);
+        return new LogCheckpoint(origin, logSize, rootHash, rest);
+    }
+}
+// Filter the list of tlog instances to only those which have usable public
+// keys and were valid at the given time.
+function filterTLogInstances(tlogInstances, integratedTime) {
+    const targetDate = new Date(Number(integratedTime) * 1000);
+    return tlogInstances.filter((tlog) => {
+        // Must have a log ID
+        if (!tlog.logId) {
+            return false;
+        }
+        // If the tlog doesn't have a public key, we can't use it
+        const publicKey = tlog.publicKey;
+        if (publicKey === undefined) {
+            return false;
+        }
+        // If the tlog doesn't have a rawBytes field, we can't use it
+        if (publicKey.rawBytes === undefined) {
+            return false;
+        }
+        // If the tlog doesn't have a validFor field, we don't need to check it
+        const validFor = publicKey.validFor;
+        if (validFor === undefined) {
+            return true;
+        }
+        // Check that the integrated time is within the validFor range
+        return (validFor.start !== undefined &&
+            validFor.start <= targetDate &&
+            (validFor.end === undefined || targetDate <= validFor.end));
+    });
+}
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts
deleted file mode 100644
index 4f96f820731f03..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-import * as sigstore from '../../types/sigstore';
-export declare function verifyTLogEntries(bundle: sigstore.Bundle, trustedRoot: sigstore.TrustedRoot, options: sigstore.ArtifactVerificationOptions_TlogOptions): void;
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/index.js b/deps/npm/node_modules/sigstore/dist/tlog/verify/index.js
index cbb93133c2685f..9224feffde00b0 100644
--- a/deps/npm/node_modules/sigstore/dist/tlog/verify/index.js
+++ b/deps/npm/node_modules/sigstore/dist/tlog/verify/index.js
@@ -1,27 +1,4 @@
 "use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.verifyTLogEntries = void 0;
 /*
@@ -39,31 +16,51 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
+const bundle_1 = require("@sigstore/bundle");
 const error_1 = require("../../error");
-const sigstore = __importStar(require("../../types/sigstore"));
 const cert_1 = require("../../x509/cert");
 const body_1 = require("./body");
+const checkpoint_1 = require("./checkpoint");
+const merkle_1 = require("./merkle");
 const set_1 = require("./set");
 // Verifies that the number of tlog entries that pass offline verification
 // is greater than or equal to the threshold specified in the options.
 function verifyTLogEntries(bundle, trustedRoot, options) {
+    if (bundle.mediaType === bundle_1.BUNDLE_V01_MEDIA_TYPE) {
+        (0, bundle_1.assertBundleV01)(bundle);
+        verifyTLogEntriesForBundleV01(bundle, trustedRoot, options);
+    }
+    else {
+        (0, bundle_1.assertBundleLatest)(bundle);
+        verifyTLogEntriesForBundleLatest(bundle, trustedRoot, options);
+    }
+}
+exports.verifyTLogEntries = verifyTLogEntries;
+function verifyTLogEntriesForBundleV01(bundle, trustedRoot, options) {
     if (options.performOnlineVerification) {
         throw new error_1.VerificationError('Online verification not implemented');
     }
     // Extract the signing cert, if available
     const signingCert = signingCertificate(bundle);
     // Iterate over the tlog entries and verify each one
-    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryOffline(entry, bundle.content, trustedRoot.tlogs, signingCert));
+    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryWithInclusionPromise(entry, bundle.content, trustedRoot.tlogs, signingCert));
     if (verifiedEntries.length < options.threshold) {
         throw new error_1.VerificationError('tlog verification failed');
     }
 }
-exports.verifyTLogEntries = verifyTLogEntries;
-function verifyTLogEntryOffline(entry, bundleContent, tlogs, signingCert) {
-    // Check that the TLog entry has the fields necessary for verification
-    if (!sigstore.isVerifiableTransparencyLogEntry(entry)) {
-        return false;
+function verifyTLogEntriesForBundleLatest(bundle, trustedRoot, options) {
+    if (options.performOnlineVerification) {
+        throw new error_1.VerificationError('Online verification not implemented');
+    }
+    // Extract the signing cert, if available
+    const signingCert = signingCertificate(bundle);
+    // Iterate over the tlog entries and verify each one
+    const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryWithInclusionProof(entry, bundle.content, trustedRoot.tlogs, signingCert));
+    if (verifiedEntries.length < options.threshold) {
+        throw new error_1.VerificationError('tlog verification failed');
     }
+}
+function verifyTLogEntryWithInclusionPromise(entry, bundleContent, tlogs, signingCert) {
     // If there is a signing certificate availble, check that the tlog integrated
     // time is within the certificate's validity period; otherwise, skip this
     // check.
@@ -74,8 +71,20 @@ function verifyTLogEntryOffline(entry, bundleContent, tlogs, signingCert) {
         (0, set_1.verifyTLogSET)(entry, tlogs) &&
         verifyTLogIntegrationTime());
 }
+function verifyTLogEntryWithInclusionProof(entry, bundleContent, tlogs, signingCert) {
+    // If there is a signing certificate availble, check that the tlog integrated
+    // time is within the certificate's validity period; otherwise, skip this
+    // check.
+    const verifyTLogIntegrationTime = signingCert
+        ? () => signingCert.validForDate(new Date(Number(entry.integratedTime) * 1000))
+        : () => true;
+    return ((0, body_1.verifyTLogBody)(entry, bundleContent) &&
+        (0, merkle_1.verifyMerkleInclusion)(entry) &&
+        (0, checkpoint_1.verifyCheckpoint)(entry, tlogs) &&
+        verifyTLogIntegrationTime());
+}
 function signingCertificate(bundle) {
-    if (!sigstore.isBundleWithCertificateChain(bundle)) {
+    if (!(0, bundle_1.isBundleWithCertificateChain)(bundle)) {
         return undefined;
     }
     const signingCert = bundle.verificationMaterial.content.x509CertificateChain.certificates[0];
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts
deleted file mode 100644
index a2c47626d01f84..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-import * as sigstore from '../../types/sigstore';
-export declare function verifyMerkleInclusion(entry: sigstore.TransparencyLogEntry): boolean;
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.js b/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.js
index 90609cb73576fe..0f246af4a28a3b 100644
--- a/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.js
+++ b/deps/npm/node_modules/sigstore/dist/tlog/verify/merkle.js
@@ -25,9 +25,6 @@ const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]);
 const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]);
 function verifyMerkleInclusion(entry) {
     const inclusionProof = entry.inclusionProof;
-    if (!inclusionProof) {
-        throw new error_1.VerificationError('tlog entry has no inclusion proof');
-    }
     const logIndex = BigInt(inclusionProof.logIndex);
     const treeSize = BigInt(inclusionProof.treeSize);
     if (logIndex < 0n || logIndex >= treeSize) {
@@ -76,13 +73,20 @@ function chainBorderRight(seed, hashes) {
     return hashes.reduce((acc, h) => hashChildren(h, acc), seed);
 }
 function innerProofSize(index, size) {
-    return (index ^ (size - BigInt(1))).toString(2).length;
+    return bitLength(index ^ (size - BigInt(1)));
 }
 // Counts the number of ones in the binary representation of the given number.
 // https://en.wikipedia.org/wiki/Hamming_weight
 function onesCount(x) {
     return x.toString(2).split('1').length - 1;
 }
+// Returns the number of bits necessary to represent an integer in binary.
+function bitLength(n) {
+    if (n === 0n) {
+        return 0;
+    }
+    return n.toString(2).length;
+}
 // Hashing logic according to RFC6962.
 // https://datatracker.ietf.org/doc/html/rfc6962#section-2
 function hashChildren(left, right) {
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/set.d.ts b/deps/npm/node_modules/sigstore/dist/tlog/verify/set.d.ts
deleted file mode 100644
index 278317489a7e49..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/tlog/verify/set.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-import * as sigstore from '../../types/sigstore';
-export declare function verifyTLogSET(entry: sigstore.VerifiableTransparencyLogEntry, tlogs: sigstore.TransparencyLogInstance[]): boolean;
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/verify/set.js b/deps/npm/node_modules/sigstore/dist/tlog/verify/set.js
index 89a544283d73d9..959cd5883f1cad 100644
--- a/deps/npm/node_modules/sigstore/dist/tlog/verify/set.js
+++ b/deps/npm/node_modules/sigstore/dist/tlog/verify/set.js
@@ -11,9 +11,6 @@ function verifyTLogSET(entry, tlogs) {
     const validTLogs = filterTLogInstances(tlogs, entry.logId.keyId, entry.integratedTime);
     // Check to see if we can verify the SET against any of the valid tlogs
     return validTLogs.some((tlog) => {
-        if (!tlog.publicKey?.rawBytes) {
-            return false;
-        }
         const publicKey = util_1.crypto.createPublicKey(tlog.publicKey.rawBytes);
         // Re-create the original Rekor verification payload
         const payload = toVerificationPayload(entry);
@@ -60,7 +57,7 @@ function filterTLogInstances(tlogInstances, logID, integratedTime) {
             return true;
         }
         // Check that the integrated time is within the validFor range
-        return (publicKey.validFor.start &&
+        return (publicKey.validFor.start !== undefined &&
             publicKey.validFor.start <= targetDate &&
             (!publicKey.validFor.end || targetDate <= publicKey.validFor.end));
     });
diff --git a/deps/npm/node_modules/sigstore/dist/tsa/index.d.ts b/deps/npm/node_modules/sigstore/dist/tsa/index.d.ts
deleted file mode 100644
index e94b20c075e557..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/tsa/index.d.ts
+++ /dev/null
@@ -1,13 +0,0 @@
-/// 
-import type { FetchOptions } from '../types/fetch';
-export interface TSA {
-    createTimestamp: (signature: Buffer) => Promise;
-}
-export type TSAClientOptions = {
-    tsaBaseURL: string;
-} & FetchOptions;
-export declare class TSAClient implements TSA {
-    private tsa;
-    constructor(options: TSAClientOptions);
-    createTimestamp(signature: Buffer): Promise;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/types/fetch.d.ts b/deps/npm/node_modules/sigstore/dist/types/fetch.d.ts
deleted file mode 100644
index 510aeee6a37d72..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/types/fetch.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-import type { MakeFetchHappenOptions } from 'make-fetch-happen';
-export type Retry = MakeFetchHappenOptions['retry'];
-export type FetchOptions = {
-    retry?: Retry;
-    timeout?: number | undefined;
-};
diff --git a/deps/npm/node_modules/sigstore/dist/types/signature.d.ts b/deps/npm/node_modules/sigstore/dist/types/signature.d.ts
deleted file mode 100644
index 40b4fbe6339ca6..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/types/signature.d.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-/// 
-import { Envelope } from './sigstore';
-import { OneOf } from './utility';
-interface VerificationMaterial {
-    certificates: string[];
-    key: {
-        id?: string;
-        value: string;
-    };
-}
-export type SignatureMaterial = {
-    signature: Buffer;
-} & OneOf;
-export type SignerFunc = (payload: Buffer) => Promise;
-export declare function extractSignatureMaterial(dsseEnvelope: Envelope, publicKey: string): SignatureMaterial;
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/types/signature.js b/deps/npm/node_modules/sigstore/dist/types/signature.js
deleted file mode 100644
index 339e2a2731b413..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/types/signature.js
+++ /dev/null
@@ -1,15 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.extractSignatureMaterial = void 0;
-function extractSignatureMaterial(dsseEnvelope, publicKey) {
-    const signature = dsseEnvelope.signatures[0];
-    return {
-        signature: signature.sig,
-        key: {
-            id: signature.keyid,
-            value: publicKey,
-        },
-        certificates: undefined,
-    };
-}
-exports.extractSignatureMaterial = extractSignatureMaterial;
diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore.js b/deps/npm/node_modules/sigstore/dist/types/sigstore.js
new file mode 100644
index 00000000000000..36efb67e38a5eb
--- /dev/null
+++ b/deps/npm/node_modules/sigstore/dist/types/sigstore.js
@@ -0,0 +1,27 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isCAVerificationOptions = exports.SubjectAlternativeNameType = void 0;
+// Enums from protobuf-specs
+var protobuf_specs_1 = require("@sigstore/protobuf-specs");
+Object.defineProperty(exports, "SubjectAlternativeNameType", { enumerable: true, get: function () { return protobuf_specs_1.SubjectAlternativeNameType; } });
+function isCAVerificationOptions(options) {
+    return (options.ctlogOptions !== undefined &&
+        (options.signers === undefined ||
+            options.signers.$case === 'certificateIdentities'));
+}
+exports.isCAVerificationOptions = isCAVerificationOptions;
diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts b/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts
deleted file mode 100644
index 2be598d923048f..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-/// 
-import { SignatureMaterial } from '../signature';
-import { ValidBundle } from './validate';
-import type { ArtifactVerificationOptions, Envelope, TransparencyLogEntry, VerificationMaterial } from '@sigstore/protobuf-specs';
-import type { Entry } from '../../external/rekor';
-import type { WithRequired } from '../utility';
-import type { SerializedBundle } from './serialized';
-export { Envelope, HashAlgorithm, PublicKeyDetails, SubjectAlternativeNameType, } from '@sigstore/protobuf-specs';
-export type { ArtifactVerificationOptions, ArtifactVerificationOptions_CtlogOptions, ArtifactVerificationOptions_TlogOptions, CertificateAuthority, CertificateIdentities, CertificateIdentity, MessageSignature, ObjectIdentifierValuePair, PublicKey, PublicKeyIdentifier, RFC3161SignedTimestamp, Signature, SubjectAlternativeName, TimestampVerificationData, TransparencyLogEntry, TransparencyLogInstance, TrustedRoot, X509Certificate, X509CertificateChain, } from '@sigstore/protobuf-specs';
-export type { SerializedBundle, SerializedEnvelope } from './serialized';
-export type { ValidBundle as Bundle };
-export declare const bundleFromJSON: (obj: any) => ValidBundle;
-export declare const bundleToJSON: (bundle: ValidBundle) => SerializedBundle;
-export type BundleWithCertificateChain = ValidBundle & {
-    verificationMaterial: VerificationMaterial & {
-        content: Extract;
-    };
-};
-export declare function isBundleWithCertificateChain(bundle: ValidBundle): bundle is BundleWithCertificateChain;
-export type RequiredArtifactVerificationOptions = WithRequired;
-export type CAArtifactVerificationOptions = WithRequired & {
-    signers?: Extract;
-};
-export declare function isCAVerificationOptions(options: ArtifactVerificationOptions): options is CAArtifactVerificationOptions;
-export type VerifiableTransparencyLogEntry = WithRequired;
-export declare function isVerifiableTransparencyLogEntry(entry: TransparencyLogEntry): entry is VerifiableTransparencyLogEntry;
-export declare function toDSSEBundle({ envelope, signature, tlogEntry, timestamp, }: {
-    envelope: Envelope;
-    signature: SignatureMaterial;
-    tlogEntry?: Entry;
-    timestamp?: Buffer;
-}): ValidBundle;
-export declare function toMessageSignatureBundle({ digest, signature, tlogEntry, timestamp, }: {
-    digest: Buffer;
-    signature: SignatureMaterial;
-    tlogEntry?: Entry;
-    timestamp?: Buffer;
-}): ValidBundle;
diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js b/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js
deleted file mode 100644
index 2c240c865cf37a..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/types/sigstore/index.js
+++ /dev/null
@@ -1,162 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toMessageSignatureBundle = exports.toDSSEBundle = exports.isVerifiableTransparencyLogEntry = exports.isCAVerificationOptions = exports.isBundleWithCertificateChain = exports.bundleToJSON = exports.bundleFromJSON = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = exports.Envelope = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const protobuf_specs_1 = require("@sigstore/protobuf-specs");
-const util_1 = require("../../util");
-const validate_1 = require("./validate");
-// Enums from protobuf-specs
-// TODO: Move Envelope to "type" export once @sigstore/sign is a thing
-var protobuf_specs_2 = require("@sigstore/protobuf-specs");
-Object.defineProperty(exports, "Envelope", { enumerable: true, get: function () { return protobuf_specs_2.Envelope; } });
-Object.defineProperty(exports, "HashAlgorithm", { enumerable: true, get: function () { return protobuf_specs_2.HashAlgorithm; } });
-Object.defineProperty(exports, "PublicKeyDetails", { enumerable: true, get: function () { return protobuf_specs_2.PublicKeyDetails; } });
-Object.defineProperty(exports, "SubjectAlternativeNameType", { enumerable: true, get: function () { return protobuf_specs_2.SubjectAlternativeNameType; } });
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-const bundleFromJSON = (obj) => {
-    const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
-    (0, validate_1.assertValidBundle)(bundle);
-    return bundle;
-};
-exports.bundleFromJSON = bundleFromJSON;
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-const bundleToJSON = (bundle) => {
-    return protobuf_specs_1.Bundle.toJSON(bundle);
-};
-exports.bundleToJSON = bundleToJSON;
-const BUNDLE_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
-// Type guard for narrowing a Bundle to a BundleWithCertificateChain
-function isBundleWithCertificateChain(bundle) {
-    return (bundle.verificationMaterial.content !== undefined &&
-        bundle.verificationMaterial.content.$case === 'x509CertificateChain');
-}
-exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
-function isCAVerificationOptions(options) {
-    return (options.ctlogOptions !== undefined &&
-        (options.signers === undefined ||
-            options.signers.$case === 'certificateIdentities'));
-}
-exports.isCAVerificationOptions = isCAVerificationOptions;
-function isVerifiableTransparencyLogEntry(entry) {
-    return (entry.logId !== undefined &&
-        entry.inclusionPromise !== undefined &&
-        entry.kindVersion !== undefined);
-}
-exports.isVerifiableTransparencyLogEntry = isVerifiableTransparencyLogEntry;
-// All of the following functions are used to construct a ValidBundle
-// from various types of input. When this code moves into the
-// @sigstore/sign package, these functions will be exported from there.
-function toDSSEBundle({ envelope, signature, tlogEntry, timestamp, }) {
-    return {
-        mediaType: BUNDLE_MEDIA_TYPE,
-        content: { $case: 'dsseEnvelope', dsseEnvelope: envelope },
-        verificationMaterial: toVerificationMaterial({
-            signature,
-            tlogEntry,
-            timestamp,
-        }),
-    };
-}
-exports.toDSSEBundle = toDSSEBundle;
-function toMessageSignatureBundle({ digest, signature, tlogEntry, timestamp, }) {
-    return {
-        mediaType: BUNDLE_MEDIA_TYPE,
-        content: {
-            $case: 'messageSignature',
-            messageSignature: {
-                messageDigest: {
-                    algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
-                    digest: digest,
-                },
-                signature: signature.signature,
-            },
-        },
-        verificationMaterial: toVerificationMaterial({
-            signature,
-            tlogEntry,
-            timestamp,
-        }),
-    };
-}
-exports.toMessageSignatureBundle = toMessageSignatureBundle;
-function toTransparencyLogEntry(entry) {
-    const b64SET = entry.verification?.signedEntryTimestamp || '';
-    const set = Buffer.from(b64SET, 'base64');
-    const logID = Buffer.from(entry.logID, 'hex');
-    const proof = entry.verification?.inclusionProof
-        ? toInclusionProof(entry.verification.inclusionProof)
-        : undefined;
-    // Parse entry body so we can extract the kind and version.
-    const bodyJSON = util_1.encoding.base64Decode(entry.body);
-    const entryBody = JSON.parse(bodyJSON);
-    return {
-        inclusionPromise: {
-            signedEntryTimestamp: set,
-        },
-        logIndex: entry.logIndex.toString(),
-        logId: {
-            keyId: logID,
-        },
-        integratedTime: entry.integratedTime.toString(),
-        kindVersion: {
-            kind: entryBody.kind,
-            version: entryBody.apiVersion,
-        },
-        inclusionProof: proof,
-        canonicalizedBody: Buffer.from(entry.body, 'base64'),
-    };
-}
-function toInclusionProof(proof) {
-    return {
-        logIndex: proof.logIndex.toString(),
-        rootHash: Buffer.from(proof.rootHash, 'hex'),
-        treeSize: proof.treeSize.toString(),
-        checkpoint: {
-            envelope: proof.checkpoint,
-        },
-        hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')),
-    };
-}
-function toVerificationMaterial({ signature, tlogEntry, timestamp, }) {
-    return {
-        content: signature.certificates
-            ? toVerificationMaterialx509CertificateChain(signature.certificates)
-            : toVerificationMaterialPublicKey(signature.key.id || ''),
-        tlogEntries: tlogEntry ? [toTransparencyLogEntry(tlogEntry)] : [],
-        timestampVerificationData: timestamp
-            ? toTimestampVerificationData(timestamp)
-            : undefined,
-    };
-}
-function toVerificationMaterialx509CertificateChain(certificates) {
-    return {
-        $case: 'x509CertificateChain',
-        x509CertificateChain: {
-            certificates: certificates.map((c) => ({
-                rawBytes: util_1.pem.toDER(c),
-            })),
-        },
-    };
-}
-function toVerificationMaterialPublicKey(hint) {
-    return { $case: 'publicKey', publicKey: { hint } };
-}
-function toTimestampVerificationData(timestamp) {
-    return {
-        rfc3161Timestamps: [{ signedTimestamp: timestamp }],
-    };
-}
diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts b/deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts
deleted file mode 100644
index 8ea3b5cff35ee9..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-import { OneOf } from '../utility';
-type SerializedTLogEntry = {
-    logIndex: string;
-    logId: {
-        keyId: string;
-    };
-    kindVersion: {
-        kind: string;
-        version: string;
-    } | undefined;
-    integratedTime: string;
-    inclusionPromise: {
-        signedEntryTimestamp: string;
-    };
-    inclusionProof: {
-        logIndex: string;
-        rootHash: string;
-        treeSize: string;
-        hashes: string[];
-        checkpoint: {
-            envelope: string;
-        };
-    } | undefined;
-    canonicalizedBody: string;
-};
-type SerializedTimestampVerificationData = {
-    rfc3161Timestamps: {
-        signedTimestamp: string;
-    }[];
-};
-type SerializedMessageSignature = {
-    messageDigest: {
-        algorithm: string;
-        digest: string;
-    } | undefined;
-    signature: string;
-};
-type SerializedDSSEEnvelope = {
-    payload: string;
-    payloadType: string;
-    signatures: {
-        sig: string;
-        keyid: string;
-    }[];
-};
-export type { SerializedDSSEEnvelope as SerializedEnvelope };
-export type SerializedBundle = {
-    mediaType: string;
-    verificationMaterial: (OneOf<{
-        x509CertificateChain: {
-            certificates: {
-                rawBytes: string;
-            }[];
-        };
-        publicKey: {
-            hint: string;
-        };
-    }> | undefined) & {
-        tlogEntries: SerializedTLogEntry[];
-        timestampVerificationData: SerializedTimestampVerificationData | undefined;
-    };
-} & OneOf<{
-    dsseEnvelope: SerializedDSSEEnvelope;
-    messageSignature: SerializedMessageSignature;
-}>;
diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts b/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts
deleted file mode 100644
index a6c33b3c7c0f28..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-import { WithRequired } from '../utility';
-import type { Bundle, MessageSignature, VerificationMaterial } from '@sigstore/protobuf-specs';
-export type ValidBundle = Bundle & {
-    verificationMaterial: VerificationMaterial & {
-        content: NonNullable;
-    };
-    content: (Extract & {
-        messageSignature: WithRequired;
-    }) | Extract;
-};
-export declare function assertValidBundle(b: Bundle): asserts b is ValidBundle;
diff --git a/deps/npm/node_modules/sigstore/dist/types/utility.d.ts b/deps/npm/node_modules/sigstore/dist/types/utility.d.ts
deleted file mode 100644
index df993d503f8ea1..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/types/utility.d.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-type ValueOf = Obj[keyof Obj];
-type OneOnly = {
-    [key in Exclude]: undefined;
-} & {
-    [key in K]: Obj[K];
-};
-type OneOfByKey = {
-    [key in keyof Obj]: OneOnly;
-};
-export type OneOf = ValueOf>;
-export type WithRequired = T & {
-    [P in K]-?: NonNullable;
-};
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/types/utility.js b/deps/npm/node_modules/sigstore/dist/types/utility.js
index 132848cd7587e7..77c91b1923ca08 100644
--- a/deps/npm/node_modules/sigstore/dist/types/utility.js
+++ b/deps/npm/node_modules/sigstore/dist/types/utility.js
@@ -14,5 +14,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 */
-// https://dev.to/maxime1992/implement-a-generic-oneof-type-with-typescript-22em
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts
deleted file mode 100644
index 3f192dea45445c..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-import { ASN1Obj } from './obj';
-export declare function dump(obj: ASN1Obj, indent?: number): void;
diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/error.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/error.d.ts
deleted file mode 100644
index fcd908f47036ac..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/asn1/error.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-export declare class ASN1ParseError extends Error {
-}
-export declare class ASN1TypeError extends Error {
-}
diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/index.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/index.d.ts
deleted file mode 100644
index da45453d4eab7b..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/asn1/index.d.ts
+++ /dev/null
@@ -1 +0,0 @@
-export { ASN1Obj } from './obj';
diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/length.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/length.d.ts
deleted file mode 100644
index 97c7114af29091..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/asn1/length.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-/// 
-import { ByteStream } from '../stream';
-export declare function decodeLength(stream: ByteStream): number;
-export declare function encodeLength(len: number): Buffer;
diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts
deleted file mode 100644
index de54996c87faac..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-/// 
-import { ASN1Tag } from './tag';
-export declare class ASN1Obj {
-    readonly tag: ASN1Tag;
-    readonly subs: ASN1Obj[];
-    readonly value: Buffer;
-    constructor(tag: ASN1Tag, value: Buffer, subs: ASN1Obj[]);
-    static parseBuffer(buf: Buffer): ASN1Obj;
-    toDER(): Buffer;
-    toBoolean(): boolean;
-    toInteger(): bigint;
-    toOID(): string;
-    toDate(): Date;
-    toBitString(): number[];
-}
diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts
deleted file mode 100644
index 35989d5510e26b..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-/// 
-export declare function parseInteger(buf: Buffer): bigint;
-export declare function parseStringASCII(buf: Buffer): string;
-export declare function parseTime(buf: Buffer, shortYear: boolean): Date;
-export declare function parseOID(buf: Buffer): string;
-export declare function parseBoolean(buf: Buffer): boolean;
-export declare function parseBitString(buf: Buffer): number[];
diff --git a/deps/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts b/deps/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts
deleted file mode 100644
index cdc9a69097b380..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-export declare const UNIVERSAL_TAG: {
-    BOOLEAN: number;
-    INTEGER: number;
-    BIT_STRING: number;
-    OCTET_STRING: number;
-    OBJECT_IDENTIFIER: number;
-    SEQUENCE: number;
-    SET: number;
-    PRINTABLE_STRING: number;
-    UTC_TIME: number;
-    GENERALIZED_TIME: number;
-};
-export declare class ASN1Tag {
-    readonly number: number;
-    readonly constructed: boolean;
-    readonly class: number;
-    constructor(enc: number);
-    isUniversal(): boolean;
-    isContextSpecific(num?: number): boolean;
-    isBoolean(): boolean;
-    isInteger(): boolean;
-    isBitString(): boolean;
-    isOctetString(): boolean;
-    isOID(): boolean;
-    isUTCTime(): boolean;
-    isGeneralizedTime(): boolean;
-    toDER(): number;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/util/crypto.d.ts b/deps/npm/node_modules/sigstore/dist/util/crypto.d.ts
deleted file mode 100644
index a726dd260750c3..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/crypto.d.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-/// 
-/// 
-/// 
-import { BinaryLike, KeyLike, KeyPairKeyObjectResult } from 'crypto';
-export declare function generateKeyPair(): KeyPairKeyObjectResult;
-export declare function createPublicKey(key: string | Buffer): KeyLike;
-export declare function signBlob(data: NodeJS.ArrayBufferView, privateKey: KeyLike): Buffer;
-export declare function verifyBlob(data: Buffer, key: KeyLike, signature: Buffer, algorithm?: string): boolean;
-export declare function hash(data: BinaryLike): Buffer;
-export declare function randomBytes(count: number): Buffer;
diff --git a/deps/npm/node_modules/sigstore/dist/util/crypto.js b/deps/npm/node_modules/sigstore/dist/util/crypto.js
index 0b1e0bc62d8abb..c26de091ecdb62 100644
--- a/deps/npm/node_modules/sigstore/dist/util/crypto.js
+++ b/deps/npm/node_modules/sigstore/dist/util/crypto.js
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
     return (mod && mod.__esModule) ? mod : { "default": mod };
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.randomBytes = exports.hash = exports.verifyBlob = exports.signBlob = exports.createPublicKey = exports.generateKeyPair = void 0;
+exports.bufferEqual = exports.randomBytes = exports.hash = exports.verifyBlob = exports.createPublicKey = void 0;
 /*
 Copyright 2022 The Sigstore Authors.
 
@@ -20,15 +20,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 */
 const crypto_1 = __importDefault(require("crypto"));
-const EC_KEYPAIR_TYPE = 'ec';
-const P256_CURVE = 'P-256';
 const SHA256_ALGORITHM = 'sha256';
-function generateKeyPair() {
-    return crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
-        namedCurve: P256_CURVE,
-    });
-}
-exports.generateKeyPair = generateKeyPair;
 function createPublicKey(key) {
     if (typeof key === 'string') {
         return crypto_1.default.createPublicKey(key);
@@ -38,10 +30,6 @@ function createPublicKey(key) {
     }
 }
 exports.createPublicKey = createPublicKey;
-function signBlob(data, privateKey) {
-    return crypto_1.default.sign(null, data, privateKey);
-}
-exports.signBlob = signBlob;
 function verifyBlob(data, key, signature, algorithm) {
     // The try/catch is to work around an issue in Node 14.x where verify throws
     // an error in some scenarios if the signature is invalid.
@@ -49,6 +37,7 @@ function verifyBlob(data, key, signature, algorithm) {
         return crypto_1.default.verify(algorithm, data, key, signature);
     }
     catch (e) {
+        /* istanbul ignore next */
         return false;
     }
 }
@@ -62,3 +51,13 @@ function randomBytes(count) {
     return crypto_1.default.randomBytes(count);
 }
 exports.randomBytes = randomBytes;
+function bufferEqual(a, b) {
+    try {
+        return crypto_1.default.timingSafeEqual(a, b);
+    }
+    catch {
+        /* istanbul ignore next */
+        return false;
+    }
+}
+exports.bufferEqual = bufferEqual;
diff --git a/deps/npm/node_modules/sigstore/dist/util/dsse.d.ts b/deps/npm/node_modules/sigstore/dist/util/dsse.d.ts
deleted file mode 100644
index 839b9c03ce38c7..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/dsse.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-/// 
-export declare function preAuthEncoding(payloadType: string, payload: Buffer): Buffer;
diff --git a/deps/npm/node_modules/sigstore/dist/util/encoding.d.ts b/deps/npm/node_modules/sigstore/dist/util/encoding.d.ts
deleted file mode 100644
index f1347c241ed0c4..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/encoding.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-export declare function base64Encode(str: string): string;
-export declare function base64Decode(str: string): string;
-export declare function base64URLEncode(str: string): string;
-export declare function base64URLDecode(str: string): string;
-export declare function base64URLEscape(str: string): string;
-export declare function base64URLUnescape(str: string): string;
diff --git a/deps/npm/node_modules/sigstore/dist/util/index.d.ts b/deps/npm/node_modules/sigstore/dist/util/index.d.ts
deleted file mode 100644
index f062a1c9d3c57d..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/index.d.ts
+++ /dev/null
@@ -1,9 +0,0 @@
-export * as asn1 from './asn1';
-export * as crypto from './crypto';
-export * as dsse from './dsse';
-export * as encoding from './encoding';
-export * as json from './json';
-export * as oidc from './oidc';
-export * as pem from './pem';
-export * as promise from './promise';
-export * as ua from './ua';
diff --git a/deps/npm/node_modules/sigstore/dist/util/index.js b/deps/npm/node_modules/sigstore/dist/util/index.js
index b7d6ce21aafd3b..ff4cec375af8f8 100644
--- a/deps/npm/node_modules/sigstore/dist/util/index.js
+++ b/deps/npm/node_modules/sigstore/dist/util/index.js
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
     return result;
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.asn1 = void 0;
+exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.asn1 = void 0;
 /*
 Copyright 2022 The Sigstore Authors.
 
@@ -44,7 +44,4 @@ exports.crypto = __importStar(require("./crypto"));
 exports.dsse = __importStar(require("./dsse"));
 exports.encoding = __importStar(require("./encoding"));
 exports.json = __importStar(require("./json"));
-exports.oidc = __importStar(require("./oidc"));
 exports.pem = __importStar(require("./pem"));
-exports.promise = __importStar(require("./promise"));
-exports.ua = __importStar(require("./ua"));
diff --git a/deps/npm/node_modules/sigstore/dist/util/json.d.ts b/deps/npm/node_modules/sigstore/dist/util/json.d.ts
deleted file mode 100644
index ed331817ef2360..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/json.d.ts
+++ /dev/null
@@ -1 +0,0 @@
-export declare function canonicalize(object: any): string;
diff --git a/deps/npm/node_modules/sigstore/dist/util/oidc.d.ts b/deps/npm/node_modules/sigstore/dist/util/oidc.d.ts
deleted file mode 100644
index b4513891a3527f..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/oidc.d.ts
+++ /dev/null
@@ -1 +0,0 @@
-export declare function extractJWTSubject(jwt: string): string;
diff --git a/deps/npm/node_modules/sigstore/dist/util/pem.d.ts b/deps/npm/node_modules/sigstore/dist/util/pem.d.ts
deleted file mode 100644
index 6910679cae0654..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/pem.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-/// 
-export declare function toDER(certificate: string): Buffer;
-export declare function fromDER(certificate: Buffer, type?: string): string;
diff --git a/deps/npm/node_modules/sigstore/dist/util/promise.d.ts b/deps/npm/node_modules/sigstore/dist/util/promise.d.ts
deleted file mode 100644
index bbc501a85a7c60..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/promise.d.ts
+++ /dev/null
@@ -1 +0,0 @@
-export declare const promiseAny: (values: Iterable>) => Promise;
diff --git a/deps/npm/node_modules/sigstore/dist/util/promise.js b/deps/npm/node_modules/sigstore/dist/util/promise.js
deleted file mode 100644
index 8101dd47afe026..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/promise.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.promiseAny = void 0;
-// Implementation of Promise.any (not available until Node v15).
-// We're basically inverting the logic of Promise.all and taking advantage
-// of the fact that Promise.all will return early on the first rejection.
-// By reversing the resolve/reject logic we can use this to return early
-// on the first resolved promise.
-const promiseAny = async (values) => {
-    return Promise.all([...values].map((promise) => new Promise((resolve, reject) => promise.then(reject, resolve)))).then((errors) => Promise.reject(errors), (value) => Promise.resolve(value));
-};
-exports.promiseAny = promiseAny;
diff --git a/deps/npm/node_modules/sigstore/dist/util/stream.d.ts b/deps/npm/node_modules/sigstore/dist/util/stream.d.ts
deleted file mode 100644
index 4d509565942e14..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/stream.d.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-/// 
-export declare class StreamError extends Error {
-}
-export declare class ByteStream {
-    private static BLOCK_SIZE;
-    private buf;
-    private view;
-    private start;
-    constructor(buffer?: ArrayBuffer);
-    get buffer(): Buffer;
-    get length(): number;
-    get position(): number;
-    seek(position: number): void;
-    slice(start: number, len: number): Buffer;
-    appendChar(char: number): void;
-    appendUint16(num: number): void;
-    appendUint24(num: number): void;
-    appendView(view: Uint8Array): void;
-    getBlock(size: number): Buffer;
-    getUint8(): number;
-    getUint16(): number;
-    private ensureCapacity;
-    private realloc;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/util/ua.d.ts b/deps/npm/node_modules/sigstore/dist/util/ua.d.ts
deleted file mode 100644
index b60e2e9c3e5374..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/util/ua.d.ts
+++ /dev/null
@@ -1 +0,0 @@
-export declare const getUserAgent: () => string;
diff --git a/deps/npm/node_modules/sigstore/dist/verify.d.ts b/deps/npm/node_modules/sigstore/dist/verify.d.ts
deleted file mode 100644
index 850d0f37f09817..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/verify.d.ts
+++ /dev/null
@@ -1,13 +0,0 @@
-/// 
-import * as sigstore from './types/sigstore';
-export type KeySelector = (hint: string) => string | Buffer | undefined;
-export declare class Verifier {
-    private trustedRoot;
-    private keySelector;
-    constructor(trustedRoot: sigstore.TrustedRoot, keySelector?: KeySelector);
-    verify(bundle: sigstore.Bundle, options: sigstore.RequiredArtifactVerificationOptions, data?: Buffer): void;
-    private verifyArtifactSignature;
-    private verifySigningCertificate;
-    private verifyTLogEntries;
-    private getPublicKey;
-}
diff --git a/deps/npm/node_modules/sigstore/dist/verify.js b/deps/npm/node_modules/sigstore/dist/verify.js
index 49f63d93abb268..a3dc4b307e4953 100644
--- a/deps/npm/node_modules/sigstore/dist/verify.js
+++ b/deps/npm/node_modules/sigstore/dist/verify.js
@@ -24,6 +24,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
 };
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.Verifier = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const bundle_1 = require("@sigstore/bundle");
 const ca = __importStar(require("./ca/verify"));
 const error_1 = require("./error");
 const tlog = __importStar(require("./tlog/verify"));
@@ -38,7 +54,7 @@ class Verifier {
     // and the bundle's transparency log entries.
     verify(bundle, options, data) {
         this.verifyArtifactSignature(bundle, data);
-        if (sigstore.isBundleWithCertificateChain(bundle)) {
+        if ((0, bundle_1.isBundleWithCertificateChain)(bundle)) {
             this.verifySigningCertificate(bundle, options);
         }
         if (options.tlogOptions.disable === false) {
diff --git a/deps/npm/node_modules/sigstore/dist/x509/cert.d.ts b/deps/npm/node_modules/sigstore/dist/x509/cert.d.ts
deleted file mode 100644
index 216dbd39cb1f7d..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/x509/cert.d.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-/// 
-import * as sigstore from '../types/sigstore';
-import { ASN1Obj } from '../util/asn1';
-import { x509AuthorityKeyIDExtension, x509BasicConstraintsExtension, x509Extension, x509KeyUsageExtension, x509SCTExtension, x509SubjectAlternativeNameExtension, x509SubjectKeyIDExtension } from './ext';
-interface SCTVerificationResult {
-    verified: boolean;
-    logID: Buffer;
-}
-export declare class x509Certificate {
-    root: ASN1Obj;
-    constructor(asn1: ASN1Obj);
-    static parse(cert: Buffer | string): x509Certificate;
-    get tbsCertificate(): ASN1Obj;
-    get version(): string;
-    get notBefore(): Date;
-    get notAfter(): Date;
-    get issuer(): Buffer;
-    get subject(): Buffer;
-    get publicKey(): Buffer;
-    get signatureAlgorithm(): string;
-    get signatureValue(): Buffer;
-    get extensions(): ASN1Obj[];
-    get extKeyUsage(): x509KeyUsageExtension | undefined;
-    get extBasicConstraints(): x509BasicConstraintsExtension | undefined;
-    get extSubjectAltName(): x509SubjectAlternativeNameExtension | undefined;
-    get extAuthorityKeyID(): x509AuthorityKeyIDExtension | undefined;
-    get extSubjectKeyID(): x509SubjectKeyIDExtension | undefined;
-    get extSCT(): x509SCTExtension | undefined;
-    get isCA(): boolean;
-    extension(oid: string): x509Extension | undefined;
-    verify(issuerCertificate?: x509Certificate): boolean;
-    validForDate(date: Date): boolean;
-    equals(other: x509Certificate): boolean;
-    verifySCTs(issuer: x509Certificate, logs: sigstore.TransparencyLogInstance[]): SCTVerificationResult[];
-    private clone;
-    private findExtension;
-    private checkRecognizedExtensions;
-    private get tbsCertificateObj();
-    private get signatureAlgorithmObj();
-    private get signatureValueObj();
-    private get versionObj();
-    private get issuerObj();
-    private get validityObj();
-    private get subjectObj();
-    private get subjectPublicKeyInfoObj();
-    private get extensionsObj();
-}
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/x509/ext.d.ts b/deps/npm/node_modules/sigstore/dist/x509/ext.d.ts
deleted file mode 100644
index d6285f306f6adc..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/x509/ext.d.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-/// 
-import { ASN1Obj } from '../util/asn1';
-import { SignedCertificateTimestamp } from './sct';
-export declare class x509Extension {
-    protected root: ASN1Obj;
-    constructor(asn1: ASN1Obj);
-    get oid(): string;
-    get critical(): boolean;
-    get value(): Buffer;
-    get valueObj(): ASN1Obj;
-    protected get extnValueObj(): ASN1Obj;
-}
-export declare class x509BasicConstraintsExtension extends x509Extension {
-    get isCA(): boolean;
-    get pathLenConstraint(): bigint | undefined;
-    private get sequence();
-}
-export declare class x509KeyUsageExtension extends x509Extension {
-    get digitalSignature(): boolean;
-    get keyCertSign(): boolean;
-    get crlSign(): boolean;
-    private get bitString();
-}
-export declare class x509SubjectAlternativeNameExtension extends x509Extension {
-    get rfc822Name(): string | undefined;
-    get uri(): string | undefined;
-    otherName(oid: string): string | undefined;
-    private findGeneralName;
-    private get generalNames();
-}
-export declare class x509AuthorityKeyIDExtension extends x509Extension {
-    get keyIdentifier(): Buffer | undefined;
-    private findSequenceMember;
-    private get sequence();
-}
-export declare class x509SubjectKeyIDExtension extends x509Extension {
-    get keyIdentifier(): Buffer;
-}
-export declare class x509SCTExtension extends x509Extension {
-    constructor(asn1: ASN1Obj);
-    get signedCertificateTimestamps(): SignedCertificateTimestamp[];
-}
diff --git a/deps/npm/node_modules/sigstore/dist/x509/sct.d.ts b/deps/npm/node_modules/sigstore/dist/x509/sct.d.ts
deleted file mode 100644
index 076a532984c6b8..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/x509/sct.d.ts
+++ /dev/null
@@ -1,26 +0,0 @@
-/// 
-import * as sigstore from '../types/sigstore';
-interface SCTOptions {
-    version: number;
-    logID: Buffer;
-    timestamp: Buffer;
-    extensions: Buffer;
-    hashAlgorithm: number;
-    signatureAlgorithm: number;
-    signature: Buffer;
-}
-export declare class SignedCertificateTimestamp {
-    readonly version: number;
-    readonly logID: Buffer;
-    readonly timestamp: Buffer;
-    readonly extensions: Buffer;
-    readonly hashAlgorithm: number;
-    readonly signatureAlgorithm: number;
-    readonly signature: Buffer;
-    constructor(options: SCTOptions);
-    get datetime(): Date;
-    get algorithm(): string;
-    verify(preCert: Buffer, logs: sigstore.TransparencyLogInstance[]): boolean;
-    static parse(buf: Buffer): SignedCertificateTimestamp;
-}
-export {};
diff --git a/deps/npm/node_modules/sigstore/dist/x509/verify.d.ts b/deps/npm/node_modules/sigstore/dist/x509/verify.d.ts
deleted file mode 100644
index b12594adb2ea88..00000000000000
--- a/deps/npm/node_modules/sigstore/dist/x509/verify.d.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-import { x509Certificate } from './cert';
-interface VerifyCertificateChainOptions {
-    trustedCerts: x509Certificate[];
-    untrustedCert: x509Certificate;
-    validAt?: Date;
-}
-export declare function verifyCertificateChain(opts: VerifyCertificateChainOptions): x509Certificate[];
-export {};
diff --git a/deps/npm/node_modules/sigstore/package.json b/deps/npm/node_modules/sigstore/package.json
index 02655a6c79bc81..daf50ba601884c 100644
--- a/deps/npm/node_modules/sigstore/package.json
+++ b/deps/npm/node_modules/sigstore/package.json
@@ -1,6 +1,6 @@
 {
   "name": "sigstore",
-  "version": "1.7.0",
+  "version": "2.1.0",
   "description": "code-signing for npm packages",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -9,9 +9,6 @@
     "build": "tsc --build",
     "test": "jest"
   },
-  "bin": {
-    "sigstore": "bin/sigstore.js"
-  },
   "files": [
     "dist",
     "store"
@@ -30,17 +27,19 @@
     "provenance": true
   },
   "devDependencies": {
-    "@sigstore/rekor-types": "^1.0.0",
+    "@sigstore/rekor-types": "^2.0.0",
     "@sigstore/jest": "^0.0.0",
-    "@tufjs/repo-mock": "^1.1.0",
+    "@sigstore/mock": "^0.4.0",
+    "@tufjs/repo-mock": "^2.0.0",
     "@types/make-fetch-happen": "^10.0.0"
   },
   "dependencies": {
-    "@sigstore/protobuf-specs": "^0.1.0",
-    "@sigstore/tuf": "^1.0.1",
-    "make-fetch-happen": "^11.0.1"
+    "@sigstore/bundle": "^2.1.0",
+    "@sigstore/protobuf-specs": "^0.2.1",
+    "@sigstore/sign": "^2.1.0",
+    "@sigstore/tuf": "^2.1.0"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   }
 }
diff --git a/deps/npm/node_modules/ssri/package.json b/deps/npm/node_modules/ssri/package.json
index 815c7f3ed03ae9..8750bd744d28bd 100644
--- a/deps/npm/node_modules/ssri/package.json
+++ b/deps/npm/node_modules/ssri/package.json
@@ -1,6 +1,6 @@
 {
   "name": "ssri",
-  "version": "10.0.4",
+  "version": "10.0.5",
   "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
   "main": "lib/index.js",
   "files": [
@@ -47,11 +47,11 @@
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "minipass": "^5.0.0"
+    "minipass": "^7.0.3"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.0.1"
   },
   "engines": {
@@ -59,7 +59,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
+    "version": "4.18.0",
     "publish": "true"
   }
 }
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/LICENSE b/deps/npm/node_modules/tar/node_modules/minipass/LICENSE
new file mode 100644
index 00000000000000..97f8e32ed82e4c
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/index.js b/deps/npm/node_modules/tar/node_modules/minipass/index.js
new file mode 100644
index 00000000000000..ed07c17acd97b7
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/index.js
@@ -0,0 +1,702 @@
+'use strict'
+const proc =
+  typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+      }
+const EE = require('events')
+const Stream = require('stream')
+const stringdecoder = require('string_decoder')
+const SD = stringdecoder.StringDecoder
+
+const EOF = Symbol('EOF')
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
+const EMITTED_END = Symbol('emittedEnd')
+const EMITTING_END = Symbol('emittingEnd')
+const EMITTED_ERROR = Symbol('emittedError')
+const CLOSED = Symbol('closed')
+const READ = Symbol('read')
+const FLUSH = Symbol('flush')
+const FLUSHCHUNK = Symbol('flushChunk')
+const ENCODING = Symbol('encoding')
+const DECODER = Symbol('decoder')
+const FLOWING = Symbol('flowing')
+const PAUSED = Symbol('paused')
+const RESUME = Symbol('resume')
+const BUFFER = Symbol('buffer')
+const PIPES = Symbol('pipes')
+const BUFFERLENGTH = Symbol('bufferLength')
+const BUFFERPUSH = Symbol('bufferPush')
+const BUFFERSHIFT = Symbol('bufferShift')
+const OBJECTMODE = Symbol('objectMode')
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed')
+// internal event when stream has an error
+const ERROR = Symbol('error')
+const EMITDATA = Symbol('emitData')
+const EMITEND = Symbol('emitEnd')
+const EMITEND2 = Symbol('emitEnd2')
+const ASYNC = Symbol('async')
+const ABORT = Symbol('abort')
+const ABORTED = Symbol('aborted')
+const SIGNAL = Symbol('signal')
+
+const defer = fn => Promise.resolve().then(fn)
+
+// TODO remove when Node v8 support drops
+const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
+const ASYNCITERATOR =
+  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
+const ITERATOR =
+  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
+
+// events that mean 'the stream is over'
+// these are treated specially, and re-emitted
+// if they are listened for after emitting.
+const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
+
+const isArrayBuffer = b =>
+  b instanceof ArrayBuffer ||
+  (typeof b === 'object' &&
+    b.constructor &&
+    b.constructor.name === 'ArrayBuffer' &&
+    b.byteLength >= 0)
+
+const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
+
+class Pipe {
+  constructor(src, dest, opts) {
+    this.src = src
+    this.dest = dest
+    this.opts = opts
+    this.ondrain = () => src[RESUME]()
+    dest.on('drain', this.ondrain)
+  }
+  unpipe() {
+    this.dest.removeListener('drain', this.ondrain)
+  }
+  // istanbul ignore next - only here for the prototype
+  proxyErrors() {}
+  end() {
+    this.unpipe()
+    if (this.opts.end) this.dest.end()
+  }
+}
+
+class PipeProxyErrors extends Pipe {
+  unpipe() {
+    this.src.removeListener('error', this.proxyErrors)
+    super.unpipe()
+  }
+  constructor(src, dest, opts) {
+    super(src, dest, opts)
+    this.proxyErrors = er => dest.emit('error', er)
+    src.on('error', this.proxyErrors)
+  }
+}
+
+class Minipass extends Stream {
+  constructor(options) {
+    super()
+    this[FLOWING] = false
+    // whether we're explicitly paused
+    this[PAUSED] = false
+    this[PIPES] = []
+    this[BUFFER] = []
+    this[OBJECTMODE] = (options && options.objectMode) || false
+    if (this[OBJECTMODE]) this[ENCODING] = null
+    else this[ENCODING] = (options && options.encoding) || null
+    if (this[ENCODING] === 'buffer') this[ENCODING] = null
+    this[ASYNC] = (options && !!options.async) || false
+    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
+    this[EOF] = false
+    this[EMITTED_END] = false
+    this[EMITTING_END] = false
+    this[CLOSED] = false
+    this[EMITTED_ERROR] = null
+    this.writable = true
+    this.readable = true
+    this[BUFFERLENGTH] = 0
+    this[DESTROYED] = false
+    if (options && options.debugExposeBuffer === true) {
+      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
+    }
+    if (options && options.debugExposePipes === true) {
+      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
+    }
+    this[SIGNAL] = options && options.signal
+    this[ABORTED] = false
+    if (this[SIGNAL]) {
+      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
+      if (this[SIGNAL].aborted) {
+        this[ABORT]()
+      }
+    }
+  }
+
+  get bufferLength() {
+    return this[BUFFERLENGTH]
+  }
+
+  get encoding() {
+    return this[ENCODING]
+  }
+  set encoding(enc) {
+    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
+
+    if (
+      this[ENCODING] &&
+      enc !== this[ENCODING] &&
+      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
+    )
+      throw new Error('cannot change encoding')
+
+    if (this[ENCODING] !== enc) {
+      this[DECODER] = enc ? new SD(enc) : null
+      if (this[BUFFER].length)
+        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
+    }
+
+    this[ENCODING] = enc
+  }
+
+  setEncoding(enc) {
+    this.encoding = enc
+  }
+
+  get objectMode() {
+    return this[OBJECTMODE]
+  }
+  set objectMode(om) {
+    this[OBJECTMODE] = this[OBJECTMODE] || !!om
+  }
+
+  get ['async']() {
+    return this[ASYNC]
+  }
+  set ['async'](a) {
+    this[ASYNC] = this[ASYNC] || !!a
+  }
+
+  // drop everything and get out of the flow completely
+  [ABORT]() {
+    this[ABORTED] = true
+    this.emit('abort', this[SIGNAL].reason)
+    this.destroy(this[SIGNAL].reason)
+  }
+
+  get aborted() {
+    return this[ABORTED]
+  }
+  set aborted(_) {}
+
+  write(chunk, encoding, cb) {
+    if (this[ABORTED]) return false
+    if (this[EOF]) throw new Error('write after end')
+
+    if (this[DESTROYED]) {
+      this.emit(
+        'error',
+        Object.assign(
+          new Error('Cannot call write after a stream was destroyed'),
+          { code: 'ERR_STREAM_DESTROYED' }
+        )
+      )
+      return true
+    }
+
+    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
+
+    if (!encoding) encoding = 'utf8'
+
+    const fn = this[ASYNC] ? defer : f => f()
+
+    // convert array buffers and typed array views into buffers
+    // at some point in the future, we may want to do the opposite!
+    // leave strings and buffers as-is
+    // anything else switches us into object mode
+    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+      if (isArrayBufferView(chunk))
+        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
+      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
+      else if (typeof chunk !== 'string')
+        // use the setter so we throw if we have encoding set
+        this.objectMode = true
+    }
+
+    // handle object mode up front, since it's simpler
+    // this yields better performance, fewer checks later.
+    if (this[OBJECTMODE]) {
+      /* istanbul ignore if - maybe impossible? */
+      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
+
+      if (this.flowing) this.emit('data', chunk)
+      else this[BUFFERPUSH](chunk)
+
+      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
+
+      if (cb) fn(cb)
+
+      return this.flowing
+    }
+
+    // at this point the chunk is a buffer or string
+    // don't buffer it up or send it to the decoder
+    if (!chunk.length) {
+      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
+      if (cb) fn(cb)
+      return this.flowing
+    }
+
+    // fast-path writing strings of same encoding to a stream with
+    // an empty buffer, skipping the buffer/decoder dance
+    if (
+      typeof chunk === 'string' &&
+      // unless it is a string already ready for us to use
+      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
+    ) {
+      chunk = Buffer.from(chunk, encoding)
+    }
+
+    if (Buffer.isBuffer(chunk) && this[ENCODING])
+      chunk = this[DECODER].write(chunk)
+
+    // Note: flushing CAN potentially switch us into not-flowing mode
+    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
+
+    if (this.flowing) this.emit('data', chunk)
+    else this[BUFFERPUSH](chunk)
+
+    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
+
+    if (cb) fn(cb)
+
+    return this.flowing
+  }
+
+  read(n) {
+    if (this[DESTROYED]) return null
+
+    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
+      this[MAYBE_EMIT_END]()
+      return null
+    }
+
+    if (this[OBJECTMODE]) n = null
+
+    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
+      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
+    }
+
+    const ret = this[READ](n || null, this[BUFFER][0])
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [READ](n, chunk) {
+    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
+    else {
+      this[BUFFER][0] = chunk.slice(n)
+      chunk = chunk.slice(0, n)
+      this[BUFFERLENGTH] -= n
+    }
+
+    this.emit('data', chunk)
+
+    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
+
+    return chunk
+  }
+
+  end(chunk, encoding, cb) {
+    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
+    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
+    if (chunk) this.write(chunk, encoding)
+    if (cb) this.once('end', cb)
+    this[EOF] = true
+    this.writable = false
+
+    // if we haven't written anything, then go ahead and emit,
+    // even if we're not reading.
+    // we'll re-emit if a new 'end' listener is added anyway.
+    // This makes MP more suitable to write-only use cases.
+    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
+    return this
+  }
+
+  // don't let the internal resume be overwritten
+  [RESUME]() {
+    if (this[DESTROYED]) return
+
+    this[PAUSED] = false
+    this[FLOWING] = true
+    this.emit('resume')
+    if (this[BUFFER].length) this[FLUSH]()
+    else if (this[EOF]) this[MAYBE_EMIT_END]()
+    else this.emit('drain')
+  }
+
+  resume() {
+    return this[RESUME]()
+  }
+
+  pause() {
+    this[FLOWING] = false
+    this[PAUSED] = true
+  }
+
+  get destroyed() {
+    return this[DESTROYED]
+  }
+
+  get flowing() {
+    return this[FLOWING]
+  }
+
+  get paused() {
+    return this[PAUSED]
+  }
+
+  [BUFFERPUSH](chunk) {
+    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
+    else this[BUFFERLENGTH] += chunk.length
+    this[BUFFER].push(chunk)
+  }
+
+  [BUFFERSHIFT]() {
+    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
+    else this[BUFFERLENGTH] -= this[BUFFER][0].length
+    return this[BUFFER].shift()
+  }
+
+  [FLUSH](noDrain) {
+    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
+
+    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
+  }
+
+  [FLUSHCHUNK](chunk) {
+    this.emit('data', chunk)
+    return this.flowing
+  }
+
+  pipe(dest, opts) {
+    if (this[DESTROYED]) return
+
+    const ended = this[EMITTED_END]
+    opts = opts || {}
+    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
+    else opts.end = opts.end !== false
+    opts.proxyErrors = !!opts.proxyErrors
+
+    // piping an ended stream ends immediately
+    if (ended) {
+      if (opts.end) dest.end()
+    } else {
+      this[PIPES].push(
+        !opts.proxyErrors
+          ? new Pipe(this, dest, opts)
+          : new PipeProxyErrors(this, dest, opts)
+      )
+      if (this[ASYNC]) defer(() => this[RESUME]())
+      else this[RESUME]()
+    }
+
+    return dest
+  }
+
+  unpipe(dest) {
+    const p = this[PIPES].find(p => p.dest === dest)
+    if (p) {
+      this[PIPES].splice(this[PIPES].indexOf(p), 1)
+      p.unpipe()
+    }
+  }
+
+  addListener(ev, fn) {
+    return this.on(ev, fn)
+  }
+
+  on(ev, fn) {
+    const ret = super.on(ev, fn)
+    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
+    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
+      super.emit('readable')
+    else if (isEndish(ev) && this[EMITTED_END]) {
+      super.emit(ev)
+      this.removeAllListeners(ev)
+    } else if (ev === 'error' && this[EMITTED_ERROR]) {
+      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
+      else fn.call(this, this[EMITTED_ERROR])
+    }
+    return ret
+  }
+
+  get emittedEnd() {
+    return this[EMITTED_END]
+  }
+
+  [MAYBE_EMIT_END]() {
+    if (
+      !this[EMITTING_END] &&
+      !this[EMITTED_END] &&
+      !this[DESTROYED] &&
+      this[BUFFER].length === 0 &&
+      this[EOF]
+    ) {
+      this[EMITTING_END] = true
+      this.emit('end')
+      this.emit('prefinish')
+      this.emit('finish')
+      if (this[CLOSED]) this.emit('close')
+      this[EMITTING_END] = false
+    }
+  }
+
+  emit(ev, data, ...extra) {
+    // error and close are only events allowed after calling destroy()
+    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
+      return
+    else if (ev === 'data') {
+      return !this[OBJECTMODE] && !data
+        ? false
+        : this[ASYNC]
+        ? defer(() => this[EMITDATA](data))
+        : this[EMITDATA](data)
+    } else if (ev === 'end') {
+      return this[EMITEND]()
+    } else if (ev === 'close') {
+      this[CLOSED] = true
+      // don't emit close before 'end' and 'finish'
+      if (!this[EMITTED_END] && !this[DESTROYED]) return
+      const ret = super.emit('close')
+      this.removeAllListeners('close')
+      return ret
+    } else if (ev === 'error') {
+      this[EMITTED_ERROR] = data
+      super.emit(ERROR, data)
+      const ret =
+        !this[SIGNAL] || this.listeners('error').length
+          ? super.emit('error', data)
+          : false
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'resume') {
+      const ret = super.emit('resume')
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'finish' || ev === 'prefinish') {
+      const ret = super.emit(ev)
+      this.removeAllListeners(ev)
+      return ret
+    }
+
+    // Some other unknown event
+    const ret = super.emit(ev, data, ...extra)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITDATA](data) {
+    for (const p of this[PIPES]) {
+      if (p.dest.write(data) === false) this.pause()
+    }
+    const ret = super.emit('data', data)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITEND]() {
+    if (this[EMITTED_END]) return
+
+    this[EMITTED_END] = true
+    this.readable = false
+    if (this[ASYNC]) defer(() => this[EMITEND2]())
+    else this[EMITEND2]()
+  }
+
+  [EMITEND2]() {
+    if (this[DECODER]) {
+      const data = this[DECODER].end()
+      if (data) {
+        for (const p of this[PIPES]) {
+          p.dest.write(data)
+        }
+        super.emit('data', data)
+      }
+    }
+
+    for (const p of this[PIPES]) {
+      p.end()
+    }
+    const ret = super.emit('end')
+    this.removeAllListeners('end')
+    return ret
+  }
+
+  // const all = await stream.collect()
+  collect() {
+    const buf = []
+    if (!this[OBJECTMODE]) buf.dataLength = 0
+    // set the promise first, in case an error is raised
+    // by triggering the flow here.
+    const p = this.promise()
+    this.on('data', c => {
+      buf.push(c)
+      if (!this[OBJECTMODE]) buf.dataLength += c.length
+    })
+    return p.then(() => buf)
+  }
+
+  // const data = await stream.concat()
+  concat() {
+    return this[OBJECTMODE]
+      ? Promise.reject(new Error('cannot concat in objectMode'))
+      : this.collect().then(buf =>
+          this[OBJECTMODE]
+            ? Promise.reject(new Error('cannot concat in objectMode'))
+            : this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength)
+        )
+  }
+
+  // stream.promise().then(() => done, er => emitted error)
+  promise() {
+    return new Promise((resolve, reject) => {
+      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
+      this.on('error', er => reject(er))
+      this.on('end', () => resolve())
+    })
+  }
+
+  // for await (let chunk of stream)
+  [ASYNCITERATOR]() {
+    let stopped = false
+    const stop = () => {
+      this.pause()
+      stopped = true
+      return Promise.resolve({ done: true })
+    }
+    const next = () => {
+      if (stopped) return stop()
+      const res = this.read()
+      if (res !== null) return Promise.resolve({ done: false, value: res })
+
+      if (this[EOF]) return stop()
+
+      let resolve = null
+      let reject = null
+      const onerr = er => {
+        this.removeListener('data', ondata)
+        this.removeListener('end', onend)
+        this.removeListener(DESTROYED, ondestroy)
+        stop()
+        reject(er)
+      }
+      const ondata = value => {
+        this.removeListener('error', onerr)
+        this.removeListener('end', onend)
+        this.removeListener(DESTROYED, ondestroy)
+        this.pause()
+        resolve({ value: value, done: !!this[EOF] })
+      }
+      const onend = () => {
+        this.removeListener('error', onerr)
+        this.removeListener('data', ondata)
+        this.removeListener(DESTROYED, ondestroy)
+        stop()
+        resolve({ done: true })
+      }
+      const ondestroy = () => onerr(new Error('stream destroyed'))
+      return new Promise((res, rej) => {
+        reject = rej
+        resolve = res
+        this.once(DESTROYED, ondestroy)
+        this.once('error', onerr)
+        this.once('end', onend)
+        this.once('data', ondata)
+      })
+    }
+
+    return {
+      next,
+      throw: stop,
+      return: stop,
+      [ASYNCITERATOR]() {
+        return this
+      },
+    }
+  }
+
+  // for (let chunk of stream)
+  [ITERATOR]() {
+    let stopped = false
+    const stop = () => {
+      this.pause()
+      this.removeListener(ERROR, stop)
+      this.removeListener(DESTROYED, stop)
+      this.removeListener('end', stop)
+      stopped = true
+      return { done: true }
+    }
+
+    const next = () => {
+      if (stopped) return stop()
+      const value = this.read()
+      return value === null ? stop() : { value }
+    }
+    this.once('end', stop)
+    this.once(ERROR, stop)
+    this.once(DESTROYED, stop)
+
+    return {
+      next,
+      throw: stop,
+      return: stop,
+      [ITERATOR]() {
+        return this
+      },
+    }
+  }
+
+  destroy(er) {
+    if (this[DESTROYED]) {
+      if (er) this.emit('error', er)
+      else this.emit(DESTROYED)
+      return this
+    }
+
+    this[DESTROYED] = true
+
+    // throw away all buffered data, it's never coming out
+    this[BUFFER].length = 0
+    this[BUFFERLENGTH] = 0
+
+    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
+
+    if (er) this.emit('error', er)
+    // if no error to emit, still reject pending promises
+    else this.emit(DESTROYED)
+
+    return this
+  }
+
+  static isStream(s) {
+    return (
+      !!s &&
+      (s instanceof Minipass ||
+        s instanceof Stream ||
+        (s instanceof EE &&
+          // readable
+          (typeof s.pipe === 'function' ||
+            // writable
+            (typeof s.write === 'function' && typeof s.end === 'function'))))
+    )
+  }
+}
+
+exports.Minipass = Minipass
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/index.mjs b/deps/npm/node_modules/tar/node_modules/minipass/index.mjs
new file mode 100644
index 00000000000000..89b3fbf1a4d445
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/index.mjs
@@ -0,0 +1,700 @@
+'use strict'
+const proc =
+  typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+      }
+import EE from 'events'
+import Stream from 'stream'
+import stringdecoder from 'string_decoder'
+const SD = stringdecoder.StringDecoder
+
+const EOF = Symbol('EOF')
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
+const EMITTED_END = Symbol('emittedEnd')
+const EMITTING_END = Symbol('emittingEnd')
+const EMITTED_ERROR = Symbol('emittedError')
+const CLOSED = Symbol('closed')
+const READ = Symbol('read')
+const FLUSH = Symbol('flush')
+const FLUSHCHUNK = Symbol('flushChunk')
+const ENCODING = Symbol('encoding')
+const DECODER = Symbol('decoder')
+const FLOWING = Symbol('flowing')
+const PAUSED = Symbol('paused')
+const RESUME = Symbol('resume')
+const BUFFER = Symbol('buffer')
+const PIPES = Symbol('pipes')
+const BUFFERLENGTH = Symbol('bufferLength')
+const BUFFERPUSH = Symbol('bufferPush')
+const BUFFERSHIFT = Symbol('bufferShift')
+const OBJECTMODE = Symbol('objectMode')
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed')
+// internal event when stream has an error
+const ERROR = Symbol('error')
+const EMITDATA = Symbol('emitData')
+const EMITEND = Symbol('emitEnd')
+const EMITEND2 = Symbol('emitEnd2')
+const ASYNC = Symbol('async')
+const ABORT = Symbol('abort')
+const ABORTED = Symbol('aborted')
+const SIGNAL = Symbol('signal')
+
+const defer = fn => Promise.resolve().then(fn)
+
+// TODO remove when Node v8 support drops
+const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
+const ASYNCITERATOR =
+  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
+const ITERATOR =
+  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
+
+// events that mean 'the stream is over'
+// these are treated specially, and re-emitted
+// if they are listened for after emitting.
+const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
+
+const isArrayBuffer = b =>
+  b instanceof ArrayBuffer ||
+  (typeof b === 'object' &&
+    b.constructor &&
+    b.constructor.name === 'ArrayBuffer' &&
+    b.byteLength >= 0)
+
+const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
+
+class Pipe {
+  constructor(src, dest, opts) {
+    this.src = src
+    this.dest = dest
+    this.opts = opts
+    this.ondrain = () => src[RESUME]()
+    dest.on('drain', this.ondrain)
+  }
+  unpipe() {
+    this.dest.removeListener('drain', this.ondrain)
+  }
+  // istanbul ignore next - only here for the prototype
+  proxyErrors() {}
+  end() {
+    this.unpipe()
+    if (this.opts.end) this.dest.end()
+  }
+}
+
+class PipeProxyErrors extends Pipe {
+  unpipe() {
+    this.src.removeListener('error', this.proxyErrors)
+    super.unpipe()
+  }
+  constructor(src, dest, opts) {
+    super(src, dest, opts)
+    this.proxyErrors = er => dest.emit('error', er)
+    src.on('error', this.proxyErrors)
+  }
+}
+
+export class Minipass extends Stream {
+  constructor(options) {
+    super()
+    this[FLOWING] = false
+    // whether we're explicitly paused
+    this[PAUSED] = false
+    this[PIPES] = []
+    this[BUFFER] = []
+    this[OBJECTMODE] = (options && options.objectMode) || false
+    if (this[OBJECTMODE]) this[ENCODING] = null
+    else this[ENCODING] = (options && options.encoding) || null
+    if (this[ENCODING] === 'buffer') this[ENCODING] = null
+    this[ASYNC] = (options && !!options.async) || false
+    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
+    this[EOF] = false
+    this[EMITTED_END] = false
+    this[EMITTING_END] = false
+    this[CLOSED] = false
+    this[EMITTED_ERROR] = null
+    this.writable = true
+    this.readable = true
+    this[BUFFERLENGTH] = 0
+    this[DESTROYED] = false
+    if (options && options.debugExposeBuffer === true) {
+      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
+    }
+    if (options && options.debugExposePipes === true) {
+      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
+    }
+    this[SIGNAL] = options && options.signal
+    this[ABORTED] = false
+    if (this[SIGNAL]) {
+      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
+      if (this[SIGNAL].aborted) {
+        this[ABORT]()
+      }
+    }
+  }
+
+  get bufferLength() {
+    return this[BUFFERLENGTH]
+  }
+
+  get encoding() {
+    return this[ENCODING]
+  }
+  set encoding(enc) {
+    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
+
+    if (
+      this[ENCODING] &&
+      enc !== this[ENCODING] &&
+      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
+    )
+      throw new Error('cannot change encoding')
+
+    if (this[ENCODING] !== enc) {
+      this[DECODER] = enc ? new SD(enc) : null
+      if (this[BUFFER].length)
+        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
+    }
+
+    this[ENCODING] = enc
+  }
+
+  setEncoding(enc) {
+    this.encoding = enc
+  }
+
+  get objectMode() {
+    return this[OBJECTMODE]
+  }
+  set objectMode(om) {
+    this[OBJECTMODE] = this[OBJECTMODE] || !!om
+  }
+
+  get ['async']() {
+    return this[ASYNC]
+  }
+  set ['async'](a) {
+    this[ASYNC] = this[ASYNC] || !!a
+  }
+
+  // drop everything and get out of the flow completely
+  [ABORT]() {
+    this[ABORTED] = true
+    this.emit('abort', this[SIGNAL].reason)
+    this.destroy(this[SIGNAL].reason)
+  }
+
+  get aborted() {
+    return this[ABORTED]
+  }
+  set aborted(_) {}
+
+  write(chunk, encoding, cb) {
+    if (this[ABORTED]) return false
+    if (this[EOF]) throw new Error('write after end')
+
+    if (this[DESTROYED]) {
+      this.emit(
+        'error',
+        Object.assign(
+          new Error('Cannot call write after a stream was destroyed'),
+          { code: 'ERR_STREAM_DESTROYED' }
+        )
+      )
+      return true
+    }
+
+    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
+
+    if (!encoding) encoding = 'utf8'
+
+    const fn = this[ASYNC] ? defer : f => f()
+
+    // convert array buffers and typed array views into buffers
+    // at some point in the future, we may want to do the opposite!
+    // leave strings and buffers as-is
+    // anything else switches us into object mode
+    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+      if (isArrayBufferView(chunk))
+        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
+      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
+      else if (typeof chunk !== 'string')
+        // use the setter so we throw if we have encoding set
+        this.objectMode = true
+    }
+
+    // handle object mode up front, since it's simpler
+    // this yields better performance, fewer checks later.
+    if (this[OBJECTMODE]) {
+      /* istanbul ignore if - maybe impossible? */
+      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
+
+      if (this.flowing) this.emit('data', chunk)
+      else this[BUFFERPUSH](chunk)
+
+      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
+
+      if (cb) fn(cb)
+
+      return this.flowing
+    }
+
+    // at this point the chunk is a buffer or string
+    // don't buffer it up or send it to the decoder
+    if (!chunk.length) {
+      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
+      if (cb) fn(cb)
+      return this.flowing
+    }
+
+    // fast-path writing strings of same encoding to a stream with
+    // an empty buffer, skipping the buffer/decoder dance
+    if (
+      typeof chunk === 'string' &&
+      // unless it is a string already ready for us to use
+      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
+    ) {
+      chunk = Buffer.from(chunk, encoding)
+    }
+
+    if (Buffer.isBuffer(chunk) && this[ENCODING])
+      chunk = this[DECODER].write(chunk)
+
+    // Note: flushing CAN potentially switch us into not-flowing mode
+    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
+
+    if (this.flowing) this.emit('data', chunk)
+    else this[BUFFERPUSH](chunk)
+
+    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
+
+    if (cb) fn(cb)
+
+    return this.flowing
+  }
+
+  read(n) {
+    if (this[DESTROYED]) return null
+
+    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
+      this[MAYBE_EMIT_END]()
+      return null
+    }
+
+    if (this[OBJECTMODE]) n = null
+
+    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
+      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
+    }
+
+    const ret = this[READ](n || null, this[BUFFER][0])
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [READ](n, chunk) {
+    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
+    else {
+      this[BUFFER][0] = chunk.slice(n)
+      chunk = chunk.slice(0, n)
+      this[BUFFERLENGTH] -= n
+    }
+
+    this.emit('data', chunk)
+
+    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
+
+    return chunk
+  }
+
+  end(chunk, encoding, cb) {
+    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
+    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
+    if (chunk) this.write(chunk, encoding)
+    if (cb) this.once('end', cb)
+    this[EOF] = true
+    this.writable = false
+
+    // if we haven't written anything, then go ahead and emit,
+    // even if we're not reading.
+    // we'll re-emit if a new 'end' listener is added anyway.
+    // This makes MP more suitable to write-only use cases.
+    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
+    return this
+  }
+
+  // don't let the internal resume be overwritten
+  [RESUME]() {
+    if (this[DESTROYED]) return
+
+    this[PAUSED] = false
+    this[FLOWING] = true
+    this.emit('resume')
+    if (this[BUFFER].length) this[FLUSH]()
+    else if (this[EOF]) this[MAYBE_EMIT_END]()
+    else this.emit('drain')
+  }
+
+  resume() {
+    return this[RESUME]()
+  }
+
+  pause() {
+    this[FLOWING] = false
+    this[PAUSED] = true
+  }
+
+  get destroyed() {
+    return this[DESTROYED]
+  }
+
+  get flowing() {
+    return this[FLOWING]
+  }
+
+  get paused() {
+    return this[PAUSED]
+  }
+
+  [BUFFERPUSH](chunk) {
+    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
+    else this[BUFFERLENGTH] += chunk.length
+    this[BUFFER].push(chunk)
+  }
+
+  [BUFFERSHIFT]() {
+    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
+    else this[BUFFERLENGTH] -= this[BUFFER][0].length
+    return this[BUFFER].shift()
+  }
+
+  [FLUSH](noDrain) {
+    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
+
+    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
+  }
+
+  [FLUSHCHUNK](chunk) {
+    this.emit('data', chunk)
+    return this.flowing
+  }
+
+  pipe(dest, opts) {
+    if (this[DESTROYED]) return
+
+    const ended = this[EMITTED_END]
+    opts = opts || {}
+    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
+    else opts.end = opts.end !== false
+    opts.proxyErrors = !!opts.proxyErrors
+
+    // piping an ended stream ends immediately
+    if (ended) {
+      if (opts.end) dest.end()
+    } else {
+      this[PIPES].push(
+        !opts.proxyErrors
+          ? new Pipe(this, dest, opts)
+          : new PipeProxyErrors(this, dest, opts)
+      )
+      if (this[ASYNC]) defer(() => this[RESUME]())
+      else this[RESUME]()
+    }
+
+    return dest
+  }
+
+  unpipe(dest) {
+    const p = this[PIPES].find(p => p.dest === dest)
+    if (p) {
+      this[PIPES].splice(this[PIPES].indexOf(p), 1)
+      p.unpipe()
+    }
+  }
+
+  addListener(ev, fn) {
+    return this.on(ev, fn)
+  }
+
+  on(ev, fn) {
+    const ret = super.on(ev, fn)
+    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
+    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
+      super.emit('readable')
+    else if (isEndish(ev) && this[EMITTED_END]) {
+      super.emit(ev)
+      this.removeAllListeners(ev)
+    } else if (ev === 'error' && this[EMITTED_ERROR]) {
+      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
+      else fn.call(this, this[EMITTED_ERROR])
+    }
+    return ret
+  }
+
+  get emittedEnd() {
+    return this[EMITTED_END]
+  }
+
+  [MAYBE_EMIT_END]() {
+    if (
+      !this[EMITTING_END] &&
+      !this[EMITTED_END] &&
+      !this[DESTROYED] &&
+      this[BUFFER].length === 0 &&
+      this[EOF]
+    ) {
+      this[EMITTING_END] = true
+      this.emit('end')
+      this.emit('prefinish')
+      this.emit('finish')
+      if (this[CLOSED]) this.emit('close')
+      this[EMITTING_END] = false
+    }
+  }
+
+  emit(ev, data, ...extra) {
+    // error and close are only events allowed after calling destroy()
+    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
+      return
+    else if (ev === 'data') {
+      return !this[OBJECTMODE] && !data
+        ? false
+        : this[ASYNC]
+        ? defer(() => this[EMITDATA](data))
+        : this[EMITDATA](data)
+    } else if (ev === 'end') {
+      return this[EMITEND]()
+    } else if (ev === 'close') {
+      this[CLOSED] = true
+      // don't emit close before 'end' and 'finish'
+      if (!this[EMITTED_END] && !this[DESTROYED]) return
+      const ret = super.emit('close')
+      this.removeAllListeners('close')
+      return ret
+    } else if (ev === 'error') {
+      this[EMITTED_ERROR] = data
+      super.emit(ERROR, data)
+      const ret =
+        !this[SIGNAL] || this.listeners('error').length
+          ? super.emit('error', data)
+          : false
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'resume') {
+      const ret = super.emit('resume')
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'finish' || ev === 'prefinish') {
+      const ret = super.emit(ev)
+      this.removeAllListeners(ev)
+      return ret
+    }
+
+    // Some other unknown event
+    const ret = super.emit(ev, data, ...extra)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITDATA](data) {
+    for (const p of this[PIPES]) {
+      if (p.dest.write(data) === false) this.pause()
+    }
+    const ret = super.emit('data', data)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITEND]() {
+    if (this[EMITTED_END]) return
+
+    this[EMITTED_END] = true
+    this.readable = false
+    if (this[ASYNC]) defer(() => this[EMITEND2]())
+    else this[EMITEND2]()
+  }
+
+  [EMITEND2]() {
+    if (this[DECODER]) {
+      const data = this[DECODER].end()
+      if (data) {
+        for (const p of this[PIPES]) {
+          p.dest.write(data)
+        }
+        super.emit('data', data)
+      }
+    }
+
+    for (const p of this[PIPES]) {
+      p.end()
+    }
+    const ret = super.emit('end')
+    this.removeAllListeners('end')
+    return ret
+  }
+
+  // const all = await stream.collect()
+  collect() {
+    const buf = []
+    if (!this[OBJECTMODE]) buf.dataLength = 0
+    // set the promise first, in case an error is raised
+    // by triggering the flow here.
+    const p = this.promise()
+    this.on('data', c => {
+      buf.push(c)
+      if (!this[OBJECTMODE]) buf.dataLength += c.length
+    })
+    return p.then(() => buf)
+  }
+
+  // const data = await stream.concat()
+  concat() {
+    return this[OBJECTMODE]
+      ? Promise.reject(new Error('cannot concat in objectMode'))
+      : this.collect().then(buf =>
+          this[OBJECTMODE]
+            ? Promise.reject(new Error('cannot concat in objectMode'))
+            : this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength)
+        )
+  }
+
+  // stream.promise().then(() => done, er => emitted error)
+  promise() {
+    return new Promise((resolve, reject) => {
+      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
+      this.on('error', er => reject(er))
+      this.on('end', () => resolve())
+    })
+  }
+
+  // for await (let chunk of stream)
+  [ASYNCITERATOR]() {
+    let stopped = false
+    const stop = () => {
+      this.pause()
+      stopped = true
+      return Promise.resolve({ done: true })
+    }
+    const next = () => {
+      if (stopped) return stop()
+      const res = this.read()
+      if (res !== null) return Promise.resolve({ done: false, value: res })
+
+      if (this[EOF]) return stop()
+
+      let resolve = null
+      let reject = null
+      const onerr = er => {
+        this.removeListener('data', ondata)
+        this.removeListener('end', onend)
+        this.removeListener(DESTROYED, ondestroy)
+        stop()
+        reject(er)
+      }
+      const ondata = value => {
+        this.removeListener('error', onerr)
+        this.removeListener('end', onend)
+        this.removeListener(DESTROYED, ondestroy)
+        this.pause()
+        resolve({ value: value, done: !!this[EOF] })
+      }
+      const onend = () => {
+        this.removeListener('error', onerr)
+        this.removeListener('data', ondata)
+        this.removeListener(DESTROYED, ondestroy)
+        stop()
+        resolve({ done: true })
+      }
+      const ondestroy = () => onerr(new Error('stream destroyed'))
+      return new Promise((res, rej) => {
+        reject = rej
+        resolve = res
+        this.once(DESTROYED, ondestroy)
+        this.once('error', onerr)
+        this.once('end', onend)
+        this.once('data', ondata)
+      })
+    }
+
+    return {
+      next,
+      throw: stop,
+      return: stop,
+      [ASYNCITERATOR]() {
+        return this
+      },
+    }
+  }
+
+  // for (let chunk of stream)
+  [ITERATOR]() {
+    let stopped = false
+    const stop = () => {
+      this.pause()
+      this.removeListener(ERROR, stop)
+      this.removeListener(DESTROYED, stop)
+      this.removeListener('end', stop)
+      stopped = true
+      return { done: true }
+    }
+
+    const next = () => {
+      if (stopped) return stop()
+      const value = this.read()
+      return value === null ? stop() : { value }
+    }
+    this.once('end', stop)
+    this.once(ERROR, stop)
+    this.once(DESTROYED, stop)
+
+    return {
+      next,
+      throw: stop,
+      return: stop,
+      [ITERATOR]() {
+        return this
+      },
+    }
+  }
+
+  destroy(er) {
+    if (this[DESTROYED]) {
+      if (er) this.emit('error', er)
+      else this.emit(DESTROYED)
+      return this
+    }
+
+    this[DESTROYED] = true
+
+    // throw away all buffered data, it's never coming out
+    this[BUFFER].length = 0
+    this[BUFFERLENGTH] = 0
+
+    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
+
+    if (er) this.emit('error', er)
+    // if no error to emit, still reject pending promises
+    else this.emit(DESTROYED)
+
+    return this
+  }
+
+  static isStream(s) {
+    return (
+      !!s &&
+      (s instanceof Minipass ||
+        s instanceof Stream ||
+        (s instanceof EE &&
+          // readable
+          (typeof s.pipe === 'function' ||
+            // writable
+            (typeof s.write === 'function' && typeof s.end === 'function'))))
+    )
+  }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/package.json b/deps/npm/node_modules/tar/node_modules/minipass/package.json
new file mode 100644
index 00000000000000..0e20e988047f23
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/package.json
@@ -0,0 +1,76 @@
+{
+  "name": "minipass",
+  "version": "5.0.0",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./index.js",
+  "module": "./index.mjs",
+  "types": "./index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./index.d.ts",
+        "default": "./index.mjs"
+      },
+      "require": {
+        "types": "./index.d.ts",
+        "default": "./index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "devDependencies": {
+    "@types/node": "^17.0.41",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "prettier": "^2.6.2",
+    "tap": "^16.2.0",
+    "through2": "^2.0.3",
+    "ts-node": "^10.8.1",
+    "typedoc": "^0.23.24",
+    "typescript": "^4.7.3"
+  },
+  "scripts": {
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "node ./scripts/transpile-to-esm.js",
+    "snap": "tap",
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --follow-tags",
+    "typedoc": "typedoc ./index.d.ts",
+    "format": "prettier --write . --loglevel warn"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minipass.git"
+  },
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "files": [
+    "index.d.ts",
+    "index.js",
+    "index.mjs"
+  ],
+  "tap": {
+    "check-coverage": true
+  },
+  "engines": {
+    "node": ">=8"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  }
+}
diff --git a/deps/npm/node_modules/tuf-js/dist/config.js b/deps/npm/node_modules/tuf-js/dist/config.js
index c2d970e2562449..bafb33a8a1bf7c 100644
--- a/deps/npm/node_modules/tuf-js/dist/config.js
+++ b/deps/npm/node_modules/tuf-js/dist/config.js
@@ -10,5 +10,6 @@ exports.defaultConfig = {
     targetsMaxLength: 5000000,
     prefixTargetsWithHash: true,
     fetchTimeout: 100000,
-    fetchRetries: 2,
+    fetchRetries: undefined,
+    fetchRetry: 2,
 };
diff --git a/deps/npm/node_modules/tuf-js/dist/fetcher.js b/deps/npm/node_modules/tuf-js/dist/fetcher.js
index d3dcf53eeb8697..f966ce1bb0cdc6 100644
--- a/deps/npm/node_modules/tuf-js/dist/fetcher.js
+++ b/deps/npm/node_modules/tuf-js/dist/fetcher.js
@@ -57,13 +57,13 @@ class DefaultFetcher extends BaseFetcher {
     constructor(options = {}) {
         super();
         this.timeout = options.timeout;
-        this.retries = options.retries;
+        this.retry = options.retry;
     }
     async fetch(url) {
         log('GET %s', url);
         const response = await (0, make_fetch_happen_1.default)(url, {
             timeout: this.timeout,
-            retry: this.retries,
+            retry: this.retry,
         });
         if (!response.ok || !response?.body) {
             throw new error_1.DownloadHTTPError('Failed to download', response.status);
diff --git a/deps/npm/node_modules/tuf-js/dist/updater.js b/deps/npm/node_modules/tuf-js/dist/updater.js
index 2aba48d24affd5..2d0c769c7af647 100644
--- a/deps/npm/node_modules/tuf-js/dist/updater.js
+++ b/deps/npm/node_modules/tuf-js/dist/updater.js
@@ -51,7 +51,7 @@ class Updater {
             fetcher ||
                 new fetcher_1.DefaultFetcher({
                     timeout: this.config.fetchTimeout,
-                    retries: this.config.fetchRetries,
+                    retry: this.config.fetchRetries ?? this.config.fetchRetry,
                 });
     }
     // refresh and load the metadata before downloading the target
@@ -306,7 +306,7 @@ class Updater {
         const filePath = encodeURIComponent(targetInfo.path);
         return path.join(this.targetDir, filePath);
     }
-    async persistMetadata(metaDataName, bytesData) {
+    persistMetadata(metaDataName, bytesData) {
         try {
             const filePath = path.join(this.dir, `${metaDataName}.json`);
             log('WRITE %s', filePath);
diff --git a/deps/npm/node_modules/tuf-js/package.json b/deps/npm/node_modules/tuf-js/package.json
index 9187d88083272c..c757d6a00d7008 100644
--- a/deps/npm/node_modules/tuf-js/package.json
+++ b/deps/npm/node_modules/tuf-js/package.json
@@ -1,6 +1,6 @@
 {
   "name": "tuf-js",
-  "version": "1.1.7",
+  "version": "2.1.0",
   "description": "JavaScript implementation of The Update Framework (TUF)",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -28,19 +28,16 @@
   },
   "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
   "devDependencies": {
-    "@tufjs/repo-mock": "1.3.1",
+    "@tufjs/repo-mock": "2.0.0",
     "@types/debug": "^4.1.8",
-    "@types/make-fetch-happen": "^10.0.1",
-    "@types/node": "^20.2.5",
-    "nock": "^13.3.1",
-    "typescript": "^5.1.3"
+    "@types/make-fetch-happen": "^10.0.1"
   },
   "dependencies": {
-    "@tufjs/models": "1.0.4",
+    "@tufjs/models": "2.0.0",
     "debug": "^4.3.4",
-    "make-fetch-happen": "^11.1.1"
+    "make-fetch-happen": "^13.0.0"
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.14.0 || >=18.0.0"
   }
 }
diff --git a/deps/npm/node_modules/which/lib/index.js b/deps/npm/node_modules/which/lib/index.js
index 52e9ea62377e74..2fd358baf888fd 100644
--- a/deps/npm/node_modules/which/lib/index.js
+++ b/deps/npm/node_modules/which/lib/index.js
@@ -1,4 +1,4 @@
-const isexe = require('isexe')
+const { isexe, sync: isexeSync } = require('isexe')
 const { join, delimiter, sep, posix } = require('path')
 
 const isWindows = process.platform === 'win32'
@@ -31,11 +31,7 @@ const getPathInfo = (cmd, {
   if (isWindows) {
     const pathExtExe = optPathExt ||
       ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
-    const pathExt = pathExtExe.split(optDelimiter).reduce((acc, item) => {
-      acc.push(item)
-      acc.push(item.toLowerCase())
-      return acc
-    }, [])
+    const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()])
     if (cmd.includes('.') && pathExt[0] !== '') {
       pathExt.unshift('')
     }
@@ -90,7 +86,7 @@ const whichSync = (cmd, opt = {}) => {
 
     for (const ext of pathExt) {
       const withExt = p + ext
-      const is = isexe.sync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
+      const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
       if (is) {
         if (!opt.all) {
           return withExt
diff --git a/deps/npm/node_modules/which/node_modules/isexe/LICENSE b/deps/npm/node_modules/which/node_modules/isexe/LICENSE
new file mode 100644
index 00000000000000..c925dbe826b670
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/index.js b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/index.js
new file mode 100644
index 00000000000000..cefcb66b5c5434
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/index.js
@@ -0,0 +1,46 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sync = exports.isexe = exports.posix = exports.win32 = void 0;
+const posix = __importStar(require("./posix.js"));
+exports.posix = posix;
+const win32 = __importStar(require("./win32.js"));
+exports.win32 = win32;
+__exportStar(require("./options.js"), exports);
+const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform;
+const impl = platform === 'win32' ? win32 : posix;
+/**
+ * Determine whether a path is executable on the current platform.
+ */
+exports.isexe = impl.isexe;
+/**
+ * Synchronously determine whether a path is executable on the
+ * current platform.
+ */
+exports.sync = impl.sync;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/options.js b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/options.js
new file mode 100644
index 00000000000000..0dfad0762cc32c
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/options.js
@@ -0,0 +1,3 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/package.json b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/package.json
new file mode 100644
index 00000000000000..5bbefffbabee39
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/posix.js b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/posix.js
new file mode 100644
index 00000000000000..3bc5e79d7007e9
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/posix.js
@@ -0,0 +1,67 @@
+"use strict";
+/**
+ * This is the Posix implementation of isexe, which uses the file
+ * mode and uid/gid values.
+ *
+ * @module
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sync = exports.isexe = void 0;
+const fs_1 = require("fs");
+const promises_1 = require("fs/promises");
+/**
+ * Determine whether a path is executable according to the mode and
+ * current (or specified) user and group IDs.
+ */
+const isexe = async (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(await (0, promises_1.stat)(path), options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+exports.isexe = isexe;
+/**
+ * Synchronously determine whether a path is executable according to
+ * the mode and current (or specified) user and group IDs.
+ */
+const sync = (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat((0, fs_1.statSync)(path), options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+exports.sync = sync;
+const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options);
+const checkMode = (stat, options) => {
+    const myUid = options.uid ?? process.getuid?.();
+    const myGroups = options.groups ?? process.getgroups?.() ?? [];
+    const myGid = options.gid ?? process.getgid?.() ?? myGroups[0];
+    if (myUid === undefined || myGid === undefined) {
+        throw new Error('cannot get uid or gid');
+    }
+    const groups = new Set([myGid, ...myGroups]);
+    const mod = stat.mode;
+    const uid = stat.uid;
+    const gid = stat.gid;
+    const u = parseInt('100', 8);
+    const g = parseInt('010', 8);
+    const o = parseInt('001', 8);
+    const ug = u | g;
+    return !!(mod & o ||
+        (mod & g && groups.has(gid)) ||
+        (mod & u && uid === myUid) ||
+        (mod & ug && myUid === 0));
+};
+//# sourceMappingURL=posix.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/win32.js b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/win32.js
new file mode 100644
index 00000000000000..fa7a4d2f7d240d
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/cjs/win32.js
@@ -0,0 +1,62 @@
+"use strict";
+/**
+ * This is the Windows implementation of isexe, which uses the file
+ * extension and PATHEXT setting.
+ *
+ * @module
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sync = exports.isexe = void 0;
+const fs_1 = require("fs");
+const promises_1 = require("fs/promises");
+/**
+ * Determine whether a path is executable based on the file extension
+ * and PATHEXT environment variable (or specified pathExt option)
+ */
+const isexe = async (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(await (0, promises_1.stat)(path), path, options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+exports.isexe = isexe;
+/**
+ * Synchronously determine whether a path is executable based on the file
+ * extension and PATHEXT environment variable (or specified pathExt option)
+ */
+const sync = (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat((0, fs_1.statSync)(path), path, options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+exports.sync = sync;
+const checkPathExt = (path, options) => {
+    const { pathExt = process.env.PATHEXT || '' } = options;
+    const peSplit = pathExt.split(';');
+    if (peSplit.indexOf('') !== -1) {
+        return true;
+    }
+    for (let i = 0; i < peSplit.length; i++) {
+        const p = peSplit[i].toLowerCase();
+        const ext = path.substring(path.length - p.length).toLowerCase();
+        if (p && ext === p) {
+            return true;
+        }
+    }
+    return false;
+};
+const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options);
+//# sourceMappingURL=win32.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/index.js b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/index.js
new file mode 100644
index 00000000000000..1e309acd7355ec
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/index.js
@@ -0,0 +1,16 @@
+import * as posix from './posix.js';
+import * as win32 from './win32.js';
+export * from './options.js';
+export { win32, posix };
+const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform;
+const impl = platform === 'win32' ? win32 : posix;
+/**
+ * Determine whether a path is executable on the current platform.
+ */
+export const isexe = impl.isexe;
+/**
+ * Synchronously determine whether a path is executable on the
+ * current platform.
+ */
+export const sync = impl.sync;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/options.js b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/options.js
new file mode 100644
index 00000000000000..e9ded40bd5b2cd
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/options.js
@@ -0,0 +1,2 @@
+export {};
+//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/package.json b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/package.json
new file mode 100644
index 00000000000000..3dbc1ca591c055
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/posix.js b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/posix.js
new file mode 100644
index 00000000000000..c453776c0452f7
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/posix.js
@@ -0,0 +1,62 @@
+/**
+ * This is the Posix implementation of isexe, which uses the file
+ * mode and uid/gid values.
+ *
+ * @module
+ */
+import { statSync } from 'fs';
+import { stat } from 'fs/promises';
+/**
+ * Determine whether a path is executable according to the mode and
+ * current (or specified) user and group IDs.
+ */
+export const isexe = async (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(await stat(path), options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+/**
+ * Synchronously determine whether a path is executable according to
+ * the mode and current (or specified) user and group IDs.
+ */
+export const sync = (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(statSync(path), options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options);
+const checkMode = (stat, options) => {
+    const myUid = options.uid ?? process.getuid?.();
+    const myGroups = options.groups ?? process.getgroups?.() ?? [];
+    const myGid = options.gid ?? process.getgid?.() ?? myGroups[0];
+    if (myUid === undefined || myGid === undefined) {
+        throw new Error('cannot get uid or gid');
+    }
+    const groups = new Set([myGid, ...myGroups]);
+    const mod = stat.mode;
+    const uid = stat.uid;
+    const gid = stat.gid;
+    const u = parseInt('100', 8);
+    const g = parseInt('010', 8);
+    const o = parseInt('001', 8);
+    const ug = u | g;
+    return !!(mod & o ||
+        (mod & g && groups.has(gid)) ||
+        (mod & u && uid === myUid) ||
+        (mod & ug && myUid === 0));
+};
+//# sourceMappingURL=posix.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/win32.js b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/win32.js
new file mode 100644
index 00000000000000..a354ee2a5115c7
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/dist/mjs/win32.js
@@ -0,0 +1,57 @@
+/**
+ * This is the Windows implementation of isexe, which uses the file
+ * extension and PATHEXT setting.
+ *
+ * @module
+ */
+import { statSync } from 'fs';
+import { stat } from 'fs/promises';
+/**
+ * Determine whether a path is executable based on the file extension
+ * and PATHEXT environment variable (or specified pathExt option)
+ */
+export const isexe = async (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(await stat(path), path, options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+/**
+ * Synchronously determine whether a path is executable based on the file
+ * extension and PATHEXT environment variable (or specified pathExt option)
+ */
+export const sync = (path, options = {}) => {
+    const { ignoreErrors = false } = options;
+    try {
+        return checkStat(statSync(path), path, options);
+    }
+    catch (e) {
+        const er = e;
+        if (ignoreErrors || er.code === 'EACCES')
+            return false;
+        throw er;
+    }
+};
+const checkPathExt = (path, options) => {
+    const { pathExt = process.env.PATHEXT || '' } = options;
+    const peSplit = pathExt.split(';');
+    if (peSplit.indexOf('') !== -1) {
+        return true;
+    }
+    for (let i = 0; i < peSplit.length; i++) {
+        const p = peSplit[i].toLowerCase();
+        const ext = path.substring(path.length - p.length).toLowerCase();
+        if (p && ext === p) {
+            return true;
+        }
+    }
+    return false;
+};
+const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options);
+//# sourceMappingURL=win32.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/which/node_modules/isexe/package.json b/deps/npm/node_modules/which/node_modules/isexe/package.json
new file mode 100644
index 00000000000000..a0e2cd04bfdbfe
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/package.json
@@ -0,0 +1,96 @@
+{
+  "name": "isexe",
+  "version": "3.1.1",
+  "description": "Minimal module to check if a file is executable.",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "files": [
+    "dist"
+  ],
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./posix": {
+      "import": {
+        "types": "./dist/mjs/posix.d.ts",
+        "default": "./dist/mjs/posix.js"
+      },
+      "require": {
+        "types": "./dist/cjs/posix.d.ts",
+        "default": "./dist/cjs/posix.js"
+      }
+    },
+    "./win32": {
+      "import": {
+        "types": "./dist/mjs/win32.d.ts",
+        "default": "./dist/mjs/win32.js"
+      },
+      "require": {
+        "types": "./dist/cjs/win32.d.ts",
+        "default": "./dist/cjs/win32.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "devDependencies": {
+    "@types/node": "^20.4.5",
+    "@types/tap": "^15.0.8",
+    "c8": "^8.0.1",
+    "mkdirp": "^0.5.1",
+    "prettier": "^2.8.8",
+    "rimraf": "^2.5.0",
+    "sync-content": "^1.0.2",
+    "tap": "^16.3.8",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.6"
+  },
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tsc -p tsconfig/cjs.json && tsc -p tsconfig/esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
+    "typedoc": "typedoc --tsconfig tsconfig/esm.json ./src/*.ts"
+  },
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "repository": "https://github.com/isaacs/isexe",
+  "engines": {
+    "node": ">=16"
+  }
+}
diff --git a/deps/npm/node_modules/which/package.json b/deps/npm/node_modules/which/package.json
index 989e01c9a36830..515bfb22ca0e1e 100644
--- a/deps/npm/node_modules/which/package.json
+++ b/deps/npm/node_modules/which/package.json
@@ -2,7 +2,7 @@
   "author": "GitHub Inc.",
   "name": "which",
   "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
-  "version": "3.0.1",
+  "version": "4.0.0",
   "repository": {
     "type": "git",
     "url": "https://github.com/npm/node-which.git"
@@ -13,11 +13,11 @@
   },
   "license": "ISC",
   "dependencies": {
-    "isexe": "^2.0.0"
+    "isexe": "^3.1.1"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.14.1",
+    "@npmcli/template-oss": "4.18.0",
     "tap": "^16.3.0"
   },
   "scripts": {
@@ -41,11 +41,17 @@
     ]
   },
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^16.13.0 || >=18.0.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.14.1",
+    "ciVersions": [
+      "16.13.0",
+      "16.x",
+      "18.0.0",
+      "18.x"
+    ],
+    "version": "4.18.0",
     "publish": "true"
   }
 }
diff --git a/deps/npm/package.json b/deps/npm/package.json
index 6e719a073893b0..8418f543b406ac 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,5 +1,5 @@
 {
-  "version": "9.8.1",
+  "version": "10.0.0",
   "name": "npm",
   "description": "a package manager for JavaScript",
   "workspaces": [
@@ -52,72 +52,72 @@
   },
   "dependencies": {
     "@isaacs/string-locale-compare": "^1.1.0",
-    "@npmcli/arborist": "^6.3.0",
-    "@npmcli/config": "^6.2.1",
+    "@npmcli/arborist": "^7.0.0",
+    "@npmcli/config": "^7.1.0",
     "@npmcli/fs": "^3.1.0",
     "@npmcli/map-workspaces": "^3.0.4",
-    "@npmcli/package-json": "^4.0.1",
-    "@npmcli/promise-spawn": "^6.0.2",
-    "@npmcli/run-script": "^6.0.2",
+    "@npmcli/package-json": "^5.0.0",
+    "@npmcli/promise-spawn": "^7.0.0",
+    "@npmcli/run-script": "^7.0.1",
+    "@sigstore/tuf": "^2.1.0",
     "abbrev": "^2.0.0",
     "archy": "~1.0.0",
-    "cacache": "^17.1.3",
+    "cacache": "^18.0.0",
     "chalk": "^5.3.0",
     "ci-info": "^3.8.0",
     "cli-columns": "^4.0.0",
     "cli-table3": "^0.6.3",
     "columnify": "^1.6.0",
     "fastest-levenshtein": "^1.0.16",
-    "fs-minipass": "^3.0.2",
-    "glob": "^10.2.7",
+    "fs-minipass": "^3.0.3",
+    "glob": "^10.3.3",
     "graceful-fs": "^4.2.11",
-    "hosted-git-info": "^6.1.1",
+    "hosted-git-info": "^7.0.0",
     "ini": "^4.1.1",
-    "init-package-json": "^5.0.0",
+    "init-package-json": "^6.0.0",
     "is-cidr": "^4.0.2",
     "json-parse-even-better-errors": "^3.0.0",
-    "libnpmaccess": "^7.0.2",
-    "libnpmdiff": "^5.0.19",
-    "libnpmexec": "^6.0.3",
-    "libnpmfund": "^4.0.19",
-    "libnpmhook": "^9.0.3",
-    "libnpmorg": "^5.0.4",
-    "libnpmpack": "^5.0.19",
-    "libnpmpublish": "^7.5.0",
-    "libnpmsearch": "^6.0.2",
-    "libnpmteam": "^5.0.3",
-    "libnpmversion": "^4.0.2",
-    "make-fetch-happen": "^11.1.1",
+    "libnpmaccess": "^8.0.0",
+    "libnpmdiff": "^6.0.0",
+    "libnpmexec": "^7.0.0",
+    "libnpmfund": "^4.1.0",
+    "libnpmhook": "^10.0.0",
+    "libnpmorg": "^6.0.0",
+    "libnpmpack": "^6.0.0",
+    "libnpmpublish": "^9.0.0",
+    "libnpmsearch": "^7.0.0",
+    "libnpmteam": "^6.0.0",
+    "libnpmversion": "^5.0.0",
+    "make-fetch-happen": "^13.0.0",
     "minimatch": "^9.0.3",
-    "minipass": "^5.0.0",
+    "minipass": "^7.0.3",
     "minipass-pipeline": "^1.2.4",
     "ms": "^2.1.2",
     "node-gyp": "^9.4.0",
     "nopt": "^7.2.0",
     "npm-audit-report": "^5.0.0",
-    "npm-install-checks": "^6.1.1",
-    "npm-package-arg": "^10.1.0",
-    "npm-pick-manifest": "^8.0.1",
-    "npm-profile": "^7.0.1",
-    "npm-registry-fetch": "^14.0.5",
+    "npm-install-checks": "^6.2.0",
+    "npm-package-arg": "^11.0.0",
+    "npm-pick-manifest": "^9.0.0",
+    "npm-profile": "^9.0.0",
+    "npm-registry-fetch": "^16.0.0",
     "npm-user-validate": "^2.0.0",
     "npmlog": "^7.0.1",
     "p-map": "^4.0.0",
-    "pacote": "^15.2.0",
+    "pacote": "^17.0.4",
     "parse-conflict-json": "^3.0.1",
     "proc-log": "^3.0.0",
     "qrcode-terminal": "^0.12.0",
     "read": "^2.1.0",
     "semver": "^7.5.4",
-    "sigstore": "^1.7.0",
-    "ssri": "^10.0.4",
+    "ssri": "^10.0.5",
     "supports-color": "^9.4.0",
     "tar": "^6.1.15",
     "text-table": "~0.2.0",
     "tiny-relative-date": "^1.3.0",
     "treeverse": "^3.0.0",
     "validate-npm-package-name": "^5.0.0",
-    "which": "^3.0.1",
+    "which": "^4.0.0",
     "write-file-atomic": "^5.0.1"
   },
   "bundleDependencies": [
@@ -129,6 +129,7 @@
     "@npmcli/package-json",
     "@npmcli/promise-spawn",
     "@npmcli/run-script",
+    "@sigstore/tuf",
     "abbrev",
     "archy",
     "cacache",
@@ -179,7 +180,6 @@
     "qrcode-terminal",
     "read",
     "semver",
-    "sigstore",
     "ssri",
     "supports-color",
     "tar",
@@ -193,20 +193,20 @@
   "devDependencies": {
     "@npmcli/docs": "^1.0.0",
     "@npmcli/eslint-config": "^4.0.2",
-    "@npmcli/git": "^4.1.0",
+    "@npmcli/git": "^5.0.3",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.18.0",
-    "@tufjs/repo-mock": "^1.3.1",
+    "@tufjs/repo-mock": "^2.0.0",
     "diff": "^5.1.0",
     "licensee": "^10.0.0",
-    "nock": "^13.3.0",
-    "npm-packlist": "^7.0.4",
+    "nock": "^13.3.3",
+    "npm-packlist": "^8.0.0",
     "remark": "^14.0.2",
     "remark-gfm": "^3.0.1",
     "remark-github": "^11.2.4",
     "spawk": "^1.7.1",
-    "tap": "^16.3.4"
+    "tap": "^16.3.8"
   },
   "scripts": {
     "dependencies": "node scripts/bundle-and-gitignore-deps.js && node scripts/dependency-graph.js",
@@ -254,6 +254,6 @@
   },
   "license": "Artistic-2.0",
   "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+    "node": "^18.17.0 || >=20.5.0"
   }
 }
diff --git a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
index af600062c980e7..8346e8d9131fd7 100644
--- a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
@@ -30,7 +30,6 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna
   "cafile": null,
   "call": "",
   "cert": null,
-  "ci-name": null,
   "cidr": null,
   "color": true,
   "commit-hooks": true,
@@ -147,7 +146,6 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna
   "tag": "latest",
   "tag-version-prefix": "v",
   "timing": false,
-  "tmp": "{TMP}",
   "umask": 0,
   "unicode": false,
   "update-notifier": true,
@@ -161,8 +159,7 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna
   "workspaces": null,
   "workspaces-update": true,
   "yes": null,
-  "npm-version": "{NPM-VERSION}",
-  "metrics-registry": "https://registry.npmjs.org/"
+  "npm-version": "{NPM-VERSION}"
 }
 `
 
@@ -187,7 +184,6 @@ cache-min = 0
 cafile = null
 call = ""
 cert = null
-ci-name = null
 cidr = null
 color = true
 commit-hooks = true
@@ -254,7 +250,6 @@ logs-max = 10
 ; long = false ; overridden by cli
 maxsockets = 15
 message = "%s"
-metrics-registry = "https://registry.npmjs.org/"
 node-options = null
 noproxy = [""]
 npm-version = "{NPM-VERSION}"
@@ -306,7 +301,6 @@ strict-ssl = true
 tag = "latest"
 tag-version-prefix = "v"
 timing = false
-tmp = "{TMP}"
 umask = 0
 unicode = false
 update-notifier = true
diff --git a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs
index b7ea39ac4de0ef..98d10c2bb5d4bb 100644
--- a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs
@@ -180,9 +180,9 @@ Object {
 
 exports[`test/lib/commands/doctor.js TAP bad proxy > output 1`] = `
 Check                               Value   Recommendation/Notes
-npm ping                            not ok  unsupported proxy protocol: 'ssh:'
-npm -v                              not ok  Error: unsupported proxy protocol: 'ssh:'
-node -v                             not ok  Error: unsupported proxy protocol: 'ssh:'
+npm ping                            not ok  Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
+npm -v                              not ok  Error: Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
+node -v                             not ok  Error: Invalid protocol \`ssh:\` connecting to proxy \`npmjs.org\`
 npm config get registry             ok      using default registry (https://registry.npmjs.org/)
 git executable in PATH              ok      /path/to/git
 global bin folder in PATH           ok      {CWD}/global/bin
diff --git a/deps/npm/tap-snapshots/test/lib/commands/search.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/search.js.test.cjs
index bfa4b42182e1ea..a47cdba22003fe 100644
--- a/deps/npm/tap-snapshots/test/lib/commands/search.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/commands/search.js.test.cjs
@@ -24,6 +24,7 @@ NAME                      | DESCRIPTION          | AUTHOR          | DATE
 @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
 libnpmversion             | library to do the…   | =nlf…           | 2020-11-04 | 1.0.7    |
 @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
+pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
 `
 
 exports[`test/lib/commands/search.js TAP search  --color > should have expected search results with color 1`] = `
@@ -41,6 +42,7 @@ NAME                      | DESCRIPTION          | AUTHOR          | DATE
 @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
 libnpmversion             | library to do the…   | =nlf…           | 2020-11-04 | 1.0.7    | 
 @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    | 
+pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    | 
 `
 
 exports[`test/lib/commands/search.js TAP search  --parseable > should have expected search results as parseable 1`] = `
@@ -57,6 +59,7 @@ libnpmfund	Programmatic API for npm fund	=nlf =ruyadorno =darcyclarke =isaacs	20
 @npmcli/map-workspaces	Retrieves a name:pathname Map for a given workspaces config	=nlf =ruyadorno =darcyclarke =isaacs	2020-09-30 	1.0.1	npm npmcli libnpm cli workspaces map-workspaces
 libnpmversion	library to do the things that 'npm version' does	=nlf =ruyadorno =darcyclarke =isaacs	2020-11-04 	1.0.7
 @types/libnpmsearch	TypeScript definitions for libnpmsearch	=types	2019-09-26 	2.0.1
+pkg-no-desc		=lukekarrys	2019-09-26 	1.0.0
 `
 
 exports[`test/lib/commands/search.js TAP search  > should have filtered expected search results 1`] = `
@@ -80,6 +83,7 @@ libnpmfund                | Programmatic API…    | =nlf…           | 2020-12
 @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
 libnpmversion             | library to do the…   | =nlf…           | 2020-11-04 | 1.0.7    |
 @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
+pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
 `
 
 exports[`test/lib/commands/search.js TAP search exclude forward slash > results should not have libnpmversion 1`] = `
@@ -96,6 +100,7 @@ libnpmpublish             | Programmatic API…    | =nlf…           | 2020-11
 libnpmfund                | Programmatic API…    | =nlf…           | 2020-12-08 | 1.0.2    | npm npmcli libnpm cli git fund gitfund
 @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
 @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
+pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
 `
 
 exports[`test/lib/commands/search.js TAP search exclude regex > results should not have libnpmversion 1`] = `
@@ -112,6 +117,7 @@ libnpmpublish             | Programmatic API…    | =nlf…           | 2020-11
 libnpmfund                | Programmatic API…    | =nlf…           | 2020-12-08 | 1.0.2    | npm npmcli libnpm cli git fund gitfund
 @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
 @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
+pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
 `
 
 exports[`test/lib/commands/search.js TAP search exclude string > results should not have libnpmversion 1`] = `
@@ -128,6 +134,7 @@ libnpmpublish             | Programmatic API…    | =nlf…           | 2020-11
 libnpmfund                | Programmatic API…    | =nlf…           | 2020-12-08 | 1.0.2    | npm npmcli libnpm cli git fund gitfund
 @npmcli/map-workspaces    | Retrieves a…         | =nlf…           | 2020-09-30 | 1.0.1    | npm npmcli libnpm cli workspaces map-workspaces
 @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
+pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
 `
 
 exports[`test/lib/commands/search.js TAP search exclude username with upper case letters > results should not have nlf 1`] = `
@@ -135,4 +142,5 @@ NAME                      | DESCRIPTION          | AUTHOR          | DATE
 @evocateur/libnpmaccess   | programmatic…        | =evocateur      | 2019-07-16 | 3.1.2    |
 @evocateur/libnpmpublish  | Programmatic API…    | =evocateur      | 2019-07-16 | 1.2.2    |
 @types/libnpmsearch       | TypeScript…          | =types          | 2019-09-26 | 2.0.1    |
+pkg-no-desc               |                      | =lukekarrys     | 2019-09-26 | 1.0.0    |
 `
diff --git a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
index 4875ebae6952b2..463b0862d2be8d 100644
--- a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
@@ -1822,20 +1822,6 @@ registry-scoped "certfile" path like
 
 
 
-#### \`ci-name\`
-
-* Default: The name of the current CI system, or \`null\` when not on a known CI
-  platform.
-* Type: null or String
-* DEPRECATED: This config is deprecated and will not be changeable in future
-  version of npm.
-
-The name of a continuous integration system. If not set explicitly, npm will
-detect the current CI environment using the
-[\`ci-info\`](http://npm.im/ci-info) module.
-
-
-
 #### \`dev\`
 
 * Default: false
@@ -1995,20 +1981,6 @@ Alias for \`--omit=dev\`
 Alias for --package-lock
 
 
-
-#### \`tmp\`
-
-* Default: The value returned by the Node.js \`os.tmpdir()\` method
-  
-* Type: Path
-* DEPRECATED: This setting is no longer used. npm stores temporary files in a
-  special location in the cache, and they are managed by
-  [\`cacache\`](http://npm.im/cacache).
-
-Historically, the location where temporary files were stored. No longer
-relevant.
-
-
 `
 
 exports[`test/lib/docs.js TAP config > all keys 1`] = `
@@ -2031,7 +2003,6 @@ Array [
   "cafile",
   "call",
   "cert",
-  "ci-name",
   "cidr",
   "color",
   "commit-hooks",
@@ -2148,7 +2119,6 @@ Array [
   "tag",
   "tag-version-prefix",
   "timing",
-  "tmp",
   "umask",
   "unicode",
   "update-notifier",
@@ -2186,7 +2156,6 @@ Array [
   "cafile",
   "call",
   "cert",
-  "ci-name",
   "cidr",
   "color",
   "commit-hooks",
@@ -2314,7 +2283,6 @@ Array [
   "node-options",
   "prefix",
   "timing",
-  "tmp",
   "update-notifier",
   "usage",
   "userconfig",
@@ -2343,7 +2311,6 @@ Object {
   "call": "",
   "cert": null,
   "cidr": null,
-  "ciName": "{ci}",
   "color": false,
   "commitHooks": true,
   "defaultTag": "latest",
@@ -2367,7 +2334,6 @@ Object {
   "gitTagVersion": true,
   "global": false,
   "globalconfig": "{CWD}/global/etc/npmrc",
-  "hashAlgorithm": "sha1",
   "heading": "npm",
   "httpsProxy": null,
   "ifPresent": false,
diff --git a/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs
index 93711275392339..3e7bc4570dd4ad 100644
--- a/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs
@@ -63,4 +63,5 @@ verbose exit 1
 timing npm Completed in {TIME}ms
 verbose code 1
 error  A complete log of this run can be found in: {CWD}/cache/_logs/{DATE}-debug-0.log
+silly logfile done cleaning log files
 `
diff --git a/deps/npm/test/fixtures/libnpmsearch-stream-result.js b/deps/npm/test/fixtures/libnpmsearch-stream-result.js
index 1ec8b7b113d6b8..ac792b1c087c8f 100644
--- a/deps/npm/test/fixtures/libnpmsearch-stream-result.js
+++ b/deps/npm/test/fixtures/libnpmsearch-stream-result.js
@@ -275,4 +275,11 @@ module.exports = [
     publisher: { username: 'types', email: 'ts-npm-types@microsoft.com' },
     maintainers: [{ username: 'types', email: 'ts-npm-types@microsoft.com' }],
   },
+  {
+    name: 'pkg-no-desc',
+    scope: 'unscoped',
+    version: '1.0.0',
+    date: '2019-09-26T22:24:28.713Z',
+    maintainers: [{ username: 'lukekarrys', email: 'lukekarrys' }],
+  },
 ]
diff --git a/deps/npm/test/fixtures/sandbox.js b/deps/npm/test/fixtures/sandbox.js
index 2c4e5c2968a38c..5be02fcf80c1eb 100644
--- a/deps/npm/test/fixtures/sandbox.js
+++ b/deps/npm/test/fixtures/sandbox.js
@@ -42,11 +42,6 @@ const _get = Symbol('sandbox.proxy.get')
 const _set = Symbol('sandbox.proxy.set')
 const _logs = Symbol('sandbox.logs')
 
-// these config keys can be redacted widely
-const redactedDefaults = [
-  'tmp',
-]
-
 // we can't just replace these values everywhere because they're known to be
 // very short strings that could be present all over the place, so we only
 // replace them if they're located within quotes for now
@@ -161,12 +156,6 @@ class Sandbox extends EventEmitter {
     // and we replaced the node version first, the real execPath we're trying
     // to replace would no longer be represented, and be missed.
     if (this[_npm]) {
-      // replace default config values with placeholders
-      for (const name of redactedDefaults) {
-        const value = this[_npm].config.defaults[name]
-        clean = clean.split(normalize(value)).join(`{${name.toUpperCase()}}`)
-      }
-
       // replace vague default config values that are present within quotes
       // with placeholders
       for (const name of vagueRedactedDefaults) {
diff --git a/deps/npm/test/lib/commands/audit.js b/deps/npm/test/lib/commands/audit.js
index 4014e733873519..4a776e89bd9e9c 100644
--- a/deps/npm/test/lib/commands/audit.js
+++ b/deps/npm/test/lib/commands/audit.js
@@ -1699,16 +1699,12 @@ t.test('audit signatures', async t => {
     const { npm } = await loadMockNpm(t, {
       prefixDir: installWithMultipleDeps,
       mocks: {
-        sigstore: {
-          sigstore: {
-            tuf: {
-              client: async () => ({
-                getTarget: async () => {
-                  throw new Error('error refreshing TUF metadata')
-                },
-              }),
+        '@sigstore/tuf': {
+          initTUF: async () => ({
+            getTarget: async () => {
+              throw new Error('error refreshing TUF metadata')
             },
-          },
+          }),
         },
       },
     })
@@ -1877,9 +1873,7 @@ t.test('audit signatures', async t => {
       prefixDir: installWithValidAttestations,
       mocks: {
         pacote: t.mock('pacote', {
-          sigstore: {
-            sigstore: { verify: async () => true },
-          },
+          sigstore: { verify: async () => true },
         }),
       },
     })
@@ -1904,9 +1898,7 @@ t.test('audit signatures', async t => {
       prefixDir: installWithMultipleValidAttestations,
       mocks: {
         pacote: t.mock('pacote', {
-          sigstore: {
-            sigstore: { verify: async () => true },
-          },
+          sigstore: { verify: async () => true },
         }),
       },
     })
@@ -1937,10 +1929,8 @@ t.test('audit signatures', async t => {
       mocks: {
         pacote: t.mock('pacote', {
           sigstore: {
-            sigstore: {
-              verify: async () => {
-                throw new Error(`artifact signature verification failed`)
-              },
+            verify: async () => {
+              throw new Error(`artifact signature verification failed`)
             },
           },
         }),
@@ -1974,10 +1964,8 @@ t.test('audit signatures', async t => {
       mocks: {
         pacote: t.mock('pacote', {
           sigstore: {
-            sigstore: {
-              verify: async () => {
-                throw new Error(`artifact signature verification failed`)
-              },
+            verify: async () => {
+              throw new Error(`artifact signature verification failed`)
             },
           },
         }),
@@ -2005,10 +1993,8 @@ t.test('audit signatures', async t => {
       mocks: {
         pacote: t.mock('pacote', {
           sigstore: {
-            sigstore: {
-              verify: async () => {
-                throw new Error(`artifact signature verification failed`)
-              },
+            verify: async () => {
+              throw new Error(`artifact signature verification failed`)
             },
           },
         }),
diff --git a/deps/npm/test/lib/commands/run-script.js b/deps/npm/test/lib/commands/run-script.js
index cb54a7f51e9002..24f51400e8dfc3 100644
--- a/deps/npm/test/lib/commands/run-script.js
+++ b/deps/npm/test/lib/commands/run-script.js
@@ -781,12 +781,7 @@ t.test('workspaces', async t => {
   t.test('missing scripts in all workspaces', async t => {
     const { runScript, RUN_SCRIPTS, cleanLogs } = await mockWorkspaces(t, { exec: null })
 
-    await t.rejects(
-      runScript.exec(['missing-script']),
-      /Missing script: missing-script/,
-      'should throw missing script error'
-    )
-
+    await runScript.exec(['missing-script'])
     t.match(RUN_SCRIPTS(), [])
     t.strictSame(
       cleanLogs(),
diff --git a/deps/npm/test/lib/utils/exit-handler.js b/deps/npm/test/lib/utils/exit-handler.js
index f553e1a2ea518d..3eb5840985b8f5 100644
--- a/deps/npm/test/lib/utils/exit-handler.js
+++ b/deps/npm/test/lib/utils/exit-handler.js
@@ -132,6 +132,8 @@ t.test('handles unknown error with logs and debug file', async (t) => {
   const { exitHandler, debugFile, logs } = await mockExitHandler(t)
 
   await exitHandler(err('Unknown error', 'ECODE'))
+  // force logfile cleaning logs to happen since those are purposefully not awaited
+  await require('timers/promises').setTimeout(200)
 
   const fileLogs = await debugFile()
   const fileLines = fileLogs.split('\n')
@@ -141,14 +143,19 @@ t.test('handles unknown error with logs and debug file', async (t) => {
 
   t.equal(process.exitCode, 1)
 
+  let skippedLogs = 0
   logs.forEach((logItem, i) => {
     const logLines = format(i, ...logItem).trim().split(os.EOL)
-    logLines.forEach((line) => {
+    for (const line of logLines) {
+      if (line.includes('logfile') && line.includes('cleaning')) {
+        skippedLogs++
+        continue
+      }
       t.match(fileLogs.trim(), line, 'log appears in debug file')
-    })
+    }
   })
 
-  t.equal(logs.length, parseInt(lastLog) + 1)
+  t.equal(logs.length - skippedLogs, parseInt(lastLog) + 1)
   t.match(logs.error, [
     ['code', 'ECODE'],
     ['ERR SUMMARY', 'Unknown error'],

From a1a65f593cb0d47b8b0a73b42ea6713dca7b5b77 Mon Sep 17 00:00:00 2001
From: npm CLI robot 
Date: Sat, 9 Sep 2023 13:06:39 -0700
Subject: [PATCH 124/125] deps: upgrade npm to 10.1.0

PR-URL: https://github.com/nodejs/node/pull/49570
Reviewed-By: Luigi Pinca 
Reviewed-By: Moshe Atlow 
---
 deps/npm/README.md                            |  5 ++-
 .../docs/content/commands/npm-install-test.md | 20 +++++++++++
 deps/npm/docs/content/commands/npm-install.md | 20 +++++++++++
 deps/npm/docs/content/commands/npm-ls.md      |  2 +-
 deps/npm/docs/content/commands/npm.md         |  2 +-
 deps/npm/docs/content/using-npm/config.md     | 20 +++++++++++
 .../output/commands/npm-install-test.html     | 16 ++++++++-
 .../npm/docs/output/commands/npm-install.html | 16 ++++++++-
 deps/npm/docs/output/commands/npm-ls.html     |  2 +-
 deps/npm/docs/output/commands/npm.html        |  2 +-
 deps/npm/docs/output/using-npm/config.html    | 16 ++++++++-
 deps/npm/lib/commands/install.js              |  2 ++
 deps/npm/man/man1/npm-access.1                |  2 +-
 deps/npm/man/man1/npm-adduser.1               |  2 +-
 deps/npm/man/man1/npm-audit.1                 |  2 +-
 deps/npm/man/man1/npm-bugs.1                  |  2 +-
 deps/npm/man/man1/npm-cache.1                 |  2 +-
 deps/npm/man/man1/npm-ci.1                    |  2 +-
 deps/npm/man/man1/npm-completion.1            |  2 +-
 deps/npm/man/man1/npm-config.1                |  2 +-
 deps/npm/man/man1/npm-dedupe.1                |  2 +-
 deps/npm/man/man1/npm-deprecate.1             |  2 +-
 deps/npm/man/man1/npm-diff.1                  |  2 +-
 deps/npm/man/man1/npm-dist-tag.1              |  2 +-
 deps/npm/man/man1/npm-docs.1                  |  2 +-
 deps/npm/man/man1/npm-doctor.1                |  2 +-
 deps/npm/man/man1/npm-edit.1                  |  2 +-
 deps/npm/man/man1/npm-exec.1                  |  2 +-
 deps/npm/man/man1/npm-explain.1               |  2 +-
 deps/npm/man/man1/npm-explore.1               |  2 +-
 deps/npm/man/man1/npm-find-dupes.1            |  2 +-
 deps/npm/man/man1/npm-fund.1                  |  2 +-
 deps/npm/man/man1/npm-help-search.1           |  2 +-
 deps/npm/man/man1/npm-help.1                  |  2 +-
 deps/npm/man/man1/npm-hook.1                  |  2 +-
 deps/npm/man/man1/npm-init.1                  |  2 +-
 deps/npm/man/man1/npm-install-ci-test.1       |  2 +-
 deps/npm/man/man1/npm-install-test.1          | 22 +++++++++++-
 deps/npm/man/man1/npm-install.1               | 22 +++++++++++-
 deps/npm/man/man1/npm-link.1                  |  2 +-
 deps/npm/man/man1/npm-login.1                 |  2 +-
 deps/npm/man/man1/npm-logout.1                |  2 +-
 deps/npm/man/man1/npm-ls.1                    |  4 +--
 deps/npm/man/man1/npm-org.1                   |  2 +-
 deps/npm/man/man1/npm-outdated.1              |  2 +-
 deps/npm/man/man1/npm-owner.1                 |  2 +-
 deps/npm/man/man1/npm-pack.1                  |  2 +-
 deps/npm/man/man1/npm-ping.1                  |  2 +-
 deps/npm/man/man1/npm-pkg.1                   |  2 +-
 deps/npm/man/man1/npm-prefix.1                |  2 +-
 deps/npm/man/man1/npm-profile.1               |  2 +-
 deps/npm/man/man1/npm-prune.1                 |  2 +-
 deps/npm/man/man1/npm-publish.1               |  2 +-
 deps/npm/man/man1/npm-query.1                 |  2 +-
 deps/npm/man/man1/npm-rebuild.1               |  2 +-
 deps/npm/man/man1/npm-repo.1                  |  2 +-
 deps/npm/man/man1/npm-restart.1               |  2 +-
 deps/npm/man/man1/npm-root.1                  |  2 +-
 deps/npm/man/man1/npm-run-script.1            |  2 +-
 deps/npm/man/man1/npm-search.1                |  2 +-
 deps/npm/man/man1/npm-shrinkwrap.1            |  2 +-
 deps/npm/man/man1/npm-star.1                  |  2 +-
 deps/npm/man/man1/npm-stars.1                 |  2 +-
 deps/npm/man/man1/npm-start.1                 |  2 +-
 deps/npm/man/man1/npm-stop.1                  |  2 +-
 deps/npm/man/man1/npm-team.1                  |  2 +-
 deps/npm/man/man1/npm-test.1                  |  2 +-
 deps/npm/man/man1/npm-token.1                 |  2 +-
 deps/npm/man/man1/npm-uninstall.1             |  2 +-
 deps/npm/man/man1/npm-unpublish.1             |  2 +-
 deps/npm/man/man1/npm-unstar.1                |  2 +-
 deps/npm/man/man1/npm-update.1                |  2 +-
 deps/npm/man/man1/npm-version.1               |  2 +-
 deps/npm/man/man1/npm-view.1                  |  2 +-
 deps/npm/man/man1/npm-whoami.1                |  2 +-
 deps/npm/man/man1/npm.1                       |  4 +--
 deps/npm/man/man1/npx.1                       |  2 +-
 deps/npm/man/man5/folders.5                   |  2 +-
 deps/npm/man/man5/install.5                   |  2 +-
 deps/npm/man/man5/npm-global.5                |  2 +-
 deps/npm/man/man5/npm-json.5                  |  2 +-
 deps/npm/man/man5/npm-shrinkwrap-json.5       |  2 +-
 deps/npm/man/man5/npmrc.5                     |  2 +-
 deps/npm/man/man5/package-json.5              |  2 +-
 deps/npm/man/man5/package-lock-json.5         |  2 +-
 deps/npm/man/man7/config.7                    | 22 +++++++++++-
 deps/npm/man/man7/dependency-selectors.7      |  2 +-
 deps/npm/man/man7/developers.7                |  2 +-
 deps/npm/man/man7/logging.7                   |  2 +-
 deps/npm/man/man7/orgs.7                      |  2 +-
 deps/npm/man/man7/package-spec.7              |  2 +-
 deps/npm/man/man7/registry.7                  |  2 +-
 deps/npm/man/man7/removal.7                   |  2 +-
 deps/npm/man/man7/scope.7                     |  2 +-
 deps/npm/man/man7/scripts.7                   |  2 +-
 deps/npm/man/man7/workspaces.7                |  2 +-
 .../node_modules/@npmcli/agent/lib/agents.js  |  8 ++---
 .../node_modules/@npmcli/agent/lib/proxy.js   |  8 -----
 .../node_modules/@npmcli/agent/package.json   |  5 +--
 .../@npmcli/arborist/lib/arborist/reify.js    |  4 +--
 .../@npmcli/arborist/package.json             |  2 +-
 .../config/lib/definitions/definitions.js     | 22 ++++++++++++
 .../@npmcli/config/lib/definitions/index.js   |  7 ----
 .../node_modules/@npmcli/config/lib/index.js  |  3 ++
 .../@npmcli/config/lib/set-envs.js            |  5 +--
 .../node_modules/@npmcli/config/package.json  |  2 +-
 deps/npm/node_modules/libnpmdiff/package.json |  4 +--
 deps/npm/node_modules/libnpmexec/package.json |  4 +--
 deps/npm/node_modules/libnpmfund/package.json |  4 +--
 deps/npm/node_modules/libnpmpack/package.json |  4 +--
 deps/npm/package.json                         | 14 ++++----
 .../test/lib/commands/config.js.test.cjs      |  4 +++
 .../tap-snapshots/test/lib/docs.js.test.cjs   | 36 +++++++++++++++++--
 113 files changed, 347 insertions(+), 144 deletions(-)

diff --git a/deps/npm/README.md b/deps/npm/README.md
index da46ce7f38075e..cffee2429dddca 100644
--- a/deps/npm/README.md
+++ b/deps/npm/README.md
@@ -9,9 +9,8 @@
 
 One of the following versions of [Node.js](https://nodejs.org/en/download/) must be installed to run **`npm`**:
 
-* `14.x.x` >= `14.17.0`
-* `16.x.x` >= `16.13.0`
-* `18.0.0` or higher
+* `18.x.x` >= `18.17.0`
+* `20.5.0` or higher
 
 ### Installation
 
diff --git a/deps/npm/docs/content/commands/npm-install-test.md b/deps/npm/docs/content/commands/npm-install-test.md
index 587a0a15ec6318..443ccd670daa1f 100644
--- a/deps/npm/docs/content/commands/npm-install-test.md
+++ b/deps/npm/docs/content/commands/npm-install-test.md
@@ -256,6 +256,26 @@ Note: This is NOT honored by other network related commands, eg `dist-tags`,
 
 
 
+#### `cpu`
+
+* Default: null
+* Type: null or String
+
+Override CPU architecture of native modules to install. Acceptable values
+are same as `cpu` field of package.json, which comes from `process.arch`.
+
+
+
+#### `os`
+
+* Default: null
+* Type: null or String
+
+Override OS of native modules to install. Acceptable values are same as `os`
+field of package.json, which comes from `process.platform`.
+
+
+
 #### `workspace`
 
 * Default:
diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md
index 31f8261132d9ea..6be022c8f2340c 100644
--- a/deps/npm/docs/content/commands/npm-install.md
+++ b/deps/npm/docs/content/commands/npm-install.md
@@ -646,6 +646,26 @@ Note: This is NOT honored by other network related commands, eg `dist-tags`,
 
 
 
+#### `cpu`
+
+* Default: null
+* Type: null or String
+
+Override CPU architecture of native modules to install. Acceptable values
+are same as `cpu` field of package.json, which comes from `process.arch`.
+
+
+
+#### `os`
+
+* Default: null
+* Type: null or String
+
+Override OS of native modules to install. Acceptable values are same as `os`
+field of package.json, which comes from `process.platform`.
+
+
+
 #### `workspace`
 
 * Default:
diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md
index 3fd67ec372fd89..c7b8f674862a9c 100644
--- a/deps/npm/docs/content/commands/npm-ls.md
+++ b/deps/npm/docs/content/commands/npm-ls.md
@@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages.  For
 example, running `npm ls promzard` in npm's source tree will show:
 
 ```bash
-npm@10.0.0 /path/to/npm
+npm@10.1.0 /path/to/npm
 └─┬ init-package-json@0.0.4
   └── promzard@0.1.5
 ```
diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md
index 5923d67d48103d..e7ad76c74cf65e 100644
--- a/deps/npm/docs/content/commands/npm.md
+++ b/deps/npm/docs/content/commands/npm.md
@@ -14,7 +14,7 @@ Note: This command is unaware of workspaces.
 
 ### Version
 
-10.0.0
+10.1.0
 
 ### Description
 
diff --git a/deps/npm/docs/content/using-npm/config.md b/deps/npm/docs/content/using-npm/config.md
index 96b35edced7b3f..253cd3dffd1d1d 100644
--- a/deps/npm/docs/content/using-npm/config.md
+++ b/deps/npm/docs/content/using-npm/config.md
@@ -345,6 +345,16 @@ Run git commit hooks when using the `npm version` command.
 
 
 
+#### `cpu`
+
+* Default: null
+* Type: null or String
+
+Override CPU architecture of native modules to install. Acceptable values
+are same as `cpu` field of package.json, which comes from `process.arch`.
+
+
+
 #### `depth`
 
 * Default: `Infinity` if `--all` is set, otherwise `1`
@@ -1038,6 +1048,16 @@ time.
 
 
 
+#### `os`
+
+* Default: null
+* Type: null or String
+
+Override OS of native modules to install. Acceptable values are same as `os`
+field of package.json, which comes from `process.platform`.
+
+
+
 #### `otp`
 
 * Default: null
diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html
index d707d68d220efa..1afefac3e93869 100644
--- a/deps/npm/docs/output/commands/npm-install-test.html
+++ b/deps/npm/docs/output/commands/npm-install-test.html
@@ -142,7 +142,7 @@ 

npm-install-test

Table of contents

- +

Synopsis

@@ -327,6 +327,20 @@

dry-run

dedupe, uninstall, as well as pack and publish.

Note: This is NOT honored by other network related commands, eg dist-tags, owner, etc.

+

cpu

+
    +
  • Default: null
  • +
  • Type: null or String
  • +
+

Override CPU architecture of native modules to install. Acceptable values +are same as cpu field of package.json, which comes from process.arch.

+

os

+
    +
  • Default: null
  • +
  • Type: null or String
  • +
+

Override OS of native modules to install. Acceptable values are same as os +field of package.json, which comes from process.platform.

workspace

  • Default:
  • diff --git a/deps/npm/docs/output/commands/npm-install.html b/deps/npm/docs/output/commands/npm-install.html index 3a18e9e8809ade..54d8b5e31a28de 100644 --- a/deps/npm/docs/output/commands/npm-install.html +++ b/deps/npm/docs/output/commands/npm-install.html @@ -142,7 +142,7 @@

    npm-install

    Table of contents

    - +

    Synopsis

    @@ -653,6 +653,20 @@

    dry-run

    dedupe, uninstall, as well as pack and publish.

    Note: This is NOT honored by other network related commands, eg dist-tags, owner, etc.

    +

    cpu

    +
      +
    • Default: null
    • +
    • Type: null or String
    • +
    +

    Override CPU architecture of native modules to install. Acceptable values +are same as cpu field of package.json, which comes from process.arch.

    +

    os

    +
      +
    • Default: null
    • +
    • Type: null or String
    • +
    +

    Override OS of native modules to install. Acceptable values are same as os +field of package.json, which comes from process.platform.

    workspace

    Run git commit hooks when using the npm version command.

    +

    cpu

    +
      +
    • Default: null
    • +
    • Type: null or String
    • +
    +

    Override CPU architecture of native modules to install. Acceptable values +are same as cpu field of package.json, which comes from process.arch.

    depth

    • Default: Infinity if --all is set, otherwise 1
    • @@ -894,6 +901,13 @@

      omit-lockfile-registry-resolved +

      os

      +
        +
      • Default: null
      • +
      • Type: null or String
      • +
      +

      Override OS of native modules to install. Acceptable values are same as os +field of package.json, which comes from process.platform.

      otp

      • Default: null
      • diff --git a/deps/npm/lib/commands/install.js b/deps/npm/lib/commands/install.js index 75f0e2f175b61d..3983c8d26c841b 100644 --- a/deps/npm/lib/commands/install.js +++ b/deps/npm/lib/commands/install.js @@ -34,6 +34,8 @@ class Install extends ArboristWorkspaceCmd { 'bin-links', 'fund', 'dry-run', + 'cpu', + 'os', ...super.params, ] diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index 2593dd51e38f26..dc455d4a5f23b2 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM-ACCESS" "1" "August 2023" "" "" +.TH "NPM-ACCESS" "1" "September 2023" "" "" .SH "NAME" \fBnpm-access\fR - Set access level on published packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index 7de144d726a746..81807af54a8cdf 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM-ADDUSER" "1" "August 2023" "" "" +.TH "NPM-ADDUSER" "1" "September 2023" "" "" .SH "NAME" \fBnpm-adduser\fR - Add a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index d17652e280d1e2..84e580c7e5b17a 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM-AUDIT" "1" "August 2023" "" "" +.TH "NPM-AUDIT" "1" "September 2023" "" "" .SH "NAME" \fBnpm-audit\fR - Run a security audit .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index b09652e73bbab8..bbf9f8d979fd18 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM-BUGS" "1" "August 2023" "" "" +.TH "NPM-BUGS" "1" "September 2023" "" "" .SH "NAME" \fBnpm-bugs\fR - Report bugs for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index 3aa23197273f33..a58256bf8d4585 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM-CACHE" "1" "August 2023" "" "" +.TH "NPM-CACHE" "1" "September 2023" "" "" .SH "NAME" \fBnpm-cache\fR - Manipulates packages cache .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index 2764435bbeec7e..59b0de021df4e5 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM-CI" "1" "August 2023" "" "" +.TH "NPM-CI" "1" "September 2023" "" "" .SH "NAME" \fBnpm-ci\fR - Clean install a project .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index 044de420ba8ad4..e420854773c247 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM-COMPLETION" "1" "August 2023" "" "" +.TH "NPM-COMPLETION" "1" "September 2023" "" "" .SH "NAME" \fBnpm-completion\fR - Tab Completion for npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index cd1138c20d7f7c..dba3356299a4f2 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM-CONFIG" "1" "August 2023" "" "" +.TH "NPM-CONFIG" "1" "September 2023" "" "" .SH "NAME" \fBnpm-config\fR - Manage the npm configuration files .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index 4705eb5140a0a2..8dba928a3d78aa 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEDUPE" "1" "August 2023" "" "" +.TH "NPM-DEDUPE" "1" "September 2023" "" "" .SH "NAME" \fBnpm-dedupe\fR - Reduce duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index c6831d93ecd84b..85447f708de39d 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEPRECATE" "1" "August 2023" "" "" +.TH "NPM-DEPRECATE" "1" "September 2023" "" "" .SH "NAME" \fBnpm-deprecate\fR - Deprecate a version of a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1 index 7ded467b1198c2..cb21d2b1bfd135 100644 --- a/deps/npm/man/man1/npm-diff.1 +++ b/deps/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIFF" "1" "August 2023" "" "" +.TH "NPM-DIFF" "1" "September 2023" "" "" .SH "NAME" \fBnpm-diff\fR - The registry diff command .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index d592e6508f58a2..ab3c4bec4b6838 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIST-TAG" "1" "August 2023" "" "" +.TH "NPM-DIST-TAG" "1" "September 2023" "" "" .SH "NAME" \fBnpm-dist-tag\fR - Modify package distribution tags .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index 23524867cee64b..32492edbf9d8f1 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCS" "1" "August 2023" "" "" +.TH "NPM-DOCS" "1" "September 2023" "" "" .SH "NAME" \fBnpm-docs\fR - Open documentation for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index 11aa1eb02da26e..7bbae5311478b2 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCTOR" "1" "August 2023" "" "" +.TH "NPM-DOCTOR" "1" "September 2023" "" "" .SH "NAME" \fBnpm-doctor\fR - Check your npm environment .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index 98c660d624fc9a..97274222f7b744 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM-EDIT" "1" "August 2023" "" "" +.TH "NPM-EDIT" "1" "September 2023" "" "" .SH "NAME" \fBnpm-edit\fR - Edit an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1 index fd6bc243de5c8c..a48b29fd1ab411 100644 --- a/deps/npm/man/man1/npm-exec.1 +++ b/deps/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXEC" "1" "August 2023" "" "" +.TH "NPM-EXEC" "1" "September 2023" "" "" .SH "NAME" \fBnpm-exec\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1 index e986043503710b..08aa51b4748f9d 100644 --- a/deps/npm/man/man1/npm-explain.1 +++ b/deps/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLAIN" "1" "August 2023" "" "" +.TH "NPM-EXPLAIN" "1" "September 2023" "" "" .SH "NAME" \fBnpm-explain\fR - Explain installed packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index ea45239d095afd..bf87dc4c66d3d9 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLORE" "1" "August 2023" "" "" +.TH "NPM-EXPLORE" "1" "September 2023" "" "" .SH "NAME" \fBnpm-explore\fR - Browse an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index 4f0e08327d6578..3310201661f1b1 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM-FIND-DUPES" "1" "August 2023" "" "" +.TH "NPM-FIND-DUPES" "1" "September 2023" "" "" .SH "NAME" \fBnpm-find-dupes\fR - Find duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1 index 5d574502589a58..7f70ab3c45c058 100644 --- a/deps/npm/man/man1/npm-fund.1 +++ b/deps/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM-FUND" "1" "August 2023" "" "" +.TH "NPM-FUND" "1" "September 2023" "" "" .SH "NAME" \fBnpm-fund\fR - Retrieve funding information .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index f848af56068fa8..e1bd049796d950 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP-SEARCH" "1" "August 2023" "" "" +.TH "NPM-HELP-SEARCH" "1" "September 2023" "" "" .SH "NAME" \fBnpm-help-search\fR - Search npm help documentation .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index 4e8c5490d8716f..c7b63df80ca5b2 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP" "1" "August 2023" "" "" +.TH "NPM-HELP" "1" "September 2023" "" "" .SH "NAME" \fBnpm-help\fR - Get help on npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1 index 5209a9949ea4ab..345fa4bf0fa339 100644 --- a/deps/npm/man/man1/npm-hook.1 +++ b/deps/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM-HOOK" "1" "August 2023" "" "" +.TH "NPM-HOOK" "1" "September 2023" "" "" .SH "NAME" \fBnpm-hook\fR - Manage registry hooks .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 33f66faa9b0d6a..733d132e56f283 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,4 +1,4 @@ -.TH "NPM-INIT" "1" "August 2023" "" "" +.TH "NPM-INIT" "1" "September 2023" "" "" .SH "NAME" \fBnpm-init\fR - Create a package.json file .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index c94907639d07a4..291242bcc5551a 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-CI-TEST" "1" "August 2023" "" "" +.TH "NPM-INSTALL-CI-TEST" "1" "September 2023" "" "" .SH "NAME" \fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index 40ba63ef57f44e..7a7db04ce9af80 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-TEST" "1" "August 2023" "" "" +.TH "NPM-INSTALL-TEST" "1" "September 2023" "" "" .SH "NAME" \fBnpm-install-test\fR - Install package(s) and run tests .SS "Synopsis" @@ -223,6 +223,26 @@ Type: Boolean Indicates that you don't want npm to make any changes and that it should only report what it would have done. This can be passed into any of the commands that modify your local installation, eg, \fBinstall\fR, \fBupdate\fR, \fBdedupe\fR, \fBuninstall\fR, as well as \fBpack\fR and \fBpublish\fR. .P Note: This is NOT honored by other network related commands, eg \fBdist-tags\fR, \fBowner\fR, etc. +.SS "\fBcpu\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override CPU architecture of native modules to install. Acceptable values are same as \fBcpu\fR field of package.json, which comes from \fBprocess.arch\fR. +.SS "\fBos\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR. .SS "\fBworkspace\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 84dc7ba204e761..2e55418c2cbdba 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL" "1" "August 2023" "" "" +.TH "NPM-INSTALL" "1" "September 2023" "" "" .SH "NAME" \fBnpm-install\fR - Install a package .SS "Synopsis" @@ -585,6 +585,26 @@ Type: Boolean Indicates that you don't want npm to make any changes and that it should only report what it would have done. This can be passed into any of the commands that modify your local installation, eg, \fBinstall\fR, \fBupdate\fR, \fBdedupe\fR, \fBuninstall\fR, as well as \fBpack\fR and \fBpublish\fR. .P Note: This is NOT honored by other network related commands, eg \fBdist-tags\fR, \fBowner\fR, etc. +.SS "\fBcpu\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override CPU architecture of native modules to install. Acceptable values are same as \fBcpu\fR field of package.json, which comes from \fBprocess.arch\fR. +.SS "\fBos\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR. .SS "\fBworkspace\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index 6a4214c1cc2cab..d07195da001f1b 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM-LINK" "1" "August 2023" "" "" +.TH "NPM-LINK" "1" "September 2023" "" "" .SH "NAME" \fBnpm-link\fR - Symlink a package folder .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1 index 11e6707e2d3115..54c612b86dd6a9 100644 --- a/deps/npm/man/man1/npm-login.1 +++ b/deps/npm/man/man1/npm-login.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGIN" "1" "August 2023" "" "" +.TH "NPM-LOGIN" "1" "September 2023" "" "" .SH "NAME" \fBnpm-login\fR - Login to a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index 803dedd52a96bc..3bab9b35c7866a 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGOUT" "1" "August 2023" "" "" +.TH "NPM-LOGOUT" "1" "September 2023" "" "" .SH "NAME" \fBnpm-logout\fR - Log out of the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 2561137d9091b1..7254629646d7a9 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM-LS" "1" "August 2023" "" "" +.TH "NPM-LS" "1" "September 2023" "" "" .SH "NAME" \fBnpm-ls\fR - List installed packages .SS "Synopsis" @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@10.0.0 /path/to/npm +npm@10.1.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1 index 56b7c5b6f830dd..e1b45b50b6765f 100644 --- a/deps/npm/man/man1/npm-org.1 +++ b/deps/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM-ORG" "1" "August 2023" "" "" +.TH "NPM-ORG" "1" "September 2023" "" "" .SH "NAME" \fBnpm-org\fR - Manage orgs .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index e95ebea296a0f1..887c252ff38778 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM-OUTDATED" "1" "August 2023" "" "" +.TH "NPM-OUTDATED" "1" "September 2023" "" "" .SH "NAME" \fBnpm-outdated\fR - Check for outdated packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index 4e5c11e4667b64..01c2050f00068e 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM-OWNER" "1" "August 2023" "" "" +.TH "NPM-OWNER" "1" "September 2023" "" "" .SH "NAME" \fBnpm-owner\fR - Manage package owners .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index a4c9c43c0bc796..a5fc8234152216 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM-PACK" "1" "August 2023" "" "" +.TH "NPM-PACK" "1" "September 2023" "" "" .SH "NAME" \fBnpm-pack\fR - Create a tarball from a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index 5508a054767855..92f76c66dc5a2e 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM-PING" "1" "August 2023" "" "" +.TH "NPM-PING" "1" "September 2023" "" "" .SH "NAME" \fBnpm-ping\fR - Ping npm registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1 index 0ab2b402a0cedf..d13bcd64cef703 100644 --- a/deps/npm/man/man1/npm-pkg.1 +++ b/deps/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM-PKG" "1" "August 2023" "" "" +.TH "NPM-PKG" "1" "September 2023" "" "" .SH "NAME" \fBnpm-pkg\fR - Manages your package.json .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index e7b4a7ebe16570..909e5b709787bc 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM-PREFIX" "1" "August 2023" "" "" +.TH "NPM-PREFIX" "1" "September 2023" "" "" .SH "NAME" \fBnpm-prefix\fR - Display prefix .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1 index cb061712872765..4941380ad2ef58 100644 --- a/deps/npm/man/man1/npm-profile.1 +++ b/deps/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM-PROFILE" "1" "August 2023" "" "" +.TH "NPM-PROFILE" "1" "September 2023" "" "" .SH "NAME" \fBnpm-profile\fR - Change settings on your registry profile .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index 8936f1801cb34c..eea4ee41da6a23 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM-PRUNE" "1" "August 2023" "" "" +.TH "NPM-PRUNE" "1" "September 2023" "" "" .SH "NAME" \fBnpm-prune\fR - Remove extraneous packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index 1437b97676d163..b167ed8a60677a 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM-PUBLISH" "1" "August 2023" "" "" +.TH "NPM-PUBLISH" "1" "September 2023" "" "" .SH "NAME" \fBnpm-publish\fR - Publish a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1 index 6bd12878bcf78f..c5ff8ec3bffd66 100644 --- a/deps/npm/man/man1/npm-query.1 +++ b/deps/npm/man/man1/npm-query.1 @@ -1,4 +1,4 @@ -.TH "NPM-QUERY" "1" "August 2023" "" "" +.TH "NPM-QUERY" "1" "September 2023" "" "" .SH "NAME" \fBnpm-query\fR - Dependency selector query .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index 5ce8aa959e5d2d..d22edac3427f53 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM-REBUILD" "1" "August 2023" "" "" +.TH "NPM-REBUILD" "1" "September 2023" "" "" .SH "NAME" \fBnpm-rebuild\fR - Rebuild a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index 3a838540039e2d..8562592255a21a 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM-REPO" "1" "August 2023" "" "" +.TH "NPM-REPO" "1" "September 2023" "" "" .SH "NAME" \fBnpm-repo\fR - Open package repository page in the browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index 2500c91e698adf..50f86f5aedc763 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM-RESTART" "1" "August 2023" "" "" +.TH "NPM-RESTART" "1" "September 2023" "" "" .SH "NAME" \fBnpm-restart\fR - Restart a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index 520835c18200de..6d66168523776a 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM-ROOT" "1" "August 2023" "" "" +.TH "NPM-ROOT" "1" "September 2023" "" "" .SH "NAME" \fBnpm-root\fR - Display npm root .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index 93031cfe89db60..4a3cf441d72dd7 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM-RUN-SCRIPT" "1" "August 2023" "" "" +.TH "NPM-RUN-SCRIPT" "1" "September 2023" "" "" .SH "NAME" \fBnpm-run-script\fR - Run arbitrary package scripts .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index 7c43d3bd115de3..b21ea554562f9f 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-SEARCH" "1" "August 2023" "" "" +.TH "NPM-SEARCH" "1" "September 2023" "" "" .SH "NAME" \fBnpm-search\fR - Search for packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index 99a04c82b0b148..7ed46091e5f6ea 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP" "1" "August 2023" "" "" +.TH "NPM-SHRINKWRAP" "1" "September 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap\fR - Lock down dependency versions for publication .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index f6a3784a70c0db..4a890b37a86170 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM-STAR" "1" "August 2023" "" "" +.TH "NPM-STAR" "1" "September 2023" "" "" .SH "NAME" \fBnpm-star\fR - Mark your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index 1d22e099bc362a..de27ac0866449a 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM-STARS" "1" "August 2023" "" "" +.TH "NPM-STARS" "1" "September 2023" "" "" .SH "NAME" \fBnpm-stars\fR - View packages marked as favorites .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index 0bee36d08e2a9d..ac0a839b353884 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM-START" "1" "August 2023" "" "" +.TH "NPM-START" "1" "September 2023" "" "" .SH "NAME" \fBnpm-start\fR - Start a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index ffee802a0ad888..98f444f0863835 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM-STOP" "1" "August 2023" "" "" +.TH "NPM-STOP" "1" "September 2023" "" "" .SH "NAME" \fBnpm-stop\fR - Stop a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index cd46af5e7cc054..355b25afeb6427 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEAM" "1" "August 2023" "" "" +.TH "NPM-TEAM" "1" "September 2023" "" "" .SH "NAME" \fBnpm-team\fR - Manage organization teams and team memberships .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index df03feb04c8c88..e9c3734c91912d 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEST" "1" "August 2023" "" "" +.TH "NPM-TEST" "1" "September 2023" "" "" .SH "NAME" \fBnpm-test\fR - Test a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index 5a845534a1a61a..be7912f43ca11b 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM-TOKEN" "1" "August 2023" "" "" +.TH "NPM-TOKEN" "1" "September 2023" "" "" .SH "NAME" \fBnpm-token\fR - Manage your authentication tokens .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index c4de8421fec7ec..a8a3ec892d8eee 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNINSTALL" "1" "August 2023" "" "" +.TH "NPM-UNINSTALL" "1" "September 2023" "" "" .SH "NAME" \fBnpm-uninstall\fR - Remove a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index 68d841c8f55c14..96d28182b1d468 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNPUBLISH" "1" "August 2023" "" "" +.TH "NPM-UNPUBLISH" "1" "September 2023" "" "" .SH "NAME" \fBnpm-unpublish\fR - Remove a package from the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1 index a129bc500c9248..7e9a57782a8e1e 100644 --- a/deps/npm/man/man1/npm-unstar.1 +++ b/deps/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNSTAR" "1" "August 2023" "" "" +.TH "NPM-UNSTAR" "1" "September 2023" "" "" .SH "NAME" \fBnpm-unstar\fR - Remove an item from your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index e300b680de6f47..73608d675a64cf 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM-UPDATE" "1" "August 2023" "" "" +.TH "NPM-UPDATE" "1" "September 2023" "" "" .SH "NAME" \fBnpm-update\fR - Update packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index c2f002aa203b49..3c05ff3809d0c7 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM-VERSION" "1" "August 2023" "" "" +.TH "NPM-VERSION" "1" "September 2023" "" "" .SH "NAME" \fBnpm-version\fR - Bump a package version .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index 3293e6a288d225..c1c4d6f199e0ae 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM-VIEW" "1" "August 2023" "" "" +.TH "NPM-VIEW" "1" "September 2023" "" "" .SH "NAME" \fBnpm-view\fR - View registry info .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index 0c105ee38f9afd..296ee8d5cff88b 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM-WHOAMI" "1" "August 2023" "" "" +.TH "NPM-WHOAMI" "1" "September 2023" "" "" .SH "NAME" \fBnpm-whoami\fR - Display npm username .SS "Synopsis" diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index af425bdf4b9901..824d4b8719fce3 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "August 2023" "" "" +.TH "NPM" "1" "September 2023" "" "" .SH "NAME" \fBnpm\fR - javascript package manager .SS "Synopsis" @@ -12,7 +12,7 @@ npm Note: This command is unaware of workspaces. .SS "Version" .P -10.0.0 +10.1.0 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1 index 61b9737c330117..d6d0e0d88e0f64 100644 --- a/deps/npm/man/man1/npx.1 +++ b/deps/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "August 2023" "" "" +.TH "NPX" "1" "September 2023" "" "" .SH "NAME" \fBnpx\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5 index 2acf702d6081d7..023a0ec693adfd 100644 --- a/deps/npm/man/man5/folders.5 +++ b/deps/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "August 2023" "" "" +.TH "FOLDERS" "5" "September 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index 100b4755a12026..8343669c716873 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "August 2023" "" "" +.TH "INSTALL" "5" "September 2023" "" "" .SH "NAME" \fBinstall\fR - Download and install node and npm .SS "Description" diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5 index 2acf702d6081d7..023a0ec693adfd 100644 --- a/deps/npm/man/man5/npm-global.5 +++ b/deps/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "August 2023" "" "" +.TH "FOLDERS" "5" "September 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5 index 033a8810e7c85a..2e6402de50d69e 100644 --- a/deps/npm/man/man5/npm-json.5 +++ b/deps/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "August 2023" "" "" +.TH "PACKAGE.JSON" "5" "September 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 index edea5989cb78b7..3f0af1837befc3 100644 --- a/deps/npm/man/man5/npm-shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP.JSON" "5" "August 2023" "" "" +.TH "NPM-SHRINKWRAP.JSON" "5" "September 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR - A publishable lockfile .SS "Description" diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index 5f16942e165bf8..60b1794712d0b6 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "August 2023" "" "" +.TH "NPMRC" "5" "September 2023" "" "" .SH "NAME" \fBnpmrc\fR - The npm config files .SS "Description" diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index 033a8810e7c85a..2e6402de50d69e 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "August 2023" "" "" +.TH "PACKAGE.JSON" "5" "September 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 85f42563387145..bbff439d118161 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE-LOCK.JSON" "5" "August 2023" "" "" +.TH "PACKAGE-LOCK.JSON" "5" "September 2023" "" "" .SH "NAME" \fBpackage-lock.json\fR - A manifestation of the manifest .SS "Description" diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index 43d874f9710cb2..13acb13c238167 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "August 2023" "" "" +.TH "CONFIG" "7" "September 2023" "" "" .SH "NAME" \fBconfig\fR - More than you probably want to know about npm configuration .SS "Description" @@ -350,6 +350,16 @@ Type: Boolean .P Run git commit hooks when using the \fBnpm version\fR command. +.SS "\fBcpu\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override CPU architecture of native modules to install. Acceptable values are same as \fBcpu\fR field of package.json, which comes from \fBprocess.arch\fR. .SS "\fBdepth\fR" .RS 0 .IP \(bu 4 @@ -1038,6 +1048,16 @@ Type: Boolean .P This option causes npm to create lock files without a \fBresolved\fR key for registry dependencies. Subsequent installs will need to resolve tarball endpoints with the configured registry, likely resulting in a longer install time. +.SS "\fBos\fR" +.RS 0 +.IP \(bu 4 +Default: null +.IP \(bu 4 +Type: null or String +.RE 0 + +.P +Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR. .SS "\fBotp\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7 index c64631e02b22fd..51b2052587fc57 100644 --- a/deps/npm/man/man7/dependency-selectors.7 +++ b/deps/npm/man/man7/dependency-selectors.7 @@ -1,4 +1,4 @@ -.TH "QUERYING" "7" "August 2023" "" "" +.TH "QUERYING" "7" "September 2023" "" "" .SH "NAME" \fBQuerying\fR - Dependency Selector Syntax & Querying .SS "Description" diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7 index c447a49da07e48..7902f7258f5776 100644 --- a/deps/npm/man/man7/developers.7 +++ b/deps/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "August 2023" "" "" +.TH "DEVELOPERS" "7" "September 2023" "" "" .SH "NAME" \fBdevelopers\fR - Developer Guide .SS "Description" diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7 index e06928a041162e..81a09df41eb3ce 100644 --- a/deps/npm/man/man7/logging.7 +++ b/deps/npm/man/man7/logging.7 @@ -1,4 +1,4 @@ -.TH "LOGGING" "7" "August 2023" "" "" +.TH "LOGGING" "7" "September 2023" "" "" .SH "NAME" \fBLogging\fR - Why, What & How We Log .SS "Description" diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7 index 46fa4efe9b51e1..02fa92a4519625 100644 --- a/deps/npm/man/man7/orgs.7 +++ b/deps/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "August 2023" "" "" +.TH "ORGS" "7" "September 2023" "" "" .SH "NAME" \fBorgs\fR - Working with Teams & Orgs .SS "Description" diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7 index 8651c7aec26b5c..67846da9dbe210 100644 --- a/deps/npm/man/man7/package-spec.7 +++ b/deps/npm/man/man7/package-spec.7 @@ -1,4 +1,4 @@ -.TH "PACKAGE-SPEC" "7" "August 2023" "" "" +.TH "PACKAGE-SPEC" "7" "September 2023" "" "" .SH "NAME" \fBpackage-spec\fR - Package name specifier .SS "Description" diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7 index 6811ccee1ff655..f0a8460b65464e 100644 --- a/deps/npm/man/man7/registry.7 +++ b/deps/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "August 2023" "" "" +.TH "REGISTRY" "7" "September 2023" "" "" .SH "NAME" \fBregistry\fR - The JavaScript Package Registry .SS "Description" diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7 index ea31865f6b5985..d1e3149c6be142 100644 --- a/deps/npm/man/man7/removal.7 +++ b/deps/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "August 2023" "" "" +.TH "REMOVAL" "7" "September 2023" "" "" .SH "NAME" \fBremoval\fR - Cleaning the Slate .SS "Synopsis" diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index 57b2d8edbdd180..1f2a9565a792c4 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "August 2023" "" "" +.TH "SCOPE" "7" "September 2023" "" "" .SH "NAME" \fBscope\fR - Scoped packages .SS "Description" diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index bd119571b8f64d..6fdf8c7a903d15 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "August 2023" "" "" +.TH "SCRIPTS" "7" "September 2023" "" "" .SH "NAME" \fBscripts\fR - How npm handles the "scripts" field .SS "Description" diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index ec1700b0496afd..cac34f55ad07d4 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "August 2023" "" "" +.TH "WORKSPACES" "7" "September 2023" "" "" .SH "NAME" \fBworkspaces\fR - Working with workspaces .SS "Description" diff --git a/deps/npm/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/@npmcli/agent/lib/agents.js index db997403f75794..7d32768817c18f 100644 --- a/deps/npm/node_modules/@npmcli/agent/lib/agents.js +++ b/deps/npm/node_modules/@npmcli/agent/lib/agents.js @@ -7,7 +7,7 @@ const tls = require('tls') const { once } = require('events') const { createTimeout, abortRace, urlify, appendPort, cacheAgent } = require('./util') const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, getProxyType, isSecureProxy, proxyCache } = require('./proxy.js') +const { getProxy, getProxyType, proxyCache } = require('./proxy.js') const Errors = require('./errors.js') const createAgent = (base, name) => { @@ -43,18 +43,16 @@ const createAgent = (base, name) => { return } - const secure = isSecureProxy(proxy) - return cacheAgent({ key: cacheOptions({ ...options, ...this.#options, - secure, + secure: SECURE, timeouts: this.#timeouts, proxy, }), cache: proxyCache, - secure, + secure: SECURE, proxies: this.#proxy.proxies, }, proxy, this.#options) } diff --git a/deps/npm/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/@npmcli/agent/lib/proxy.js index 81afdad74c1e56..babedad45ff99f 100644 --- a/deps/npm/node_modules/@npmcli/agent/lib/proxy.js +++ b/deps/npm/node_modules/@npmcli/agent/lib/proxy.js @@ -22,13 +22,6 @@ const PROXY_ENV = (() => { })() const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) -const SECURE_PROTOCOLS = new Set([...SocksProxyAgent.protocols, 'https']) - -const isSecureProxy = (url) => { - url = urlify(url) - const protocol = url.protocol.slice(0, -1) - return SECURE_PROTOCOLS.has(protocol) -} const getProxyType = (url) => { url = urlify(url) @@ -91,6 +84,5 @@ const getProxy = (url, { module.exports = { getProxyType, getProxy, - isSecureProxy, proxyCache: PROXY_CACHE, } diff --git a/deps/npm/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/@npmcli/agent/package.json index c0bf65719db9a6..32379b39b5b560 100644 --- a/deps/npm/node_modules/@npmcli/agent/package.json +++ b/deps/npm/node_modules/@npmcli/agent/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/agent", - "version": "2.1.0", + "version": "2.1.1", "description": "the http/https agent used by the npm cli", "main": "lib/index.js", "scripts": { @@ -35,7 +35,8 @@ "16.x", "18.0.0", "18.x" - ] + ], + "npmSpec": "next-9" }, "dependencies": { "http-proxy-agent": "^7.0.0", diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js index 020038b409bb17..0981afdae6ece7 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -628,7 +628,7 @@ module.exports = cls => class Reifier extends cls { process.emit('time', timer) this.addTracker('reify', node.name, node.location) - const { npmVersion, nodeVersion } = this.options + const { npmVersion, nodeVersion, cpu, os } = this.options const p = Promise.resolve().then(async () => { // when we reify an optional node, check the engine and platform // first. be sure to ignore the --force and --engine-strict flags, @@ -638,7 +638,7 @@ module.exports = cls => class Reifier extends cls { // eslint-disable-next-line promise/always-return if (node.optional) { checkEngine(node.package, npmVersion, nodeVersion, false) - checkPlatform(node.package, false) + checkPlatform(node.package, false, { cpu, os }) } await this[_checkBins](node) await this[_extractOrLink](node) diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 3b286e782149fc..24a442a7d88500 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "7.0.0", + "version": "7.1.0", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", diff --git a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js index 7f0edc7167a42c..e6b9859dc1dfb9 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js +++ b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js @@ -472,6 +472,28 @@ define('commit-hooks', { flatten, }) +define('cpu', { + default: null, + type: [null, String], + description: ` + Override CPU architecture of native modules to install. + Acceptable values are same as \`cpu\` field of package.json, + which comes from \`process.arch\`. + `, + flatten, +}) + +define('os', { + default: null, + type: [null, String], + description: ` + Override OS of native modules to install. + Acceptable values are same as \`os\` field of package.json, + which comes from \`process.platform\`. + `, + flatten, +}) + define('depth', { default: null, defaultDescription: ` diff --git a/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js b/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js index 51c7aa7c352cfc..8255a904423911 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js +++ b/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js @@ -18,13 +18,6 @@ const flatten = (obj, flat = {}) => { flat[key] = val } } - - // XXX make this the bin/npm-cli.js file explicitly instead - // otherwise using npm programmatically is a bit of a pain. - flat.npmBin = require.main ? require.main.filename - : /* istanbul ignore next - not configurable property */ undefined - flat.nodeBin = process.env.NODE || process.execPath - return flat } diff --git a/deps/npm/node_modules/@npmcli/config/lib/index.js b/deps/npm/node_modules/@npmcli/config/lib/index.js index e46fe3d2aa2f3d..ad07fcdf51826a 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/index.js +++ b/deps/npm/node_modules/@npmcli/config/lib/index.js @@ -115,6 +115,7 @@ class Config { this.defaults = defaults this.npmPath = npmPath + this.npmBin = join(this.npmPath, 'bin/npm-cli.js') this.argv = argv this.env = env this.execPath = execPath @@ -231,6 +232,8 @@ class Config { for (const { data } of this.data.values()) { this.#flatten(data, this.#flatOptions) } + this.#flatOptions.nodeBin = this.execPath + this.#flatOptions.npmBin = this.npmBin process.emit('timeEnd', 'config:load:flatten') return this.#flatOptions diff --git a/deps/npm/node_modules/@npmcli/config/lib/set-envs.js b/deps/npm/node_modules/@npmcli/config/lib/set-envs.js index 0f5781aaf33959..b6f5a30562ab1b 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/set-envs.js +++ b/deps/npm/node_modules/@npmcli/config/lib/set-envs.js @@ -101,10 +101,7 @@ const setEnvs = (config) => { if (cliConf['node-options']) { env.NODE_OPTIONS = cliConf['node-options'] } - - if (require.main && require.main.filename) { - env.npm_execpath = require.main.filename - } + env.npm_execpath = config.npmBin env.NODE = env.npm_node_execpath = config.execPath } diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json index 93f2cd097d6f2d..d2e7066b654d1b 100644 --- a/deps/npm/node_modules/@npmcli/config/package.json +++ b/deps/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "7.1.0", + "version": "7.2.0", "files": [ "bin/", "lib/" diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json index 8fdf332e748377..9e8a7b62949bc1 100644 --- a/deps/npm/node_modules/libnpmdiff/package.json +++ b/deps/npm/node_modules/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "6.0.0", + "version": "6.0.1", "description": "The registry diff", "repository": { "type": "git", @@ -46,7 +46,7 @@ "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.0.0", + "@npmcli/arborist": "^7.1.0", "@npmcli/disparity-colors": "^3.0.0", "@npmcli/installed-package-contents": "^2.0.2", "binary-extensions": "^2.2.0", diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index dd515abf654d8d..5e49fe5264a3ae 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,6 +1,6 @@ { "name": "libnpmexec", - "version": "7.0.0", + "version": "7.0.1", "files": [ "bin/", "lib/" @@ -59,7 +59,7 @@ "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.0.0", + "@npmcli/arborist": "^7.1.0", "@npmcli/run-script": "^7.0.1", "ci-info": "^3.7.1", "npm-package-arg": "^11.0.0", diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json index 21bd395d63ead7..b20bfec92346b6 100644 --- a/deps/npm/node_modules/libnpmfund/package.json +++ b/deps/npm/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "4.1.0", + "version": "4.1.1", "main": "lib/index.js", "files": [ "bin/", @@ -45,7 +45,7 @@ "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.0.0" + "@npmcli/arborist": "^7.1.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json index 1f81dfa48b8a8a..88a80e95226183 100644 --- a/deps/npm/node_modules/libnpmpack/package.json +++ b/deps/npm/node_modules/libnpmpack/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpack", - "version": "6.0.0", + "version": "6.0.1", "description": "Programmatic API for the bits behind npm pack", "author": "GitHub Inc.", "main": "lib/index.js", @@ -36,7 +36,7 @@ "bugs": "https://github.com/npm/libnpmpack/issues", "homepage": "https://npmjs.com/package/libnpmpack", "dependencies": { - "@npmcli/arborist": "^7.0.0", + "@npmcli/arborist": "^7.1.0", "@npmcli/run-script": "^7.0.1", "npm-package-arg": "^11.0.0", "pacote": "^17.0.4" diff --git a/deps/npm/package.json b/deps/npm/package.json index 8418f543b406ac..a95496f1a3eb01 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "10.0.0", + "version": "10.1.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -52,8 +52,8 @@ }, "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^7.0.0", - "@npmcli/config": "^7.1.0", + "@npmcli/arborist": "^7.1.0", + "@npmcli/config": "^7.2.0", "@npmcli/fs": "^3.1.0", "@npmcli/map-workspaces": "^3.0.4", "@npmcli/package-json": "^5.0.0", @@ -78,12 +78,12 @@ "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", "libnpmaccess": "^8.0.0", - "libnpmdiff": "^6.0.0", - "libnpmexec": "^7.0.0", - "libnpmfund": "^4.1.0", + "libnpmdiff": "^6.0.1", + "libnpmexec": "^7.0.1", + "libnpmfund": "^4.1.1", "libnpmhook": "^10.0.0", "libnpmorg": "^6.0.0", - "libnpmpack": "^6.0.0", + "libnpmpack": "^6.0.1", "libnpmpublish": "^9.0.0", "libnpmsearch": "^7.0.0", "libnpmteam": "^6.0.0", diff --git a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs index 8346e8d9131fd7..3001c98b3e03d8 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs @@ -33,6 +33,8 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna "cidr": null, "color": true, "commit-hooks": true, + "cpu": null, + "os": null, "depth": null, "description": true, "dev": false, @@ -187,6 +189,7 @@ cert = null cidr = null color = true commit-hooks = true +cpu = null depth = null description = true dev = false @@ -258,6 +261,7 @@ omit = [] omit-lockfile-registry-resolved = false only = null optional = null +os = null otp = null pack-destination = "." package = [] diff --git a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs index 463b0862d2be8d..a7e3e6f665af18 100644 --- a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs @@ -392,6 +392,16 @@ Run git commit hooks when using the \`npm version\` command. +#### \`cpu\` + +* Default: null +* Type: null or String + +Override CPU architecture of native modules to install. Acceptable values +are same as \`cpu\` field of package.json, which comes from \`process.arch\`. + + + #### \`depth\` * Default: \`Infinity\` if \`--all\` is set, otherwise \`1\` @@ -1085,6 +1095,16 @@ time. +#### \`os\` + +* Default: null +* Type: null or String + +Override OS of native modules to install. Acceptable values are same as \`os\` +field of package.json, which comes from \`process.platform\`. + + + #### \`otp\` * Default: null @@ -2006,6 +2026,8 @@ Array [ "cidr", "color", "commit-hooks", + "cpu", + "os", "depth", "description", "dev", @@ -2159,6 +2181,8 @@ Array [ "cidr", "color", "commit-hooks", + "cpu", + "os", "depth", "description", "dev", @@ -2313,6 +2337,7 @@ Object { "cidr": null, "color": false, "commitHooks": true, + "cpu": null, "defaultTag": "latest", "depth": null, "diff": Array [], @@ -2354,13 +2379,14 @@ Object { "nodeBin": "{NODE}", "nodeVersion": "2.2.2", "noProxy": "", - "npmBin": "{CWD}/{TESTDIR}/docs.js", + "npmBin": "{CWD}/other/bin/npm-cli.js", "npmCommand": "version", "npmVersion": "3.3.3", "npxCache": "{CWD}/cache/_npx", "offline": false, "omit": Array [], "omitLockfileRegistryResolved": false, + "os": null, "otp": null, "package": Array [], "packageLock": true, @@ -3170,7 +3196,7 @@ Options: [--global-style] [--omit [--omit ...]] [--strict-peer-deps] [--prefer-dedupe] [--no-package-lock] [--package-lock-only] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] -[--no-fund] [--dry-run] +[--no-fund] [--dry-run] [--cpu ] [--os ] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] [--include-workspace-root] [--install-links] @@ -3201,6 +3227,8 @@ aliases: add, i, in, ins, inst, insta, instal, isnt, isnta, isntal, isntall #### \`bin-links\` #### \`fund\` #### \`dry-run\` +#### \`cpu\` +#### \`os\` #### \`workspace\` #### \`workspaces\` #### \`include-workspace-root\` @@ -3261,7 +3289,7 @@ Options: [--global-style] [--omit [--omit ...]] [--strict-peer-deps] [--prefer-dedupe] [--no-package-lock] [--package-lock-only] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] -[--no-fund] [--dry-run] +[--no-fund] [--dry-run] [--cpu ] [--os ] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] [--include-workspace-root] [--install-links] @@ -3292,6 +3320,8 @@ alias: it #### \`bin-links\` #### \`fund\` #### \`dry-run\` +#### \`cpu\` +#### \`os\` #### \`workspace\` #### \`workspaces\` #### \`include-workspace-root\` From a71037ee5618c32c7c9e76d55f7d0640ab7f5e96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ulises=20Gasc=C3=B3n?= Date: Sun, 10 Sep 2023 18:41:23 +0000 Subject: [PATCH 125/125] 2023-09-18, Version 20.7.0 (Current) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Notable changes: crypto: * update root certificates to NSS 3.93 (Node.js GitHub Bot) https://github.com/nodejs/node/pull/49341 deps: * upgrade npm to 10.1.0 (npm team) https://github.com/nodejs/node/pull/49570 * upgrade npm to 10.0.0 (npm team) https://github.com/nodejs/node/pull/49423 doc: * move and rename loaders section (Geoffrey Booth) https://github.com/nodejs/node/pull/49261 * add release key for Ulises Gascon (Ulises Gascón) https://github.com/nodejs/node/pull/49196 lib: * (SEMVER-MINOR) add api to detect whether source-maps are enabled (翠 / green) https://github.com/nodejs/node/pull/46391 src: * support multiple `--env-file` declarations (Yagiz Nizipli) https://github.com/nodejs/node/pull/49542 src,permission: * add multiple allow-fs-* flags (Carlos Espa) https://github.com/nodejs/node/pull/49047 test_runner: * (SEMVER-MINOR) expose location of tests (Colin Ihrig) https://github.com/nodejs/node/pull/48975 PR-URL: https://github.com/nodejs/node/pull/49592 --- CHANGELOG.md | 3 +- doc/api/cli.md | 4 +- doc/api/module.md | 6 +- doc/api/process.md | 2 +- doc/api/stream.md | 4 +- doc/changelogs/CHANGELOG_V20.md | 144 ++++++++++++++++++++++++++++++++ src/node_version.h | 6 +- 7 files changed, 157 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c30394031a0b8a..b6156ad948fc7e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,7 +36,8 @@ release. -20.6.1
        +20.7.0
        +20.6.1
        20.6.0
        20.5.1
        20.5.0
        diff --git a/doc/api/cli.md b/doc/api/cli.md index b8993bc1eb14db..fa269469160304 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -146,7 +146,7 @@ Error: Access to this API has been restricted @@ -200,7 +200,7 @@ node --experimental-permission --allow-fs-read=/path/to/index.js index.js diff --git a/doc/api/module.md b/doc/api/module.md index e7b94d34a63b18..e5f708aca07493 100644 --- a/doc/api/module.md +++ b/doc/api/module.md @@ -150,7 +150,7 @@ import('node:fs').then((esmFS) => { > Stability: 1.1 - Active development @@ -563,7 +563,7 @@ export async function resolve(specifier, context, nextResolve) { > Stability: 1 - Experimental diff --git a/doc/api/stream.md b/doc/api/stream.md index e00536a1c97324..0132c45ade727a 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -2013,7 +2013,7 @@ added: - v17.4.0 - v16.14.0 changes: - - version: REPLACEME + - version: v20.7.0 pr-url: https://github.com/nodejs/node/pull/49249 description: added `highWaterMark` in options. --> @@ -2066,7 +2066,7 @@ added: - v17.4.0 - v16.14.0 changes: - - version: REPLACEME + - version: v20.7.0 pr-url: https://github.com/nodejs/node/pull/49249 description: added `highWaterMark` in options. --> diff --git a/doc/changelogs/CHANGELOG_V20.md b/doc/changelogs/CHANGELOG_V20.md index dbd3fd45090926..10da1821ac465f 100644 --- a/doc/changelogs/CHANGELOG_V20.md +++ b/doc/changelogs/CHANGELOG_V20.md @@ -8,6 +8,7 @@ +20.7.0
        20.6.1
        20.6.0
        20.5.1
        @@ -44,6 +45,149 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + + +## 2023-09-18, Version 20.7.0 (Current), @UlisesGascon + +### Notable Changes + +* \[[`022f1b70c1`](https://github.com/nodejs/node/commit/022f1b70c1)] - **src**: support multiple `--env-file` declarations (Yagiz Nizipli) [#49542](https://github.com/nodejs/node/pull/49542) +* \[[`4a1d1cad61`](https://github.com/nodejs/node/commit/4a1d1cad61)] - **crypto**: update root certificates to NSS 3.93 (Node.js GitHub Bot) [#49341](https://github.com/nodejs/node/pull/49341) +* \[[`a1a65f593c`](https://github.com/nodejs/node/commit/a1a65f593c)] - **deps**: upgrade npm to 10.1.0 (npm team) [#49570](https://github.com/nodejs/node/pull/49570) +* \[[`6c2480cad9`](https://github.com/nodejs/node/commit/6c2480cad9)] - **(SEMVER-MINOR)** **deps**: upgrade npm to 10.0.0 (npm team) [#49423](https://github.com/nodejs/node/pull/49423) +* \[[`bef900e56b`](https://github.com/nodejs/node/commit/bef900e56b)] - **doc**: move and rename loaders section (Geoffrey Booth) [#49261](https://github.com/nodejs/node/pull/49261) +* \[[`db4ce8a593`](https://github.com/nodejs/node/commit/db4ce8a593)] - **doc**: add release key for Ulises Gascon (Ulises Gascón) [#49196](https://github.com/nodejs/node/pull/49196) +* \[[`11c85ffa98`](https://github.com/nodejs/node/commit/11c85ffa98)] - **(SEMVER-MINOR)** **lib**: add api to detect whether source-maps are enabled (翠 / green) [#46391](https://github.com/nodejs/node/pull/46391) +* \[[`ec51e25ed7`](https://github.com/nodejs/node/commit/ec51e25ed7)] - **src,permission**: add multiple allow-fs-\* flags (Carlos Espa) [#49047](https://github.com/nodejs/node/pull/49047) +* \[[`efdc95fbc0`](https://github.com/nodejs/node/commit/efdc95fbc0)] - **(SEMVER-MINOR)** **test\_runner**: expose location of tests (Colin Ihrig) [#48975](https://github.com/nodejs/node/pull/48975) + +### Commits + +* \[[`e84515594e`](https://github.com/nodejs/node/commit/e84515594e)] - **benchmark**: use `tmpdir.resolve()` (Livia Medeiros) [#49137](https://github.com/nodejs/node/pull/49137) +* \[[`f37444e896`](https://github.com/nodejs/node/commit/f37444e896)] - **bootstrap**: build code cache from deserialized isolate (Joyee Cheung) [#49099](https://github.com/nodejs/node/pull/49099) +* \[[`af6dc1754d`](https://github.com/nodejs/node/commit/af6dc1754d)] - **bootstrap**: do not generate code cache in an unfinalized isolate (Joyee Cheung) [#49108](https://github.com/nodejs/node/pull/49108) +* \[[`cade5716df`](https://github.com/nodejs/node/commit/cade5716df)] - **build**: add symlink to `compile_commands.json` file if needed (Juan José) [#49260](https://github.com/nodejs/node/pull/49260) +* \[[`34a2590b05`](https://github.com/nodejs/node/commit/34a2590b05)] - **build**: expand when we run internet tests (Michael Dawson) [#49218](https://github.com/nodejs/node/pull/49218) +* \[[`f637fd46ab`](https://github.com/nodejs/node/commit/f637fd46ab)] - **build**: fix typo `libray` -> `library` (configure.py) (michalbiesek) [#49106](https://github.com/nodejs/node/pull/49106) +* \[[`ef3d8dd493`](https://github.com/nodejs/node/commit/ef3d8dd493)] - **crypto**: remove webcrypto EdDSA key checks and properties (Filip Skokan) [#49408](https://github.com/nodejs/node/pull/49408) +* \[[`4a1d1cad61`](https://github.com/nodejs/node/commit/4a1d1cad61)] - **crypto**: update root certificates to NSS 3.93 (Node.js GitHub Bot) [#49341](https://github.com/nodejs/node/pull/49341) +* \[[`7eb10a38ea`](https://github.com/nodejs/node/commit/7eb10a38ea)] - **crypto**: remove getDefaultEncoding() (Tobias Nießen) [#49170](https://github.com/nodejs/node/pull/49170) +* \[[`772496c030`](https://github.com/nodejs/node/commit/772496c030)] - **crypto**: remove default encoding from DiffieHellman (Tobias Nießen) [#49169](https://github.com/nodejs/node/pull/49169) +* \[[`c795083232`](https://github.com/nodejs/node/commit/c795083232)] - **crypto**: remove default encoding from Hash/Hmac (Tobias Nießen) [#49167](https://github.com/nodejs/node/pull/49167) +* \[[`08197aa010`](https://github.com/nodejs/node/commit/08197aa010)] - **crypto**: remove default encoding from sign/verify (Tobias Nießen) [#49145](https://github.com/nodejs/node/pull/49145) +* \[[`a1a65f593c`](https://github.com/nodejs/node/commit/a1a65f593c)] - **deps**: upgrade npm to 10.1.0 (npm team) [#49570](https://github.com/nodejs/node/pull/49570) +* \[[`6c2480cad9`](https://github.com/nodejs/node/commit/6c2480cad9)] - **(SEMVER-MINOR)** **deps**: upgrade npm to 10.0.0 (npm team) [#49423](https://github.com/nodejs/node/pull/49423) +* \[[`84195d9584`](https://github.com/nodejs/node/commit/84195d9584)] - **deps**: add missing thread-common.c in uv.gyp (Santiago Gimeno) [#49410](https://github.com/nodejs/node/pull/49410) +* \[[`5b70b68b3d`](https://github.com/nodejs/node/commit/5b70b68b3d)] - **deps**: V8: cherry-pick eadaef581c29 (Adam Majer) [#49401](https://github.com/nodejs/node/pull/49401) +* \[[`fe34d632e8`](https://github.com/nodejs/node/commit/fe34d632e8)] - **deps**: update zlib to 1.2.13.1-motley-f5fd0ad (Node.js GitHub Bot) [#49252](https://github.com/nodejs/node/pull/49252) +* \[[`db4ce8a593`](https://github.com/nodejs/node/commit/db4ce8a593)] - **doc**: add release key for Ulises Gascon (Ulises Gascón) [#49196](https://github.com/nodejs/node/pull/49196) +* \[[`e5f3a694cf`](https://github.com/nodejs/node/commit/e5f3a694cf)] - **doc**: fix node-api call example (Chengzhong Wu) [#49395](https://github.com/nodejs/node/pull/49395) +* \[[`021345a724`](https://github.com/nodejs/node/commit/021345a724)] - **doc**: add news issue for Diagnostics WG (Michael Dawson) [#49306](https://github.com/nodejs/node/pull/49306) +* \[[`f82347266b`](https://github.com/nodejs/node/commit/f82347266b)] - **doc**: clarify policy expectations (Rafael Gonzaga) [#48947](https://github.com/nodejs/node/pull/48947) +* \[[`73cfd9c895`](https://github.com/nodejs/node/commit/73cfd9c895)] - **doc**: add print results for examples in `StringDecoder` (Jungku Lee) [#49326](https://github.com/nodejs/node/pull/49326) +* \[[`63ab591416`](https://github.com/nodejs/node/commit/63ab591416)] - **doc**: update outdated reference to NIST SP 800-131A (Tobias Nießen) [#49316](https://github.com/nodejs/node/pull/49316) +* \[[`935dfe2afd`](https://github.com/nodejs/node/commit/935dfe2afd)] - **doc**: use `cjs` as block code's type in `MockTimers` (Deokjin Kim) [#49309](https://github.com/nodejs/node/pull/49309) +* \[[`7c0cd2fb87`](https://github.com/nodejs/node/commit/7c0cd2fb87)] - **doc**: update `options.filter` description for `fs.cp` (Shubham Pandey) [#49289](https://github.com/nodejs/node/pull/49289) +* \[[`f72e79ea67`](https://github.com/nodejs/node/commit/f72e79ea67)] - **doc**: add riscv64 to list of architectures (Stewart X Addison) [#49284](https://github.com/nodejs/node/pull/49284) +* \[[`d19c710064`](https://github.com/nodejs/node/commit/d19c710064)] - **doc**: avoid "not currently recommended" (Tobias Nießen) [#49300](https://github.com/nodejs/node/pull/49300) +* \[[`ae656101c0`](https://github.com/nodejs/node/commit/ae656101c0)] - **doc**: update module hooks docs (Geoffrey Booth) [#49265](https://github.com/nodejs/node/pull/49265) +* \[[`fefbdb92f2`](https://github.com/nodejs/node/commit/fefbdb92f2)] - **doc**: modify param description for end(),write() in `StringDecoder` (Jungku Lee) [#49285](https://github.com/nodejs/node/pull/49285) +* \[[`59e66a1ebe`](https://github.com/nodejs/node/commit/59e66a1ebe)] - **doc**: use NODE\_API\_SUPPORTED\_VERSION\_MAX in release doc (Cheng Zhao) [#49268](https://github.com/nodejs/node/pull/49268) +* \[[`ac3b88449b`](https://github.com/nodejs/node/commit/ac3b88449b)] - **doc**: fix typo in `stream.finished` documentation (Antoine du Hamel) [#49271](https://github.com/nodejs/node/pull/49271) +* \[[`7428ebf6c3`](https://github.com/nodejs/node/commit/7428ebf6c3)] - **doc**: update description for `percent_encode` sets in `WHATWG API` (Jungku Lee) [#49258](https://github.com/nodejs/node/pull/49258) +* \[[`bef900e56b`](https://github.com/nodejs/node/commit/bef900e56b)] - **doc**: move and rename loaders section (Geoffrey Booth) [#49261](https://github.com/nodejs/node/pull/49261) +* \[[`a22e0d9696`](https://github.com/nodejs/node/commit/a22e0d9696)] - **doc**: clarify use of Uint8Array for n-api (Fedor Indutny) [#48742](https://github.com/nodejs/node/pull/48742) +* \[[`1704f24cb9`](https://github.com/nodejs/node/commit/1704f24cb9)] - **doc**: add signature for `module.register` (Geoffrey Booth) [#49251](https://github.com/nodejs/node/pull/49251) +* \[[`5a363bb01b`](https://github.com/nodejs/node/commit/5a363bb01b)] - **doc**: caveat unavailability of `import.meta.resolve` in custom loaders (Jacob Smith) [#49242](https://github.com/nodejs/node/pull/49242) +* \[[`8101f2b259`](https://github.com/nodejs/node/commit/8101f2b259)] - **doc**: use same name in the doc as in the code (Hyunjin Kim) [#49216](https://github.com/nodejs/node/pull/49216) +* \[[`edf278d60d`](https://github.com/nodejs/node/commit/edf278d60d)] - **doc**: add notable-change label mention to PR template (Rafael Gonzaga) [#49188](https://github.com/nodejs/node/pull/49188) +* \[[`3df2251a6a`](https://github.com/nodejs/node/commit/3df2251a6a)] - **doc**: add h1 summary to security release process (Rafael Gonzaga) [#49112](https://github.com/nodejs/node/pull/49112) +* \[[`9fcd99a744`](https://github.com/nodejs/node/commit/9fcd99a744)] - **doc**: update to semver-minor releases by default (Rafael Gonzaga) [#49175](https://github.com/nodejs/node/pull/49175) +* \[[`777931f499`](https://github.com/nodejs/node/commit/777931f499)] - **doc**: fix wording in napi\_async\_init (Tobias Nießen) [#49180](https://github.com/nodejs/node/pull/49180) +* \[[`f45c8e10c0`](https://github.com/nodejs/node/commit/f45c8e10c0)] - **doc,test**: add known path resolution issue in permission model (Tobias Nießen) [#49155](https://github.com/nodejs/node/pull/49155) +* \[[`a6cfea3f74`](https://github.com/nodejs/node/commit/a6cfea3f74)] - **esm**: align sync and async load implementations (Antoine du Hamel) [#49152](https://github.com/nodejs/node/pull/49152) +* \[[`9fac310b33`](https://github.com/nodejs/node/commit/9fac310b33)] - **fs**: add the options param description in openAsBlob() (Yeseul Lee) [#49308](https://github.com/nodejs/node/pull/49308) +* \[[`92772a8175`](https://github.com/nodejs/node/commit/92772a8175)] - **fs**: remove redundant code in readableWebStream() (Deokjin Kim) [#49298](https://github.com/nodejs/node/pull/49298) +* \[[`88ba79b083`](https://github.com/nodejs/node/commit/88ba79b083)] - **fs**: make sure to write entire buffer (Robert Nagy) [#49211](https://github.com/nodejs/node/pull/49211) +* \[[`11c85ffa98`](https://github.com/nodejs/node/commit/11c85ffa98)] - **(SEMVER-MINOR)** **lib**: add api to detect whether source-maps are enabled (翠 / green) [#46391](https://github.com/nodejs/node/pull/46391) +* \[[`c12711ebfe`](https://github.com/nodejs/node/commit/c12711ebfe)] - **lib**: implement WeakReference on top of JS WeakRef (Joyee Cheung) [#49053](https://github.com/nodejs/node/pull/49053) +* \[[`9a0891f88d`](https://github.com/nodejs/node/commit/9a0891f88d)] - **meta**: bump step-security/harden-runner from 2.5.0 to 2.5.1 (dependabot\[bot]) [#49435](https://github.com/nodejs/node/pull/49435) +* \[[`ae67f41ef1`](https://github.com/nodejs/node/commit/ae67f41ef1)] - **meta**: bump actions/checkout from 3.5.3 to 3.6.0 (dependabot\[bot]) [#49436](https://github.com/nodejs/node/pull/49436) +* \[[`71b4411fb2`](https://github.com/nodejs/node/commit/71b4411fb2)] - **meta**: bump actions/setup-node from 3.7.0 to 3.8.1 (dependabot\[bot]) [#49434](https://github.com/nodejs/node/pull/49434) +* \[[`83b7d3a395`](https://github.com/nodejs/node/commit/83b7d3a395)] - **meta**: remove modules team from CODEOWNERS (Benjamin Gruenbaum) [#49412](https://github.com/nodejs/node/pull/49412) +* \[[`81ff68c45c`](https://github.com/nodejs/node/commit/81ff68c45c)] - **meta**: move one or more collaborators to emeritus (Node.js GitHub Bot) [#49264](https://github.com/nodejs/node/pull/49264) +* \[[`ab975233cc`](https://github.com/nodejs/node/commit/ab975233cc)] - **meta**: mention nodejs/tsc when changing GH templates (Rafael Gonzaga) [#49189](https://github.com/nodejs/node/pull/49189) +* \[[`ceaa5494de`](https://github.com/nodejs/node/commit/ceaa5494de)] - **meta**: add test/reporters to codeowners (Chemi Atlow) [#49186](https://github.com/nodejs/node/pull/49186) +* \[[`de0a51b7cf`](https://github.com/nodejs/node/commit/de0a51b7cf)] - **net**: improve performance of isIPv4 and isIPv6 (Uzlopak) [#49568](https://github.com/nodejs/node/pull/49568) +* \[[`8d0913bf95`](https://github.com/nodejs/node/commit/8d0913bf95)] - **net**: use asserts in JS Socket Stream to catch races in future (Tim Perry) [#49400](https://github.com/nodejs/node/pull/49400) +* \[[`2486836a7d`](https://github.com/nodejs/node/commit/2486836a7d)] - **net**: fix crash due to simultaneous close/shutdown on JS Stream Sockets (Tim Perry) [#49400](https://github.com/nodejs/node/pull/49400) +* \[[`7a808340cd`](https://github.com/nodejs/node/commit/7a808340cd)] - **node-api**: fix compiler warning in node\_api.h (Michael Graeb) [#49103](https://github.com/nodejs/node/pull/49103) +* \[[`30f26a99f4`](https://github.com/nodejs/node/commit/30f26a99f4)] - **permission**: ensure to resolve path when calling mkdtemp (RafaelGSS) [nodejs-private/node-private#440](https://github.com/nodejs-private/node-private/pull/440) +* \[[`5051c75a5b`](https://github.com/nodejs/node/commit/5051c75a5b)] - **policy**: fix path to URL conversion (Antoine du Hamel) [#49133](https://github.com/nodejs/node/pull/49133) +* \[[`173aed4757`](https://github.com/nodejs/node/commit/173aed4757)] - **report**: fix recent coverity warning (Michael Dawson) [#48954](https://github.com/nodejs/node/pull/48954) +* \[[`d7ff78b442`](https://github.com/nodejs/node/commit/d7ff78b442)] - **sea**: generate code cache with deserialized isolate (Joyee Cheung) [#49226](https://github.com/nodejs/node/pull/49226) +* \[[`022f1b70c1`](https://github.com/nodejs/node/commit/022f1b70c1)] - **src**: support multiple `--env-file` declarations (Yagiz Nizipli) [#49542](https://github.com/nodejs/node/pull/49542) +* \[[`154b1c2115`](https://github.com/nodejs/node/commit/154b1c2115)] - **src**: don't overwrite environment from .env file (Phil Nash) [#49424](https://github.com/nodejs/node/pull/49424) +* \[[`dc4de1c69b`](https://github.com/nodejs/node/commit/dc4de1c69b)] - **src**: modify code for empty string (pluris) [#49336](https://github.com/nodejs/node/pull/49336) +* \[[`701c46f967`](https://github.com/nodejs/node/commit/701c46f967)] - **src**: remove unused PromiseWrap-related code (Joyee Cheung) [#49335](https://github.com/nodejs/node/pull/49335) +* \[[`4a094dc7af`](https://github.com/nodejs/node/commit/4a094dc7af)] - **src**: rename IsAnyByteSource to IsAnyBufferSource (Tobias Nießen) [#49346](https://github.com/nodejs/node/pull/49346) +* \[[`55d6649175`](https://github.com/nodejs/node/commit/55d6649175)] - **src**: support snapshot deserialization in RAIIIsolate (Joyee Cheung) [#49226](https://github.com/nodejs/node/pull/49226) +* \[[`dc092864ef`](https://github.com/nodejs/node/commit/dc092864ef)] - **src**: remove unused function `GetName()` in node\_perf (Jungku Lee) [#49244](https://github.com/nodejs/node/pull/49244) +* \[[`f2552a410e`](https://github.com/nodejs/node/commit/f2552a410e)] - **src**: use ARES\_SUCCESS instead of 0 (Jungku Lee) [#49048](https://github.com/nodejs/node/pull/49048) +* \[[`4a9ae31519`](https://github.com/nodejs/node/commit/4a9ae31519)] - **src**: add a condition if the argument of `DomainToUnicode` is empty (Jungku Lee) [#49097](https://github.com/nodejs/node/pull/49097) +* \[[`f460362cdf`](https://github.com/nodejs/node/commit/f460362cdf)] - **src**: remove C++ WeakReference implementation (Joyee Cheung) [#49053](https://github.com/nodejs/node/pull/49053) +* \[[`2a35383b3e`](https://github.com/nodejs/node/commit/2a35383b3e)] - **src**: use per-realm GetBindingData() wherever applicable (Joyee Cheung) [#49007](https://github.com/nodejs/node/pull/49007) +* \[[`184bbddcf5`](https://github.com/nodejs/node/commit/184bbddcf5)] - **src**: add per-realm GetBindingData() method (Joyee Cheung) [#49007](https://github.com/nodejs/node/pull/49007) +* \[[`e9946885f9`](https://github.com/nodejs/node/commit/e9946885f9)] - **src**: serialize both BaseObject slots (Joyee Cheung) [#48996](https://github.com/nodejs/node/pull/48996) +* \[[`ec51e25ed7`](https://github.com/nodejs/node/commit/ec51e25ed7)] - **src,permission**: add multiple allow-fs-\* flags (Carlos Espa) [#49047](https://github.com/nodejs/node/pull/49047) +* \[[`8aac95de4b`](https://github.com/nodejs/node/commit/8aac95de4b)] - **stream**: improve tee perf by reduce `ReflectConstruct` usages (Raz Luvaton) [#49546](https://github.com/nodejs/node/pull/49546) +* \[[`0eea7fd8fb`](https://github.com/nodejs/node/commit/0eea7fd8fb)] - **stream**: use Buffer.from when constructor is a Buffer (Matthew Aitken) [#49250](https://github.com/nodejs/node/pull/49250) +* \[[`b961d9bd52`](https://github.com/nodejs/node/commit/b961d9bd52)] - **stream**: add `highWaterMark` for the map operator (Raz Luvaton) [#49249](https://github.com/nodejs/node/pull/49249) +* \[[`ca1384166d`](https://github.com/nodejs/node/commit/ca1384166d)] - **test**: fix warning for comment in embedtest (Jungku Lee) [#49416](https://github.com/nodejs/node/pull/49416) +* \[[`2a35782809`](https://github.com/nodejs/node/commit/2a35782809)] - **test**: simplify test-crypto-dh-group-setters (Tobias Nießen) [#49404](https://github.com/nodejs/node/pull/49404) +* \[[`6740f3c209`](https://github.com/nodejs/node/commit/6740f3c209)] - **test**: verify dynamic import call with absolute path strings (Chengzhong Wu) [#49275](https://github.com/nodejs/node/pull/49275) +* \[[`6ed47bd8fb`](https://github.com/nodejs/node/commit/6ed47bd8fb)] - **test**: reduce length in crypto keygen tests (Joyee Cheung) [#49221](https://github.com/nodejs/node/pull/49221) +* \[[`4faa30c553`](https://github.com/nodejs/node/commit/4faa30c553)] - **test**: split JWK async elliptic curve keygen tests (Joyee Cheung) [#49221](https://github.com/nodejs/node/pull/49221) +* \[[`e04a2603d8`](https://github.com/nodejs/node/commit/e04a2603d8)] - **test**: split test-crypto-keygen.js (Joyee Cheung) [#49221](https://github.com/nodejs/node/pull/49221) +* \[[`0d23c1d4ce`](https://github.com/nodejs/node/commit/0d23c1d4ce)] - **test**: rename test-crypto-modp1-error (Tobias Nießen) [#49348](https://github.com/nodejs/node/pull/49348) +* \[[`48e41569e2`](https://github.com/nodejs/node/commit/48e41569e2)] - **test**: migrate message source map tests from Python to JS (Yiyun Lei) [#49238](https://github.com/nodejs/node/pull/49238) +* \[[`a11e64e09c`](https://github.com/nodejs/node/commit/a11e64e09c)] - **test**: fix compiler warning in NodeCryptoEnv (Tobias Nießen) [#49206](https://github.com/nodejs/node/pull/49206) +* \[[`345543938f`](https://github.com/nodejs/node/commit/345543938f)] - **test**: handle EUNATCH (Abdirahim Musse) [#48050](https://github.com/nodejs/node/pull/48050) +* \[[`e391f4b197`](https://github.com/nodejs/node/commit/e391f4b197)] - **test**: use `tmpdir.resolve()` (Livia Medeiros) [#49136](https://github.com/nodejs/node/pull/49136) +* \[[`910378f93f`](https://github.com/nodejs/node/commit/910378f93f)] - **test**: reduce flakiness of `test-esm-loader-hooks` (Antoine du Hamel) [#49248](https://github.com/nodejs/node/pull/49248) +* \[[`4a85f70462`](https://github.com/nodejs/node/commit/4a85f70462)] - **test**: add spawnSyncAndExit() and spawnSyncAndExitWithoutError() (Joyee Cheung) [#49200](https://github.com/nodejs/node/pull/49200) +* \[[`9610008b79`](https://github.com/nodejs/node/commit/9610008b79)] - **test**: make test-perf-hooks more robust and work with workers (Joyee Cheung) [#49197](https://github.com/nodejs/node/pull/49197) +* \[[`dc8fff9a75`](https://github.com/nodejs/node/commit/dc8fff9a75)] - **test**: use gcUntil() in test-v8-serialize-leak (Joyee Cheung) [#49168](https://github.com/nodejs/node/pull/49168) +* \[[`ca9f801332`](https://github.com/nodejs/node/commit/ca9f801332)] - **test**: make WeakReference tests robust (Joyee Cheung) [#49053](https://github.com/nodejs/node/pull/49053) +* \[[`de103a4686`](https://github.com/nodejs/node/commit/de103a4686)] - **test**: add test for effect of UV\_THREADPOOL\_SIZE (Tobias Nießen) [#49165](https://github.com/nodejs/node/pull/49165) +* \[[`47d24f144b`](https://github.com/nodejs/node/commit/47d24f144b)] - **test**: use expectSyncExit{WithErrors} in snapshot tests (Joyee Cheung) [#49020](https://github.com/nodejs/node/pull/49020) +* \[[`c441f5a097`](https://github.com/nodejs/node/commit/c441f5a097)] - **test**: add expectSyncExitWithoutError() and expectSyncExit() utils (Joyee Cheung) [#49020](https://github.com/nodejs/node/pull/49020) +* \[[`4d184b5251`](https://github.com/nodejs/node/commit/4d184b5251)] - **test**: remove --no-warnings flag in test\_runner fixtures (Raz Luvaton) [#48989](https://github.com/nodejs/node/pull/48989) +* \[[`25e967a90b`](https://github.com/nodejs/node/commit/25e967a90b)] - **test**: reorder test files fixtures for better understanding (Raz Luvaton) [#48787](https://github.com/nodejs/node/pull/48787) +* \[[`fac56dbcc0`](https://github.com/nodejs/node/commit/fac56dbcc0)] - **test,benchmark**: use `tmpdir.fileURL()` (Livia Medeiros) [#49138](https://github.com/nodejs/node/pull/49138) +* \[[`36763fa532`](https://github.com/nodejs/node/commit/36763fa532)] - **test\_runner**: preserve original property descriptor (Erick Wendel) [#49433](https://github.com/nodejs/node/pull/49433) +* \[[`40e9fcdbea`](https://github.com/nodejs/node/commit/40e9fcdbea)] - **test\_runner**: add support for setImmediate (Erick Wendel) [#49397](https://github.com/nodejs/node/pull/49397) +* \[[`23216f1935`](https://github.com/nodejs/node/commit/23216f1935)] - **test\_runner**: report covered lines, functions and branches to reporters (Phil Nash) [#49320](https://github.com/nodejs/node/pull/49320) +* \[[`283f2806b1`](https://github.com/nodejs/node/commit/283f2806b1)] - **test\_runner**: expose spec reporter as newable function (Chemi Atlow) [#49184](https://github.com/nodejs/node/pull/49184) +* \[[`546ad5f770`](https://github.com/nodejs/node/commit/546ad5f770)] - **test\_runner**: reland run global after() hook earlier (Colin Ihrig) [#49116](https://github.com/nodejs/node/pull/49116) +* \[[`efdc95fbc0`](https://github.com/nodejs/node/commit/efdc95fbc0)] - **(SEMVER-MINOR)** **test\_runner**: expose location of tests (Colin Ihrig) [#48975](https://github.com/nodejs/node/pull/48975) +* \[[`4bc0a8fe99`](https://github.com/nodejs/node/commit/4bc0a8fe99)] - **test\_runner**: fix global after not failing the tests (Raz Luvaton) [#48913](https://github.com/nodejs/node/pull/48913) +* \[[`08738b2664`](https://github.com/nodejs/node/commit/08738b2664)] - **test\_runner**: fix timeout in \*Each hook failing further tests (Raz Luvaton) [#48925](https://github.com/nodejs/node/pull/48925) +* \[[`c2f1830f66`](https://github.com/nodejs/node/commit/c2f1830f66)] - **test\_runner**: cleanup test timeout abort listener (Raz Luvaton) [#48915](https://github.com/nodejs/node/pull/48915) +* \[[`75333f38b2`](https://github.com/nodejs/node/commit/75333f38b2)] - **test\_runner**: fix global before not called when no global test exists (Raz Luvaton) [#48877](https://github.com/nodejs/node/pull/48877) +* \[[`b28b85adf8`](https://github.com/nodejs/node/commit/b28b85adf8)] - **tls**: remove redundant code in onConnectSecure() (Deokjin Kim) [#49457](https://github.com/nodejs/node/pull/49457) +* \[[`83fc4dccbc`](https://github.com/nodejs/node/commit/83fc4dccbc)] - **tls**: refactor to use validateFunction (Deokjin Kim) [#49422](https://github.com/nodejs/node/pull/49422) +* \[[`8949cc79dd`](https://github.com/nodejs/node/commit/8949cc79dd)] - **tls**: ensure TLS Sockets are closed if the underlying wrap closes (Tim Perry) [#49327](https://github.com/nodejs/node/pull/49327) +* \[[`1df56e6f01`](https://github.com/nodejs/node/commit/1df56e6f01)] - **tools**: update eslint to 8.48.0 (Node.js GitHub Bot) [#49343](https://github.com/nodejs/node/pull/49343) +* \[[`ef50ec5b57`](https://github.com/nodejs/node/commit/ef50ec5b57)] - **tools**: update lint-md-dependencies (Node.js GitHub Bot) [#49342](https://github.com/nodejs/node/pull/49342) +* \[[`9a8fb4fc34`](https://github.com/nodejs/node/commit/9a8fb4fc34)] - **tools**: remove v8\_dump\_build\_config action (Cheng Zhao) [#49301](https://github.com/nodejs/node/pull/49301) +* \[[`91b2d4314b`](https://github.com/nodejs/node/commit/91b2d4314b)] - **tools**: update lint-md-dependencies (Node.js GitHub Bot) [#49253](https://github.com/nodejs/node/pull/49253) +* \[[`b51946ebdd`](https://github.com/nodejs/node/commit/b51946ebdd)] - **tools**: fix github reporter appended multiple times (Moshe Atlow) [#49199](https://github.com/nodejs/node/pull/49199) +* \[[`ae40cb1612`](https://github.com/nodejs/node/commit/ae40cb1612)] - **url**: validate `pathToFileURL(path)` argument as string (LiviaMedeiros) [#49161](https://github.com/nodejs/node/pull/49161) +* \[[`e787673dcf`](https://github.com/nodejs/node/commit/e787673dcf)] - **url**: handle unicode hostname if empty (Yagiz Nizipli) [#49396](https://github.com/nodejs/node/pull/49396) +* \[[`6ee74be87f`](https://github.com/nodejs/node/commit/6ee74be87f)] - **vm**: store MicrotaskQueue in ContextifyContext directly (Joyee Cheung) [#48982](https://github.com/nodejs/node/pull/48982) +* \[[`0179c6dc8f`](https://github.com/nodejs/node/commit/0179c6dc8f)] - **worker**: protect against user mutating well-known prototypes (Antoine du Hamel) [#49270](https://github.com/nodejs/node/pull/49270) + ## 2023-09-08, Version 20.6.1 (Current), @RafaelGSS diff --git a/src/node_version.h b/src/node_version.h index 727c3d754959bf..22e482aaf18fd4 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 20 -#define NODE_MINOR_VERSION 6 -#define NODE_PATCH_VERSION 2 +#define NODE_MINOR_VERSION 7 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)