diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 47e420a9457bb8..1a62e3560a23ef 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -11,6 +11,8 @@ # tsc /.github/CODEOWNERS @nodejs/tsc +/.github/PULL_REQUEST_TEMPLATE.md @nodejs/tsc +/.github/ISSUE_TEMPLATE/* @nodejs/tsc /CODE_OF_CONDUCT.md @nodejs/tsc /CONTRIBUTING.md @nodejs/tsc /doc/contributing/*.md @nodejs/tsc @@ -85,16 +87,16 @@ # modules, including loaders -/doc/api/esm.md @nodejs/modules @nodejs/loaders -/doc/api/module.md @nodejs/modules @nodejs/loaders -/doc/api/modules.md @nodejs/modules @nodejs/loaders -/doc/api/packages.md @nodejs/modules @nodejs/loaders -/lib/internal/bootstrap/realm.js @nodejs/modules @nodejs/loaders -/lib/internal/modules/* @nodejs/modules @nodejs/loaders -/lib/internal/process/esm_loader.js @nodejs/modules @nodejs/loaders -/lib/internal/process/execution.js @nodejs/modules @nodejs/loaders -/lib/module.js @nodejs/modules @nodejs/loaders -/src/module_wrap* @nodejs/modules @nodejs/loaders @nodejs/vm +/doc/api/esm.md @nodejs/loaders +/doc/api/module.md @nodejs/loaders +/doc/api/modules.md @nodejs/loaders +/doc/api/packages.md @nodejs/loaders +/lib/internal/bootstrap/realm.js @nodejs/loaders +/lib/internal/modules/* @nodejs/loaders +/lib/internal/process/esm_loader.js @nodejs/loaders +/lib/internal/process/execution.js @nodejs/loaders +/lib/module.js @nodejs/loaders +/src/module_wrap* @nodejs/loaders @nodejs/vm # Node-API @@ -146,6 +148,7 @@ /lib/internal/main/test_runner.js @nodejs/test_runner /lib/internal/test_runner/* @nodejs/test_runner /lib/test.js @nodejs/test_runner +/lib/test/reporters.js @nodejs/test_runner /test/parallel/test-runner-* @nodejs/test_runner # Single Executable Applications diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 7e72cfbd77e972..936c2a06125795 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,6 +10,9 @@ For code changes: 2. Update documentation if relevant. 3. Ensure that `make -j4 test` (UNIX), or `vcbuild test` (Windows) passes. +If you believe this PR should be highlighted in the Node.js CHANGELOG +please add the `notable-change` label. + Developer's Certificate of Origin 1.1 By making a contribution to this project, I certify that: diff --git a/.github/workflows/auto-start-ci.yml b/.github/workflows/auto-start-ci.yml index 03c5f326a06f39..6a45b2d692b94e 100644 --- a/.github/workflows/auto-start-ci.yml +++ b/.github/workflows/auto-start-ci.yml @@ -46,12 +46,12 @@ jobs: if: needs.get-prs-for-ci.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Install Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 273b1fb6d81c6e..7a1789a2f5fa20 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -39,7 +39,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -65,7 +65,7 @@ jobs: needs: build-tarball runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index 880f24279fcc43..36205e56752d0f 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -38,7 +38,7 @@ jobs: fail-fast: false runs-on: ${{ matrix.windows }} steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml index bad18e9a65cf55..c4836d505804a6 100644 --- a/.github/workflows/commit-lint.yml +++ b/.github/workflows/commit-lint.yml @@ -17,13 +17,13 @@ jobs: run: | echo "plusOne=$((${{ github.event.pull_request.commits }} + 1))" >> $GITHUB_OUTPUT echo "minusOne=$((${{ github.event.pull_request.commits }} - 1))" >> $GITHUB_OUTPUT - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: fetch-depth: ${{ steps.nb-of-commits.outputs.plusOne }} persist-credentials: false - run: git reset HEAD^2 - name: Install Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Validate commit message diff --git a/.github/workflows/commit-queue.yml b/.github/workflows/commit-queue.yml index 12b9c653f7b00f..8cf3978c3f23ef 100644 --- a/.github/workflows/commit-queue.yml +++ b/.github/workflows/commit-queue.yml @@ -58,7 +58,7 @@ jobs: if: needs.get_mergeable_prs.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: # Needs the whole git history for ncu to work # See https://github.com/nodejs/node-core-utils/pull/486 @@ -71,7 +71,7 @@ jobs: # Install dependencies - name: Install Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Install node-core-utils diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml index 6a53f7d53f3595..a11350f2377d93 100644 --- a/.github/workflows/coverage-linux-without-intl.yml +++ b/.github/workflows/coverage-linux-without-intl.yml @@ -37,7 +37,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index 62eb62a0ca1e5e..03300a334eb05f 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -37,7 +37,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/coverage-windows.yml b/.github/workflows/coverage-windows.yml index 331d48f7d09146..0045010df812d5 100644 --- a/.github/workflows/coverage-windows.yml +++ b/.github/workflows/coverage-windows.yml @@ -39,7 +39,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: windows-2022 steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/daily-wpt-fyi.yml b/.github/workflows/daily-wpt-fyi.yml index 7ce42ec0c76a24..37a67296964b46 100644 --- a/.github/workflows/daily-wpt-fyi.yml +++ b/.github/workflows/daily-wpt-fyi.yml @@ -45,7 +45,7 @@ jobs: run: echo "NIGHTLY=$(curl -s https://nodejs.org/download/nightly/index.json | jq -r '[.[] | select(.files[] | contains("linux-x64"))][0].version')" >> $GITHUB_ENV - name: Install Node.js id: setup-node - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NIGHTLY || matrix.node-version }} check-latest: true @@ -57,7 +57,7 @@ jobs: SHORT_SHA=$(node -p 'process.version.split(/-nightly\d{8}/)[1]') echo "NIGHTLY_REF=$(gh api /repos/nodejs/node/commits/$SHORT_SHA --jq '.sha')" >> $GITHUB_ENV - name: Checkout ${{ steps.setup-node.outputs.node-version }} - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false ref: ${{ env.NIGHTLY_REF || steps.setup-node.outputs.node-version }} @@ -73,7 +73,7 @@ jobs: run: rm -rf wpt working-directory: test/fixtures - name: Checkout epochs/daily WPT - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: repository: web-platform-tests/wpt persist-credentials: false @@ -98,7 +98,7 @@ jobs: run: rm -rf deps/undici - name: Checkout undici if: ${{ env.WPT_REPORT != '' }} - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: repository: nodejs/undici persist-credentials: false diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 308c8cbc5e6462..d7c688db158a08 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -17,11 +17,11 @@ jobs: # not working on gcc-8 and gcc-9 see https://github.com/nodejs/node/issues/38570 container: gcc:11 steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Environment Information diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 4e7b41907c7cf2..0efc8cda7ce386 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -24,11 +24,11 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Environment Information diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml index 6a1e74b8cbb034..e5b6f2d5d2ffde 100644 --- a/.github/workflows/find-inactive-collaborators.yml +++ b/.github/workflows/find-inactive-collaborators.yml @@ -19,13 +19,13 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: fetch-depth: 0 persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} diff --git a/.github/workflows/find-inactive-tsc.yml b/.github/workflows/find-inactive-tsc.yml index 47cd697598405d..b6c1b990041de8 100644 --- a/.github/workflows/find-inactive-tsc.yml +++ b/.github/workflows/find-inactive-tsc.yml @@ -20,13 +20,13 @@ jobs: steps: - name: Checkout the repo - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: fetch-depth: 0 persist-credentials: false - name: Clone nodejs/TSC repository - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: fetch-depth: 0 path: .tmp @@ -34,7 +34,7 @@ jobs: repository: nodejs/TSC - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} diff --git a/.github/workflows/license-builder.yml b/.github/workflows/license-builder.yml index 84d03a4ee24e7b..28429d3fcff96e 100644 --- a/.github/workflows/license-builder.yml +++ b/.github/workflows/license-builder.yml @@ -17,7 +17,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - run: ./tools/license-builder.sh # Run the license builder tool diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index 46cb08e5964d44..4770dca2f2834b 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -25,11 +25,11 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Environment Information @@ -40,7 +40,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -55,12 +55,12 @@ jobs: if: ${{ github.event.pull_request && github.event.pull_request.draft == false && github.base_ref == github.event.repository.default_branch }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: fetch-depth: 0 persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -93,11 +93,11 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Environment Information @@ -118,7 +118,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -135,7 +135,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Use Python ${{ env.PYTHON_VERSION }} @@ -153,7 +153,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - run: shellcheck -V @@ -163,7 +163,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - uses: mszostok/codeowners-validator@7f3f5e28c6d7b8dfae5731e54ce2272ca384592f @@ -173,7 +173,7 @@ jobs: if: ${{ github.event.pull_request }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: fetch-depth: 2 persist-credentials: false diff --git a/.github/workflows/notify-on-push.yml b/.github/workflows/notify-on-push.yml index 828e4480adfaec..e5ae6301a5c397 100644 --- a/.github/workflows/notify-on-push.yml +++ b/.github/workflows/notify-on-push.yml @@ -34,7 +34,7 @@ jobs: permissions: pull-requests: write steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Check commit message diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 80913e487bd957..8ce22207982083 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -33,12 +33,12 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@cba0d00b1fc9a034e1e642ea0f1103c282990604 # v2.5.0 + uses: step-security/harden-runner@8ca2b8b2ece13480cda6dacd3511b49857a23c09 # v2.5.1 with: egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs - name: Checkout code - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false diff --git a/.github/workflows/test-asan.yml b/.github/workflows/test-asan.yml index 1cc7386e5872e0..1336abbbdca070 100644 --- a/.github/workflows/test-asan.yml +++ b/.github/workflows/test-asan.yml @@ -47,7 +47,7 @@ jobs: CONFIG_FLAGS: --enable-asan ASAN: true steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 1c3113ab6acdd0..7f10457e3ad5f6 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -7,14 +7,22 @@ on: pull_request: types: [opened, synchronize, reopened, ready_for_review] - paths: [test/internet/**] + paths: + - test/internet/** + - internal/dns/** + - lib/dns.js + - lib/net.js push: branches: - main - canary - v[0-9]+.x-staging - v[0-9]+.x - paths: [test/internet/**] + paths: + - test/internet/** + - internal/dns/** + - lib/dns.js + - lib/net.js concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -32,7 +40,7 @@ jobs: if: github.repository == 'nodejs/node' || github.event_name != 'schedule' runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index 6b4af9add89d46..76330f37d05ad7 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -34,7 +34,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index c5dd968cf056f0..c74d3d193bfe4d 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -40,7 +40,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: macos-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/timezone-update.yml b/.github/workflows/timezone-update.yml index 33122d91ef73ad..f48dd8ad0eb3ed 100644 --- a/.github/workflows/timezone-update.yml +++ b/.github/workflows/timezone-update.yml @@ -20,12 +20,12 @@ jobs: steps: - name: Checkout nodejs/node - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Checkout unicode-org/icu-data - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: path: icu-data persist-credentials: false diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index 0ae849738e5487..880586e2879cbe 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -284,7 +284,7 @@ jobs: tail -n1 temp-output | grep "NEW_VERSION=" >> "$GITHUB_ENV" || true rm temp-output steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 if: github.event_name == 'schedule' || inputs.id == 'all' || inputs.id == matrix.id with: persist-credentials: false diff --git a/.github/workflows/update-openssl.yml b/.github/workflows/update-openssl.yml index 591085df1efad7..0586e0b7d5595d 100644 --- a/.github/workflows/update-openssl.yml +++ b/.github/workflows/update-openssl.yml @@ -14,7 +14,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Check and download new OpenSSL version @@ -62,7 +62,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false ref: v16.x-staging diff --git a/.github/workflows/update-v8.yml b/.github/workflows/update-v8.yml index 28d366de526508..fb123a5b069a72 100644 --- a/.github/workflows/update-v8.yml +++ b/.github/workflows/update-v8.yml @@ -16,7 +16,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 with: persist-credentials: false - name: Cache node modules and update-v8 @@ -30,7 +30,7 @@ jobs: ~/.npm key: ${{ runner.os }}-build-${{ env.cache-name }} - name: Install Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1 with: node-version: ${{ env.NODE_VERSION }} - name: Install node-core-utils diff --git a/CHANGELOG.md b/CHANGELOG.md index c30394031a0b8a..b6156ad948fc7e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,7 +36,8 @@ release.
dry-run
dedupe
, uninstall
, as well as pack
and publish
.
Note: This is NOT honored by other network related commands, eg dist-tags
,
owner
, etc.
cpu
Override CPU architecture of native modules to install. Acceptable values
+are same as cpu
field of package.json, which comes from process.arch
.
os
Override OS of native modules to install. Acceptable values are same as os
+field of package.json, which comes from process.platform
.
workspace
dry-run
dedupe
, uninstall
, as well as pack
and publish
.
Note: This is NOT honored by other network related commands, eg dist-tags
,
owner
, etc.
cpu
Override CPU architecture of native modules to install. Acceptable values
+are same as cpu
field of package.json, which comes from process.arch
.
os
Override OS of native modules to install. Acceptable values are same as os
+field of package.json, which comes from process.platform
.
workspace
npm ls promzard
in npm's source tree will show:
-npm@9.8.1 /path/to/npm
+npm@10.1.0 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
diff --git a/deps/npm/docs/output/commands/npm-pkg.html b/deps/npm/docs/output/commands/npm-pkg.html
index 0a0b84107b7587..d60ca0b0f4aac7 100644
--- a/deps/npm/docs/output/commands/npm-pkg.html
+++ b/deps/npm/docs/output/commands/npm-pkg.html
@@ -346,7 +346,6 @@ See Also
- npm install
- npm init
- npm config
-- npm set-script
- workspaces
Note: This command is unaware of workspaces.
9.8.1
+10.1.0
npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html index 64e062223626a4..712708ef406391 100644 --- a/deps/npm/docs/output/configuring-npm/package-json.html +++ b/deps/npm/docs/output/configuring-npm/package-json.html @@ -211,8 +211,10 @@
It should look like this:
{
- "url" : "https://github.com/owner/project/issues",
- "email" : "project@hostname.com"
+ "bugs": {
+ "url": "https://github.com/owner/project/issues",
+ "email": "project@hostname.com"
+ }
}
You can specify either one or both values. If you want to provide only a @@ -359,6 +361,7 @@
README
LICENSE
/ LICENCE
README
& LICENSE
can have any case and extension.
Conversely, some files are always ignored:
diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html index 440c4f29e97f10..5942cf2118fae3 100644 --- a/deps/npm/docs/output/using-npm/config.html +++ b/deps/npm/docs/output/using-npm/config.html @@ -142,7 +142,7 @@_auth
access
all
allow-same-version
audit
audit-level
auth-type
before
bin-links
browser
ca
cache
cafile
call
cidr
color
commit-hooks
depth
description
diff
diff-dst-prefix
diff-ignore-all-space
diff-name-only
diff-no-prefix
diff-src-prefix
diff-text
diff-unified
dry-run
editor
engine-strict
fetch-retries
fetch-retry-factor
fetch-retry-maxtimeout
fetch-retry-mintimeout
fetch-timeout
force
foreground-scripts
format-package-lock
fund
git
git-tag-version
global
globalconfig
heading
https-proxy
if-present
ignore-scripts
include
include-staged
include-workspace-root
init-author-email
init-author-name
init-author-url
init-license
init-module
init-version
install-links
install-strategy
json
legacy-peer-deps
link
local-address
location
lockfile-version
loglevel
logs-dir
logs-max
long
maxsockets
message
node-options
noproxy
offline
omit
omit-lockfile-registry-resolved
otp
pack-destination
package
package-lock
package-lock-only
parseable
prefer-dedupe
prefer-offline
prefer-online
prefix
preid
progress
provenance
provenance-file
proxy
read-only
rebuild-bundle
registry
replace-registry-host
save
save-bundle
save-dev
save-exact
save-optional
save-peer
save-prefix
save-prod
scope
script-shell
searchexclude
searchlimit
searchopts
searchstaleness
shell
sign-git-commit
sign-git-tag
strict-peer-deps
strict-ssl
tag
tag-version-prefix
timing
umask
unicode
update-notifier
usage
user-agent
userconfig
version
versions
viewer
which
workspace
workspaces
workspaces-update
yes
also
cache-max
cache-min
cert
ci-name
dev
global-style
init.author.email
init.author.name
init.author.url
init.license
init.module
init.version
key
legacy-bundling
only
optional
production
shrinkwrap
tmp
_auth
access
all
allow-same-version
audit
audit-level
auth-type
before
bin-links
browser
ca
cache
cafile
call
cidr
color
commit-hooks
cpu
depth
description
diff
diff-dst-prefix
diff-ignore-all-space
diff-name-only
diff-no-prefix
diff-src-prefix
diff-text
diff-unified
dry-run
editor
engine-strict
fetch-retries
fetch-retry-factor
fetch-retry-maxtimeout
fetch-retry-mintimeout
fetch-timeout
force
foreground-scripts
format-package-lock
fund
git
git-tag-version
global
globalconfig
heading
https-proxy
if-present
ignore-scripts
include
include-staged
include-workspace-root
init-author-email
init-author-name
init-author-url
init-license
init-module
init-version
install-links
install-strategy
json
legacy-peer-deps
link
local-address
location
lockfile-version
loglevel
logs-dir
logs-max
long
maxsockets
message
node-options
noproxy
offline
omit
omit-lockfile-registry-resolved
os
otp
pack-destination
package
package-lock
package-lock-only
parseable
prefer-dedupe
prefer-offline
prefer-online
prefix
preid
progress
provenance
provenance-file
proxy
read-only
rebuild-bundle
registry
replace-registry-host
save
save-bundle
save-dev
save-exact
save-optional
save-peer
save-prefix
save-prod
scope
script-shell
searchexclude
searchlimit
searchopts
searchstaleness
shell
sign-git-commit
sign-git-tag
strict-peer-deps
strict-ssl
tag
tag-version-prefix
timing
umask
unicode
update-notifier
usage
user-agent
userconfig
version
versions
viewer
which
workspace
workspaces
workspaces-update
yes
also
cache-max
cache-min
cert
dev
global-style
init.author.email
init.author.name
init.author.url
init.license
init.module
init.version
key
legacy-bundling
only
optional
production
shrinkwrap
commit-hooks
Run git commit hooks when using the npm version
command.
cpu
Override CPU architecture of native modules to install. Acceptable values
+are same as cpu
field of package.json, which comes from process.arch
.
depth
Infinity
if --all
is set, otherwise 1
omit-lockfile-registry-resolved
registry dependencies. Subsequent installs will need to resolve tarball
endpoints with the configured registry, likely resulting in a longer install
time.
+os
+
+- Default: null
+- Type: null or String
+
+Override OS of native modules to install. Acceptable values are same as os
+field of package.json, which comes from process.platform
.
otp
- Default: null
@@ -1418,17 +1432,6 @@ cert
It is not the path to a certificate file, though you can set a
registry-scoped "certfile" path like
"//other-registry.tld/:certfile=/path/to/cert.pem".
-ci-name
-
-- Default: The name of the current CI system, or
null
when not on a known CI
-platform.
-- Type: null or String
-- DEPRECATED: This config is deprecated and will not be changeable in future
-version of npm.
-
-The name of a continuous integration system. If not set explicitly, npm will
-detect the current CI environment using the
-ci-info
module.
dev
- Default: false
@@ -1543,17 +1546,6 @@ shrinkwrap
- DEPRECATED: Use the --package-lock setting instead.
Alias for --package-lock
-tmp
-
-- Default: The value returned by the Node.js
os.tmpdir()
method
-https://nodejs.org/api/os.html#os_os_tmpdir
-- Type: Path
-- DEPRECATED: This setting is no longer used. npm stores temporary files in a
-special location in the cache, and they are managed by
-
cacache
.
-
-Historically, the location where temporary files were stored. No longer
-relevant.
See also
- npm config
diff --git a/deps/npm/lib/commands/audit.js b/deps/npm/lib/commands/audit.js
index 500620f2cd01bd..de5483109d598e 100644
--- a/deps/npm/lib/commands/audit.js
+++ b/deps/npm/lib/commands/audit.js
@@ -4,7 +4,7 @@ const localeCompare = require('@isaacs/string-locale-compare')('en')
const npa = require('npm-package-arg')
const pacote = require('pacote')
const pMap = require('p-map')
-const { sigstore } = require('sigstore')
+const tufClient = require('@sigstore/tuf')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const auditError = require('../utils/audit-error.js')
@@ -38,8 +38,8 @@ class VerifySignatures {
throw new Error('found no installed dependencies to audit')
}
- const tuf = await sigstore.tuf.client({
- tufCachePath: this.opts.tufCache,
+ const tuf = await tufClient.initTUF({
+ cachePath: this.opts.tufCache,
retry: this.opts.retry,
timeout: this.opts.timeout,
})
diff --git a/deps/npm/lib/commands/install.js b/deps/npm/lib/commands/install.js
index 75f0e2f175b61d..3983c8d26c841b 100644
--- a/deps/npm/lib/commands/install.js
+++ b/deps/npm/lib/commands/install.js
@@ -34,6 +34,8 @@ class Install extends ArboristWorkspaceCmd {
'bin-links',
'fund',
'dry-run',
+ 'cpu',
+ 'os',
...super.params,
]
diff --git a/deps/npm/lib/commands/run-script.js b/deps/npm/lib/commands/run-script.js
index 13efdde750a825..75f00a46b84e9f 100644
--- a/deps/npm/lib/commands/run-script.js
+++ b/deps/npm/lib/commands/run-script.js
@@ -207,24 +207,10 @@ class RunScript extends BaseCommand {
log.error(err)
log.error(` in workspace: ${pkg._id || pkg.name}`)
log.error(` at location: ${workspacePath}`)
-
- const scriptMissing = err.message.startsWith('Missing script')
-
- // avoids exiting with error code in case there's scripts missing
- // in some workspaces since other scripts might have succeeded
- if (!scriptMissing) {
- process.exitCode = 1
- }
-
- return scriptMissing
+ process.exitCode = 1
})
res.push(runResult)
}
-
- // in case **all** tests are missing, then it should exit with error code
- if (res.every(Boolean)) {
- throw new Error(`Missing script: ${args[0]}`)
- }
}
async listWorkspaces (args, filters) {
diff --git a/deps/npm/lib/commands/search.js b/deps/npm/lib/commands/search.js
index 5fb0a12bce1386..85ff7db2b78840 100644
--- a/deps/npm/lib/commands/search.js
+++ b/deps/npm/lib/commands/search.js
@@ -68,6 +68,10 @@ class Search extends BaseCommand {
let anyOutput = false
class FilterStream extends Minipass {
+ constructor () {
+ super({ objectMode: true })
+ }
+
write (pkg) {
if (filter(pkg, opts.include, opts.exclude)) {
super.write(pkg)
diff --git a/deps/npm/lib/utils/format-search-stream.js b/deps/npm/lib/utils/format-search-stream.js
index 762dea90859d19..ed753c27aabc83 100644
--- a/deps/npm/lib/utils/format-search-stream.js
+++ b/deps/npm/lib/utils/format-search-stream.js
@@ -143,7 +143,7 @@ function highlightSearchTerms (str, terms) {
function normalizePackage (data, opts) {
return {
name: ansiTrim(data.name),
- description: ansiTrim(data.description),
+ description: ansiTrim(data.description ?? ''),
author: data.maintainers.map((m) => `=${ansiTrim(m.username)}`).join(' '),
keywords: Array.isArray(data.keywords)
? data.keywords.map(ansiTrim).join(' ')
diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1
index b6266e1c49ba22..dc455d4a5f23b2 100644
--- a/deps/npm/man/man1/npm-access.1
+++ b/deps/npm/man/man1/npm-access.1
@@ -1,4 +1,4 @@
-.TH "NPM-ACCESS" "1" "July 2023" "" ""
+.TH "NPM-ACCESS" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-access\fR - Set access level on published packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1
index c38b6251f94abe..81807af54a8cdf 100644
--- a/deps/npm/man/man1/npm-adduser.1
+++ b/deps/npm/man/man1/npm-adduser.1
@@ -1,4 +1,4 @@
-.TH "NPM-ADDUSER" "1" "July 2023" "" ""
+.TH "NPM-ADDUSER" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-adduser\fR - Add a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1
index 35fb73e57860c5..84e580c7e5b17a 100644
--- a/deps/npm/man/man1/npm-audit.1
+++ b/deps/npm/man/man1/npm-audit.1
@@ -1,4 +1,4 @@
-.TH "NPM-AUDIT" "1" "July 2023" "" ""
+.TH "NPM-AUDIT" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-audit\fR - Run a security audit
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1
index 8fac5248b8a812..bbf9f8d979fd18 100644
--- a/deps/npm/man/man1/npm-bugs.1
+++ b/deps/npm/man/man1/npm-bugs.1
@@ -1,4 +1,4 @@
-.TH "NPM-BUGS" "1" "July 2023" "" ""
+.TH "NPM-BUGS" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-bugs\fR - Report bugs for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1
index f16723a031d50b..a58256bf8d4585 100644
--- a/deps/npm/man/man1/npm-cache.1
+++ b/deps/npm/man/man1/npm-cache.1
@@ -1,4 +1,4 @@
-.TH "NPM-CACHE" "1" "July 2023" "" ""
+.TH "NPM-CACHE" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-cache\fR - Manipulates packages cache
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1
index a8c641634a7a76..59b0de021df4e5 100644
--- a/deps/npm/man/man1/npm-ci.1
+++ b/deps/npm/man/man1/npm-ci.1
@@ -1,4 +1,4 @@
-.TH "NPM-CI" "1" "July 2023" "" ""
+.TH "NPM-CI" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-ci\fR - Clean install a project
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1
index fbf5a68626d391..e420854773c247 100644
--- a/deps/npm/man/man1/npm-completion.1
+++ b/deps/npm/man/man1/npm-completion.1
@@ -1,4 +1,4 @@
-.TH "NPM-COMPLETION" "1" "July 2023" "" ""
+.TH "NPM-COMPLETION" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-completion\fR - Tab Completion for npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1
index 77fd28de8f05f9..dba3356299a4f2 100644
--- a/deps/npm/man/man1/npm-config.1
+++ b/deps/npm/man/man1/npm-config.1
@@ -1,4 +1,4 @@
-.TH "NPM-CONFIG" "1" "July 2023" "" ""
+.TH "NPM-CONFIG" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-config\fR - Manage the npm configuration files
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1
index bb97e329c50bce..8dba928a3d78aa 100644
--- a/deps/npm/man/man1/npm-dedupe.1
+++ b/deps/npm/man/man1/npm-dedupe.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEDUPE" "1" "July 2023" "" ""
+.TH "NPM-DEDUPE" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-dedupe\fR - Reduce duplication in the package tree
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1
index 36b97e30f0aa02..85447f708de39d 100644
--- a/deps/npm/man/man1/npm-deprecate.1
+++ b/deps/npm/man/man1/npm-deprecate.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEPRECATE" "1" "July 2023" "" ""
+.TH "NPM-DEPRECATE" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-deprecate\fR - Deprecate a version of a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1
index 99bbc2fe01d0ca..cb21d2b1bfd135 100644
--- a/deps/npm/man/man1/npm-diff.1
+++ b/deps/npm/man/man1/npm-diff.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIFF" "1" "July 2023" "" ""
+.TH "NPM-DIFF" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-diff\fR - The registry diff command
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1
index 16ebcdbf0311d3..ab3c4bec4b6838 100644
--- a/deps/npm/man/man1/npm-dist-tag.1
+++ b/deps/npm/man/man1/npm-dist-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIST-TAG" "1" "July 2023" "" ""
+.TH "NPM-DIST-TAG" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-dist-tag\fR - Modify package distribution tags
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1
index 779b89157c4e81..32492edbf9d8f1 100644
--- a/deps/npm/man/man1/npm-docs.1
+++ b/deps/npm/man/man1/npm-docs.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCS" "1" "July 2023" "" ""
+.TH "NPM-DOCS" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-docs\fR - Open documentation for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1
index a1eef652698146..7bbae5311478b2 100644
--- a/deps/npm/man/man1/npm-doctor.1
+++ b/deps/npm/man/man1/npm-doctor.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCTOR" "1" "July 2023" "" ""
+.TH "NPM-DOCTOR" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-doctor\fR - Check your npm environment
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1
index ee7b76bd9d7dec..97274222f7b744 100644
--- a/deps/npm/man/man1/npm-edit.1
+++ b/deps/npm/man/man1/npm-edit.1
@@ -1,4 +1,4 @@
-.TH "NPM-EDIT" "1" "July 2023" "" ""
+.TH "NPM-EDIT" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-edit\fR - Edit an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1
index 9e6af781c52267..a48b29fd1ab411 100644
--- a/deps/npm/man/man1/npm-exec.1
+++ b/deps/npm/man/man1/npm-exec.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXEC" "1" "July 2023" "" ""
+.TH "NPM-EXEC" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-exec\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1
index 9985e3cb482836..08aa51b4748f9d 100644
--- a/deps/npm/man/man1/npm-explain.1
+++ b/deps/npm/man/man1/npm-explain.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLAIN" "1" "July 2023" "" ""
+.TH "NPM-EXPLAIN" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-explain\fR - Explain installed packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1
index ee3ee6b7d64dc8..bf87dc4c66d3d9 100644
--- a/deps/npm/man/man1/npm-explore.1
+++ b/deps/npm/man/man1/npm-explore.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLORE" "1" "July 2023" "" ""
+.TH "NPM-EXPLORE" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-explore\fR - Browse an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1
index 9d373635de18be..3310201661f1b1 100644
--- a/deps/npm/man/man1/npm-find-dupes.1
+++ b/deps/npm/man/man1/npm-find-dupes.1
@@ -1,4 +1,4 @@
-.TH "NPM-FIND-DUPES" "1" "July 2023" "" ""
+.TH "NPM-FIND-DUPES" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-find-dupes\fR - Find duplication in the package tree
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1
index 794d0431b4ed79..7f70ab3c45c058 100644
--- a/deps/npm/man/man1/npm-fund.1
+++ b/deps/npm/man/man1/npm-fund.1
@@ -1,4 +1,4 @@
-.TH "NPM-FUND" "1" "July 2023" "" ""
+.TH "NPM-FUND" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-fund\fR - Retrieve funding information
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1
index 0f85ec27c96f65..e1bd049796d950 100644
--- a/deps/npm/man/man1/npm-help-search.1
+++ b/deps/npm/man/man1/npm-help-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP-SEARCH" "1" "July 2023" "" ""
+.TH "NPM-HELP-SEARCH" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-help-search\fR - Search npm help documentation
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1
index 9226fac417504f..c7b63df80ca5b2 100644
--- a/deps/npm/man/man1/npm-help.1
+++ b/deps/npm/man/man1/npm-help.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP" "1" "July 2023" "" ""
+.TH "NPM-HELP" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-help\fR - Get help on npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1
index df6ff9f56f0d66..345fa4bf0fa339 100644
--- a/deps/npm/man/man1/npm-hook.1
+++ b/deps/npm/man/man1/npm-hook.1
@@ -1,4 +1,4 @@
-.TH "NPM-HOOK" "1" "July 2023" "" ""
+.TH "NPM-HOOK" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-hook\fR - Manage registry hooks
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1
index 7a6722bea212f7..733d132e56f283 100644
--- a/deps/npm/man/man1/npm-init.1
+++ b/deps/npm/man/man1/npm-init.1
@@ -1,4 +1,4 @@
-.TH "NPM-INIT" "1" "July 2023" "" ""
+.TH "NPM-INIT" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-init\fR - Create a package.json file
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1
index 306c5e3e9b6895..291242bcc5551a 100644
--- a/deps/npm/man/man1/npm-install-ci-test.1
+++ b/deps/npm/man/man1/npm-install-ci-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-CI-TEST" "1" "July 2023" "" ""
+.TH "NPM-INSTALL-CI-TEST" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1
index 47dfcea404dcd3..7a7db04ce9af80 100644
--- a/deps/npm/man/man1/npm-install-test.1
+++ b/deps/npm/man/man1/npm-install-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-TEST" "1" "July 2023" "" ""
+.TH "NPM-INSTALL-TEST" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-install-test\fR - Install package(s) and run tests
.SS "Synopsis"
@@ -223,6 +223,26 @@ Type: Boolean
Indicates that you don't want npm to make any changes and that it should only report what it would have done. This can be passed into any of the commands that modify your local installation, eg, \fBinstall\fR, \fBupdate\fR, \fBdedupe\fR, \fBuninstall\fR, as well as \fBpack\fR and \fBpublish\fR.
.P
Note: This is NOT honored by other network related commands, eg \fBdist-tags\fR, \fBowner\fR, etc.
+.SS "\fBcpu\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override CPU architecture of native modules to install. Acceptable values are same as \fBcpu\fR field of package.json, which comes from \fBprocess.arch\fR.
+.SS "\fBos\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR.
.SS "\fBworkspace\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1
index 1e93c6dba476e3..2e55418c2cbdba 100644
--- a/deps/npm/man/man1/npm-install.1
+++ b/deps/npm/man/man1/npm-install.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL" "1" "July 2023" "" ""
+.TH "NPM-INSTALL" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-install\fR - Install a package
.SS "Synopsis"
@@ -585,6 +585,26 @@ Type: Boolean
Indicates that you don't want npm to make any changes and that it should only report what it would have done. This can be passed into any of the commands that modify your local installation, eg, \fBinstall\fR, \fBupdate\fR, \fBdedupe\fR, \fBuninstall\fR, as well as \fBpack\fR and \fBpublish\fR.
.P
Note: This is NOT honored by other network related commands, eg \fBdist-tags\fR, \fBowner\fR, etc.
+.SS "\fBcpu\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override CPU architecture of native modules to install. Acceptable values are same as \fBcpu\fR field of package.json, which comes from \fBprocess.arch\fR.
+.SS "\fBos\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR.
.SS "\fBworkspace\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1
index 9494902cf2f18a..d07195da001f1b 100644
--- a/deps/npm/man/man1/npm-link.1
+++ b/deps/npm/man/man1/npm-link.1
@@ -1,4 +1,4 @@
-.TH "NPM-LINK" "1" "July 2023" "" ""
+.TH "NPM-LINK" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-link\fR - Symlink a package folder
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1
index fb07b4981e49a9..54c612b86dd6a9 100644
--- a/deps/npm/man/man1/npm-login.1
+++ b/deps/npm/man/man1/npm-login.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGIN" "1" "July 2023" "" ""
+.TH "NPM-LOGIN" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-login\fR - Login to a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1
index 9ee817a430f1fd..3bab9b35c7866a 100644
--- a/deps/npm/man/man1/npm-logout.1
+++ b/deps/npm/man/man1/npm-logout.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGOUT" "1" "July 2023" "" ""
+.TH "NPM-LOGOUT" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-logout\fR - Log out of the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index af399edb102b6f..7254629646d7a9 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -1,4 +1,4 @@
-.TH "NPM-LS" "1" "July 2023" "" ""
+.TH "NPM-LS" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-ls\fR - List installed packages
.SS "Synopsis"
@@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit
.P
.RS 2
.nf
-npm@9.8.1 /path/to/npm
+npm@10.1.0 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
.fi
diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1
index f4584893ab84da..e1b45b50b6765f 100644
--- a/deps/npm/man/man1/npm-org.1
+++ b/deps/npm/man/man1/npm-org.1
@@ -1,4 +1,4 @@
-.TH "NPM-ORG" "1" "July 2023" "" ""
+.TH "NPM-ORG" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-org\fR - Manage orgs
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1
index 0c5d218eaa3526..887c252ff38778 100644
--- a/deps/npm/man/man1/npm-outdated.1
+++ b/deps/npm/man/man1/npm-outdated.1
@@ -1,4 +1,4 @@
-.TH "NPM-OUTDATED" "1" "July 2023" "" ""
+.TH "NPM-OUTDATED" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-outdated\fR - Check for outdated packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1
index b2d19405e83ca5..01c2050f00068e 100644
--- a/deps/npm/man/man1/npm-owner.1
+++ b/deps/npm/man/man1/npm-owner.1
@@ -1,4 +1,4 @@
-.TH "NPM-OWNER" "1" "July 2023" "" ""
+.TH "NPM-OWNER" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-owner\fR - Manage package owners
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1
index 38869efc2e3f26..a5fc8234152216 100644
--- a/deps/npm/man/man1/npm-pack.1
+++ b/deps/npm/man/man1/npm-pack.1
@@ -1,4 +1,4 @@
-.TH "NPM-PACK" "1" "July 2023" "" ""
+.TH "NPM-PACK" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-pack\fR - Create a tarball from a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1
index fdbc131fba1438..92f76c66dc5a2e 100644
--- a/deps/npm/man/man1/npm-ping.1
+++ b/deps/npm/man/man1/npm-ping.1
@@ -1,4 +1,4 @@
-.TH "NPM-PING" "1" "July 2023" "" ""
+.TH "NPM-PING" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-ping\fR - Ping npm registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1
index 806a5ae62bac31..d13bcd64cef703 100644
--- a/deps/npm/man/man1/npm-pkg.1
+++ b/deps/npm/man/man1/npm-pkg.1
@@ -1,4 +1,4 @@
-.TH "NPM-PKG" "1" "July 2023" "" ""
+.TH "NPM-PKG" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-pkg\fR - Manages your package.json
.SS "Synopsis"
@@ -282,7 +282,5 @@ npm help init
.IP \(bu 4
npm help config
.IP \(bu 4
-npm help set-script
-.IP \(bu 4
npm help workspaces
.RE 0
diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1
index 764e9b9dc31ff4..909e5b709787bc 100644
--- a/deps/npm/man/man1/npm-prefix.1
+++ b/deps/npm/man/man1/npm-prefix.1
@@ -1,4 +1,4 @@
-.TH "NPM-PREFIX" "1" "July 2023" "" ""
+.TH "NPM-PREFIX" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-prefix\fR - Display prefix
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1
index 47c7d2eca05841..4941380ad2ef58 100644
--- a/deps/npm/man/man1/npm-profile.1
+++ b/deps/npm/man/man1/npm-profile.1
@@ -1,4 +1,4 @@
-.TH "NPM-PROFILE" "1" "July 2023" "" ""
+.TH "NPM-PROFILE" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-profile\fR - Change settings on your registry profile
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1
index fd4492f40845ed..eea4ee41da6a23 100644
--- a/deps/npm/man/man1/npm-prune.1
+++ b/deps/npm/man/man1/npm-prune.1
@@ -1,4 +1,4 @@
-.TH "NPM-PRUNE" "1" "July 2023" "" ""
+.TH "NPM-PRUNE" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-prune\fR - Remove extraneous packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1
index 888977f67626f3..b167ed8a60677a 100644
--- a/deps/npm/man/man1/npm-publish.1
+++ b/deps/npm/man/man1/npm-publish.1
@@ -1,4 +1,4 @@
-.TH "NPM-PUBLISH" "1" "July 2023" "" ""
+.TH "NPM-PUBLISH" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-publish\fR - Publish a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1
index e8bda254f3e19c..c5ff8ec3bffd66 100644
--- a/deps/npm/man/man1/npm-query.1
+++ b/deps/npm/man/man1/npm-query.1
@@ -1,4 +1,4 @@
-.TH "NPM-QUERY" "1" "July 2023" "" ""
+.TH "NPM-QUERY" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-query\fR - Dependency selector query
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1
index 4d7644fa5dabaa..d22edac3427f53 100644
--- a/deps/npm/man/man1/npm-rebuild.1
+++ b/deps/npm/man/man1/npm-rebuild.1
@@ -1,4 +1,4 @@
-.TH "NPM-REBUILD" "1" "July 2023" "" ""
+.TH "NPM-REBUILD" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-rebuild\fR - Rebuild a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1
index 233ae1c8def62b..8562592255a21a 100644
--- a/deps/npm/man/man1/npm-repo.1
+++ b/deps/npm/man/man1/npm-repo.1
@@ -1,4 +1,4 @@
-.TH "NPM-REPO" "1" "July 2023" "" ""
+.TH "NPM-REPO" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-repo\fR - Open package repository page in the browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1
index 5df4da83185575..50f86f5aedc763 100644
--- a/deps/npm/man/man1/npm-restart.1
+++ b/deps/npm/man/man1/npm-restart.1
@@ -1,4 +1,4 @@
-.TH "NPM-RESTART" "1" "July 2023" "" ""
+.TH "NPM-RESTART" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-restart\fR - Restart a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1
index 9d7f65b1eaf15e..6d66168523776a 100644
--- a/deps/npm/man/man1/npm-root.1
+++ b/deps/npm/man/man1/npm-root.1
@@ -1,4 +1,4 @@
-.TH "NPM-ROOT" "1" "July 2023" "" ""
+.TH "NPM-ROOT" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-root\fR - Display npm root
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1
index 2b458fc495568e..4a3cf441d72dd7 100644
--- a/deps/npm/man/man1/npm-run-script.1
+++ b/deps/npm/man/man1/npm-run-script.1
@@ -1,4 +1,4 @@
-.TH "NPM-RUN-SCRIPT" "1" "July 2023" "" ""
+.TH "NPM-RUN-SCRIPT" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-run-script\fR - Run arbitrary package scripts
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1
index 30a9e7c0f9371b..b21ea554562f9f 100644
--- a/deps/npm/man/man1/npm-search.1
+++ b/deps/npm/man/man1/npm-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-SEARCH" "1" "July 2023" "" ""
+.TH "NPM-SEARCH" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-search\fR - Search for packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1
index 932c73fa284ff4..7ed46091e5f6ea 100644
--- a/deps/npm/man/man1/npm-shrinkwrap.1
+++ b/deps/npm/man/man1/npm-shrinkwrap.1
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP" "1" "July 2023" "" ""
+.TH "NPM-SHRINKWRAP" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR - Lock down dependency versions for publication
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1
index 83bec1215ce124..4a890b37a86170 100644
--- a/deps/npm/man/man1/npm-star.1
+++ b/deps/npm/man/man1/npm-star.1
@@ -1,4 +1,4 @@
-.TH "NPM-STAR" "1" "July 2023" "" ""
+.TH "NPM-STAR" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-star\fR - Mark your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1
index 7651506c9135ee..de27ac0866449a 100644
--- a/deps/npm/man/man1/npm-stars.1
+++ b/deps/npm/man/man1/npm-stars.1
@@ -1,4 +1,4 @@
-.TH "NPM-STARS" "1" "July 2023" "" ""
+.TH "NPM-STARS" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-stars\fR - View packages marked as favorites
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1
index 6d3fa76cd86681..ac0a839b353884 100644
--- a/deps/npm/man/man1/npm-start.1
+++ b/deps/npm/man/man1/npm-start.1
@@ -1,4 +1,4 @@
-.TH "NPM-START" "1" "July 2023" "" ""
+.TH "NPM-START" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-start\fR - Start a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1
index 54611e36b08633..98f444f0863835 100644
--- a/deps/npm/man/man1/npm-stop.1
+++ b/deps/npm/man/man1/npm-stop.1
@@ -1,4 +1,4 @@
-.TH "NPM-STOP" "1" "July 2023" "" ""
+.TH "NPM-STOP" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-stop\fR - Stop a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1
index 7b806f4412061b..355b25afeb6427 100644
--- a/deps/npm/man/man1/npm-team.1
+++ b/deps/npm/man/man1/npm-team.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEAM" "1" "July 2023" "" ""
+.TH "NPM-TEAM" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-team\fR - Manage organization teams and team memberships
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1
index 5e02ed40cdb14e..e9c3734c91912d 100644
--- a/deps/npm/man/man1/npm-test.1
+++ b/deps/npm/man/man1/npm-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEST" "1" "July 2023" "" ""
+.TH "NPM-TEST" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-test\fR - Test a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1
index 242c82f2feb528..be7912f43ca11b 100644
--- a/deps/npm/man/man1/npm-token.1
+++ b/deps/npm/man/man1/npm-token.1
@@ -1,4 +1,4 @@
-.TH "NPM-TOKEN" "1" "July 2023" "" ""
+.TH "NPM-TOKEN" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-token\fR - Manage your authentication tokens
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1
index d89488ffc91a22..a8a3ec892d8eee 100644
--- a/deps/npm/man/man1/npm-uninstall.1
+++ b/deps/npm/man/man1/npm-uninstall.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNINSTALL" "1" "July 2023" "" ""
+.TH "NPM-UNINSTALL" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-uninstall\fR - Remove a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1
index faa9bd23baf2c9..96d28182b1d468 100644
--- a/deps/npm/man/man1/npm-unpublish.1
+++ b/deps/npm/man/man1/npm-unpublish.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNPUBLISH" "1" "July 2023" "" ""
+.TH "NPM-UNPUBLISH" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-unpublish\fR - Remove a package from the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1
index 157e7f53fcbede..7e9a57782a8e1e 100644
--- a/deps/npm/man/man1/npm-unstar.1
+++ b/deps/npm/man/man1/npm-unstar.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNSTAR" "1" "July 2023" "" ""
+.TH "NPM-UNSTAR" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-unstar\fR - Remove an item from your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1
index c72b717593f356..73608d675a64cf 100644
--- a/deps/npm/man/man1/npm-update.1
+++ b/deps/npm/man/man1/npm-update.1
@@ -1,4 +1,4 @@
-.TH "NPM-UPDATE" "1" "July 2023" "" ""
+.TH "NPM-UPDATE" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-update\fR - Update packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1
index 482727246288a4..3c05ff3809d0c7 100644
--- a/deps/npm/man/man1/npm-version.1
+++ b/deps/npm/man/man1/npm-version.1
@@ -1,4 +1,4 @@
-.TH "NPM-VERSION" "1" "July 2023" "" ""
+.TH "NPM-VERSION" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-version\fR - Bump a package version
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1
index ea6fdfeb518b2d..c1c4d6f199e0ae 100644
--- a/deps/npm/man/man1/npm-view.1
+++ b/deps/npm/man/man1/npm-view.1
@@ -1,4 +1,4 @@
-.TH "NPM-VIEW" "1" "July 2023" "" ""
+.TH "NPM-VIEW" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-view\fR - View registry info
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1
index 799d85fc0275e3..296ee8d5cff88b 100644
--- a/deps/npm/man/man1/npm-whoami.1
+++ b/deps/npm/man/man1/npm-whoami.1
@@ -1,4 +1,4 @@
-.TH "NPM-WHOAMI" "1" "July 2023" "" ""
+.TH "NPM-WHOAMI" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm-whoami\fR - Display npm username
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index cbb25b2aa1a32d..824d4b8719fce3 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "July 2023" "" ""
+.TH "NPM" "1" "September 2023" "" ""
.SH "NAME"
\fBnpm\fR - javascript package manager
.SS "Synopsis"
@@ -12,7 +12,7 @@ npm
Note: This command is unaware of workspaces.
.SS "Version"
.P
-9.8.1
+10.1.0
.SS "Description"
.P
npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently.
diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1
index f1c9b4cbf676ca..d6d0e0d88e0f64 100644
--- a/deps/npm/man/man1/npx.1
+++ b/deps/npm/man/man1/npx.1
@@ -1,4 +1,4 @@
-.TH "NPX" "1" "July 2023" "" ""
+.TH "NPX" "1" "September 2023" "" ""
.SH "NAME"
\fBnpx\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5
index 3661e0bbbab59d..023a0ec693adfd 100644
--- a/deps/npm/man/man5/folders.5
+++ b/deps/npm/man/man5/folders.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "July 2023" "" ""
+.TH "FOLDERS" "5" "September 2023" "" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5
index efbbdccbba07d8..8343669c716873 100644
--- a/deps/npm/man/man5/install.5
+++ b/deps/npm/man/man5/install.5
@@ -1,4 +1,4 @@
-.TH "INSTALL" "5" "July 2023" "" ""
+.TH "INSTALL" "5" "September 2023" "" ""
.SH "NAME"
\fBinstall\fR - Download and install node and npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5
index 3661e0bbbab59d..023a0ec693adfd 100644
--- a/deps/npm/man/man5/npm-global.5
+++ b/deps/npm/man/man5/npm-global.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "July 2023" "" ""
+.TH "FOLDERS" "5" "September 2023" "" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5
index f1e5784ada7682..2e6402de50d69e 100644
--- a/deps/npm/man/man5/npm-json.5
+++ b/deps/npm/man/man5/npm-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "July 2023" "" ""
+.TH "PACKAGE.JSON" "5" "September 2023" "" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
@@ -70,8 +70,10 @@ It should look like this:
.RS 2
.nf
{
- "url" : "https://github.com/owner/project/issues",
- "email" : "project@hostname.com"
+ "bugs": {
+ "url": "https://github.com/owner/project/issues",
+ "email": "project@hostname.com"
+ }
}
.fi
.RE
@@ -254,6 +256,8 @@ Certain files are always included, regardless of settings:
\fBLICENSE\fR / \fBLICENCE\fR
.IP \(bu 4
The file in the "main" field
+.IP \(bu 4
+The file(s) in the "bin" field
.RE 0
.P
diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5
index cf3e37e92253d2..3f0af1837befc3 100644
--- a/deps/npm/man/man5/npm-shrinkwrap-json.5
+++ b/deps/npm/man/man5/npm-shrinkwrap-json.5
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP.JSON" "5" "July 2023" "" ""
+.TH "NPM-SHRINKWRAP.JSON" "5" "September 2023" "" ""
.SH "NAME"
\fBnpm-shrinkwrap.json\fR - A publishable lockfile
.SS "Description"
diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5
index 7b222d3736b02d..60b1794712d0b6 100644
--- a/deps/npm/man/man5/npmrc.5
+++ b/deps/npm/man/man5/npmrc.5
@@ -1,4 +1,4 @@
-.TH "NPMRC" "5" "July 2023" "" ""
+.TH "NPMRC" "5" "September 2023" "" ""
.SH "NAME"
\fBnpmrc\fR - The npm config files
.SS "Description"
diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5
index f1e5784ada7682..2e6402de50d69e 100644
--- a/deps/npm/man/man5/package-json.5
+++ b/deps/npm/man/man5/package-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "July 2023" "" ""
+.TH "PACKAGE.JSON" "5" "September 2023" "" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
@@ -70,8 +70,10 @@ It should look like this:
.RS 2
.nf
{
- "url" : "https://github.com/owner/project/issues",
- "email" : "project@hostname.com"
+ "bugs": {
+ "url": "https://github.com/owner/project/issues",
+ "email": "project@hostname.com"
+ }
}
.fi
.RE
@@ -254,6 +256,8 @@ Certain files are always included, regardless of settings:
\fBLICENSE\fR / \fBLICENCE\fR
.IP \(bu 4
The file in the "main" field
+.IP \(bu 4
+The file(s) in the "bin" field
.RE 0
.P
diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5
index 82435a461b88a6..bbff439d118161 100644
--- a/deps/npm/man/man5/package-lock-json.5
+++ b/deps/npm/man/man5/package-lock-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE-LOCK.JSON" "5" "July 2023" "" ""
+.TH "PACKAGE-LOCK.JSON" "5" "September 2023" "" ""
.SH "NAME"
\fBpackage-lock.json\fR - A manifestation of the manifest
.SS "Description"
diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7
index 3bcd6075cc2235..13acb13c238167 100644
--- a/deps/npm/man/man7/config.7
+++ b/deps/npm/man/man7/config.7
@@ -1,4 +1,4 @@
-.TH "CONFIG" "7" "July 2023" "" ""
+.TH "CONFIG" "7" "September 2023" "" ""
.SH "NAME"
\fBconfig\fR - More than you probably want to know about npm configuration
.SS "Description"
@@ -350,6 +350,16 @@ Type: Boolean
.P
Run git commit hooks when using the \fBnpm version\fR command.
+.SS "\fBcpu\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override CPU architecture of native modules to install. Acceptable values are same as \fBcpu\fR field of package.json, which comes from \fBprocess.arch\fR.
.SS "\fBdepth\fR"
.RS 0
.IP \(bu 4
@@ -1038,6 +1048,16 @@ Type: Boolean
.P
This option causes npm to create lock files without a \fBresolved\fR key for registry dependencies. Subsequent installs will need to resolve tarball endpoints with the configured registry, likely resulting in a longer install time.
+.SS "\fBos\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: null or String
+.RE 0
+
+.P
+Override OS of native modules to install. Acceptable values are same as \fBos\fR field of package.json, which comes from \fBprocess.platform\fR.
.SS "\fBotp\fR"
.RS 0
.IP \(bu 4
@@ -1769,18 +1789,6 @@ cert="-----BEGIN CERTIFICATE-----\[rs]nXXXX\[rs]nXXXX\[rs]n-----END CERTIFICATE-
.RE
.P
It is \fInot\fR the path to a certificate file, though you can set a registry-scoped "certfile" path like "//other-registry.tld/:certfile=/path/to/cert.pem".
-.SS "\fBci-name\fR"
-.RS 0
-.IP \(bu 4
-Default: The name of the current CI system, or \fBnull\fR when not on a known CI platform.
-.IP \(bu 4
-Type: null or String
-.IP \(bu 4
-DEPRECATED: This config is deprecated and will not be changeable in future version of npm.
-.RE 0
-
-.P
-The name of a continuous integration system. If not set explicitly, npm will detect the current CI environment using the \fB\fBci-info\fR\fR \fI\(lahttp://npm.im/ci-info\(ra\fR module.
.SS "\fBdev\fR"
.RS 0
.IP \(bu 4
@@ -1959,18 +1967,6 @@ DEPRECATED: Use the --package-lock setting instead.
.P
Alias for --package-lock
-.SS "\fBtmp\fR"
-.RS 0
-.IP \(bu 4
-Default: The value returned by the Node.js \fBos.tmpdir()\fR method \fI\(lahttps://nodejs.org/api/os.html#os_os_tmpdir\(ra\fR
-.IP \(bu 4
-Type: Path
-.IP \(bu 4
-DEPRECATED: This setting is no longer used. npm stores temporary files in a special location in the cache, and they are managed by \fB\fBcacache\fR\fR \fI\(lahttp://npm.im/cacache\(ra\fR.
-.RE 0
-
-.P
-Historically, the location where temporary files were stored. No longer relevant.
.SS "See also"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7
index 8e557efe93853a..51b2052587fc57 100644
--- a/deps/npm/man/man7/dependency-selectors.7
+++ b/deps/npm/man/man7/dependency-selectors.7
@@ -1,4 +1,4 @@
-.TH "QUERYING" "7" "July 2023" "" ""
+.TH "QUERYING" "7" "September 2023" "" ""
.SH "NAME"
\fBQuerying\fR - Dependency Selector Syntax & Querying
.SS "Description"
diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7
index 788ed4b9d8d457..7902f7258f5776 100644
--- a/deps/npm/man/man7/developers.7
+++ b/deps/npm/man/man7/developers.7
@@ -1,4 +1,4 @@
-.TH "DEVELOPERS" "7" "July 2023" "" ""
+.TH "DEVELOPERS" "7" "September 2023" "" ""
.SH "NAME"
\fBdevelopers\fR - Developer Guide
.SS "Description"
diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7
index 0c96f75c479453..81a09df41eb3ce 100644
--- a/deps/npm/man/man7/logging.7
+++ b/deps/npm/man/man7/logging.7
@@ -1,4 +1,4 @@
-.TH "LOGGING" "7" "July 2023" "" ""
+.TH "LOGGING" "7" "September 2023" "" ""
.SH "NAME"
\fBLogging\fR - Why, What & How We Log
.SS "Description"
diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7
index 2d0ec91b96774d..02fa92a4519625 100644
--- a/deps/npm/man/man7/orgs.7
+++ b/deps/npm/man/man7/orgs.7
@@ -1,4 +1,4 @@
-.TH "ORGS" "7" "July 2023" "" ""
+.TH "ORGS" "7" "September 2023" "" ""
.SH "NAME"
\fBorgs\fR - Working with Teams & Orgs
.SS "Description"
diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7
index 2d02001f93791b..67846da9dbe210 100644
--- a/deps/npm/man/man7/package-spec.7
+++ b/deps/npm/man/man7/package-spec.7
@@ -1,4 +1,4 @@
-.TH "PACKAGE-SPEC" "7" "July 2023" "" ""
+.TH "PACKAGE-SPEC" "7" "September 2023" "" ""
.SH "NAME"
\fBpackage-spec\fR - Package name specifier
.SS "Description"
diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7
index 9b68a2a761543b..f0a8460b65464e 100644
--- a/deps/npm/man/man7/registry.7
+++ b/deps/npm/man/man7/registry.7
@@ -1,4 +1,4 @@
-.TH "REGISTRY" "7" "July 2023" "" ""
+.TH "REGISTRY" "7" "September 2023" "" ""
.SH "NAME"
\fBregistry\fR - The JavaScript Package Registry
.SS "Description"
diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7
index 1ae685b6f126e3..d1e3149c6be142 100644
--- a/deps/npm/man/man7/removal.7
+++ b/deps/npm/man/man7/removal.7
@@ -1,4 +1,4 @@
-.TH "REMOVAL" "7" "July 2023" "" ""
+.TH "REMOVAL" "7" "September 2023" "" ""
.SH "NAME"
\fBremoval\fR - Cleaning the Slate
.SS "Synopsis"
diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7
index 04dc80fd662669..1f2a9565a792c4 100644
--- a/deps/npm/man/man7/scope.7
+++ b/deps/npm/man/man7/scope.7
@@ -1,4 +1,4 @@
-.TH "SCOPE" "7" "July 2023" "" ""
+.TH "SCOPE" "7" "September 2023" "" ""
.SH "NAME"
\fBscope\fR - Scoped packages
.SS "Description"
diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7
index 043b296f90baa5..6fdf8c7a903d15 100644
--- a/deps/npm/man/man7/scripts.7
+++ b/deps/npm/man/man7/scripts.7
@@ -1,4 +1,4 @@
-.TH "SCRIPTS" "7" "July 2023" "" ""
+.TH "SCRIPTS" "7" "September 2023" "" ""
.SH "NAME"
\fBscripts\fR - How npm handles the "scripts" field
.SS "Description"
diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7
index 8f11ac9a9f67f5..cac34f55ad07d4 100644
--- a/deps/npm/man/man7/workspaces.7
+++ b/deps/npm/man/man7/workspaces.7
@@ -1,4 +1,4 @@
-.TH "WORKSPACES" "7" "July 2023" "" ""
+.TH "WORKSPACES" "7" "September 2023" "" ""
.SH "NAME"
\fBworkspaces\fR - Working with workspaces
.SS "Description"
diff --git a/deps/npm/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/@npmcli/agent/lib/agents.js
new file mode 100644
index 00000000000000..7d32768817c18f
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/lib/agents.js
@@ -0,0 +1,199 @@
+'use strict'
+
+const http = require('http')
+const https = require('https')
+const net = require('net')
+const tls = require('tls')
+const { once } = require('events')
+const { createTimeout, abortRace, urlify, appendPort, cacheAgent } = require('./util')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, getProxyType, proxyCache } = require('./proxy.js')
+const Errors = require('./errors.js')
+
+const createAgent = (base, name) => {
+ const SECURE = base === https
+ const SOCKET_TYPE = SECURE ? tls : net
+
+ const agent = class extends base.Agent {
+ #options
+ #timeouts
+ #proxy
+ #socket
+
+ constructor (_options) {
+ const { timeouts, proxy, noProxy, ...options } = normalizeOptions(_options)
+
+ super(options)
+
+ this.#options = options
+ this.#timeouts = timeouts
+ this.#proxy = proxy ? { proxies: getProxyType(proxy), proxy: urlify(proxy), noProxy } : null
+ }
+
+ get proxy () {
+ return this.#proxy ? { url: this.#proxy.proxy } : {}
+ }
+
+ #getProxy (options) {
+ const proxy = this.#proxy
+ ? getProxy(appendPort(`${options.protocol}//${options.host}`, options.port), this.#proxy)
+ : null
+
+ if (!proxy) {
+ return
+ }
+
+ return cacheAgent({
+ key: cacheOptions({
+ ...options,
+ ...this.#options,
+ secure: SECURE,
+ timeouts: this.#timeouts,
+ proxy,
+ }),
+ cache: proxyCache,
+ secure: SECURE,
+ proxies: this.#proxy.proxies,
+ }, proxy, this.#options)
+ }
+
+ #setKeepAlive (socket) {
+ socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
+ socket.setNoDelay(this.keepAlive)
+ }
+
+ #setIdleTimeout (socket, options) {
+ if (this.#timeouts.idle) {
+ socket.setTimeout(this.#timeouts.idle, () => {
+ socket.destroy(new Errors.IdleTimeoutError(options))
+ })
+ }
+ }
+
+ async #proxyConnect (proxy, request, options) {
+ // socks-proxy-agent accepts a dns lookup function
+ options.lookup ??= this.#options.lookup
+
+ // all the proxy agents use this secureEndpoint option to determine
+ // if the proxy should connect over tls or not. we can set it based
+ // on if the HttpAgent or HttpsAgent is used.
+ options.secureEndpoint = SECURE
+
+ const socket = await abortRace([
+ (ac) => createTimeout(this.#timeouts.connection, ac).catch(() => {
+ throw new Errors.ConnectionTimeoutError(options)
+ }),
+ (ac) => proxy.connect(request, options).then((s) => {
+ this.#setKeepAlive(s)
+
+ const connectEvent = SECURE ? 'secureConnect' : 'connect'
+ const connectingEvent = SECURE ? 'secureConnecting' : 'connecting'
+
+ if (!s[connectingEvent]) {
+ return s
+ }
+
+ return abortRace([
+ () => once(s, 'error', ac).then((err) => {
+ throw err
+ }),
+ () => once(s, connectEvent, ac).then(() => s),
+ ], ac)
+ }),
+ ])
+
+ this.#setIdleTimeout(socket, options)
+
+ return socket
+ }
+
+ async connect (request, options) {
+ const proxy = this.#getProxy(options)
+ if (proxy) {
+ return this.#proxyConnect(proxy, request, options)
+ }
+
+ const socket = SOCKET_TYPE.connect(options)
+
+ this.#setKeepAlive(socket)
+
+ await abortRace([
+ (s) => createTimeout(this.#timeouts.connection, s).catch(() => {
+ throw new Errors.ConnectionTimeoutError(options)
+ }),
+ (s) => once(socket, 'error', s).then((err) => {
+ throw err
+ }),
+ (s) => once(socket, 'connect', s),
+ ])
+
+ this.#setIdleTimeout(socket, options)
+
+ return socket
+ }
+
+ addRequest (request, options) {
+ const proxy = this.#getProxy(options)
+ // it would be better to call proxy.addRequest here but this causes the
+ // http-proxy-agent to call its super.addRequest which causes the request
+ // to be added to the agent twice. since we only support 3 agents
+ // currently (see the required agents in proxy.js) we have manually
+ // checked that the only public methods we need to call are called in the
+ // next block. this could change in the future and presumably we would get
+ // failing tests until we have properly called the necessary methods on
+ // each of our proxy agents
+ if (proxy?.setRequestProps) {
+ proxy.setRequestProps(request, options)
+ }
+
+ request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
+
+ const responseTimeout = createTimeout(this.#timeouts.response)
+ if (responseTimeout) {
+ request.once('finish', () => {
+ responseTimeout.start(() => {
+ request.destroy(new Errors.ResponseTimeoutError(request, this.proxy?.url))
+ })
+ })
+ request.once('response', () => {
+ responseTimeout.clear()
+ })
+ }
+
+ const transferTimeout = createTimeout(this.#timeouts.transfer)
+ if (transferTimeout) {
+ request.once('response', (res) => {
+ transferTimeout.start(() => {
+ res.destroy(new Errors.TransferTimeoutError(request, this.proxy?.url))
+ })
+ res.once('close', () => {
+ transferTimeout.clear()
+ })
+ })
+ }
+
+ return super.addRequest(request, options)
+ }
+
+ createSocket (req, options, cb) {
+ return Promise.resolve()
+ .then(() => this.connect(req, options))
+ .then((socket) => {
+ this.#socket = socket
+ return super.createSocket(req, options, cb)
+ }, cb)
+ }
+
+ createConnection () {
+ return this.#socket
+ }
+ }
+
+ Object.defineProperty(agent, 'name', { value: name })
+ return agent
+}
+
+module.exports = {
+ HttpAgent: createAgent(http, 'HttpAgent'),
+ HttpsAgent: createAgent(https, 'HttpsAgent'),
+}
diff --git a/deps/npm/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/@npmcli/agent/lib/dns.js
new file mode 100644
index 00000000000000..3c6946c566d736
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/lib/dns.js
@@ -0,0 +1,53 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const dns = require('dns')
+
+// this is a factory so that each request can have its own opts (i.e. ttl)
+// while still sharing the cache across all requests
+const cache = new LRUCache({ max: 50 })
+
+const getOptions = ({
+ family = 0,
+ hints = dns.ADDRCONFIG,
+ all = false,
+ verbatim = undefined,
+ ttl = 5 * 60 * 1000,
+ lookup = dns.lookup,
+}) => ({
+ // hints and lookup are returned since both are top level properties to (net|tls).connect
+ hints,
+ lookup: (hostname, ...args) => {
+ const callback = args.pop() // callback is always last arg
+ const lookupOptions = args[0] ?? {}
+
+ const options = {
+ family,
+ hints,
+ all,
+ verbatim,
+ ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
+ }
+
+ const key = JSON.stringify({ hostname, ...options })
+
+ if (cache.has(key)) {
+ const cached = cache.get(key)
+ return process.nextTick(callback, null, ...cached)
+ }
+
+ lookup(hostname, options, (err, ...result) => {
+ if (err) {
+ return callback(err)
+ }
+
+ cache.set(key, result, { ttl })
+ return callback(null, ...result)
+ })
+ },
+})
+
+module.exports = {
+ cache,
+ getOptions,
+}
diff --git a/deps/npm/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/@npmcli/agent/lib/errors.js
new file mode 100644
index 00000000000000..f41b4a065d713e
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/lib/errors.js
@@ -0,0 +1,65 @@
+'use strict'
+
+const { appendPort } = require('./util')
+
+class InvalidProxyProtocolError extends Error {
+ constructor (url) {
+ super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
+ this.code = 'EINVALIDPROXY'
+ this.proxy = url
+ }
+}
+
+class ConnectionTimeoutError extends Error {
+ constructor ({ host, port }) {
+ host = appendPort(host, port)
+ super(`Timeout connecting to host \`${host}\``)
+ this.code = 'ECONNECTIONTIMEOUT'
+ this.host = host
+ }
+}
+
+class IdleTimeoutError extends Error {
+ constructor ({ host, port }) {
+ host = appendPort(host, port)
+ super(`Idle timeout reached for host \`${host}\``)
+ this.code = 'EIDLETIMEOUT'
+ this.host = host
+ }
+}
+
+class ResponseTimeoutError extends Error {
+ constructor (request, proxy) {
+ let msg = 'Response timeout '
+ if (proxy) {
+ msg += `from proxy \`${proxy.host}\` `
+ }
+ msg += `connecting to host \`${request.host}\``
+ super(msg)
+ this.code = 'ERESPONSETIMEOUT'
+ this.proxy = proxy
+ this.request = request
+ }
+}
+
+class TransferTimeoutError extends Error {
+ constructor (request, proxy) {
+ let msg = 'Transfer timeout '
+ if (proxy) {
+ msg += `from proxy \`${proxy.host}\` `
+ }
+ msg += `for \`${request.host}\``
+ super(msg)
+ this.code = 'ETRANSFERTIMEOUT'
+ this.proxy = proxy
+ this.request = request
+ }
+}
+
+module.exports = {
+ InvalidProxyProtocolError,
+ ConnectionTimeoutError,
+ IdleTimeoutError,
+ ResponseTimeoutError,
+ TransferTimeoutError,
+}
diff --git a/deps/npm/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/@npmcli/agent/lib/index.js
new file mode 100644
index 00000000000000..2cd69390ea77e9
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/lib/index.js
@@ -0,0 +1,46 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const { urlify, cacheAgent } = require('./util')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, proxyCache } = require('./proxy.js')
+const dns = require('./dns.js')
+const { HttpAgent, HttpsAgent } = require('./agents.js')
+
+const agentCache = new LRUCache({ max: 20 })
+
+const getAgent = (url, { agent: _agent, proxy: _proxy, noProxy, ..._options } = {}) => {
+ // false has meaning so this can't be a simple truthiness check
+ if (_agent != null) {
+ return _agent
+ }
+
+ url = urlify(url)
+
+ const secure = url.protocol === 'https:'
+ const proxy = getProxy(url, { proxy: _proxy, noProxy })
+ const options = { ...normalizeOptions(_options), proxy }
+
+ return cacheAgent({
+ key: cacheOptions({ ...options, secure }),
+ cache: agentCache,
+ secure,
+ proxies: [HttpAgent, HttpsAgent],
+ }, options)
+}
+
+module.exports = {
+ getAgent,
+ HttpAgent,
+ HttpsAgent,
+ cache: {
+ proxy: proxyCache,
+ agent: agentCache,
+ dns: dns.cache,
+ clear: () => {
+ proxyCache.clear()
+ agentCache.clear()
+ dns.cache.clear()
+ },
+ },
+}
diff --git a/deps/npm/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/@npmcli/agent/lib/options.js
new file mode 100644
index 00000000000000..cd87c09d6a25ad
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/lib/options.js
@@ -0,0 +1,74 @@
+'use strict'
+
+const dns = require('./dns')
+const { createKey } = require('./util')
+
+const normalizeOptions = (opts) => {
+ const family = parseInt(opts.family ?? '0', 10)
+ const keepAlive = opts.keepAlive ?? true
+
+ const normalized = {
+ // nodejs http agent options. these are all the defaults
+ // but kept here to increase the likelihood of cache hits
+ // https://nodejs.org/api/http.html#new-agentoptions
+ keepAliveMsecs: keepAlive ? 1000 : undefined,
+ maxSockets: opts.maxSockets ?? 15,
+ maxTotalSockets: Infinity,
+ maxFreeSockets: keepAlive ? 256 : undefined,
+ scheduling: 'fifo',
+ // then spread the rest of the options
+ ...opts,
+ // we already set these to their defaults that we want
+ family,
+ keepAlive,
+ // our custom timeout options
+ timeouts: {
+ // the standard timeout option is mapped to our idle timeout
+ // and then deleted below
+ idle: opts.timeout ?? 0,
+ connection: 0,
+ response: 0,
+ transfer: 0,
+ ...opts.timeouts,
+ },
+ // get the dns options that go at the top level of socket connection
+ ...dns.getOptions({ family, ...opts.dns }),
+ }
+
+ // remove timeout since we already used it to set our own idle timeout
+ delete normalized.timeout
+
+ return normalized
+}
+
+const cacheOptions = (options) => {
+ const { secure } = options
+ return createKey({
+ secure: !!secure,
+ // socket connect options
+ family: options.family,
+ hints: options.hints,
+ localAddress: options.localAddress,
+ // tls specific connect options
+ strictSsl: secure ? !!options.rejectUnauthorized : false,
+ ca: secure ? options.ca : null,
+ cert: secure ? options.cert : null,
+ key: secure ? options.key : null,
+ // http agent options
+ keepAlive: options.keepAlive,
+ keepAliveMsecs: options.keepAliveMsecs,
+ maxSockets: options.maxSockets,
+ maxTotalSockets: options.maxTotalSockets,
+ maxFreeSockets: options.maxFreeSockets,
+ scheduling: options.scheduling,
+ // timeout options
+ timeouts: options.timeouts,
+ // proxy
+ proxy: options.proxy,
+ })
+}
+
+module.exports = {
+ normalizeOptions,
+ cacheOptions,
+}
diff --git a/deps/npm/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/@npmcli/agent/lib/proxy.js
new file mode 100644
index 00000000000000..babedad45ff99f
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/lib/proxy.js
@@ -0,0 +1,88 @@
+'use strict'
+
+const { HttpProxyAgent } = require('http-proxy-agent')
+const { HttpsProxyAgent } = require('https-proxy-agent')
+const { SocksProxyAgent } = require('socks-proxy-agent')
+const { LRUCache } = require('lru-cache')
+const { InvalidProxyProtocolError } = require('./errors.js')
+const { urlify } = require('./util.js')
+
+const PROXY_CACHE = new LRUCache({ max: 20 })
+
+const PROXY_ENV = (() => {
+ const keys = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
+ const values = {}
+ for (let [key, value] of Object.entries(process.env)) {
+ key = key.toLowerCase()
+ if (keys.has(key)) {
+ values[key] = value
+ }
+ }
+ return values
+})()
+
+const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
+
+const getProxyType = (url) => {
+ url = urlify(url)
+
+ const protocol = url.protocol.slice(0, -1)
+ if (SOCKS_PROTOCOLS.has(protocol)) {
+ return [SocksProxyAgent]
+ }
+ if (protocol === 'https' || protocol === 'http') {
+ return [HttpProxyAgent, HttpsProxyAgent]
+ }
+
+ throw new InvalidProxyProtocolError(url)
+}
+
+const isNoProxy = (url, noProxy) => {
+ if (typeof noProxy === 'string') {
+ noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
+ }
+
+ if (!noProxy || !noProxy.length) {
+ return false
+ }
+
+ const hostSegments = url.hostname.split('.').reverse()
+
+ return noProxy.some((no) => {
+ const noSegments = no.split('.').filter(Boolean).reverse()
+ if (!noSegments.length) {
+ return false
+ }
+
+ for (let i = 0; i < noSegments.length; i++) {
+ if (hostSegments[i] !== noSegments[i]) {
+ return false
+ }
+ }
+
+ return true
+ })
+}
+
+const getProxy = (url, {
+ proxy = PROXY_ENV.https_proxy,
+ noProxy = PROXY_ENV.no_proxy,
+}) => {
+ url = urlify(url)
+
+ if (!proxy && url.protocol !== 'https:') {
+ proxy = PROXY_ENV.http_proxy || PROXY_ENV.proxy
+ }
+
+ if (!proxy || isNoProxy(url, noProxy)) {
+ return null
+ }
+
+ return urlify(proxy)
+}
+
+module.exports = {
+ getProxyType,
+ getProxy,
+ proxyCache: PROXY_CACHE,
+}
diff --git a/deps/npm/node_modules/@npmcli/agent/lib/util.js b/deps/npm/node_modules/@npmcli/agent/lib/util.js
new file mode 100644
index 00000000000000..6d42a2e202c1f9
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/lib/util.js
@@ -0,0 +1,84 @@
+'use strict'
+
+const timers = require('timers/promises')
+
+const createKey = (obj) => {
+ let key = ''
+ const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
+ for (let [k, v] of sorted) {
+ if (v == null) {
+ v = 'null'
+ } else if (v instanceof URL) {
+ v = v.toString()
+ } else if (typeof v === 'object') {
+ v = createKey(v)
+ }
+ key += `${k}:${v}:`
+ }
+ return key
+}
+
+const createTimeout = (delay, signal) => {
+ if (!delay) {
+ return signal ? new Promise(() => {}) : null
+ }
+
+ if (!signal) {
+ let timeout
+ return {
+ start: (cb) => (timeout = setTimeout(cb, delay)),
+ clear: () => clearTimeout(timeout),
+ }
+ }
+
+ return timers.setTimeout(delay, null, signal)
+ .then(() => {
+ throw new Error()
+ }).catch((err) => {
+ if (err.name === 'AbortError') {
+ return
+ }
+ throw err
+ })
+}
+
+const abortRace = async (promises, ac = new AbortController()) => {
+ let res
+ try {
+ res = await Promise.race(promises.map((p) => p(ac)))
+ ac.abort()
+ } catch (err) {
+ ac.abort()
+ throw err
+ }
+ return res
+}
+
+const urlify = (url) => typeof url === 'string' ? new URL(url) : url
+
+const appendPort = (host, port) => {
+ // istanbul ignore next
+ if (port) {
+ host += `:${port}`
+ }
+ return host
+}
+
+const cacheAgent = ({ key, cache, secure, proxies }, ...args) => {
+ if (cache.has(key)) {
+ return cache.get(key)
+ }
+ const Ctor = (secure ? proxies[1] : proxies[0]) ?? proxies[0]
+ const agent = new Ctor(...args)
+ cache.set(key, agent)
+ return agent
+}
+
+module.exports = {
+ createKey,
+ createTimeout,
+ abortRace,
+ urlify,
+ cacheAgent,
+ appendPort,
+}
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js
new file mode 100644
index 00000000000000..ef3f92022d455d
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/helpers.js
@@ -0,0 +1,66 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.req = exports.json = exports.toBuffer = void 0;
+const http = __importStar(require("http"));
+const https = __importStar(require("https"));
+async function toBuffer(stream) {
+ let length = 0;
+ const chunks = [];
+ for await (const chunk of stream) {
+ length += chunk.length;
+ chunks.push(chunk);
+ }
+ return Buffer.concat(chunks, length);
+}
+exports.toBuffer = toBuffer;
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+async function json(stream) {
+ const buf = await toBuffer(stream);
+ const str = buf.toString('utf8');
+ try {
+ return JSON.parse(str);
+ }
+ catch (_err) {
+ const err = _err;
+ err.message += ` (input: ${str})`;
+ throw err;
+ }
+}
+exports.json = json;
+function req(url, opts = {}) {
+ const href = typeof url === 'string' ? url : url.href;
+ const req = (href.startsWith('https:') ? https : http).request(url, opts);
+ const promise = new Promise((resolve, reject) => {
+ req
+ .once('response', resolve)
+ .once('error', reject)
+ .end();
+ });
+ req.then = promise.then.bind(promise);
+ return req;
+}
+exports.req = req;
+//# sourceMappingURL=helpers.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js
new file mode 100644
index 00000000000000..7bafc8c68604f3
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/dist/index.js
@@ -0,0 +1,112 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Agent = void 0;
+const http = __importStar(require("http"));
+__exportStar(require("./helpers"), exports);
+const INTERNAL = Symbol('AgentBaseInternalState');
+class Agent extends http.Agent {
+ constructor(opts) {
+ super(opts);
+ this[INTERNAL] = {};
+ }
+ /**
+ * Determine whether this is an `http` or `https` request.
+ */
+ isSecureEndpoint(options) {
+ if (options) {
+ // First check the `secureEndpoint` property explicitly, since this
+ // means that a parent `Agent` is "passing through" to this instance.
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ if (typeof options.secureEndpoint === 'boolean') {
+ return options.secureEndpoint;
+ }
+ // If no explicit `secure` endpoint, check if `protocol` property is
+ // set. This will usually be the case since using a full string URL
+ // or `URL` instance should be the most common usage.
+ if (typeof options.protocol === 'string') {
+ return options.protocol === 'https:';
+ }
+ }
+ // Finally, if no `protocol` property was set, then fall back to
+ // checking the stack trace of the current call stack, and try to
+ // detect the "https" module.
+ const { stack } = new Error();
+ if (typeof stack !== 'string')
+ return false;
+ return stack
+ .split('\n')
+ .some((l) => l.indexOf('(https.js:') !== -1 ||
+ l.indexOf('node:https:') !== -1);
+ }
+ createSocket(req, options, cb) {
+ const connectOpts = {
+ ...options,
+ secureEndpoint: this.isSecureEndpoint(options),
+ };
+ Promise.resolve()
+ .then(() => this.connect(req, connectOpts))
+ .then((socket) => {
+ if (socket instanceof http.Agent) {
+ // @ts-expect-error `addRequest()` isn't defined in `@types/node`
+ return socket.addRequest(req, connectOpts);
+ }
+ this[INTERNAL].currentSocket = socket;
+ // @ts-expect-error `createSocket()` isn't defined in `@types/node`
+ super.createSocket(req, options, cb);
+ }, cb);
+ }
+ createConnection() {
+ const socket = this[INTERNAL].currentSocket;
+ this[INTERNAL].currentSocket = undefined;
+ if (!socket) {
+ throw new Error('No socket was returned in the `connect()` function');
+ }
+ return socket;
+ }
+ get defaultPort() {
+ return (this[INTERNAL].defaultPort ??
+ (this.protocol === 'https:' ? 443 : 80));
+ }
+ set defaultPort(v) {
+ if (this[INTERNAL]) {
+ this[INTERNAL].defaultPort = v;
+ }
+ }
+ get protocol() {
+ return (this[INTERNAL].protocol ??
+ (this.isSecureEndpoint() ? 'https:' : 'http:'));
+ }
+ set protocol(v) {
+ if (this[INTERNAL]) {
+ this[INTERNAL].protocol = v;
+ }
+ }
+}
+exports.Agent = Agent;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/package.json b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/package.json
new file mode 100644
index 00000000000000..7178f4983f4fb9
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/agent-base/package.json
@@ -0,0 +1,49 @@
+{
+ "name": "agent-base",
+ "version": "7.1.0",
+ "description": "Turn a function into an `http.Agent` instance",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/TooTallNate/proxy-agents.git",
+ "directory": "packages/agent-base"
+ },
+ "keywords": [
+ "http",
+ "agent",
+ "base",
+ "barebones",
+ "https"
+ ],
+ "author": "Nathan Rajlich (http://n8.io/)",
+ "license": "MIT",
+ "dependencies": {
+ "debug": "^4.3.4"
+ },
+ "devDependencies": {
+ "@types/debug": "^4.1.7",
+ "@types/jest": "^29.5.1",
+ "@types/node": "^14.18.45",
+ "@types/semver": "^7.3.13",
+ "@types/ws": "^6.0.4",
+ "async-listen": "^3.0.0",
+ "jest": "^29.5.0",
+ "ts-jest": "^29.1.0",
+ "typescript": "^5.0.4",
+ "ws": "^3.3.3",
+ "tsconfig": "0.0.0"
+ },
+ "engines": {
+ "node": ">= 14"
+ },
+ "scripts": {
+ "build": "tsc",
+ "test": "jest --env node --verbose --bail",
+ "lint": "eslint . --ext .ts",
+ "pack": "node ../../scripts/pack.mjs"
+ }
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/depd/LICENSE b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE
similarity index 92%
rename from deps/npm/node_modules/depd/LICENSE
rename to deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE
index 248de7af2bd16c..aad14057fad570 100644
--- a/deps/npm/node_modules/depd/LICENSE
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/LICENSE
@@ -1,6 +1,9 @@
+License
+-------
+
(The MIT License)
-Copyright (c) 2014-2018 Douglas Christopher Wilson
+Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js
new file mode 100644
index 00000000000000..4a7daf6156f941
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/dist/index.js
@@ -0,0 +1,147 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HttpProxyAgent = void 0;
+const net = __importStar(require("net"));
+const tls = __importStar(require("tls"));
+const debug_1 = __importDefault(require("debug"));
+const events_1 = require("events");
+const agent_base_1 = require("agent-base");
+const debug = (0, debug_1.default)('http-proxy-agent');
+/**
+ * The `HttpProxyAgent` implements an HTTP Agent subclass that connects
+ * to the specified "HTTP proxy server" in order to proxy HTTP requests.
+ */
+class HttpProxyAgent extends agent_base_1.Agent {
+ constructor(proxy, opts) {
+ super(opts);
+ this.proxy = typeof proxy === 'string' ? new URL(proxy) : proxy;
+ this.proxyHeaders = opts?.headers ?? {};
+ debug('Creating new HttpProxyAgent instance: %o', this.proxy.href);
+ // Trim off the brackets from IPv6 addresses
+ const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, '');
+ const port = this.proxy.port
+ ? parseInt(this.proxy.port, 10)
+ : this.proxy.protocol === 'https:'
+ ? 443
+ : 80;
+ this.connectOpts = {
+ ...(opts ? omit(opts, 'headers') : null),
+ host,
+ port,
+ };
+ }
+ addRequest(req, opts) {
+ req._header = null;
+ this.setRequestProps(req, opts);
+ // @ts-expect-error `addRequest()` isn't defined in `@types/node`
+ super.addRequest(req, opts);
+ }
+ setRequestProps(req, opts) {
+ const { proxy } = this;
+ const protocol = opts.secureEndpoint ? 'https:' : 'http:';
+ const hostname = req.getHeader('host') || 'localhost';
+ const base = `${protocol}//${hostname}`;
+ const url = new URL(req.path, base);
+ if (opts.port !== 80) {
+ url.port = String(opts.port);
+ }
+ // Change the `http.ClientRequest` instance's "path" field
+ // to the absolute path of the URL that will be requested.
+ req.path = String(url);
+ // Inject the `Proxy-Authorization` header if necessary.
+ const headers = typeof this.proxyHeaders === 'function'
+ ? this.proxyHeaders()
+ : { ...this.proxyHeaders };
+ if (proxy.username || proxy.password) {
+ const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;
+ headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`;
+ }
+ if (!headers['Proxy-Connection']) {
+ headers['Proxy-Connection'] = this.keepAlive
+ ? 'Keep-Alive'
+ : 'close';
+ }
+ for (const name of Object.keys(headers)) {
+ const value = headers[name];
+ if (value) {
+ req.setHeader(name, value);
+ }
+ }
+ }
+ async connect(req, opts) {
+ req._header = null;
+ if (!req.path.includes('://')) {
+ this.setRequestProps(req, opts);
+ }
+ // At this point, the http ClientRequest's internal `_header` field
+ // might have already been set. If this is the case then we'll need
+ // to re-generate the string since we just changed the `req.path`.
+ let first;
+ let endOfHeaders;
+ debug('Regenerating stored HTTP header string for request');
+ req._implicitHeader();
+ if (req.outputData && req.outputData.length > 0) {
+ debug('Patching connection write() output buffer with updated header');
+ first = req.outputData[0].data;
+ endOfHeaders = first.indexOf('\r\n\r\n') + 4;
+ req.outputData[0].data =
+ req._header + first.substring(endOfHeaders);
+ debug('Output buffer: %o', req.outputData[0].data);
+ }
+ // Create a socket connection to the proxy server.
+ let socket;
+ if (this.proxy.protocol === 'https:') {
+ debug('Creating `tls.Socket`: %o', this.connectOpts);
+ socket = tls.connect(this.connectOpts);
+ }
+ else {
+ debug('Creating `net.Socket`: %o', this.connectOpts);
+ socket = net.connect(this.connectOpts);
+ }
+ // Wait for the socket's `connect` event, so that this `callback()`
+ // function throws instead of the `http` request machinery. This is
+ // important for i.e. `PacProxyAgent` which determines a failed proxy
+ // connection via the `callback()` function throwing.
+ await (0, events_1.once)(socket, 'connect');
+ return socket;
+ }
+}
+HttpProxyAgent.protocols = ['http', 'https'];
+exports.HttpProxyAgent = HttpProxyAgent;
+function omit(obj, ...keys) {
+ const ret = {};
+ let key;
+ for (key in obj) {
+ if (!keys.includes(key)) {
+ ret[key] = obj[key];
+ }
+ }
+ return ret;
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json
new file mode 100644
index 00000000000000..08c650cbb22aa4
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/http-proxy-agent/package.json
@@ -0,0 +1,47 @@
+{
+ "name": "http-proxy-agent",
+ "version": "7.0.0",
+ "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/TooTallNate/proxy-agents.git",
+ "directory": "packages/http-proxy-agent"
+ },
+ "keywords": [
+ "http",
+ "proxy",
+ "endpoint",
+ "agent"
+ ],
+ "author": "Nathan Rajlich (http://n8.io/)",
+ "license": "MIT",
+ "dependencies": {
+ "agent-base": "^7.1.0",
+ "debug": "^4.3.4"
+ },
+ "devDependencies": {
+ "@types/debug": "^4.1.7",
+ "@types/jest": "^29.5.1",
+ "@types/node": "^14.18.45",
+ "async-listen": "^3.0.0",
+ "jest": "^29.5.0",
+ "ts-jest": "^29.1.0",
+ "typescript": "^5.0.4",
+ "proxy": "2.1.1",
+ "tsconfig": "0.0.0"
+ },
+ "engines": {
+ "node": ">= 14"
+ },
+ "scripts": {
+ "build": "tsc",
+ "test": "jest --env node --verbose --bail",
+ "lint": "eslint . --ext .ts",
+ "pack": "node ../../scripts/pack.mjs"
+ }
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js
new file mode 100644
index 00000000000000..e3bbfe632c454d
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/index.js
@@ -0,0 +1,170 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HttpsProxyAgent = void 0;
+const net = __importStar(require("net"));
+const tls = __importStar(require("tls"));
+const assert_1 = __importDefault(require("assert"));
+const debug_1 = __importDefault(require("debug"));
+const agent_base_1 = require("agent-base");
+const parse_proxy_response_1 = require("./parse-proxy-response");
+const debug = (0, debug_1.default)('https-proxy-agent');
+/**
+ * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to
+ * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests.
+ *
+ * Outgoing HTTP requests are first tunneled through the proxy server using the
+ * `CONNECT` HTTP request method to establish a connection to the proxy server,
+ * and then the proxy server connects to the destination target and issues the
+ * HTTP request from the proxy server.
+ *
+ * `https:` requests have their socket connection upgraded to TLS once
+ * the connection to the proxy server has been established.
+ */
+class HttpsProxyAgent extends agent_base_1.Agent {
+ constructor(proxy, opts) {
+ super(opts);
+ this.options = { path: undefined };
+ this.proxy = typeof proxy === 'string' ? new URL(proxy) : proxy;
+ this.proxyHeaders = opts?.headers ?? {};
+ debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href);
+ // Trim off the brackets from IPv6 addresses
+ const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, '');
+ const port = this.proxy.port
+ ? parseInt(this.proxy.port, 10)
+ : this.proxy.protocol === 'https:'
+ ? 443
+ : 80;
+ this.connectOpts = {
+ // Attempt to negotiate http/1.1 for proxy servers that support http/2
+ ALPNProtocols: ['http/1.1'],
+ ...(opts ? omit(opts, 'headers') : null),
+ host,
+ port,
+ };
+ }
+ /**
+ * Called when the node-core HTTP client library is creating a
+ * new HTTP request.
+ */
+ async connect(req, opts) {
+ const { proxy } = this;
+ if (!opts.host) {
+ throw new TypeError('No "host" provided');
+ }
+ // Create a socket connection to the proxy server.
+ let socket;
+ if (proxy.protocol === 'https:') {
+ debug('Creating `tls.Socket`: %o', this.connectOpts);
+ socket = tls.connect(this.connectOpts);
+ }
+ else {
+ debug('Creating `net.Socket`: %o', this.connectOpts);
+ socket = net.connect(this.connectOpts);
+ }
+ const headers = typeof this.proxyHeaders === 'function'
+ ? this.proxyHeaders()
+ : { ...this.proxyHeaders };
+ const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host;
+ let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`;
+ // Inject the `Proxy-Authorization` header if necessary.
+ if (proxy.username || proxy.password) {
+ const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;
+ headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`;
+ }
+ headers.Host = `${host}:${opts.port}`;
+ if (!headers['Proxy-Connection']) {
+ headers['Proxy-Connection'] = this.keepAlive
+ ? 'Keep-Alive'
+ : 'close';
+ }
+ for (const name of Object.keys(headers)) {
+ payload += `${name}: ${headers[name]}\r\n`;
+ }
+ const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket);
+ socket.write(`${payload}\r\n`);
+ const { connect, buffered } = await proxyResponsePromise;
+ req.emit('proxyConnect', connect);
+ this.emit('proxyConnect', connect, req);
+ if (connect.statusCode === 200) {
+ req.once('socket', resume);
+ if (opts.secureEndpoint) {
+ // The proxy is connecting to a TLS server, so upgrade
+ // this socket connection to a TLS connection.
+ debug('Upgrading socket connection to TLS');
+ const servername = opts.servername || opts.host;
+ return tls.connect({
+ ...omit(opts, 'host', 'path', 'port'),
+ socket,
+ servername: net.isIP(servername) ? undefined : servername,
+ });
+ }
+ return socket;
+ }
+ // Some other status code that's not 200... need to re-play the HTTP
+ // header "data" events onto the socket once the HTTP machinery is
+ // attached so that the node core `http` can parse and handle the
+ // error status code.
+ // Close the original socket, and a new "fake" socket is returned
+ // instead, so that the proxy doesn't get the HTTP request
+ // written to it (which may contain `Authorization` headers or other
+ // sensitive data).
+ //
+ // See: https://hackerone.com/reports/541502
+ socket.destroy();
+ const fakeSocket = new net.Socket({ writable: false });
+ fakeSocket.readable = true;
+ // Need to wait for the "socket" event to re-play the "data" events.
+ req.once('socket', (s) => {
+ debug('Replaying proxy buffer for failed request');
+ (0, assert_1.default)(s.listenerCount('data') > 0);
+ // Replay the "buffered" Buffer onto the fake `socket`, since at
+ // this point the HTTP module machinery has been hooked up for
+ // the user.
+ s.push(buffered);
+ s.push(null);
+ });
+ return fakeSocket;
+ }
+}
+HttpsProxyAgent.protocols = ['http', 'https'];
+exports.HttpsProxyAgent = HttpsProxyAgent;
+function resume(socket) {
+ socket.resume();
+}
+function omit(obj, ...keys) {
+ const ret = {};
+ let key;
+ for (key in obj) {
+ if (!keys.includes(key)) {
+ ret[key] = obj[key];
+ }
+ }
+ return ret;
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js
new file mode 100644
index 00000000000000..a28f1d811805f8
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/dist/parse-proxy-response.js
@@ -0,0 +1,98 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseProxyResponse = void 0;
+const debug_1 = __importDefault(require("debug"));
+const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response');
+function parseProxyResponse(socket) {
+ return new Promise((resolve, reject) => {
+ // we need to buffer any HTTP traffic that happens with the proxy before we get
+ // the CONNECT response, so that if the response is anything other than an "200"
+ // response code, then we can re-play the "data" events on the socket once the
+ // HTTP parser is hooked up...
+ let buffersLength = 0;
+ const buffers = [];
+ function read() {
+ const b = socket.read();
+ if (b)
+ ondata(b);
+ else
+ socket.once('readable', read);
+ }
+ function cleanup() {
+ socket.removeListener('end', onend);
+ socket.removeListener('error', onerror);
+ socket.removeListener('readable', read);
+ }
+ function onend() {
+ cleanup();
+ debug('onend');
+ reject(new Error('Proxy connection ended before receiving CONNECT response'));
+ }
+ function onerror(err) {
+ cleanup();
+ debug('onerror %o', err);
+ reject(err);
+ }
+ function ondata(b) {
+ buffers.push(b);
+ buffersLength += b.length;
+ const buffered = Buffer.concat(buffers, buffersLength);
+ const endOfHeaders = buffered.indexOf('\r\n\r\n');
+ if (endOfHeaders === -1) {
+ // keep buffering
+ debug('have not received end of HTTP headers yet...');
+ read();
+ return;
+ }
+ const headerParts = buffered.slice(0, endOfHeaders).toString('ascii').split('\r\n');
+ const firstLine = headerParts.shift();
+ if (!firstLine) {
+ socket.destroy();
+ return reject(new Error('No header received from proxy CONNECT response'));
+ }
+ const firstLineParts = firstLine.split(' ');
+ const statusCode = +firstLineParts[1];
+ const statusText = firstLineParts.slice(2).join(' ');
+ const headers = {};
+ for (const header of headerParts) {
+ if (!header)
+ continue;
+ const firstColon = header.indexOf(':');
+ if (firstColon === -1) {
+ socket.destroy();
+ return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`));
+ }
+ const key = header.slice(0, firstColon).toLowerCase();
+ const value = header.slice(firstColon + 1).trimStart();
+ const current = headers[key];
+ if (typeof current === 'string') {
+ headers[key] = [current, value];
+ }
+ else if (Array.isArray(current)) {
+ current.push(value);
+ }
+ else {
+ headers[key] = value;
+ }
+ }
+ debug('got proxy server response: %o %o', firstLine, headers);
+ cleanup();
+ resolve({
+ connect: {
+ statusCode,
+ statusText,
+ headers,
+ },
+ buffered,
+ });
+ }
+ socket.on('error', onerror);
+ socket.on('end', onend);
+ read();
+ });
+}
+exports.parseProxyResponse = parseProxyResponse;
+//# sourceMappingURL=parse-proxy-response.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json
new file mode 100644
index 00000000000000..fc5f988d3b02bf
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/https-proxy-agent/package.json
@@ -0,0 +1,50 @@
+{
+ "name": "https-proxy-agent",
+ "version": "7.0.1",
+ "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/TooTallNate/proxy-agents.git",
+ "directory": "packages/https-proxy-agent"
+ },
+ "keywords": [
+ "https",
+ "proxy",
+ "endpoint",
+ "agent"
+ ],
+ "author": "Nathan Rajlich (http://n8.io/)",
+ "license": "MIT",
+ "dependencies": {
+ "agent-base": "^7.0.2",
+ "debug": "4"
+ },
+ "devDependencies": {
+ "@types/async-retry": "^1.4.5",
+ "@types/debug": "4",
+ "@types/jest": "^29.5.1",
+ "@types/node": "^14.18.45",
+ "async-listen": "^3.0.0",
+ "async-retry": "^1.3.3",
+ "jest": "^29.5.0",
+ "ts-jest": "^29.1.0",
+ "typescript": "^5.0.4",
+ "proxy": "2.1.1",
+ "tsconfig": "0.0.0"
+ },
+ "engines": {
+ "node": ">= 14"
+ },
+ "scripts": {
+ "build": "tsc",
+ "test": "jest --env node --verbose --bail test/test.ts",
+ "test-e2e": "jest --env node --verbose --bail test/e2e.test.ts",
+ "lint": "eslint --ext .ts",
+ "pack": "node ../../scripts/pack.mjs"
+ }
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js b/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js
new file mode 100644
index 00000000000000..8189e014c13a0d
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/dist/index.js
@@ -0,0 +1,181 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SocksProxyAgent = void 0;
+const socks_1 = require("socks");
+const agent_base_1 = require("agent-base");
+const debug_1 = __importDefault(require("debug"));
+const dns = __importStar(require("dns"));
+const net = __importStar(require("net"));
+const tls = __importStar(require("tls"));
+const debug = (0, debug_1.default)('socks-proxy-agent');
+function parseSocksURL(url) {
+ let lookup = false;
+ let type = 5;
+ const host = url.hostname;
+ // From RFC 1928, Section 3: https://tools.ietf.org/html/rfc1928#section-3
+ // "The SOCKS service is conventionally located on TCP port 1080"
+ const port = parseInt(url.port, 10) || 1080;
+ // figure out if we want socks v4 or v5, based on the "protocol" used.
+ // Defaults to 5.
+ switch (url.protocol.replace(':', '')) {
+ case 'socks4':
+ lookup = true;
+ type = 4;
+ break;
+ // pass through
+ case 'socks4a':
+ type = 4;
+ break;
+ case 'socks5':
+ lookup = true;
+ type = 5;
+ break;
+ // pass through
+ case 'socks': // no version specified, default to 5h
+ type = 5;
+ break;
+ case 'socks5h':
+ type = 5;
+ break;
+ default:
+ throw new TypeError(`A "socks" protocol must be specified! Got: ${String(url.protocol)}`);
+ }
+ const proxy = {
+ host,
+ port,
+ type,
+ };
+ if (url.username) {
+ Object.defineProperty(proxy, 'userId', {
+ value: decodeURIComponent(url.username),
+ enumerable: false,
+ });
+ }
+ if (url.password != null) {
+ Object.defineProperty(proxy, 'password', {
+ value: decodeURIComponent(url.password),
+ enumerable: false,
+ });
+ }
+ return { lookup, proxy };
+}
+class SocksProxyAgent extends agent_base_1.Agent {
+ constructor(uri, opts) {
+ super(opts);
+ const url = typeof uri === 'string' ? new URL(uri) : uri;
+ const { proxy, lookup } = parseSocksURL(url);
+ this.shouldLookup = lookup;
+ this.proxy = proxy;
+ this.timeout = opts?.timeout ?? null;
+ }
+ /**
+ * Initiates a SOCKS connection to the specified SOCKS proxy server,
+ * which in turn connects to the specified remote host and port.
+ */
+ async connect(req, opts) {
+ const { shouldLookup, proxy, timeout } = this;
+ if (!opts.host) {
+ throw new Error('No `host` defined!');
+ }
+ let { host } = opts;
+ const { port, lookup: lookupFn = dns.lookup } = opts;
+ if (shouldLookup) {
+ // Client-side DNS resolution for "4" and "5" socks proxy versions.
+ host = await new Promise((resolve, reject) => {
+ // Use the request's custom lookup, if one was configured:
+ lookupFn(host, {}, (err, res) => {
+ if (err) {
+ reject(err);
+ }
+ else {
+ resolve(res);
+ }
+ });
+ });
+ }
+ const socksOpts = {
+ proxy,
+ destination: {
+ host,
+ port: typeof port === 'number' ? port : parseInt(port, 10),
+ },
+ command: 'connect',
+ timeout: timeout ?? undefined,
+ };
+ const cleanup = (tlsSocket) => {
+ req.destroy();
+ socket.destroy();
+ if (tlsSocket)
+ tlsSocket.destroy();
+ };
+ debug('Creating socks proxy connection: %o', socksOpts);
+ const { socket } = await socks_1.SocksClient.createConnection(socksOpts);
+ debug('Successfully created socks proxy connection');
+ if (timeout !== null) {
+ socket.setTimeout(timeout);
+ socket.on('timeout', () => cleanup());
+ }
+ if (opts.secureEndpoint) {
+ // The proxy is connecting to a TLS server, so upgrade
+ // this socket connection to a TLS connection.
+ debug('Upgrading socket connection to TLS');
+ const servername = opts.servername || opts.host;
+ const tlsSocket = tls.connect({
+ ...omit(opts, 'host', 'path', 'port'),
+ socket,
+ servername: net.isIP(servername) ? undefined : servername,
+ });
+ tlsSocket.once('error', (error) => {
+ debug('Socket TLS error', error.message);
+ cleanup(tlsSocket);
+ });
+ return tlsSocket;
+ }
+ return socket;
+ }
+}
+SocksProxyAgent.protocols = [
+ 'socks',
+ 'socks4',
+ 'socks4a',
+ 'socks5',
+ 'socks5h',
+];
+exports.SocksProxyAgent = SocksProxyAgent;
+function omit(obj, ...keys) {
+ const ret = {};
+ let key;
+ for (key in obj) {
+ if (!keys.includes(key)) {
+ ret[key] = obj[key];
+ }
+ }
+ return ret;
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json b/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json
new file mode 100644
index 00000000000000..a6c7c0741641a1
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/node_modules/socks-proxy-agent/package.json
@@ -0,0 +1,142 @@
+{
+ "name": "socks-proxy-agent",
+ "version": "8.0.1",
+ "description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "author": {
+ "email": "nathan@tootallnate.net",
+ "name": "Nathan Rajlich",
+ "url": "http://n8.io/"
+ },
+ "contributors": [
+ {
+ "name": "Kiko Beats",
+ "email": "josefrancisco.verdu@gmail.com"
+ },
+ {
+ "name": "Josh Glazebrook",
+ "email": "josh@joshglazebrook.com"
+ },
+ {
+ "name": "talmobi",
+ "email": "talmobi@users.noreply.github.com"
+ },
+ {
+ "name": "Indospace.io",
+ "email": "justin@indospace.io"
+ },
+ {
+ "name": "Kilian von Pflugk",
+ "email": "github@jumoog.io"
+ },
+ {
+ "name": "Kyle",
+ "email": "admin@hk1229.cn"
+ },
+ {
+ "name": "Matheus Fernandes",
+ "email": "matheus.frndes@gmail.com"
+ },
+ {
+ "name": "Ricky Miller",
+ "email": "richardkazuomiller@gmail.com"
+ },
+ {
+ "name": "Shantanu Sharma",
+ "email": "shantanu34@outlook.com"
+ },
+ {
+ "name": "Tim Perry",
+ "email": "pimterry@gmail.com"
+ },
+ {
+ "name": "Vadim Baryshev",
+ "email": "vadimbaryshev@gmail.com"
+ },
+ {
+ "name": "jigu",
+ "email": "luo1257857309@gmail.com"
+ },
+ {
+ "name": "Alba Mendez",
+ "email": "me@jmendeth.com"
+ },
+ {
+ "name": "Дмитрий Гуденков",
+ "email": "Dimangud@rambler.ru"
+ },
+ {
+ "name": "Andrei Bitca",
+ "email": "63638922+andrei-bitca-dc@users.noreply.github.com"
+ },
+ {
+ "name": "Andrew Casey",
+ "email": "amcasey@users.noreply.github.com"
+ },
+ {
+ "name": "Brandon Ros",
+ "email": "brandonros1@gmail.com"
+ },
+ {
+ "name": "Dang Duy Thanh",
+ "email": "thanhdd.it@gmail.com"
+ },
+ {
+ "name": "Dimitar Nestorov",
+ "email": "8790386+dimitarnestorov@users.noreply.github.com"
+ }
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/TooTallNate/proxy-agents.git",
+ "directory": "packages/socks-proxy-agent"
+ },
+ "keywords": [
+ "agent",
+ "http",
+ "https",
+ "proxy",
+ "socks",
+ "socks4",
+ "socks4a",
+ "socks5",
+ "socks5h"
+ ],
+ "dependencies": {
+ "agent-base": "^7.0.1",
+ "debug": "^4.3.4",
+ "socks": "^2.7.1"
+ },
+ "devDependencies": {
+ "@types/async-retry": "^1.4.5",
+ "@types/debug": "^4.1.7",
+ "@types/dns2": "^2.0.3",
+ "@types/jest": "^29.5.1",
+ "@types/node": "^14.18.45",
+ "async-listen": "^2.1.0",
+ "async-retry": "^1.3.3",
+ "cacheable-lookup": "^6.1.0",
+ "dns2": "^2.1.0",
+ "jest": "^29.5.0",
+ "socksv5": "github:TooTallNate/socksv5#fix/dstSock-close-event",
+ "ts-jest": "^29.1.0",
+ "typescript": "^5.0.4",
+ "tsconfig": "0.0.0",
+ "proxy": "2.0.1"
+ },
+ "engines": {
+ "node": ">= 14"
+ },
+ "license": "MIT",
+ "scripts": {
+ "build": "tsc",
+ "test": "jest --env node --verbose --bail test/test.ts",
+ "test-e2e": "jest --env node --verbose --bail test/e2e.test.ts",
+ "lint": "eslint . --ext .ts",
+ "pack": "node ../../scripts/pack.mjs"
+ }
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/@npmcli/agent/package.json
new file mode 100644
index 00000000000000..32379b39b5b560
--- /dev/null
+++ b/deps/npm/node_modules/@npmcli/agent/package.json
@@ -0,0 +1,65 @@
+{
+ "name": "@npmcli/agent",
+ "version": "2.1.1",
+ "description": "the http/https agent used by the npm cli",
+ "main": "lib/index.js",
+ "scripts": {
+ "gencerts": "bash scripts/create-cert.sh",
+ "test": "tap",
+ "lint": "eslint \"**/*.js\"",
+ "postlint": "template-oss-check",
+ "template-oss-apply": "template-oss-apply --force",
+ "lintfix": "npm run lint -- --fix",
+ "snap": "tap",
+ "posttest": "npm run lint"
+ },
+ "author": "GitHub Inc.",
+ "license": "ISC",
+ "bugs": {
+ "url": "https://github.com/npm/agent/issues"
+ },
+ "homepage": "https://github.com/npm/agent#readme",
+ "files": [
+ "bin/",
+ "lib/"
+ ],
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ },
+ "templateOSS": {
+ "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+ "version": "4.18.0",
+ "publish": "true",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ],
+ "npmSpec": "next-9"
+ },
+ "dependencies": {
+ "http-proxy-agent": "^7.0.0",
+ "https-proxy-agent": "^7.0.1",
+ "lru-cache": "^10.0.1",
+ "socks-proxy-agent": "^8.0.1"
+ },
+ "devDependencies": {
+ "@npmcli/eslint-config": "^4.0.0",
+ "@npmcli/template-oss": "4.18.0",
+ "minipass-fetch": "^3.0.3",
+ "nock": "^13.2.7",
+ "simple-socks": "^2.2.2",
+ "tap": "^16.3.0"
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/npm/agent.git"
+ },
+ "tap": {
+ "nyc-arg": [
+ "--exclude",
+ "tap-snapshots/**"
+ ]
+ }
+}
diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
index 020038b409bb17..0981afdae6ece7 100644
--- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
+++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js
@@ -628,7 +628,7 @@ module.exports = cls => class Reifier extends cls {
process.emit('time', timer)
this.addTracker('reify', node.name, node.location)
- const { npmVersion, nodeVersion } = this.options
+ const { npmVersion, nodeVersion, cpu, os } = this.options
const p = Promise.resolve().then(async () => {
// when we reify an optional node, check the engine and platform
// first. be sure to ignore the --force and --engine-strict flags,
@@ -638,7 +638,7 @@ module.exports = cls => class Reifier extends cls {
// eslint-disable-next-line promise/always-return
if (node.optional) {
checkEngine(node.package, npmVersion, nodeVersion, false)
- checkPlatform(node.package, false)
+ checkPlatform(node.package, false, { cpu, os })
}
await this[_checkBins](node)
await this[_extractOrLink](node)
diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json
index a9ec27bacb0035..24a442a7d88500 100644
--- a/deps/npm/node_modules/@npmcli/arborist/package.json
+++ b/deps/npm/node_modules/@npmcli/arborist/package.json
@@ -1,39 +1,39 @@
{
"name": "@npmcli/arborist",
- "version": "6.3.0",
+ "version": "7.1.0",
"description": "Manage node_modules trees",
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
"@npmcli/fs": "^3.1.0",
"@npmcli/installed-package-contents": "^2.0.2",
"@npmcli/map-workspaces": "^3.0.2",
- "@npmcli/metavuln-calculator": "^5.0.0",
+ "@npmcli/metavuln-calculator": "^7.0.0",
"@npmcli/name-from-folder": "^2.0.0",
"@npmcli/node-gyp": "^3.0.0",
- "@npmcli/package-json": "^4.0.0",
+ "@npmcli/package-json": "^5.0.0",
"@npmcli/query": "^3.0.0",
- "@npmcli/run-script": "^6.0.0",
+ "@npmcli/run-script": "^7.0.1",
"bin-links": "^4.0.1",
- "cacache": "^17.0.4",
+ "cacache": "^18.0.0",
"common-ancestor-path": "^1.0.1",
- "hosted-git-info": "^6.1.1",
+ "hosted-git-info": "^7.0.0",
"json-parse-even-better-errors": "^3.0.0",
"json-stringify-nice": "^1.1.4",
"minimatch": "^9.0.0",
"nopt": "^7.0.0",
- "npm-install-checks": "^6.0.0",
- "npm-package-arg": "^10.1.0",
- "npm-pick-manifest": "^8.0.1",
- "npm-registry-fetch": "^14.0.3",
+ "npm-install-checks": "^6.2.0",
+ "npm-package-arg": "^11.0.0",
+ "npm-pick-manifest": "^9.0.0",
+ "npm-registry-fetch": "^16.0.0",
"npmlog": "^7.0.1",
- "pacote": "^15.0.8",
+ "pacote": "^17.0.4",
"parse-conflict-json": "^3.0.0",
"proc-log": "^3.0.0",
"promise-all-reject-late": "^1.0.0",
"promise-call-limit": "^1.0.2",
"read-package-json-fast": "^3.0.2",
"semver": "^7.3.7",
- "ssri": "^10.0.1",
+ "ssri": "^10.0.5",
"treeverse": "^3.0.0",
"walk-up-path": "^3.0.1"
},
@@ -42,8 +42,8 @@
"@npmcli/template-oss": "4.18.0",
"benchmark": "^2.1.4",
"minify-registry-metadata": "^3.0.0",
- "nock": "^13.3.0",
- "tap": "^16.3.4",
+ "nock": "^13.3.3",
+ "tap": "^16.3.8",
"tar-stream": "^3.0.0",
"tcompare": "^5.0.6"
},
@@ -79,7 +79,6 @@
"test-env": [
"LC_ALL=sk"
],
- "color": 1,
"timeout": "360",
"nyc-arg": [
"--exclude",
@@ -87,11 +86,17 @@
]
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
}
}
diff --git a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js
index fe5cafa1922d9b..e6b9859dc1dfb9 100644
--- a/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js
+++ b/deps/npm/node_modules/@npmcli/config/lib/definitions/definitions.js
@@ -64,7 +64,7 @@ const editor = process.env.EDITOR ||
const shell = isWindows ? process.env.ComSpec || 'cmd'
: process.env.SHELL || 'sh'
-const { tmpdir, networkInterfaces } = require('os')
+const { networkInterfaces } = require('os')
const getLocalAddresses = () => {
try {
return Object.values(networkInterfaces()).map(
@@ -429,24 +429,6 @@ define('cert', {
flatten,
})
-define('ci-name', {
- default: ciInfo.name ? ciInfo.name.toLowerCase().split(' ').join('-') : null,
- defaultDescription: `
- The name of the current CI system, or \`null\` when not on a known CI
- platform.
- `,
- type: [null, String],
- deprecated: `
- This config is deprecated and will not be changeable in future version of npm.
- `,
- description: `
- The name of a continuous integration system. If not set explicitly, npm
- will detect the current CI environment using the
- [\`ci-info\`](http://npm.im/ci-info) module.
- `,
- flatten,
-})
-
define('cidr', {
default: null,
type: [null, String, Array],
@@ -490,6 +472,28 @@ define('commit-hooks', {
flatten,
})
+define('cpu', {
+ default: null,
+ type: [null, String],
+ description: `
+ Override CPU architecture of native modules to install.
+ Acceptable values are same as \`cpu\` field of package.json,
+ which comes from \`process.arch\`.
+ `,
+ flatten,
+})
+
+define('os', {
+ default: null,
+ type: [null, String],
+ description: `
+ Override OS of native modules to install.
+ Acceptable values are same as \`os\` field of package.json,
+ which comes from \`process.platform\`.
+ `,
+ flatten,
+})
+
define('depth', {
default: null,
defaultDescription: `
@@ -2127,24 +2131,6 @@ define('timing', {
`,
})
-define('tmp', {
- default: tmpdir(),
- defaultDescription: `
- The value returned by the Node.js \`os.tmpdir()\` method
-
- `,
- type: path,
- deprecated: `
- This setting is no longer used. npm stores temporary files in a special
- location in the cache, and they are managed by
- [\`cacache\`](http://npm.im/cacache).
- `,
- description: `
- Historically, the location where temporary files were stored. No longer
- relevant.
- `,
-})
-
define('umask', {
default: 0,
type: Umask,
@@ -2222,7 +2208,7 @@ define('user-agent', {
`,
flatten (key, obj, flatOptions) {
const value = obj[key]
- const ciName = obj['ci-name']
+ const ciName = ciInfo.name?.toLowerCase().split(' ').join('-') || null
let inWorkspaces = false
if (obj.workspaces || obj.workspace && obj.workspace.length) {
inWorkspaces = true
diff --git a/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js b/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js
index 748f306bd2ce34..8255a904423911 100644
--- a/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js
+++ b/deps/npm/node_modules/@npmcli/config/lib/definitions/index.js
@@ -18,16 +18,6 @@ const flatten = (obj, flat = {}) => {
flat[key] = val
}
}
-
- // XXX make this the bin/npm-cli.js file explicitly instead
- // otherwise using npm programmatically is a bit of a pain.
- flat.npmBin = require.main ? require.main.filename
- : /* istanbul ignore next - not configurable property */ undefined
- flat.nodeBin = process.env.NODE || process.execPath
-
- // XXX should this be sha512? is it even relevant?
- flat.hashAlgorithm = 'sha1'
-
return flat
}
diff --git a/deps/npm/node_modules/@npmcli/config/lib/index.js b/deps/npm/node_modules/@npmcli/config/lib/index.js
index 0e19d32e3f8b45..ad07fcdf51826a 100644
--- a/deps/npm/node_modules/@npmcli/config/lib/index.js
+++ b/deps/npm/node_modules/@npmcli/config/lib/index.js
@@ -115,6 +115,7 @@ class Config {
this.defaults = defaults
this.npmPath = npmPath
+ this.npmBin = join(this.npmPath, 'bin/npm-cli.js')
this.argv = argv
this.env = env
this.execPath = execPath
@@ -231,6 +232,8 @@ class Config {
for (const { data } of this.data.values()) {
this.#flatten(data, this.#flatOptions)
}
+ this.#flatOptions.nodeBin = this.execPath
+ this.#flatOptions.npmBin = this.npmBin
process.emit('timeEnd', 'config:load:flatten')
return this.#flatOptions
@@ -322,10 +325,6 @@ class Config {
const { data } = this.data.get('default')
- // the metrics-registry defaults to the current resolved value of
- // the registry, unless overridden somewhere else.
- settableGetter(data, 'metrics-registry', () => this.#get('registry'))
-
// if the prefix is set on cli, env, or userconfig, then we need to
// default the globalconfig file to that location, instead of the default
// global prefix. It's weird that `npm get globalconfig --prefix=/foo`
@@ -614,7 +613,15 @@ class Config {
process.emit('time', 'config:load:file:' + file)
// only catch the error from readFile, not from the loadObject call
await readFile(file, 'utf8').then(
- data => this.#loadObject(ini.parse(data), type, file),
+ data => {
+ const parsedConfig = ini.parse(data)
+ if (type === 'project' && parsedConfig.prefix) {
+ // Log error if prefix is mentioned in project .npmrc
+ /* eslint-disable-next-line max-len */
+ log.error('config', `prefix cannot be changed from project config: ${file}.`)
+ }
+ return this.#loadObject(parsedConfig, type, file)
+ },
er => this.#loadObject(null, type, file, er)
)
process.emit('timeEnd', 'config:load:file:' + file)
diff --git a/deps/npm/node_modules/@npmcli/config/lib/set-envs.js b/deps/npm/node_modules/@npmcli/config/lib/set-envs.js
index 0f5781aaf33959..b6f5a30562ab1b 100644
--- a/deps/npm/node_modules/@npmcli/config/lib/set-envs.js
+++ b/deps/npm/node_modules/@npmcli/config/lib/set-envs.js
@@ -101,10 +101,7 @@ const setEnvs = (config) => {
if (cliConf['node-options']) {
env.NODE_OPTIONS = cliConf['node-options']
}
-
- if (require.main && require.main.filename) {
- env.npm_execpath = require.main.filename
- }
+ env.npm_execpath = config.npmBin
env.NODE = env.npm_node_execpath = config.execPath
}
diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json
index 76d193ba23ec4c..d2e7066b654d1b 100644
--- a/deps/npm/node_modules/@npmcli/config/package.json
+++ b/deps/npm/node_modules/@npmcli/config/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/config",
- "version": "6.2.1",
+ "version": "7.2.0",
"files": [
"bin/",
"lib/"
@@ -33,7 +33,7 @@
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/mock-globals": "^1.0.0",
"@npmcli/template-oss": "4.18.0",
- "tap": "^16.3.4"
+ "tap": "^16.3.8"
},
"dependencies": {
"@npmcli/map-workspaces": "^3.0.2",
diff --git a/deps/npm/node_modules/@npmcli/git/lib/revs.js b/deps/npm/node_modules/@npmcli/git/lib/revs.js
index ee72370d5b7eca..ca14837de1b876 100644
--- a/deps/npm/node_modules/@npmcli/git/lib/revs.js
+++ b/deps/npm/node_modules/@npmcli/git/lib/revs.js
@@ -1,8 +1,8 @@
const pinflight = require('promise-inflight')
const spawn = require('./spawn.js')
-const LRU = require('lru-cache')
+const { LRUCache } = require('lru-cache')
-const revsCache = new LRU({
+const revsCache = new LRUCache({
max: 100,
ttl: 5 * 60 * 1000,
})
diff --git a/deps/npm/node_modules/@npmcli/git/package.json b/deps/npm/node_modules/@npmcli/git/package.json
index eeba1c0415788c..6ab037d841cc34 100644
--- a/deps/npm/node_modules/@npmcli/git/package.json
+++ b/deps/npm/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/git",
- "version": "4.1.0",
+ "version": "5.0.3",
"main": "lib/index.js",
"files": [
"bin/",
@@ -31,27 +31,33 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.15.1",
- "npm-package-arg": "^10.0.0",
+ "@npmcli/template-oss": "4.18.0",
+ "npm-package-arg": "^11.0.0",
"slash": "^3.0.0",
"tap": "^16.0.1"
},
"dependencies": {
- "@npmcli/promise-spawn": "^6.0.0",
- "lru-cache": "^7.4.4",
- "npm-pick-manifest": "^8.0.0",
+ "@npmcli/promise-spawn": "^7.0.0",
+ "lru-cache": "^10.0.1",
+ "npm-pick-manifest": "^9.0.0",
"proc-log": "^3.0.0",
"promise-inflight": "^1.0.1",
"promise-retry": "^2.0.1",
"semver": "^7.3.5",
- "which": "^3.0.0"
+ "which": "^4.0.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.15.1",
- "publish": true
+ "version": "4.18.0",
+ "publish": true,
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
}
}
diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json
index 18ebb68c4bfd1a..4d0af031d54148 100644
--- a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json
+++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/metavuln-calculator",
- "version": "5.0.1",
+ "version": "7.0.0",
"main": "lib/index.js",
"files": [
"bin/",
@@ -34,22 +34,28 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.13.0",
+ "@npmcli/template-oss": "4.18.0",
"require-inject": "^1.4.4",
"tap": "^16.0.1"
},
"dependencies": {
- "cacache": "^17.0.0",
+ "cacache": "^18.0.0",
"json-parse-even-better-errors": "^3.0.0",
- "pacote": "^15.0.0",
+ "pacote": "^17.0.0",
"semver": "^7.3.5"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.13.0",
- "publish": "true"
+ "version": "4.18.0",
+ "publish": "true",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
}
}
diff --git a/deps/npm/node_modules/@npmcli/package-json/package.json b/deps/npm/node_modules/@npmcli/package-json/package.json
index 33215b638db6ee..ab320e8695ca3d 100644
--- a/deps/npm/node_modules/@npmcli/package-json/package.json
+++ b/deps/npm/node_modules/@npmcli/package-json/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/package-json",
- "version": "4.0.1",
+ "version": "5.0.0",
"description": "Programmatic API to update package.json",
"main": "lib/index.js",
"files": [
@@ -25,17 +25,17 @@
"license": "ISC",
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.17.0",
+ "@npmcli/template-oss": "4.18.0",
"read-package-json": "^6.0.4",
"read-package-json-fast": "^3.0.2",
"tap": "^16.0.1"
},
"dependencies": {
- "@npmcli/git": "^4.1.0",
+ "@npmcli/git": "^5.0.0",
"glob": "^10.2.2",
- "hosted-git-info": "^6.1.1",
+ "hosted-git-info": "^7.0.0",
"json-parse-even-better-errors": "^3.0.0",
- "normalize-package-data": "^5.0.0",
+ "normalize-package-data": "^6.0.0",
"proc-log": "^3.0.0",
"semver": "^7.5.3"
},
@@ -44,12 +44,18 @@
"url": "https://github.com/npm/package-json.git"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.17.0",
- "publish": "true"
+ "version": "4.18.0",
+ "publish": "true",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/package.json b/deps/npm/node_modules/@npmcli/promise-spawn/package.json
index 2080d9f5be9f04..ffd89f1083341c 100644
--- a/deps/npm/node_modules/@npmcli/promise-spawn/package.json
+++ b/deps/npm/node_modules/@npmcli/promise-spawn/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/promise-spawn",
- "version": "6.0.2",
+ "version": "7.0.0",
"files": [
"bin/",
"lib/"
@@ -32,19 +32,25 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.11.0",
- "minipass": "^4.0.0",
+ "@npmcli/template-oss": "4.18.0",
"spawk": "^1.7.1",
"tap": "^16.0.1"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.11.0"
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ],
+ "version": "4.18.0",
+ "publish": true
},
"dependencies": {
- "which": "^3.0.0"
+ "which": "^4.0.0"
}
}
diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json
index 38f6f72fa6ad90..7e7d2561571566 100644
--- a/deps/npm/node_modules/@npmcli/run-script/package.json
+++ b/deps/npm/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/run-script",
- "version": "6.0.2",
+ "version": "7.0.1",
"description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
"author": "GitHub Inc.",
"license": "ISC",
@@ -16,16 +16,16 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.15.1",
+ "@npmcli/template-oss": "4.18.0",
"require-inject": "^1.4.4",
"tap": "^16.0.1"
},
"dependencies": {
"@npmcli/node-gyp": "^3.0.0",
- "@npmcli/promise-spawn": "^6.0.0",
+ "@npmcli/promise-spawn": "^7.0.0",
"node-gyp": "^9.0.0",
"read-package-json-fast": "^3.0.0",
- "which": "^3.0.0"
+ "which": "^4.0.0"
},
"files": [
"bin/",
@@ -37,11 +37,17 @@
"url": "https://github.com/npm/run-script.git"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.15.1",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ],
+ "version": "4.18.0",
"publish": "true"
},
"tap": {
diff --git a/deps/npm/node_modules/@sigstore/bundle/LICENSE b/deps/npm/node_modules/@sigstore/bundle/LICENSE
new file mode 100644
index 00000000000000..e9e7c1679a09df
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/bundle/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 The Sigstore Authors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/build.js b/deps/npm/node_modules/@sigstore/bundle/dist/build.js
new file mode 100644
index 00000000000000..6990f5451a2d33
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/bundle/dist/build.js
@@ -0,0 +1,89 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const bundle_1 = require("./bundle");
+// Message signature bundle - $case: 'messageSignature'
+function toMessageSignatureBundle(options) {
+ return {
+ mediaType: bundle_1.BUNDLE_V02_MEDIA_TYPE,
+ content: {
+ $case: 'messageSignature',
+ messageSignature: {
+ messageDigest: {
+ algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
+ digest: options.digest,
+ },
+ signature: options.signature,
+ },
+ },
+ verificationMaterial: toVerificationMaterial(options),
+ };
+}
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+// DSSE envelope bundle - $case: 'dsseEnvelope'
+function toDSSEBundle(options) {
+ return {
+ mediaType: bundle_1.BUNDLE_V02_MEDIA_TYPE,
+ content: {
+ $case: 'dsseEnvelope',
+ dsseEnvelope: toEnvelope(options),
+ },
+ verificationMaterial: toVerificationMaterial(options),
+ };
+}
+exports.toDSSEBundle = toDSSEBundle;
+function toEnvelope(options) {
+ return {
+ payloadType: options.artifactType,
+ payload: options.artifact,
+ signatures: [toSignature(options)],
+ };
+}
+function toSignature(options) {
+ return {
+ keyid: options.keyHint || '',
+ sig: options.signature,
+ };
+}
+// Verification material
+function toVerificationMaterial(options) {
+ return {
+ content: toKeyContent(options),
+ tlogEntries: [],
+ timestampVerificationData: { rfc3161Timestamps: [] },
+ };
+}
+function toKeyContent(options) {
+ if (options.certificate) {
+ return {
+ $case: 'x509CertificateChain',
+ x509CertificateChain: {
+ certificates: [{ rawBytes: options.certificate }],
+ },
+ };
+ }
+ else {
+ return {
+ $case: 'publicKey',
+ publicKey: {
+ hint: options.keyHint || '',
+ },
+ };
+ }
+}
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/bundle.js b/deps/npm/node_modules/@sigstore/bundle/dist/bundle.js
new file mode 100644
index 00000000000000..8c01e2d19c5ecb
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/bundle/dist/bundle.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isBundleWithDsseEnvelope = exports.isBundleWithMessageSignature = exports.isBundleWithPublicKey = exports.isBundleWithCertificateChain = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0;
+exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
+exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2';
+// Type guards for bundle variants.
+function isBundleWithCertificateChain(b) {
+ return b.verificationMaterial.content.$case === 'x509CertificateChain';
+}
+exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
+function isBundleWithPublicKey(b) {
+ return b.verificationMaterial.content.$case === 'publicKey';
+}
+exports.isBundleWithPublicKey = isBundleWithPublicKey;
+function isBundleWithMessageSignature(b) {
+ return b.content.$case === 'messageSignature';
+}
+exports.isBundleWithMessageSignature = isBundleWithMessageSignature;
+function isBundleWithDsseEnvelope(b) {
+ return b.content.$case === 'dsseEnvelope';
+}
+exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope;
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/error.js b/deps/npm/node_modules/@sigstore/bundle/dist/error.js
new file mode 100644
index 00000000000000..f84295323b812e
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/bundle/dist/error.js
@@ -0,0 +1,25 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ValidationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class ValidationError extends Error {
+ constructor(message, fields) {
+ super(message);
+ this.fields = fields;
+ }
+}
+exports.ValidationError = ValidationError;
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/index.js b/deps/npm/node_modules/@sigstore/bundle/dist/index.js
new file mode 100644
index 00000000000000..b016a16d11cc00
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/bundle/dist/index.js
@@ -0,0 +1,40 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var build_1 = require("./build");
+Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } });
+Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } });
+var bundle_1 = require("./bundle");
+Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } });
+Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } });
+Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } });
+Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } });
+Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } });
+Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
+var serialized_1 = require("./serialized");
+Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } });
+Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } });
+Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } });
+Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } });
+var validate_1 = require("./validate");
+Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } });
+Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } });
+Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } });
+Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } });
diff --git a/deps/npm/node_modules/@sigstore/bundle/dist/serialized.js b/deps/npm/node_modules/@sigstore/bundle/dist/serialized.js
new file mode 100644
index 00000000000000..f1073358cacfd7
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/bundle/dist/serialized.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const validate_1 = require("./validate");
+const bundleFromJSON = (obj) => {
+ const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
+ (0, validate_1.assertBundle)(bundle);
+ return bundle;
+};
+exports.bundleFromJSON = bundleFromJSON;
+const bundleToJSON = (bundle) => {
+ return protobuf_specs_1.Bundle.toJSON(bundle);
+};
+exports.bundleToJSON = bundleToJSON;
+const envelopeFromJSON = (obj) => {
+ return protobuf_specs_1.Envelope.fromJSON(obj);
+};
+exports.envelopeFromJSON = envelopeFromJSON;
+const envelopeToJSON = (envelope) => {
+ return protobuf_specs_1.Envelope.toJSON(envelope);
+};
+exports.envelopeToJSON = envelopeToJSON;
diff --git a/deps/npm/node_modules/sigstore/dist/identity/provider.js b/deps/npm/node_modules/@sigstore/bundle/dist/utility.js
similarity index 100%
rename from deps/npm/node_modules/sigstore/dist/identity/provider.js
rename to deps/npm/node_modules/@sigstore/bundle/dist/utility.js
diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js b/deps/npm/node_modules/@sigstore/bundle/dist/validate.js
similarity index 53%
rename from deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js
rename to deps/npm/node_modules/@sigstore/bundle/dist/validate.js
index a19d8ad3ec7021..015b6dfc58dd73 100644
--- a/deps/npm/node_modules/sigstore/dist/types/sigstore/validate.js
+++ b/deps/npm/node_modules/@sigstore/bundle/dist/validate.js
@@ -1,6 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.assertValidBundle = void 0;
+exports.assertBundleLatest = exports.isBundleV01 = exports.assertBundleV01 = exports.assertBundle = void 0;
/*
Copyright 2023 The Sigstore Authors.
@@ -16,13 +16,19 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
-const error_1 = require("../../error");
+const bundle_1 = require("./bundle");
+const error_1 = require("./error");
// Performs basic validation of a Sigstore bundle to ensure that all required
// fields are populated. This is not a complete validation of the bundle, but
// rather a check that the bundle is in a valid state to be processed by the
// rest of the code.
-function assertValidBundle(b) {
+function assertBundle(b) {
const invalidValues = [];
+ // Media type validation
+ if (b.mediaType === undefined ||
+ !b.mediaType.startsWith('application/vnd.dev.sigstore.bundle+json;version=')) {
+ invalidValues.push('mediaType');
+ }
// Content-related validation
if (b.content === undefined) {
invalidValues.push('content');
@@ -80,9 +86,75 @@ function assertValidBundle(b) {
break;
}
}
+ if (b.verificationMaterial.tlogEntries === undefined) {
+ invalidValues.push('verificationMaterial.tlogEntries');
+ }
+ else {
+ if (b.verificationMaterial.tlogEntries.length > 0) {
+ b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+ if (entry.logId === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`);
+ }
+ if (entry.kindVersion === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`);
+ }
+ });
+ }
+ }
+ }
+ if (invalidValues.length > 0) {
+ throw new error_1.ValidationError('invalid bundle', invalidValues);
+ }
+}
+exports.assertBundle = assertBundle;
+// Asserts that the given bundle conforms to the v0.1 bundle format.
+function assertBundleV01(b) {
+ const invalidValues = [];
+ if (b.mediaType && b.mediaType !== bundle_1.BUNDLE_V01_MEDIA_TYPE) {
+ invalidValues.push('mediaType');
+ }
+ if (b.verificationMaterial &&
+ b.verificationMaterial.tlogEntries?.length > 0) {
+ b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+ if (entry.inclusionPromise === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`);
+ }
+ });
+ }
+ if (invalidValues.length > 0) {
+ throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues);
+ }
+}
+exports.assertBundleV01 = assertBundleV01;
+// Type guard to determine if Bundle is a v0.1 bundle.
+function isBundleV01(b) {
+ try {
+ assertBundleV01(b);
+ return true;
+ }
+ catch (e) {
+ return false;
+ }
+}
+exports.isBundleV01 = isBundleV01;
+// Asserts that the given bundle conforms to the newest (0.2) bundle format.
+function assertBundleLatest(b) {
+ const invalidValues = [];
+ if (b.verificationMaterial &&
+ b.verificationMaterial.tlogEntries?.length > 0) {
+ b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+ if (entry.inclusionProof === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`);
+ }
+ else {
+ if (entry.inclusionProof.checkpoint === undefined) {
+ invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`);
+ }
+ }
+ });
}
if (invalidValues.length > 0) {
- throw new error_1.ValidationError(`invalid/missing bundle values: ${invalidValues.join(', ')}`);
+ throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues);
}
}
-exports.assertValidBundle = assertValidBundle;
+exports.assertBundleLatest = assertBundleLatest;
diff --git a/deps/npm/node_modules/@sigstore/bundle/package.json b/deps/npm/node_modules/@sigstore/bundle/package.json
new file mode 100644
index 00000000000000..7e26efa11a21de
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/bundle/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "@sigstore/bundle",
+ "version": "2.1.0",
+ "description": "Sigstore bundle type",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "clean": "shx rm -rf dist *.tsbuildinfo",
+ "build": "tsc --build",
+ "test": "jest"
+ },
+ "files": [
+ "dist",
+ "store"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/sigstore-js.git"
+ },
+ "bugs": {
+ "url": "https://github.com/sigstore/sigstore-js/issues"
+ },
+ "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme",
+ "publishConfig": {
+ "provenance": true
+ },
+ "dependencies": {
+ "@sigstore/protobuf-specs": "^0.2.1"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
index 715bb1aa5b57d5..0c367a8384454c 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -44,7 +44,7 @@ exports.Signature = {
return obj;
},
};
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
@@ -60,11 +60,11 @@ var globalThis = (() => {
throw "Unable to locate global object";
})();
function bytesFromBase64(b64) {
- if (globalThis.Buffer) {
- return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
}
else {
- const bin = globalThis.atob(b64);
+ const bin = tsProtoGlobalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
@@ -73,15 +73,15 @@ function bytesFromBase64(b64) {
}
}
function base64FromBytes(arr) {
- if (globalThis.Buffer) {
- return globalThis.Buffer.from(arr).toString("base64");
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
}
else {
const bin = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
- return globalThis.btoa(bin.join(""));
+ return tsProtoGlobalThis.btoa(bin.join(""));
}
}
function isSet(value) {
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 00000000000000..073093b8371a8f
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,185 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+function createBaseCloudEvent() {
+ return { id: "", source: "", specVersion: "", type: "", attributes: {}, data: undefined };
+}
+exports.CloudEvent = {
+ fromJSON(object) {
+ return {
+ id: isSet(object.id) ? String(object.id) : "",
+ source: isSet(object.source) ? String(object.source) : "",
+ specVersion: isSet(object.specVersion) ? String(object.specVersion) : "",
+ type: isSet(object.type) ? String(object.type) : "",
+ attributes: isObject(object.attributes)
+ ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+ acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+ return acc;
+ }, {})
+ : {},
+ data: isSet(object.binaryData)
+ ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+ : isSet(object.textData)
+ ? { $case: "textData", textData: String(object.textData) }
+ : isSet(object.protoData)
+ ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.id !== undefined && (obj.id = message.id);
+ message.source !== undefined && (obj.source = message.source);
+ message.specVersion !== undefined && (obj.specVersion = message.specVersion);
+ message.type !== undefined && (obj.type = message.type);
+ obj.attributes = {};
+ if (message.attributes) {
+ Object.entries(message.attributes).forEach(([k, v]) => {
+ obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+ });
+ }
+ message.data?.$case === "binaryData" &&
+ (obj.binaryData = message.data?.binaryData !== undefined ? base64FromBytes(message.data?.binaryData) : undefined);
+ message.data?.$case === "textData" && (obj.textData = message.data?.textData);
+ message.data?.$case === "protoData" &&
+ (obj.protoData = message.data?.protoData ? any_1.Any.toJSON(message.data?.protoData) : undefined);
+ return obj;
+ },
+};
+function createBaseCloudEvent_AttributesEntry() {
+ return { key: "", value: undefined };
+}
+exports.CloudEvent_AttributesEntry = {
+ fromJSON(object) {
+ return {
+ key: isSet(object.key) ? String(object.key) : "",
+ value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.key !== undefined && (obj.key = message.key);
+ message.value !== undefined &&
+ (obj.value = message.value ? exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value) : undefined);
+ return obj;
+ },
+};
+function createBaseCloudEvent_CloudEventAttributeValue() {
+ return { attr: undefined };
+}
+exports.CloudEvent_CloudEventAttributeValue = {
+ fromJSON(object) {
+ return {
+ attr: isSet(object.ceBoolean)
+ ? { $case: "ceBoolean", ceBoolean: Boolean(object.ceBoolean) }
+ : isSet(object.ceInteger)
+ ? { $case: "ceInteger", ceInteger: Number(object.ceInteger) }
+ : isSet(object.ceString)
+ ? { $case: "ceString", ceString: String(object.ceString) }
+ : isSet(object.ceBytes)
+ ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+ : isSet(object.ceUri)
+ ? { $case: "ceUri", ceUri: String(object.ceUri) }
+ : isSet(object.ceUriRef)
+ ? { $case: "ceUriRef", ceUriRef: String(object.ceUriRef) }
+ : isSet(object.ceTimestamp)
+ ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+ : undefined,
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.attr?.$case === "ceBoolean" && (obj.ceBoolean = message.attr?.ceBoolean);
+ message.attr?.$case === "ceInteger" && (obj.ceInteger = Math.round(message.attr?.ceInteger));
+ message.attr?.$case === "ceString" && (obj.ceString = message.attr?.ceString);
+ message.attr?.$case === "ceBytes" &&
+ (obj.ceBytes = message.attr?.ceBytes !== undefined ? base64FromBytes(message.attr?.ceBytes) : undefined);
+ message.attr?.$case === "ceUri" && (obj.ceUri = message.attr?.ceUri);
+ message.attr?.$case === "ceUriRef" && (obj.ceUriRef = message.attr?.ceUriRef);
+ message.attr?.$case === "ceTimestamp" && (obj.ceTimestamp = message.attr?.ceTimestamp.toISOString());
+ return obj;
+ },
+};
+function createBaseCloudEventBatch() {
+ return { events: [] };
+}
+exports.CloudEventBatch = {
+ fromJSON(object) {
+ return { events: Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [] };
+ },
+ toJSON(message) {
+ const obj = {};
+ if (message.events) {
+ obj.events = message.events.map((e) => e ? exports.CloudEvent.toJSON(e) : undefined);
+ }
+ else {
+ obj.events = [];
+ }
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function fromTimestamp(t) {
+ let millis = Number(t.seconds) * 1000;
+ millis += t.nanos / 1000000;
+ return new Date(millis);
+}
+function fromJsonTimestamp(o) {
+ if (o instanceof Date) {
+ return o;
+ }
+ else if (typeof o === "string") {
+ return new Date(o);
+ }
+ else {
+ return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+ }
+}
+function isObject(value) {
+ return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
index f9b57cccdc3d3b..da627499ad7659 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -77,7 +77,7 @@ function fieldBehaviorFromJSON(object) {
case "UNORDERED_LIST":
return FieldBehavior.UNORDERED_LIST;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
}
}
exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
@@ -98,11 +98,11 @@ function fieldBehaviorToJSON(object) {
case FieldBehavior.UNORDERED_LIST:
return "UNORDERED_LIST";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
}
}
exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 00000000000000..6b3f3c97a66476
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,65 @@
+"use strict";
+/* eslint-disable */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+function createBaseAny() {
+ return { typeUrl: "", value: Buffer.alloc(0) };
+}
+exports.Any = {
+ fromJSON(object) {
+ return {
+ typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "",
+ value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+ };
+ },
+ toJSON(message) {
+ const obj = {};
+ message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl);
+ message.value !== undefined &&
+ (obj.value = base64FromBytes(message.value !== undefined ? message.value : Buffer.alloc(0)));
+ return obj;
+ },
+};
+var tsProtoGlobalThis = (() => {
+ if (typeof globalThis !== "undefined") {
+ return globalThis;
+ }
+ if (typeof self !== "undefined") {
+ return self;
+ }
+ if (typeof window !== "undefined") {
+ return window;
+ }
+ if (typeof global !== "undefined") {
+ return global;
+ }
+ throw "Unable to locate global object";
+})();
+function bytesFromBase64(b64) {
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
+ }
+ else {
+ const bin = tsProtoGlobalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+function base64FromBytes(arr) {
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
+ }
+ else {
+ const bin = [];
+ arr.forEach((byte) => {
+ bin.push(String.fromCharCode(byte));
+ });
+ return tsProtoGlobalThis.btoa(bin.join(""));
+ }
+}
+function isSet(value) {
+ return value !== null && value !== undefined;
+}
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
index b8cfc86ab99aad..d429aac8460436 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -102,7 +102,7 @@ function fieldDescriptorProto_TypeFromJSON(object) {
case "TYPE_SINT64":
return FieldDescriptorProto_Type.TYPE_SINT64;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
}
}
exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
@@ -145,7 +145,7 @@ function fieldDescriptorProto_TypeToJSON(object) {
case FieldDescriptorProto_Type.TYPE_SINT64:
return "TYPE_SINT64";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
}
}
exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
@@ -168,7 +168,7 @@ function fieldDescriptorProto_LabelFromJSON(object) {
case "LABEL_REPEATED":
return FieldDescriptorProto_Label.LABEL_REPEATED;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
}
}
exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
@@ -181,7 +181,7 @@ function fieldDescriptorProto_LabelToJSON(object) {
case FieldDescriptorProto_Label.LABEL_REPEATED:
return "LABEL_REPEATED";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
}
}
exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
@@ -207,7 +207,7 @@ function fileOptions_OptimizeModeFromJSON(object) {
case "LITE_RUNTIME":
return FileOptions_OptimizeMode.LITE_RUNTIME;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
}
}
exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
@@ -220,7 +220,7 @@ function fileOptions_OptimizeModeToJSON(object) {
case FileOptions_OptimizeMode.LITE_RUNTIME:
return "LITE_RUNTIME";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
}
}
exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
@@ -243,7 +243,7 @@ function fieldOptions_CTypeFromJSON(object) {
case "STRING_PIECE":
return FieldOptions_CType.STRING_PIECE;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
}
}
exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
@@ -256,7 +256,7 @@ function fieldOptions_CTypeToJSON(object) {
case FieldOptions_CType.STRING_PIECE:
return "STRING_PIECE";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
}
}
exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
@@ -281,7 +281,7 @@ function fieldOptions_JSTypeFromJSON(object) {
case "JS_NUMBER":
return FieldOptions_JSType.JS_NUMBER;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
}
}
exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
@@ -294,7 +294,7 @@ function fieldOptions_JSTypeToJSON(object) {
case FieldOptions_JSType.JS_NUMBER:
return "JS_NUMBER";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
}
}
exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
@@ -323,7 +323,7 @@ function methodOptions_IdempotencyLevelFromJSON(object) {
case "IDEMPOTENT":
return MethodOptions_IdempotencyLevel.IDEMPOTENT;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
}
}
exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
@@ -336,7 +336,7 @@ function methodOptions_IdempotencyLevelToJSON(object) {
case MethodOptions_IdempotencyLevel.IDEMPOTENT:
return "IDEMPOTENT";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
}
}
exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
@@ -1263,7 +1263,7 @@ exports.GeneratedCodeInfo_Annotation = {
return obj;
},
};
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
@@ -1279,11 +1279,11 @@ var globalThis = (() => {
throw "Unable to locate global object";
})();
function bytesFromBase64(b64) {
- if (globalThis.Buffer) {
- return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
}
else {
- const bin = globalThis.atob(b64);
+ const bin = tsProtoGlobalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
@@ -1292,15 +1292,15 @@ function bytesFromBase64(b64) {
}
}
function base64FromBytes(arr) {
- if (globalThis.Buffer) {
- return globalThis.Buffer.from(arr).toString("base64");
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
}
else {
const bin = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
- return globalThis.btoa(bin.join(""));
+ return tsProtoGlobalThis.btoa(bin.join(""));
}
}
function isSet(value) {
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
index 63ace8db580cc8..bcd654e9154b92 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -26,7 +26,7 @@ function hashAlgorithmFromJSON(object) {
case "SHA2_256":
return HashAlgorithm.SHA2_256;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
}
}
exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
@@ -37,7 +37,7 @@ function hashAlgorithmToJSON(object) {
case HashAlgorithm.SHA2_256:
return "SHA2_256";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
}
}
exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
@@ -92,7 +92,7 @@ function publicKeyDetailsFromJSON(object) {
case "PKIX_ED25519":
return PublicKeyDetails.PKIX_ED25519;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
}
}
exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
@@ -115,7 +115,7 @@ function publicKeyDetailsToJSON(object) {
case PublicKeyDetails.PKIX_ED25519:
return "PKIX_ED25519";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
}
}
exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
@@ -146,7 +146,7 @@ function subjectAlternativeNameTypeFromJSON(object) {
case "OTHER_NAME":
return SubjectAlternativeNameType.OTHER_NAME;
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
}
}
exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
@@ -161,7 +161,7 @@ function subjectAlternativeNameTypeToJSON(object) {
case SubjectAlternativeNameType.OTHER_NAME:
return "OTHER_NAME";
default:
- throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+ throw new tsProtoGlobalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
}
}
exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
@@ -396,7 +396,7 @@ exports.TimeRange = {
return obj;
},
};
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
@@ -412,11 +412,11 @@ var globalThis = (() => {
throw "Unable to locate global object";
})();
function bytesFromBase64(b64) {
- if (globalThis.Buffer) {
- return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
}
else {
- const bin = globalThis.atob(b64);
+ const bin = tsProtoGlobalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
@@ -425,15 +425,15 @@ function bytesFromBase64(b64) {
}
}
function base64FromBytes(arr) {
- if (globalThis.Buffer) {
- return globalThis.Buffer.from(arr).toString("base64");
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
}
else {
const bin = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
- return globalThis.btoa(bin.join(""));
+ return tsProtoGlobalThis.btoa(bin.join(""));
}
}
function fromTimestamp(t) {
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
index bffc7700edbec1..398193b2075a70 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -122,7 +122,7 @@ exports.TransparencyLogEntry = {
return obj;
},
};
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
@@ -138,11 +138,11 @@ var globalThis = (() => {
throw "Unable to locate global object";
})();
function bytesFromBase64(b64) {
- if (globalThis.Buffer) {
- return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
}
else {
- const bin = globalThis.atob(b64);
+ const bin = tsProtoGlobalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
@@ -151,15 +151,15 @@ function bytesFromBase64(b64) {
}
}
function base64FromBytes(arr) {
- if (globalThis.Buffer) {
- return globalThis.Buffer.from(arr).toString("base64");
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
}
else {
const bin = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
- return globalThis.btoa(bin.join(""));
+ return tsProtoGlobalThis.btoa(bin.join(""));
}
}
function isSet(value) {
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
index b99a305ba53172..8a72b897618697 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -228,7 +228,7 @@ exports.Input = {
return obj;
},
};
-var globalThis = (() => {
+var tsProtoGlobalThis = (() => {
if (typeof globalThis !== "undefined") {
return globalThis;
}
@@ -244,11 +244,11 @@ var globalThis = (() => {
throw "Unable to locate global object";
})();
function bytesFromBase64(b64) {
- if (globalThis.Buffer) {
- return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ if (tsProtoGlobalThis.Buffer) {
+ return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64"));
}
else {
- const bin = globalThis.atob(b64);
+ const bin = tsProtoGlobalThis.atob(b64);
const arr = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; ++i) {
arr[i] = bin.charCodeAt(i);
@@ -257,15 +257,15 @@ function bytesFromBase64(b64) {
}
}
function base64FromBytes(arr) {
- if (globalThis.Buffer) {
- return globalThis.Buffer.from(arr).toString("base64");
+ if (tsProtoGlobalThis.Buffer) {
+ return tsProtoGlobalThis.Buffer.from(arr).toString("base64");
}
else {
const bin = [];
arr.forEach((byte) => {
bin.push(String.fromCharCode(byte));
});
- return globalThis.btoa(bin.join(""));
+ return tsProtoGlobalThis.btoa(bin.join(""));
}
}
function isSet(value) {
diff --git a/deps/npm/node_modules/@sigstore/protobuf-specs/package.json b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json
index 7cb4aa9c5364ff..450abb157f31ab 100644
--- a/deps/npm/node_modules/@sigstore/protobuf-specs/package.json
+++ b/deps/npm/node_modules/@sigstore/protobuf-specs/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/protobuf-specs",
- "version": "0.1.0",
+ "version": "0.2.1",
"description": "code-signing for npm packages",
"main": "dist/index.js",
"types": "dist/index.d.ts",
diff --git a/deps/npm/node_modules/@sigstore/sign/LICENSE b/deps/npm/node_modules/@sigstore/sign/LICENSE
new file mode 100644
index 00000000000000..e9e7c1679a09df
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023 The Sigstore Authors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js
new file mode 100644
index 00000000000000..61d5eba4568a35
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/base.js
@@ -0,0 +1,50 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BaseBundleBuilder = void 0;
+// BaseBundleBuilder is a base class for BundleBuilder implementations. It
+// provides a the basic wokflow for signing and witnessing an artifact.
+// Subclasses must implement the `package` method to assemble a valid bundle
+// with the generated signature and verification material.
+class BaseBundleBuilder {
+ constructor(options) {
+ this.signer = options.signer;
+ this.witnesses = options.witnesses;
+ }
+ // Executes the signing/witnessing process for the given artifact.
+ async create(artifact) {
+ const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob));
+ const bundle = await this.package(artifact, signature);
+ // Invoke all of the witnesses in parallel
+ const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key))));
+ // Collect the verification material from all of the witnesses
+ const tlogEntryList = [];
+ const timestampList = [];
+ verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => {
+ tlogEntryList.push(...(tlogEntries ?? []));
+ timestampList.push(...(rfc3161Timestamps ?? []));
+ });
+ // Merge the collected verification material into the bundle
+ bundle.verificationMaterial.tlogEntries = tlogEntryList;
+ bundle.verificationMaterial.timestampVerificationData = {
+ rfc3161Timestamps: timestampList,
+ };
+ return bundle;
+ }
+ // Override this function to apply any pre-signing transformations to the
+ // artifact. The returned buffer will be signed by the signer. The default
+ // implementation simply returns the artifact data.
+ async prepare(artifact) {
+ return artifact.data;
+ }
+}
+exports.BaseBundleBuilder = BaseBundleBuilder;
+// Extracts the public key from a KeyMaterial. Returns either the public key
+// or the certificate, depending on the type of key material.
+function publicKey(key) {
+ switch (key.$case) {
+ case 'publicKey':
+ return key.publicKey;
+ case 'x509Certificate':
+ return key.certificate;
+ }
+}
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js
new file mode 100644
index 00000000000000..f01aac252b304a
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js
@@ -0,0 +1,70 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDSSEBundle = exports.toMessageSignatureBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const sigstore = __importStar(require("@sigstore/bundle"));
+const util_1 = require("../util");
+// Helper functions for assembling the parts of a Sigstore bundle
+// Message signature bundle - $case: 'messageSignature'
+function toMessageSignatureBundle(artifact, signature) {
+ const digest = util_1.crypto.hash(artifact.data);
+ return sigstore.toMessageSignatureBundle({
+ digest,
+ signature: signature.signature,
+ certificate: signature.key.$case === 'x509Certificate'
+ ? util_1.pem.toDER(signature.key.certificate)
+ : undefined,
+ keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+ });
+}
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+// DSSE envelope bundle - $case: 'dsseEnvelope'
+function toDSSEBundle(artifact, signature) {
+ return sigstore.toDSSEBundle({
+ artifact: artifact.data,
+ artifactType: artifact.type,
+ signature: signature.signature,
+ certificate: signature.key.$case === 'x509Certificate'
+ ? util_1.pem.toDER(signature.key.certificate)
+ : undefined,
+ keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+ });
+}
+exports.toDSSEBundle = toDSSEBundle;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js
new file mode 100644
index 00000000000000..486d289aea38cb
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js
@@ -0,0 +1,45 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSEBundleBuilder = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("../util");
+const base_1 = require("./base");
+const bundle_1 = require("./bundle");
+// BundleBuilder implementation for DSSE wrapped attestations
+class DSSEBundleBuilder extends base_1.BaseBundleBuilder {
+ constructor(options) {
+ super(options);
+ }
+ // DSSE requires the artifact to be pre-encoded with the payload type
+ // before the signature is generated.
+ async prepare(artifact) {
+ const a = artifactDefaults(artifact);
+ return util_1.dsse.preAuthEncoding(a.type, a.data);
+ }
+ // Packages the artifact and signature into a DSSE bundle
+ async package(artifact, signature) {
+ return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature);
+ }
+}
+exports.DSSEBundleBuilder = DSSEBundleBuilder;
+// Defaults the artifact type to an empty string if not provided
+function artifactDefaults(artifact) {
+ return {
+ ...artifact,
+ type: artifact.type ?? '',
+ };
+}
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js
new file mode 100644
index 00000000000000..d67c8c324a4f04
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/index.js
@@ -0,0 +1,7 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+var dsse_1 = require("./dsse");
+Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } });
+var message_1 = require("./message");
+Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } });
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js b/deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js
new file mode 100644
index 00000000000000..e3991f42bab939
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/bundler/message.js
@@ -0,0 +1,30 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureBundleBuilder = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const base_1 = require("./base");
+const bundle_1 = require("./bundle");
+// BundleBuilder implementation for raw message signatures
+class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder {
+ constructor(options) {
+ super(options);
+ }
+ async package(artifact, signature) {
+ return (0, bundle_1.toMessageSignatureBundle)(artifact, signature);
+ }
+}
+exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/error.js b/deps/npm/node_modules/@sigstore/sign/dist/error.js
new file mode 100644
index 00000000000000..d57e4567fb89ee
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/error.js
@@ -0,0 +1,39 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.internalError = exports.InternalError = void 0;
+const error_1 = require("./external/error");
+class InternalError extends Error {
+ constructor({ code, message, cause, }) {
+ super(message);
+ this.name = this.constructor.name;
+ this.cause = cause;
+ this.code = code;
+ }
+}
+exports.InternalError = InternalError;
+function internalError(err, code, message) {
+ if (err instanceof error_1.HTTPError) {
+ message += ` - ${err.message}`;
+ }
+ throw new InternalError({
+ code: code,
+ message: message,
+ cause: err,
+ });
+}
+exports.internalError = internalError;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/external/error.js b/deps/npm/node_modules/@sigstore/sign/dist/external/error.js
new file mode 100644
index 00000000000000..0dad92ea69414f
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/external/error.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.checkStatus = exports.HTTPError = void 0;
+class HTTPError extends Error {
+ constructor({ status, message, location, }) {
+ super(`(${status}) ${message}`);
+ this.statusCode = status;
+ this.location = location;
+ }
+}
+exports.HTTPError = HTTPError;
+const checkStatus = async (response) => {
+ if (response.ok) {
+ return response;
+ }
+ else {
+ let message = response.statusText;
+ const location = response.headers?.get('Location') || undefined;
+ const contentType = response.headers?.get('Content-Type');
+ // If response type is JSON, try to parse the body for a message
+ if (contentType?.includes('application/json')) {
+ try {
+ await response.json().then((body) => {
+ message = body.message;
+ });
+ }
+ catch (e) {
+ // ignore
+ }
+ }
+ throw new HTTPError({
+ status: response.status,
+ message: message,
+ location: location,
+ });
+ }
+};
+exports.checkStatus = checkStatus;
diff --git a/deps/npm/node_modules/sigstore/dist/external/fulcio.js b/deps/npm/node_modules/@sigstore/sign/dist/external/fulcio.js
similarity index 95%
rename from deps/npm/node_modules/sigstore/dist/external/fulcio.js
rename to deps/npm/node_modules/@sigstore/sign/dist/external/fulcio.js
index aeb48d58d8d83e..f00b62e147cd7f 100644
--- a/deps/npm/node_modules/sigstore/dist/external/fulcio.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/external/fulcio.js
@@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.Fulcio = void 0;
/*
-Copyright 2022 The Sigstore Authors.
+Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -43,7 +43,7 @@ class Fulcio {
method: 'POST',
body: JSON.stringify(request),
});
- (0, error_1.checkStatus)(response);
+ await (0, error_1.checkStatus)(response);
const data = await response.json();
return data;
}
diff --git a/deps/npm/node_modules/sigstore/dist/external/rekor.js b/deps/npm/node_modules/@sigstore/sign/dist/external/rekor.js
similarity index 94%
rename from deps/npm/node_modules/sigstore/dist/external/rekor.js
rename to deps/npm/node_modules/@sigstore/sign/dist/external/rekor.js
index b6bbeb6f207934..6f6cb96cc9c5cb 100644
--- a/deps/npm/node_modules/sigstore/dist/external/rekor.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/external/rekor.js
@@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.Rekor = void 0;
/*
-Copyright 2022 The Sigstore Authors.
+Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -49,7 +49,7 @@ class Rekor {
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(propsedEntry),
});
- (0, error_1.checkStatus)(response);
+ await (0, error_1.checkStatus)(response);
const data = await response.json();
return entryFromResponse(data);
}
@@ -61,7 +61,7 @@ class Rekor {
async getEntry(uuid) {
const url = `${this.baseUrl}/api/v1/log/entries/${uuid}`;
const response = await this.fetch(url);
- (0, error_1.checkStatus)(response);
+ await (0, error_1.checkStatus)(response);
const data = await response.json();
return entryFromResponse(data);
}
@@ -77,7 +77,7 @@ class Rekor {
body: JSON.stringify(opts),
headers: { 'Content-Type': 'application/json' },
});
- (0, error_1.checkStatus)(response);
+ await (0, error_1.checkStatus)(response);
const data = await response.json();
return data;
}
@@ -93,7 +93,7 @@ class Rekor {
body: JSON.stringify(opts),
headers: { 'Content-Type': 'application/json' },
});
- (0, error_1.checkStatus)(response);
+ await (0, error_1.checkStatus)(response);
const rawData = await response.json();
const data = rawData.map((d) => entryFromResponse(d));
return data;
diff --git a/deps/npm/node_modules/sigstore/dist/external/tsa.js b/deps/npm/node_modules/@sigstore/sign/dist/external/tsa.js
similarity index 97%
rename from deps/npm/node_modules/sigstore/dist/external/tsa.js
rename to deps/npm/node_modules/@sigstore/sign/dist/external/tsa.js
index 5277d7d3f97071..252c14f2d32d87 100644
--- a/deps/npm/node_modules/sigstore/dist/external/tsa.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/external/tsa.js
@@ -40,7 +40,7 @@ class TimestampAuthority {
method: 'POST',
body: JSON.stringify(request),
});
- (0, error_1.checkStatus)(response);
+ await (0, error_1.checkStatus)(response);
return response.buffer();
}
}
diff --git a/deps/npm/node_modules/sigstore/dist/identity/ci.js b/deps/npm/node_modules/@sigstore/sign/dist/identity/ci.js
similarity index 90%
rename from deps/npm/node_modules/sigstore/dist/identity/ci.js
rename to deps/npm/node_modules/@sigstore/sign/dist/identity/ci.js
index 0f01e1baaec57d..d79133952b605b 100644
--- a/deps/npm/node_modules/sigstore/dist/identity/ci.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/identity/ci.js
@@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.CIContextProvider = void 0;
/*
-Copyright 2022 The Sigstore Authors.
+Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -20,7 +20,6 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const util_1 = require("../util");
// Collection of all the CI-specific providers we have implemented
const providers = [getGHAToken, getEnv];
/**
@@ -29,15 +28,14 @@ const providers = [getGHAToken, getEnv];
* one that resolves.
*/
class CIContextProvider {
- constructor(audience) {
+ /* istanbul ignore next */
+ constructor(audience = 'sigstore') {
this.audience = audience;
}
// Invoke all registered ProviderFuncs and return the value of whichever one
// resolves first.
async getToken() {
- return util_1.promise
- .promiseAny(providers.map((getToken) => getToken(this.audience)))
- .catch(() => Promise.reject('CI: no tokens available'));
+ return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available'));
}
}
exports.CIContextProvider = CIContextProvider;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/identity/index.js b/deps/npm/node_modules/@sigstore/sign/dist/identity/index.js
new file mode 100644
index 00000000000000..1c1223b443fab6
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/identity/index.js
@@ -0,0 +1,20 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CIContextProvider = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var ci_1 = require("./ci");
+Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } });
diff --git a/deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.js b/deps/npm/node_modules/@sigstore/sign/dist/identity/provider.js
similarity index 100%
rename from deps/npm/node_modules/sigstore/dist/types/sigstore/serialized.js
rename to deps/npm/node_modules/@sigstore/sign/dist/identity/provider.js
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/index.js b/deps/npm/node_modules/@sigstore/sign/dist/index.js
new file mode 100644
index 00000000000000..383b76083361b9
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/index.js
@@ -0,0 +1,17 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+var bundler_1 = require("./bundler");
+Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } });
+Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } });
+var identity_1 = require("./identity");
+Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } });
+var signer_1 = require("./signer");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } });
+var witness_1 = require("./witness");
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } });
+Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } });
+Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } });
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
new file mode 100644
index 00000000000000..81b421eabadb2e
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
@@ -0,0 +1,60 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CAClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const fulcio_1 = require("../../external/fulcio");
+class CAClient {
+ constructor(options) {
+ this.fulcio = new fulcio_1.Fulcio({
+ baseURL: options.fulcioBaseURL,
+ retry: options.retry,
+ timeout: options.timeout,
+ });
+ }
+ async createSigningCertificate(identityToken, publicKey, challenge) {
+ const request = toCertificateRequest(identityToken, publicKey, challenge);
+ try {
+ const resp = await this.fulcio.createSigningCertificate(request);
+ // Account for the fact that the response may contain either a
+ // signedCertificateEmbeddedSct or a signedCertificateDetachedSct.
+ const cert = resp.signedCertificateEmbeddedSct
+ ? resp.signedCertificateEmbeddedSct
+ : resp.signedCertificateDetachedSct;
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+ return cert.chain.certificates;
+ }
+ catch (err) {
+ (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate');
+ }
+ }
+}
+exports.CAClient = CAClient;
+function toCertificateRequest(identityToken, publicKey, challenge) {
+ return {
+ credentials: {
+ oidcIdentityToken: identityToken,
+ },
+ publicKeyRequest: {
+ publicKey: {
+ algorithm: 'ECDSA',
+ content: publicKey,
+ },
+ proofOfPossession: challenge.toString('base64'),
+ },
+ };
+}
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
new file mode 100644
index 00000000000000..481aa5c3579a27
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
@@ -0,0 +1,45 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.EphemeralSigner = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+const EC_KEYPAIR_TYPE = 'ec';
+const P256_CURVE = 'P-256';
+// Signer implementation which uses an ephemeral keypair to sign artifacts.
+// The private key lives only in memory and is tied to the lifetime of the
+// EphemeralSigner instance.
+class EphemeralSigner {
+ constructor() {
+ this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
+ namedCurve: P256_CURVE,
+ });
+ }
+ async sign(data) {
+ const signature = crypto_1.default.sign(null, data, this.keypair.privateKey);
+ const publicKey = this.keypair.publicKey
+ .export({ format: 'pem', type: 'spki' })
+ .toString('ascii');
+ return {
+ signature: signature,
+ key: { $case: 'publicKey', publicKey },
+ };
+ }
+}
+exports.EphemeralSigner = EphemeralSigner;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
new file mode 100644
index 00000000000000..89a432548d2b42
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
@@ -0,0 +1,87 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const util_1 = require("../../util");
+const ca_1 = require("./ca");
+const ephemeral_1 = require("./ephemeral");
+exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
+// Signer implementation which can be used to decorate another signer
+// with a Fulcio-issued signing certificate for the signer's public key.
+// Must be instantiated with an identity provider which can provide a JWT
+// which represents the identity to be bound to the signing certificate.
+class FulcioSigner {
+ constructor(options) {
+ this.ca = new ca_1.CAClient({
+ ...options,
+ fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL,
+ });
+ this.identityProvider = options.identityProvider;
+ this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner();
+ }
+ async sign(data) {
+ // Retrieve identity token from the supplied identity provider
+ const identityToken = await this.getIdentityToken();
+ // Extract challenge claim from OIDC token
+ let subject;
+ try {
+ subject = util_1.oidc.extractJWTSubject(identityToken);
+ }
+ catch (err) {
+ throw new error_1.InternalError({
+ code: 'IDENTITY_TOKEN_PARSE_ERROR',
+ message: `invalid identity token: ${identityToken}`,
+ cause: err,
+ });
+ }
+ // Construct challenge value by signing the subject claim
+ const challenge = await this.keyHolder.sign(Buffer.from(subject));
+ if (challenge.key.$case !== 'publicKey') {
+ throw new error_1.InternalError({
+ code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
+ message: 'unexpected format for signing key',
+ });
+ }
+ // Create signing certificate
+ const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature);
+ // Generate artifact signature
+ const signature = await this.keyHolder.sign(data);
+ // Specifically returning only the first certificate in the chain
+ // as the key.
+ return {
+ signature: signature.signature,
+ key: {
+ $case: 'x509Certificate',
+ certificate: certificates[0],
+ },
+ };
+ }
+ async getIdentityToken() {
+ try {
+ return await this.identityProvider.getToken();
+ }
+ catch (err) {
+ throw new error_1.InternalError({
+ code: 'IDENTITY_TOKEN_READ_ERROR',
+ message: 'error retrieving identity token',
+ cause: err,
+ });
+ }
+ }
+}
+exports.FulcioSigner = FulcioSigner;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/signer/index.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/index.js
new file mode 100644
index 00000000000000..06ec9dbe72fe14
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/index.js
@@ -0,0 +1,21 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var fulcio_1 = require("./fulcio");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } });
diff --git a/deps/npm/node_modules/sigstore/bin/sigstore.js b/deps/npm/node_modules/@sigstore/sign/dist/signer/signer.js
old mode 100755
new mode 100644
similarity index 82%
rename from deps/npm/node_modules/sigstore/bin/sigstore.js
rename to deps/npm/node_modules/@sigstore/sign/dist/signer/signer.js
index a07b7bdc1af95a..b92c54183375d9
--- a/deps/npm/node_modules/sigstore/bin/sigstore.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/signer/signer.js
@@ -1,6 +1,6 @@
-#!/usr/bin/env node
+"use strict";
/*
-Copyright 2022 The Sigstore Authors.
+Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -14,4 +14,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
-require('../dist/cli').processArgv();
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js b/deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js
new file mode 100644
index 00000000000000..c8ad2e549bdc68
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/types/fetch.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/crypto.js b/deps/npm/node_modules/@sigstore/sign/dist/util/crypto.js
new file mode 100644
index 00000000000000..11aad2fb6ff8b0
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/util/crypto.js
@@ -0,0 +1,27 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.hash = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+const SHA256_ALGORITHM = 'sha256';
+function hash(data, algorithm = SHA256_ALGORITHM) {
+ return crypto_1.default.createHash(algorithm).update(data).digest();
+}
+exports.hash = hash;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/dsse.js b/deps/npm/node_modules/@sigstore/sign/dist/util/dsse.js
new file mode 100644
index 00000000000000..befcdbdc14ec81
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/util/dsse.js
@@ -0,0 +1,25 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.preAuthEncoding = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PAE_PREFIX = 'DSSEv1';
+// DSSE Pre-Authentication Encoding
+function preAuthEncoding(payloadType, payload) {
+ const prefix = Buffer.from(`${PAE_PREFIX} ${payloadType.length} ${payloadType} ${payload.length} `, 'ascii');
+ return Buffer.concat([prefix, payload]);
+}
+exports.preAuthEncoding = preAuthEncoding;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/encoding.js b/deps/npm/node_modules/@sigstore/sign/dist/util/encoding.js
new file mode 100644
index 00000000000000..b020ac4d6ecd42
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/util/encoding.js
@@ -0,0 +1,28 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.base64Decode = exports.base64Encode = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const BASE64_ENCODING = 'base64';
+const UTF8_ENCODING = 'utf-8';
+function base64Encode(str) {
+ return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING);
+}
+exports.base64Encode = base64Encode;
+function base64Decode(str) {
+ return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING);
+}
+exports.base64Decode = base64Decode;
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/index.js b/deps/npm/node_modules/@sigstore/sign/dist/util/index.js
new file mode 100644
index 00000000000000..567e5dbf6e04c7
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/util/index.js
@@ -0,0 +1,48 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ua = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+exports.crypto = __importStar(require("./crypto"));
+exports.dsse = __importStar(require("./dsse"));
+exports.encoding = __importStar(require("./encoding"));
+exports.json = __importStar(require("./json"));
+exports.oidc = __importStar(require("./oidc"));
+exports.pem = __importStar(require("./pem"));
+exports.ua = __importStar(require("./ua"));
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/json.js b/deps/npm/node_modules/@sigstore/sign/dist/util/json.js
new file mode 100644
index 00000000000000..69176ad731eb78
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/util/json.js
@@ -0,0 +1,61 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.canonicalize = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+// JSON canonicalization per https://github.com/cyberphone/json-canonicalization
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function canonicalize(object) {
+ let buffer = '';
+ if (object === null || typeof object !== 'object' || object.toJSON != null) {
+ // Primitives or toJSONable objects
+ buffer += JSON.stringify(object);
+ }
+ else if (Array.isArray(object)) {
+ // Array - maintain element order
+ buffer += '[';
+ let first = true;
+ object.forEach((element) => {
+ if (!first) {
+ buffer += ',';
+ }
+ first = false;
+ // recursive call
+ buffer += canonicalize(element);
+ });
+ buffer += ']';
+ }
+ else {
+ // Object - Sort properties before serializing
+ buffer += '{';
+ let first = true;
+ Object.keys(object)
+ .sort()
+ .forEach((property) => {
+ if (!first) {
+ buffer += ',';
+ }
+ first = false;
+ buffer += JSON.stringify(property);
+ buffer += ':';
+ // recursive call
+ buffer += canonicalize(object[property]);
+ });
+ buffer += '}';
+ }
+ return buffer;
+}
+exports.canonicalize = canonicalize;
diff --git a/deps/npm/node_modules/sigstore/dist/util/oidc.js b/deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js
similarity index 98%
rename from deps/npm/node_modules/sigstore/dist/util/oidc.js
rename to deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js
index 05af90d09ae684..8b49f3bbe84401 100644
--- a/deps/npm/node_modules/sigstore/dist/util/oidc.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/util/oidc.js
@@ -25,7 +25,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.extractJWTSubject = void 0;
/*
-Copyright 2022 The Sigstore Authors.
+Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/util/pem.js b/deps/npm/node_modules/@sigstore/sign/dist/util/pem.js
new file mode 100644
index 00000000000000..36eeebd2052f5e
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/util/pem.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDER = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PEM_HEADER = /-----BEGIN (.*)-----/;
+const PEM_FOOTER = /-----END (.*)-----/;
+function toDER(certificate) {
+ const lines = certificate
+ .split('\n')
+ .map((line) => line.match(PEM_HEADER) || line.match(PEM_FOOTER) ? '' : line);
+ return Buffer.from(lines.join(''), 'base64');
+}
+exports.toDER = toDER;
diff --git a/deps/npm/node_modules/sigstore/dist/util/ua.js b/deps/npm/node_modules/@sigstore/sign/dist/util/ua.js
similarity index 97%
rename from deps/npm/node_modules/sigstore/dist/util/ua.js
rename to deps/npm/node_modules/@sigstore/sign/dist/util/ua.js
index 6db6b5a2723db2..c142330eb8338c 100644
--- a/deps/npm/node_modules/sigstore/dist/util/ua.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/util/ua.js
@@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.getUserAgent = void 0;
/*
-Copyright 2022 The Sigstore Authors.
+Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/index.js
new file mode 100644
index 00000000000000..e200d0638350bb
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/index.js
@@ -0,0 +1,23 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var tlog_1 = require("./tlog");
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } });
+Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } });
+var tsa_1 = require("./tsa");
+Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } });
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js
similarity index 54%
rename from deps/npm/node_modules/sigstore/dist/tlog/index.js
rename to deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js
index 7f5f531983b37d..22c895f2ca7edd 100644
--- a/deps/npm/node_modules/sigstore/dist/tlog/index.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js
@@ -2,7 +2,7 @@
Object.defineProperty(exports, "__esModule", { value: true });
exports.TLogClient = void 0;
/*
-Copyright 2022 The Sigstore Authors.
+Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -16,52 +16,38 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
-const error_1 = require("../error");
-const external_1 = require("../external");
-const format_1 = require("./format");
+const error_1 = require("../../error");
+const error_2 = require("../../external/error");
+const rekor_1 = require("../../external/rekor");
class TLogClient {
constructor(options) {
- this.rekor = new external_1.Rekor({
+ this.fetchOnConflict = options.fetchOnConflict ?? false;
+ this.rekor = new rekor_1.Rekor({
baseURL: options.rekorBaseURL,
retry: options.retry,
timeout: options.timeout,
});
}
- async createMessageSignatureEntry(digest, sigMaterial, options = {}) {
- const proposedEntry = (0, format_1.toProposedHashedRekordEntry)(digest, sigMaterial);
- return this.createEntry(proposedEntry, options.fetchOnConflict);
- }
- async createDSSEEntry(envelope, sigMaterial, options = {}) {
- const proposedEntry = (0, format_1.toProposedIntotoEntry)(envelope, sigMaterial);
- return this.createEntry(proposedEntry, options.fetchOnConflict);
- }
- async createEntry(proposedEntry, fetchOnConflict = false) {
+ async createEntry(proposedEntry) {
let entry;
try {
entry = await this.rekor.createEntry(proposedEntry);
}
catch (err) {
// If the entry already exists, fetch it (if enabled)
- if (entryExistsError(err) && fetchOnConflict) {
+ if (entryExistsError(err) && this.fetchOnConflict) {
// Grab the UUID of the existing entry from the location header
+ /* istanbul ignore next */
const uuid = err.location.split('/').pop() || '';
try {
entry = await this.rekor.getEntry(uuid);
}
catch (err) {
- throw new error_1.InternalError({
- code: 'TLOG_FETCH_ENTRY_ERROR',
- message: 'error fetching tlog entry',
- cause: err,
- });
+ (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry');
}
}
else {
- throw new error_1.InternalError({
- code: 'TLOG_CREATE_ENTRY_ERROR',
- message: 'error creating tlog entry',
- cause: err,
- });
+ (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry');
}
}
return entry;
@@ -69,7 +55,7 @@ class TLogClient {
}
exports.TLogClient = TLogClient;
function entryExistsError(value) {
- return (value instanceof external_1.HTTPError &&
+ return (value instanceof error_2.HTTPError &&
value.statusCode === 409 &&
value.location !== undefined);
}
diff --git a/deps/npm/node_modules/sigstore/dist/tlog/format.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
similarity index 52%
rename from deps/npm/node_modules/sigstore/dist/tlog/format.js
rename to deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
index b0eae95098af01..f6c165380ba45d 100644
--- a/deps/npm/node_modules/sigstore/dist/tlog/format.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
@@ -1,30 +1,46 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.toProposedIntotoEntry = exports.toProposedHashedRekordEntry = exports.toProposedDSSEEntry = void 0;
-const sigstore_1 = require("../types/sigstore");
-const util_1 = require("../util");
-const DEFAULT_DSSE_API_VERSION = '0.0.1';
-const DEFAULT_HASHEDREKORD_API_VERSION = '0.0.1';
-const DEFAULT_INTOTO_API_VERSION = '0.0.2';
-// Returns a properly formatted Rekor "dsse" entry for the given DSSE
-// envelope and signature
-function toProposedDSSEEntry(envelope, signature, apiVersion = DEFAULT_DSSE_API_VERSION) {
- switch (apiVersion) {
- case '0.0.1':
- return toProposedDSSEV001Entry(envelope, signature);
- default:
- throw new Error(`Unsupported dsse kind API version: ${apiVersion}`);
+exports.toProposedEntry = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const bundle_1 = require("@sigstore/bundle");
+const util_1 = require("../../util");
+function toProposedEntry(content, publicKey,
+// TODO: Remove this parameter once have completely switched to 'dsse' entries
+entryType = 'intoto') {
+ switch (content.$case) {
+ case 'dsseEnvelope':
+ // TODO: Remove this conditional once have completely switched to 'dsse' entries
+ if (entryType === 'dsse') {
+ return toProposedDSSEEntry(content.dsseEnvelope, publicKey);
+ }
+ return toProposedIntotoEntry(content.dsseEnvelope, publicKey);
+ case 'messageSignature':
+ return toProposedHashedRekordEntry(content.messageSignature, publicKey);
}
}
-exports.toProposedDSSEEntry = toProposedDSSEEntry;
+exports.toProposedEntry = toProposedEntry;
// Returns a properly formatted Rekor "hashedrekord" entry for the given digest
// and signature
-function toProposedHashedRekordEntry(digest, signature) {
- const hexDigest = digest.toString('hex');
- const b64Signature = signature.signature.toString('base64');
- const b64Key = util_1.encoding.base64Encode(toPublicKey(signature));
+function toProposedHashedRekordEntry(messageSignature, publicKey) {
+ const hexDigest = messageSignature.messageDigest.digest.toString('hex');
+ const b64Signature = messageSignature.signature.toString('base64');
+ const b64Key = util_1.encoding.base64Encode(publicKey);
return {
- apiVersion: DEFAULT_HASHEDREKORD_API_VERSION,
+ apiVersion: '0.0.1',
kind: 'hashedrekord',
spec: {
data: {
@@ -42,61 +58,55 @@ function toProposedHashedRekordEntry(digest, signature) {
},
};
}
-exports.toProposedHashedRekordEntry = toProposedHashedRekordEntry;
-// Returns a properly formatted Rekor "intoto" entry for the given DSSE
-// envelope and signature
-function toProposedIntotoEntry(envelope, signature, apiVersion = DEFAULT_INTOTO_API_VERSION) {
- switch (apiVersion) {
- case '0.0.2':
- return toProposedIntotoV002Entry(envelope, signature);
- default:
- throw new Error(`Unsupported intoto kind API version: ${apiVersion}`);
- }
-}
-exports.toProposedIntotoEntry = toProposedIntotoEntry;
-function toProposedDSSEV001Entry(envelope, signature) {
+// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope
+// and signature
+function toProposedDSSEEntry(envelope, publicKey) {
+ const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope));
+ const encodedKey = util_1.encoding.base64Encode(publicKey);
return {
apiVersion: '0.0.1',
kind: 'dsse',
spec: {
proposedContent: {
- envelope: JSON.stringify(sigstore_1.Envelope.toJSON(envelope)),
- verifiers: [util_1.encoding.base64Encode(toPublicKey(signature))],
+ envelope: envelopeJSON,
+ verifiers: [encodedKey],
},
},
};
}
-function toProposedIntotoV002Entry(envelope, signature) {
+// Returns a properly formatted Rekor "intoto" entry for the given DSSE
+// envelope and signature
+function toProposedIntotoEntry(envelope, publicKey) {
// Calculate the value for the payloadHash field in the Rekor entry
const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex');
// Calculate the value for the hash field in the Rekor entry
- const envelopeHash = calculateDSSEHash(envelope, signature);
+ const envelopeHash = calculateDSSEHash(envelope, publicKey);
// Collect values for re-creating the DSSE envelope.
// Double-encode payload and signature cause that's what Rekor expects
const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64'));
const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64'));
const keyid = envelope.signatures[0].keyid;
- const publicKey = util_1.encoding.base64Encode(toPublicKey(signature));
+ const encodedKey = util_1.encoding.base64Encode(publicKey);
// Create the envelope portion of the entry. Note the inclusion of the
// publicKey in the signature struct is not a standard part of a DSSE
// envelope, but is required by Rekor.
- const dsseEnv = {
+ const dsse = {
payloadType: envelope.payloadType,
payload: payload,
- signatures: [{ sig, publicKey }],
+ signatures: [{ sig, publicKey: encodedKey }],
};
// If the keyid is an empty string, Rekor seems to remove it altogether. We
// need to do the same here so that we can properly recreate the entry for
// verification.
if (keyid.length > 0) {
- dsseEnv.signatures[0].keyid = keyid;
+ dsse.signatures[0].keyid = keyid;
}
return {
apiVersion: '0.0.2',
kind: 'intoto',
spec: {
content: {
- envelope: dsseEnv,
+ envelope: dsse,
hash: { algorithm: 'sha256', value: envelopeHash },
payloadHash: { algorithm: 'sha256', value: payloadHash },
},
@@ -110,25 +120,17 @@ function toProposedIntotoV002Entry(envelope, signature) {
// * signature is base64 encoded (only the first signature is used)
// * keyid is included ONLY if it is NOT an empty string
// * The resulting JSON is canonicalized and hashed to a hex string
-function calculateDSSEHash(envelope, signature) {
- const dsseEnv = {
+function calculateDSSEHash(envelope, publicKey) {
+ const dsse = {
payloadType: envelope.payloadType,
payload: envelope.payload.toString('base64'),
signatures: [
- {
- sig: envelope.signatures[0].sig.toString('base64'),
- publicKey: toPublicKey(signature),
- },
+ { sig: envelope.signatures[0].sig.toString('base64'), publicKey },
],
};
// If the keyid is an empty string, Rekor seems to remove it altogether.
if (envelope.signatures[0].keyid.length > 0) {
- dsseEnv.signatures[0].keyid = envelope.signatures[0].keyid;
+ dsse.signatures[0].keyid = envelope.signatures[0].keyid;
}
- return util_1.crypto.hash(util_1.json.canonicalize(dsseEnv)).toString('hex');
-}
-function toPublicKey(signature) {
- return signature.certificates
- ? signature.certificates[0]
- : signature.key.value;
+ return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex');
}
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js
new file mode 100644
index 00000000000000..1f098df85390cf
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js
@@ -0,0 +1,81 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("../../util");
+const client_1 = require("./client");
+const entry_1 = require("./entry");
+exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
+class RekorWitness {
+ constructor(options) {
+ this.tlog = new client_1.TLogClient({
+ ...options,
+ rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL,
+ });
+ }
+ async testify(content, publicKey) {
+ const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey);
+ const entry = await this.tlog.createEntry(proposedEntry);
+ return toTransparencyLogEntry(entry);
+ }
+}
+exports.RekorWitness = RekorWitness;
+function toTransparencyLogEntry(entry) {
+ const logID = Buffer.from(entry.logID, 'hex');
+ // Parse entry body so we can extract the kind and version.
+ const bodyJSON = util_1.encoding.base64Decode(entry.body);
+ const entryBody = JSON.parse(bodyJSON);
+ const promise = entry?.verification?.signedEntryTimestamp
+ ? inclusionPromise(entry.verification.signedEntryTimestamp)
+ : undefined;
+ const proof = entry?.verification?.inclusionProof
+ ? inclusionProof(entry.verification.inclusionProof)
+ : undefined;
+ const tlogEntry = {
+ logIndex: entry.logIndex.toString(),
+ logId: {
+ keyId: logID,
+ },
+ integratedTime: entry.integratedTime.toString(),
+ kindVersion: {
+ kind: entryBody.kind,
+ version: entryBody.apiVersion,
+ },
+ inclusionPromise: promise,
+ inclusionProof: proof,
+ canonicalizedBody: Buffer.from(entry.body, 'base64'),
+ };
+ return {
+ tlogEntries: [tlogEntry],
+ };
+}
+function inclusionPromise(promise) {
+ return {
+ signedEntryTimestamp: Buffer.from(promise, 'base64'),
+ };
+}
+function inclusionProof(proof) {
+ return {
+ logIndex: proof.logIndex.toString(),
+ treeSize: proof.treeSize.toString(),
+ rootHash: Buffer.from(proof.rootHash, 'hex'),
+ hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')),
+ checkpoint: {
+ envelope: proof.checkpoint,
+ },
+ };
+}
diff --git a/deps/npm/node_modules/sigstore/dist/tsa/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js
similarity index 74%
rename from deps/npm/node_modules/sigstore/dist/tsa/index.js
rename to deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js
index 4951b24a93f4fe..a334deb00b7756 100644
--- a/deps/npm/node_modules/sigstore/dist/tsa/index.js
+++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js
@@ -2,7 +2,7 @@
Object.defineProperty(exports, "__esModule", { value: true });
exports.TSAClient = void 0;
/*
-Copyright 2022 The Sigstore Authors.
+Copyright 2023 The Sigstore Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -16,12 +16,12 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
-const error_1 = require("../error");
-const external_1 = require("../external");
-const util_1 = require("../util");
+const error_1 = require("../../error");
+const tsa_1 = require("../../external/tsa");
+const util_1 = require("../../util");
class TSAClient {
constructor(options) {
- this.tsa = new external_1.TimestampAuthority({
+ this.tsa = new tsa_1.TimestampAuthority({
baseURL: options.tsaBaseURL,
retry: options.retry,
timeout: options.timeout,
@@ -36,11 +36,7 @@ class TSAClient {
return await this.tsa.createTimestamp(request);
}
catch (err) {
- throw new error_1.InternalError({
- code: 'TSA_CREATE_TIMESTAMP_ERROR',
- message: 'error creating timestamp',
- cause: err,
- });
+ (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp');
}
}
}
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js
new file mode 100644
index 00000000000000..d4f5c7c859d106
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js
@@ -0,0 +1,44 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const client_1 = require("./client");
+class TSAWitness {
+ constructor(options) {
+ this.tsa = new client_1.TSAClient({
+ tsaBaseURL: options.tsaBaseURL,
+ retry: options.retry,
+ timeout: options.timeout,
+ });
+ }
+ async testify(content) {
+ const signature = extractSignature(content);
+ const timestamp = await this.tsa.createTimestamp(signature);
+ return {
+ rfc3161Timestamps: [{ signedTimestamp: timestamp }],
+ };
+ }
+}
+exports.TSAWitness = TSAWitness;
+function extractSignature(content) {
+ switch (content.$case) {
+ case 'dsseEnvelope':
+ return content.dsseEnvelope.signatures[0].sig;
+ case 'messageSignature':
+ return content.messageSignature.signature;
+ }
+}
diff --git a/deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js b/deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js
new file mode 100644
index 00000000000000..c8ad2e549bdc68
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/dist/witness/witness.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/deps/npm/node_modules/@sigstore/sign/package.json b/deps/npm/node_modules/@sigstore/sign/package.json
new file mode 100644
index 00000000000000..cd8dc14412e4da
--- /dev/null
+++ b/deps/npm/node_modules/@sigstore/sign/package.json
@@ -0,0 +1,42 @@
+{
+ "name": "@sigstore/sign",
+ "version": "2.1.0",
+ "description": "Sigstore signing library",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "scripts": {
+ "clean": "shx rm -rf dist *.tsbuildinfo",
+ "build": "tsc --build",
+ "test": "jest"
+ },
+ "files": [
+ "dist"
+ ],
+ "author": "bdehamer@github.com",
+ "license": "Apache-2.0",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sigstore/sigstore-js.git"
+ },
+ "bugs": {
+ "url": "https://github.com/sigstore/sigstore-js/issues"
+ },
+ "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme",
+ "publishConfig": {
+ "provenance": true
+ },
+ "devDependencies": {
+ "@sigstore/jest": "^0.0.0",
+ "@sigstore/mock": "^0.4.0",
+ "@sigstore/rekor-types": "^2.0.0",
+ "@types/make-fetch-happen": "^10.0.0"
+ },
+ "dependencies": {
+ "@sigstore/bundle": "^2.1.0",
+ "@sigstore/protobuf-specs": "^0.2.1",
+ "make-fetch-happen": "^13.0.0"
+ },
+ "engines": {
+ "node": "^16.14.0 || >=18.0.0"
+ }
+}
diff --git a/deps/npm/node_modules/@sigstore/tuf/dist/client.js b/deps/npm/node_modules/@sigstore/tuf/dist/client.js
index 08d6b61840909f..797346d39e6202 100644
--- a/deps/npm/node_modules/@sigstore/tuf/dist/client.js
+++ b/deps/npm/node_modules/@sigstore/tuf/dist/client.js
@@ -76,21 +76,8 @@ function initClient(cachePath, remote, options) {
const baseURL = remote.mirror;
const config = {
fetchTimeout: options.timeout,
+ fetchRetry: options.retry,
};
- // tuf-js only supports a number for fetchRetries so we have to
- // convert the boolean and object options to a number.
- /* istanbul ignore if */
- if (typeof options.retry !== 'undefined') {
- if (typeof options.retry === 'number') {
- config.fetchRetries = options.retry;
- }
- else if (typeof options.retry === 'object') {
- config.fetchRetries = options.retry.retries;
- }
- else if (options.retry === true) {
- config.fetchRetries = 1;
- }
- }
return new tuf_js_1.Updater({
metadataBaseUrl: baseURL,
targetBaseUrl: `${baseURL}/targets`,
diff --git a/deps/npm/node_modules/@sigstore/tuf/package.json b/deps/npm/node_modules/@sigstore/tuf/package.json
index 286d481a4d39fc..a655d52a0407a3 100644
--- a/deps/npm/node_modules/@sigstore/tuf/package.json
+++ b/deps/npm/node_modules/@sigstore/tuf/package.json
@@ -1,6 +1,6 @@
{
"name": "@sigstore/tuf",
- "version": "1.0.2",
+ "version": "2.1.0",
"description": "Client for the Sigstore TUF repository",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -28,14 +28,14 @@
},
"devDependencies": {
"@sigstore/jest": "^0.0.0",
- "@tufjs/repo-mock": "^1.1.0",
+ "@tufjs/repo-mock": "^2.0.0",
"@types/make-fetch-happen": "^10.0.0"
},
"dependencies": {
- "@sigstore/protobuf-specs": "^0.1.0",
- "tuf-js": "^1.1.7"
+ "@sigstore/protobuf-specs": "^0.2.1",
+ "tuf-js": "^2.1.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
}
}
diff --git a/deps/npm/node_modules/@tufjs/canonical-json/package.json b/deps/npm/node_modules/@tufjs/canonical-json/package.json
index 688c9b93c3a4e6..886c0c3969225a 100644
--- a/deps/npm/node_modules/@tufjs/canonical-json/package.json
+++ b/deps/npm/node_modules/@tufjs/canonical-json/package.json
@@ -1,6 +1,6 @@
{
"name": "@tufjs/canonical-json",
- "version": "1.0.0",
+ "version": "2.0.0",
"description": "OLPC JSON canonicalization",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
@@ -19,7 +19,7 @@
"type": "git",
"url": "git+https://github.com/theupdateframework/tuf-js.git"
},
- "homepage": "https://github.com/theupdateframework/tuf-js/packages/canonical-json#readme",
+ "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/canonical-json#readme",
"bugs": {
"url": "https://github.com/theupdateframework/tuf-js/issues"
},
@@ -29,11 +29,7 @@
"scripts": {
"test": "jest"
},
- "devDependencies": {
- "@types/node": "^18.14.1",
- "typescript": "^4.9.5"
- },
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
}
}
diff --git a/deps/npm/node_modules/@tufjs/models/dist/base.js b/deps/npm/node_modules/@tufjs/models/dist/base.js
index d89a089c330922..259f6799c13a0d 100644
--- a/deps/npm/node_modules/@tufjs/models/dist/base.js
+++ b/deps/npm/node_modules/@tufjs/models/dist/base.js
@@ -14,7 +14,7 @@ var MetadataKind;
MetadataKind["Timestamp"] = "timestamp";
MetadataKind["Snapshot"] = "snapshot";
MetadataKind["Targets"] = "targets";
-})(MetadataKind = exports.MetadataKind || (exports.MetadataKind = {}));
+})(MetadataKind || (exports.MetadataKind = MetadataKind = {}));
function isMetadataKind(value) {
return (typeof value === 'string' &&
Object.values(MetadataKind).includes(value));
diff --git a/deps/npm/node_modules/@tufjs/models/package.json b/deps/npm/node_modules/@tufjs/models/package.json
index 6711ee0dababca..60368242ab556a 100644
--- a/deps/npm/node_modules/@tufjs/models/package.json
+++ b/deps/npm/node_modules/@tufjs/models/package.json
@@ -1,6 +1,6 @@
{
"name": "@tufjs/models",
- "version": "1.0.4",
+ "version": "2.0.0",
"description": "TUF metadata models",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -27,15 +27,11 @@
"url": "https://github.com/theupdateframework/tuf-js/issues"
},
"homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme",
- "devDependencies": {
- "@types/node": "^18.16.3",
- "typescript": "^5.0.4"
- },
"dependencies": {
- "@tufjs/canonical-json": "1.0.0",
- "minimatch": "^9.0.0"
+ "@tufjs/canonical-json": "2.0.0",
+ "minimatch": "^9.0.3"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
}
}
diff --git a/deps/npm/node_modules/agentkeepalive/lib/agent.js b/deps/npm/node_modules/agentkeepalive/lib/agent.js
index a7065b5e5d1ad3..8bd354effa05ec 100644
--- a/deps/npm/node_modules/agentkeepalive/lib/agent.js
+++ b/deps/npm/node_modules/agentkeepalive/lib/agent.js
@@ -2,8 +2,7 @@
const OriginalAgent = require('http').Agent;
const ms = require('humanize-ms');
-const debug = require('debug')('agentkeepalive');
-const deprecate = require('depd')('agentkeepalive');
+const debug = require('util').debuglog('agentkeepalive');
const {
INIT_SOCKET,
CURRENT_ID,
@@ -27,6 +26,10 @@ if (majorVersion >= 11 && majorVersion <= 12) {
defaultTimeoutListenerCount = 3;
}
+function deprecate(message) {
+ console.log('[agentkeepalive:deprecated] %s', message);
+}
+
class Agent extends OriginalAgent {
constructor(options) {
options = options || {};
@@ -230,6 +233,7 @@ class Agent extends OriginalAgent {
const newSocket = super.createConnection(options, onNewCreate);
if (newSocket) onNewCreate(null, newSocket);
+ return newSocket;
}
get statusChanged() {
diff --git a/deps/npm/node_modules/agentkeepalive/lib/https_agent.js b/deps/npm/node_modules/agentkeepalive/lib/https_agent.js
index 73f529d65e7ffe..344fb32cadd862 100644
--- a/deps/npm/node_modules/agentkeepalive/lib/https_agent.js
+++ b/deps/npm/node_modules/agentkeepalive/lib/https_agent.js
@@ -25,8 +25,8 @@ class HttpsAgent extends HttpAgent {
};
}
- createConnection(options) {
- const socket = this[CREATE_HTTPS_CONNECTION](options);
+ createConnection(options, oncreate) {
+ const socket = this[CREATE_HTTPS_CONNECTION](options, oncreate);
this[INIT_SOCKET](socket, options);
return socket;
}
diff --git a/deps/npm/node_modules/agentkeepalive/package.json b/deps/npm/node_modules/agentkeepalive/package.json
index 3115fee69a0416..d8e9aa7160d0b3 100644
--- a/deps/npm/node_modules/agentkeepalive/package.json
+++ b/deps/npm/node_modules/agentkeepalive/package.json
@@ -1,6 +1,6 @@
{
"name": "agentkeepalive",
- "version": "4.3.0",
+ "version": "4.5.0",
"description": "Missing keepalive http.Agent",
"main": "index.js",
"browser": "browser.js",
@@ -14,7 +14,7 @@
"contributor": "git-contributor",
"test": "npm run lint && egg-bin test --full-trace",
"test-local": "egg-bin test --full-trace",
- "cov": "cross-env DEBUG=agentkeepalive egg-bin cov --full-trace",
+ "cov": "cross-env NODE_DEBUG=agentkeepalive egg-bin cov --full-trace",
"ci": "npm run lint && npm run cov",
"lint": "eslint lib test index.js"
},
@@ -35,8 +35,6 @@
"HttpsAgent"
],
"dependencies": {
- "debug": "^4.1.0",
- "depd": "^2.0.0",
"humanize-ms": "^1.2.1"
},
"devDependencies": {
diff --git a/deps/npm/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/cacache/lib/memoization.js
index 0ff604a479c9c1..2ecc60912e4563 100644
--- a/deps/npm/node_modules/cacache/lib/memoization.js
+++ b/deps/npm/node_modules/cacache/lib/memoization.js
@@ -1,8 +1,8 @@
'use strict'
-const LRU = require('lru-cache')
+const { LRUCache } = require('lru-cache')
-const MEMOIZED = new LRU({
+const MEMOIZED = new LRUCache({
max: 500,
maxSize: 50 * 1024 * 1024, // 50MB
ttl: 3 * 60 * 1000, // 3 minutes
diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json
index a6f6f9bdfc4654..1b14bf4bd14904 100644
--- a/deps/npm/node_modules/cacache/package.json
+++ b/deps/npm/node_modules/cacache/package.json
@@ -1,6 +1,6 @@
{
"name": "cacache",
- "version": "17.1.3",
+ "version": "18.0.0",
"cache-version": {
"content": "2",
"index": "5"
@@ -48,8 +48,8 @@
"@npmcli/fs": "^3.1.0",
"fs-minipass": "^3.0.0",
"glob": "^10.2.2",
- "lru-cache": "^7.7.1",
- "minipass": "^5.0.0",
+ "lru-cache": "^10.0.1",
+ "minipass": "^7.0.3",
"minipass-collect": "^1.0.2",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
@@ -60,17 +60,23 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.15.1",
+ "@npmcli/template-oss": "4.18.0",
"tap": "^16.0.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"windowsCI": false,
- "version": "4.15.1",
- "publish": "true"
+ "version": "4.18.0",
+ "publish": "true",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"author": "GitHub Inc.",
"tap": {
diff --git a/deps/npm/node_modules/depd/History.md b/deps/npm/node_modules/depd/History.md
deleted file mode 100644
index cd9ebaaa9963f7..00000000000000
--- a/deps/npm/node_modules/depd/History.md
+++ /dev/null
@@ -1,103 +0,0 @@
-2.0.0 / 2018-10-26
-==================
-
- * Drop support for Node.js 0.6
- * Replace internal `eval` usage with `Function` constructor
- * Use instance methods on `process` to check for listeners
-
-1.1.2 / 2018-01-11
-==================
-
- * perf: remove argument reassignment
- * Support Node.js 0.6 to 9.x
-
-1.1.1 / 2017-07-27
-==================
-
- * Remove unnecessary `Buffer` loading
- * Support Node.js 0.6 to 8.x
-
-1.1.0 / 2015-09-14
-==================
-
- * Enable strict mode in more places
- * Support io.js 3.x
- * Support io.js 2.x
- * Support web browser loading
- - Requires bundler like Browserify or webpack
-
-1.0.1 / 2015-04-07
-==================
-
- * Fix `TypeError`s when under `'use strict'` code
- * Fix useless type name on auto-generated messages
- * Support io.js 1.x
- * Support Node.js 0.12
-
-1.0.0 / 2014-09-17
-==================
-
- * No changes
-
-0.4.5 / 2014-09-09
-==================
-
- * Improve call speed to functions using the function wrapper
- * Support Node.js 0.6
-
-0.4.4 / 2014-07-27
-==================
-
- * Work-around v8 generating empty stack traces
-
-0.4.3 / 2014-07-26
-==================
-
- * Fix exception when global `Error.stackTraceLimit` is too low
-
-0.4.2 / 2014-07-19
-==================
-
- * Correct call site for wrapped functions and properties
-
-0.4.1 / 2014-07-19
-==================
-
- * Improve automatic message generation for function properties
-
-0.4.0 / 2014-07-19
-==================
-
- * Add `TRACE_DEPRECATION` environment variable
- * Remove non-standard grey color from color output
- * Support `--no-deprecation` argument
- * Support `--trace-deprecation` argument
- * Support `deprecate.property(fn, prop, message)`
-
-0.3.0 / 2014-06-16
-==================
-
- * Add `NO_DEPRECATION` environment variable
-
-0.2.0 / 2014-06-15
-==================
-
- * Add `deprecate.property(obj, prop, message)`
- * Remove `supports-color` dependency for node.js 0.8
-
-0.1.0 / 2014-06-15
-==================
-
- * Add `deprecate.function(fn, message)`
- * Add `process.on('deprecation', fn)` emitter
- * Automatically generate message when omitted from `deprecate()`
-
-0.0.1 / 2014-06-15
-==================
-
- * Fix warning for dynamic calls at singe call site
-
-0.0.0 / 2014-06-15
-==================
-
- * Initial implementation
diff --git a/deps/npm/node_modules/depd/index.js b/deps/npm/node_modules/depd/index.js
deleted file mode 100644
index 1bf2fcfdeffc98..00000000000000
--- a/deps/npm/node_modules/depd/index.js
+++ /dev/null
@@ -1,538 +0,0 @@
-/*!
- * depd
- * Copyright(c) 2014-2018 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-/**
- * Module dependencies.
- */
-
-var relative = require('path').relative
-
-/**
- * Module exports.
- */
-
-module.exports = depd
-
-/**
- * Get the path to base files on.
- */
-
-var basePath = process.cwd()
-
-/**
- * Determine if namespace is contained in the string.
- */
-
-function containsNamespace (str, namespace) {
- var vals = str.split(/[ ,]+/)
- var ns = String(namespace).toLowerCase()
-
- for (var i = 0; i < vals.length; i++) {
- var val = vals[i]
-
- // namespace contained
- if (val && (val === '*' || val.toLowerCase() === ns)) {
- return true
- }
- }
-
- return false
-}
-
-/**
- * Convert a data descriptor to accessor descriptor.
- */
-
-function convertDataDescriptorToAccessor (obj, prop, message) {
- var descriptor = Object.getOwnPropertyDescriptor(obj, prop)
- var value = descriptor.value
-
- descriptor.get = function getter () { return value }
-
- if (descriptor.writable) {
- descriptor.set = function setter (val) { return (value = val) }
- }
-
- delete descriptor.value
- delete descriptor.writable
-
- Object.defineProperty(obj, prop, descriptor)
-
- return descriptor
-}
-
-/**
- * Create arguments string to keep arity.
- */
-
-function createArgumentsString (arity) {
- var str = ''
-
- for (var i = 0; i < arity; i++) {
- str += ', arg' + i
- }
-
- return str.substr(2)
-}
-
-/**
- * Create stack string from stack.
- */
-
-function createStackString (stack) {
- var str = this.name + ': ' + this.namespace
-
- if (this.message) {
- str += ' deprecated ' + this.message
- }
-
- for (var i = 0; i < stack.length; i++) {
- str += '\n at ' + stack[i].toString()
- }
-
- return str
-}
-
-/**
- * Create deprecate for namespace in caller.
- */
-
-function depd (namespace) {
- if (!namespace) {
- throw new TypeError('argument namespace is required')
- }
-
- var stack = getStack()
- var site = callSiteLocation(stack[1])
- var file = site[0]
-
- function deprecate (message) {
- // call to self as log
- log.call(deprecate, message)
- }
-
- deprecate._file = file
- deprecate._ignored = isignored(namespace)
- deprecate._namespace = namespace
- deprecate._traced = istraced(namespace)
- deprecate._warned = Object.create(null)
-
- deprecate.function = wrapfunction
- deprecate.property = wrapproperty
-
- return deprecate
-}
-
-/**
- * Determine if event emitter has listeners of a given type.
- *
- * The way to do this check is done three different ways in Node.js >= 0.8
- * so this consolidates them into a minimal set using instance methods.
- *
- * @param {EventEmitter} emitter
- * @param {string} type
- * @returns {boolean}
- * @private
- */
-
-function eehaslisteners (emitter, type) {
- var count = typeof emitter.listenerCount !== 'function'
- ? emitter.listeners(type).length
- : emitter.listenerCount(type)
-
- return count > 0
-}
-
-/**
- * Determine if namespace is ignored.
- */
-
-function isignored (namespace) {
- if (process.noDeprecation) {
- // --no-deprecation support
- return true
- }
-
- var str = process.env.NO_DEPRECATION || ''
-
- // namespace ignored
- return containsNamespace(str, namespace)
-}
-
-/**
- * Determine if namespace is traced.
- */
-
-function istraced (namespace) {
- if (process.traceDeprecation) {
- // --trace-deprecation support
- return true
- }
-
- var str = process.env.TRACE_DEPRECATION || ''
-
- // namespace traced
- return containsNamespace(str, namespace)
-}
-
-/**
- * Display deprecation message.
- */
-
-function log (message, site) {
- var haslisteners = eehaslisteners(process, 'deprecation')
-
- // abort early if no destination
- if (!haslisteners && this._ignored) {
- return
- }
-
- var caller
- var callFile
- var callSite
- var depSite
- var i = 0
- var seen = false
- var stack = getStack()
- var file = this._file
-
- if (site) {
- // provided site
- depSite = site
- callSite = callSiteLocation(stack[1])
- callSite.name = depSite.name
- file = callSite[0]
- } else {
- // get call site
- i = 2
- depSite = callSiteLocation(stack[i])
- callSite = depSite
- }
-
- // get caller of deprecated thing in relation to file
- for (; i < stack.length; i++) {
- caller = callSiteLocation(stack[i])
- callFile = caller[0]
-
- if (callFile === file) {
- seen = true
- } else if (callFile === this._file) {
- file = this._file
- } else if (seen) {
- break
- }
- }
-
- var key = caller
- ? depSite.join(':') + '__' + caller.join(':')
- : undefined
-
- if (key !== undefined && key in this._warned) {
- // already warned
- return
- }
-
- this._warned[key] = true
-
- // generate automatic message from call site
- var msg = message
- if (!msg) {
- msg = callSite === depSite || !callSite.name
- ? defaultMessage(depSite)
- : defaultMessage(callSite)
- }
-
- // emit deprecation if listeners exist
- if (haslisteners) {
- var err = DeprecationError(this._namespace, msg, stack.slice(i))
- process.emit('deprecation', err)
- return
- }
-
- // format and write message
- var format = process.stderr.isTTY
- ? formatColor
- : formatPlain
- var output = format.call(this, msg, caller, stack.slice(i))
- process.stderr.write(output + '\n', 'utf8')
-}
-
-/**
- * Get call site location as array.
- */
-
-function callSiteLocation (callSite) {
- var file = callSite.getFileName() || ''
- var line = callSite.getLineNumber()
- var colm = callSite.getColumnNumber()
-
- if (callSite.isEval()) {
- file = callSite.getEvalOrigin() + ', ' + file
- }
-
- var site = [file, line, colm]
-
- site.callSite = callSite
- site.name = callSite.getFunctionName()
-
- return site
-}
-
-/**
- * Generate a default message from the site.
- */
-
-function defaultMessage (site) {
- var callSite = site.callSite
- var funcName = site.name
-
- // make useful anonymous name
- if (!funcName) {
- funcName = ''
- }
-
- var context = callSite.getThis()
- var typeName = context && callSite.getTypeName()
-
- // ignore useless type name
- if (typeName === 'Object') {
- typeName = undefined
- }
-
- // make useful type name
- if (typeName === 'Function') {
- typeName = context.name || typeName
- }
-
- return typeName && callSite.getMethodName()
- ? typeName + '.' + funcName
- : funcName
-}
-
-/**
- * Format deprecation message without color.
- */
-
-function formatPlain (msg, caller, stack) {
- var timestamp = new Date().toUTCString()
-
- var formatted = timestamp +
- ' ' + this._namespace +
- ' deprecated ' + msg
-
- // add stack trace
- if (this._traced) {
- for (var i = 0; i < stack.length; i++) {
- formatted += '\n at ' + stack[i].toString()
- }
-
- return formatted
- }
-
- if (caller) {
- formatted += ' at ' + formatLocation(caller)
- }
-
- return formatted
-}
-
-/**
- * Format deprecation message with color.
- */
-
-function formatColor (msg, caller, stack) {
- var formatted = '\x1b[36;1m' + this._namespace + '\x1b[22;39m' + // bold cyan
- ' \x1b[33;1mdeprecated\x1b[22;39m' + // bold yellow
- ' \x1b[0m' + msg + '\x1b[39m' // reset
-
- // add stack trace
- if (this._traced) {
- for (var i = 0; i < stack.length; i++) {
- formatted += '\n \x1b[36mat ' + stack[i].toString() + '\x1b[39m' // cyan
- }
-
- return formatted
- }
-
- if (caller) {
- formatted += ' \x1b[36m' + formatLocation(caller) + '\x1b[39m' // cyan
- }
-
- return formatted
-}
-
-/**
- * Format call site location.
- */
-
-function formatLocation (callSite) {
- return relative(basePath, callSite[0]) +
- ':' + callSite[1] +
- ':' + callSite[2]
-}
-
-/**
- * Get the stack as array of call sites.
- */
-
-function getStack () {
- var limit = Error.stackTraceLimit
- var obj = {}
- var prep = Error.prepareStackTrace
-
- Error.prepareStackTrace = prepareObjectStackTrace
- Error.stackTraceLimit = Math.max(10, limit)
-
- // capture the stack
- Error.captureStackTrace(obj)
-
- // slice this function off the top
- var stack = obj.stack.slice(1)
-
- Error.prepareStackTrace = prep
- Error.stackTraceLimit = limit
-
- return stack
-}
-
-/**
- * Capture call site stack from v8.
- */
-
-function prepareObjectStackTrace (obj, stack) {
- return stack
-}
-
-/**
- * Return a wrapped function in a deprecation message.
- */
-
-function wrapfunction (fn, message) {
- if (typeof fn !== 'function') {
- throw new TypeError('argument fn must be a function')
- }
-
- var args = createArgumentsString(fn.length)
- var stack = getStack()
- var site = callSiteLocation(stack[1])
-
- site.name = fn.name
-
- // eslint-disable-next-line no-new-func
- var deprecatedfn = new Function('fn', 'log', 'deprecate', 'message', 'site',
- '"use strict"\n' +
- 'return function (' + args + ') {' +
- 'log.call(deprecate, message, site)\n' +
- 'return fn.apply(this, arguments)\n' +
- '}')(fn, log, this, message, site)
-
- return deprecatedfn
-}
-
-/**
- * Wrap property in a deprecation message.
- */
-
-function wrapproperty (obj, prop, message) {
- if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) {
- throw new TypeError('argument obj must be object')
- }
-
- var descriptor = Object.getOwnPropertyDescriptor(obj, prop)
-
- if (!descriptor) {
- throw new TypeError('must call property on owner object')
- }
-
- if (!descriptor.configurable) {
- throw new TypeError('property must be configurable')
- }
-
- var deprecate = this
- var stack = getStack()
- var site = callSiteLocation(stack[1])
-
- // set site name
- site.name = prop
-
- // convert data descriptor
- if ('value' in descriptor) {
- descriptor = convertDataDescriptorToAccessor(obj, prop, message)
- }
-
- var get = descriptor.get
- var set = descriptor.set
-
- // wrap getter
- if (typeof get === 'function') {
- descriptor.get = function getter () {
- log.call(deprecate, message, site)
- return get.apply(this, arguments)
- }
- }
-
- // wrap setter
- if (typeof set === 'function') {
- descriptor.set = function setter () {
- log.call(deprecate, message, site)
- return set.apply(this, arguments)
- }
- }
-
- Object.defineProperty(obj, prop, descriptor)
-}
-
-/**
- * Create DeprecationError for deprecation
- */
-
-function DeprecationError (namespace, message, stack) {
- var error = new Error()
- var stackString
-
- Object.defineProperty(error, 'constructor', {
- value: DeprecationError
- })
-
- Object.defineProperty(error, 'message', {
- configurable: true,
- enumerable: false,
- value: message,
- writable: true
- })
-
- Object.defineProperty(error, 'name', {
- enumerable: false,
- configurable: true,
- value: 'DeprecationError',
- writable: true
- })
-
- Object.defineProperty(error, 'namespace', {
- configurable: true,
- enumerable: false,
- value: namespace,
- writable: true
- })
-
- Object.defineProperty(error, 'stack', {
- configurable: true,
- enumerable: false,
- get: function () {
- if (stackString !== undefined) {
- return stackString
- }
-
- // prepare stack trace
- return (stackString = createStackString.call(this, stack))
- },
- set: function setter (val) {
- stackString = val
- }
- })
-
- return error
-}
diff --git a/deps/npm/node_modules/depd/lib/browser/index.js b/deps/npm/node_modules/depd/lib/browser/index.js
deleted file mode 100644
index 6be45cc20b33f2..00000000000000
--- a/deps/npm/node_modules/depd/lib/browser/index.js
+++ /dev/null
@@ -1,77 +0,0 @@
-/*!
- * depd
- * Copyright(c) 2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict'
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = depd
-
-/**
- * Create deprecate for namespace in caller.
- */
-
-function depd (namespace) {
- if (!namespace) {
- throw new TypeError('argument namespace is required')
- }
-
- function deprecate (message) {
- // no-op in browser
- }
-
- deprecate._file = undefined
- deprecate._ignored = true
- deprecate._namespace = namespace
- deprecate._traced = false
- deprecate._warned = Object.create(null)
-
- deprecate.function = wrapfunction
- deprecate.property = wrapproperty
-
- return deprecate
-}
-
-/**
- * Return a wrapped function in a deprecation message.
- *
- * This is a no-op version of the wrapper, which does nothing but call
- * validation.
- */
-
-function wrapfunction (fn, message) {
- if (typeof fn !== 'function') {
- throw new TypeError('argument fn must be a function')
- }
-
- return fn
-}
-
-/**
- * Wrap property in a deprecation message.
- *
- * This is a no-op version of the wrapper, which does nothing but call
- * validation.
- */
-
-function wrapproperty (obj, prop, message) {
- if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) {
- throw new TypeError('argument obj must be object')
- }
-
- var descriptor = Object.getOwnPropertyDescriptor(obj, prop)
-
- if (!descriptor) {
- throw new TypeError('must call property on owner object')
- }
-
- if (!descriptor.configurable) {
- throw new TypeError('property must be configurable')
- }
-}
diff --git a/deps/npm/node_modules/depd/package.json b/deps/npm/node_modules/depd/package.json
deleted file mode 100644
index 3857e199184a0a..00000000000000
--- a/deps/npm/node_modules/depd/package.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "name": "depd",
- "description": "Deprecate all the things",
- "version": "2.0.0",
- "author": "Douglas Christopher Wilson ",
- "license": "MIT",
- "keywords": [
- "deprecate",
- "deprecated"
- ],
- "repository": "dougwilson/nodejs-depd",
- "browser": "lib/browser/index.js",
- "devDependencies": {
- "benchmark": "2.1.4",
- "beautify-benchmark": "0.2.4",
- "eslint": "5.7.0",
- "eslint-config-standard": "12.0.0",
- "eslint-plugin-import": "2.14.0",
- "eslint-plugin-markdown": "1.0.0-beta.7",
- "eslint-plugin-node": "7.0.1",
- "eslint-plugin-promise": "4.0.1",
- "eslint-plugin-standard": "4.0.0",
- "istanbul": "0.4.5",
- "mocha": "5.2.0",
- "safe-buffer": "5.1.2",
- "uid-safe": "2.1.5"
- },
- "files": [
- "lib/",
- "History.md",
- "LICENSE",
- "index.js",
- "Readme.md"
- ],
- "engines": {
- "node": ">= 0.8"
- },
- "scripts": {
- "bench": "node benchmark/index.js",
- "lint": "eslint --plugin markdown --ext js,md .",
- "test": "mocha --reporter spec --bail test/",
- "test-ci": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter spec test/ && istanbul report lcovonly text-summary",
- "test-cov": "istanbul cover --print=none node_modules/mocha/bin/_mocha -- --reporter dot test/ && istanbul report lcov text-summary"
- }
-}
diff --git a/deps/npm/node_modules/fs-minipass/package.json b/deps/npm/node_modules/fs-minipass/package.json
index 3d1fa3dbc11e46..e501e6474294d8 100644
--- a/deps/npm/node_modules/fs-minipass/package.json
+++ b/deps/npm/node_modules/fs-minipass/package.json
@@ -1,6 +1,6 @@
{
"name": "fs-minipass",
- "version": "3.0.2",
+ "version": "3.0.3",
"main": "lib/index.js",
"scripts": {
"test": "tap",
@@ -24,11 +24,11 @@
"homepage": "https://github.com/npm/fs-minipass#readme",
"description": "fs read and write streams based on minipass",
"dependencies": {
- "minipass": "^5.0.0"
+ "minipass": "^7.0.3"
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.1",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"mutate-fs": "^2.1.1",
"tap": "^16.3.2"
},
@@ -48,7 +48,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"publish": "true"
}
}
diff --git a/deps/npm/node_modules/glob/README.md b/deps/npm/node_modules/glob/README.md
index 892013baae771c..1bde1494664d4d 100644
--- a/deps/npm/node_modules/glob/README.md
+++ b/deps/npm/node_modules/glob/README.md
@@ -55,7 +55,7 @@ const filesStream = globStream(['**/*.dat', 'logs/**/*.log'])
// construct a Glob object if you wanna do it that way, which
// allows for much faster walks if you have to look in the same
// folder multiple times.
-const g = new Glob('**/foo')
+const g = new Glob('**/foo', {})
// glob objects are async iterators, can also do globIterate() or
// g.iterate(), same deal
for await (const file of g) {
@@ -358,6 +358,8 @@ An object that can perform glob pattern traversals.
### `const g = new Glob(pattern: string | string[], options: GlobOptions)`
+Options object is required.
+
See full options descriptions below.
Note that a previous `Glob` object can be passed as the
diff --git a/deps/npm/node_modules/glob/dist/cjs/package.json b/deps/npm/node_modules/glob/dist/cjs/package.json
index 44b67c307f1c85..c15df94a3582bf 100644
--- a/deps/npm/node_modules/glob/dist/cjs/package.json
+++ b/deps/npm/node_modules/glob/dist/cjs/package.json
@@ -1,4 +1,4 @@
{
- "version": "10.2.7",
+ "version": "10.3.3",
"type": "commonjs"
}
diff --git a/deps/npm/node_modules/glob/dist/cjs/src/bin.js b/deps/npm/node_modules/glob/dist/cjs/src/bin.js
index 733358c7365be8..4a8a88f2734d2e 100755
--- a/deps/npm/node_modules/glob/dist/cjs/src/bin.js
+++ b/deps/npm/node_modules/glob/dist/cjs/src/bin.js
@@ -4,10 +4,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
const foreground_child_1 = require("foreground-child");
const fs_1 = require("fs");
const jackspeak_1 = require("jackspeak");
-const index_js_1 = require("./index.js");
const package_json_1 = require("../package.json");
+const index_js_1 = require("./index.js");
const j = (0, jackspeak_1.jack)({
- usage: 'glob [options] [ [ ...]]'
+ usage: 'glob [options] [ [ ...]]',
})
.description(`
Glob v${package_json_1.version}
@@ -22,6 +22,14 @@ const j = (0, jackspeak_1.jack)({
description: `Run the command provided, passing the glob expression
matches as arguments.`,
},
+})
+ .opt({
+ default: {
+ short: 'p',
+ hint: 'pattern',
+ description: `If no positional arguments are provided, glob will use
+ this pattern`,
+ },
})
.flag({
all: {
@@ -214,8 +222,10 @@ try {
console.log(j.usage());
process.exit(0);
}
- if (positionals.length === 0)
+ if (positionals.length === 0 && !values.default)
throw 'No patterns provided';
+ if (positionals.length === 0 && values.default)
+ positionals.push(values.default);
const patterns = values.all
? positionals
: positionals.filter(p => !(0, fs_1.existsSync)(p));
diff --git a/deps/npm/node_modules/glob/dist/cjs/src/bin.js.map b/deps/npm/node_modules/glob/dist/cjs/src/bin.js.map
index abd2aa47d82d3c..e189acfd01b1a7 100644
--- a/deps/npm/node_modules/glob/dist/cjs/src/bin.js.map
+++ b/deps/npm/node_modules/glob/dist/cjs/src/bin.js.map
@@ -1 +1 @@
-{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AACA,uDAAkD;AAClD,2BAA+B;AAC/B,yCAAgC;AAChC,yCAAuC;AACvC,kDAAyC;AAEzC,MAAM,CAAC,GAAG,IAAA,gBAAI,EAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,sBAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,QAAQ,EAAE,CAAC,CAAC,EAAE,CACZ,IAAI,GAAG,CAAC;YACN,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;KACZ;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;CACF,CAAC;KACD,IAAI,CAAC;IACJ,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI;IACF,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC;QAAE,MAAM,sBAAsB,CAAA;IAC1D,MAAM,QAAQ,GAAG,MAAM,CAAC,GAAG;QACzB,CAAC,CAAC,WAAW;QACb,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IAC3C,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IACxE,MAAM,MAAM,GAAG,IAAA,qBAAU,EAAC,QAAQ,EAAE;QAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,WAAW,EAAE,MAAM,CAAC,cAAc,CAAC;QACnC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,SAAS,EAAE,MAAM,CAAC,YAAY,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC,WAAW,CAAC;QAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,QAAQ,EAAE,MAAM,CAAC,QAAuC;QACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;KACpB,CAAC,CAAA;IAEF,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAA;IACtB,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;KACvC;SAAM;QACL,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACvC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,IAAA,kCAAe,EAAC,GAAG,EAAE,OAAO,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;KACvE;CACF;AAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;CAChB","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { globStream } from './index.js'\nimport { version } from '../package.json'\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]'\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC maths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validate: v =>\n new Set([\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ]).has(v),\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n })\n .flag({\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n if (values.help) {\n console.log(j.usage())\n process.exit(0)\n }\n if (positionals.length === 0) throw 'No patterns provided'\n const patterns = values.all\n ? positionals\n : positionals.filter(p => !existsSync(p))\n const matches = values.all ? [] : positionals.filter(p => existsSync(p))\n const stream = globStream(patterns, {\n absolute: values.absolute,\n cwd: values.cwd,\n dot: values.dot,\n dotRelative: values['dot-relative'],\n follow: values.follow,\n ignore: values.ignore,\n mark: values.mark,\n matchBase: values['match-base'],\n maxDepth: values['max-depth'],\n nobrace: values.nobrace,\n nocase: values.nocase,\n nodir: values.nodir,\n noext: values.noext,\n noglobstar: values.noglobstar,\n platform: values.platform as undefined | NodeJS.Platform,\n realpath: values.realpath,\n root: values.root,\n stat: values.stat,\n debug: values.debug,\n posix: values.posix,\n })\n\n const cmd = values.cmd\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n stream.on('data', f => matches.push(f))\n stream.on('end', () => foregroundChild(cmd, matches, { shell: true }))\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]}
\ No newline at end of file
+{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AACA,uDAAkD;AAClD,2BAA+B;AAC/B,yCAAgC;AAChC,kDAAyC;AACzC,yCAAuC;AAEvC,MAAM,CAAC,GAAG,IAAA,gBAAI,EAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,sBAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,GAAG,CAAC;IACH,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;iCACc;KAC5B;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,QAAQ,EAAE,CAAC,CAAC,EAAE,CACZ,IAAI,GAAG,CAAC;YACN,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;KACZ;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;CACF,CAAC;KACD,IAAI,CAAC;IACJ,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI;IACF,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;QAC7C,MAAM,sBAAsB,CAAA;IAC9B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO;QAC5C,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;IAClC,MAAM,QAAQ,GAAG,MAAM,CAAC,GAAG;QACzB,CAAC,CAAC,WAAW;QACb,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IAC3C,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IACxE,MAAM,MAAM,GAAG,IAAA,qBAAU,EAAC,QAAQ,EAAE;QAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,WAAW,EAAE,MAAM,CAAC,cAAc,CAAC;QACnC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,SAAS,EAAE,MAAM,CAAC,YAAY,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC,WAAW,CAAC;QAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,QAAQ,EAAE,MAAM,CAAC,QAAuC;QACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;KACpB,CAAC,CAAA;IAEF,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAA;IACtB,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;KACvC;SAAM;QACL,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACvC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,IAAA,kCAAe,EAAC,GAAG,EAAE,OAAO,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;KACvE;CACF;AAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;CAChB","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { version } from '../package.json'\nimport { globStream } from './index.js'\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]',\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .opt({\n default: {\n short: 'p',\n hint: 'pattern',\n description: `If no positional arguments are provided, glob will use\n this pattern`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC maths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validate: v =>\n new Set([\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ]).has(v),\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n })\n .flag({\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n if (values.help) {\n console.log(j.usage())\n process.exit(0)\n }\n if (positionals.length === 0 && !values.default)\n throw 'No patterns provided'\n if (positionals.length === 0 && values.default)\n positionals.push(values.default)\n const patterns = values.all\n ? positionals\n : positionals.filter(p => !existsSync(p))\n const matches = values.all ? [] : positionals.filter(p => existsSync(p))\n const stream = globStream(patterns, {\n absolute: values.absolute,\n cwd: values.cwd,\n dot: values.dot,\n dotRelative: values['dot-relative'],\n follow: values.follow,\n ignore: values.ignore,\n mark: values.mark,\n matchBase: values['match-base'],\n maxDepth: values['max-depth'],\n nobrace: values.nobrace,\n nocase: values.nocase,\n nodir: values.nodir,\n noext: values.noext,\n noglobstar: values.noglobstar,\n platform: values.platform as undefined | NodeJS.Platform,\n realpath: values.realpath,\n root: values.root,\n stat: values.stat,\n debug: values.debug,\n posix: values.posix,\n })\n\n const cmd = values.cmd\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n stream.on('data', f => matches.push(f))\n stream.on('end', () => foregroundChild(cmd, matches, { shell: true }))\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/cjs/src/glob.d.ts.map b/deps/npm/node_modules/glob/dist/cjs/src/glob.d.ts.map
index b0ea3b71e222ad..6353d8b3c47126 100644
--- a/deps/npm/node_modules/glob/dist/cjs/src/glob.d.ts.map
+++ b/deps/npm/node_modules/glob/dist/cjs/src/glob.d.ts.map
@@ -1 +1 @@
-{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAqHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"}
\ No newline at end of file
+{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAwHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/cjs/src/glob.js b/deps/npm/node_modules/glob/dist/cjs/src/glob.js
index e7ad4deb980d30..eb37c6b9a6601e 100644
--- a/deps/npm/node_modules/glob/dist/cjs/src/glob.js
+++ b/deps/npm/node_modules/glob/dist/cjs/src/glob.js
@@ -62,6 +62,10 @@ class Glob {
* again.
*/
constructor(pattern, opts) {
+ /* c8 ignore start */
+ if (!opts)
+ throw new TypeError('glob options required');
+ /* c8 ignore stop */
this.withFileTypes = !!opts.withFileTypes;
this.signal = opts.signal;
this.follow = !!opts.follow;
diff --git a/deps/npm/node_modules/glob/dist/cjs/src/glob.js.map b/deps/npm/node_modules/glob/dist/cjs/src/glob.js.map
index bf6fb4d0f0b724..7a7a9b28627480 100644
--- a/deps/npm/node_modules/glob/dist/cjs/src/glob.js.map
+++ b/deps/npm/node_modules/glob/dist/cjs/src/glob.js.map
@@ -1 +1 @@
-{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";;;AAAA,yCAAuD;AAEvD,6CAOoB;AACpB,6BAAmC;AAEnC,6CAAsC;AACtC,2CAAoD;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAa,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,IAAA,mBAAa,EAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,6BAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,8BAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,6BAAe;wBACjB,CAAC,CAAC,wBAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,qBAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,oBAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF;AAlQD,oBAkQC","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]}
\ No newline at end of file
+{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";;;AAAA,yCAAuD;AAEvD,6CAOoB;AACpB,6BAAmC;AAEnC,6CAAsC;AACtC,2CAAoD;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAa,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,IAAA,mBAAa,EAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,6BAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,8BAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,6BAAe;wBACjB,CAAC,CAAC,wBAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,qBAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,oBAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF;AArQD,oBAqQC","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/mjs/glob.d.ts.map b/deps/npm/node_modules/glob/dist/mjs/glob.d.ts.map
index b06e4633443c87..d45258ac24a580 100644
--- a/deps/npm/node_modules/glob/dist/mjs/glob.d.ts.map
+++ b/deps/npm/node_modules/glob/dist/mjs/glob.d.ts.map
@@ -1 +1 @@
-{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAqHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"}
\ No newline at end of file
+{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAwHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/mjs/glob.js b/deps/npm/node_modules/glob/dist/mjs/glob.js
index f158065746e586..8ff26154427be9 100644
--- a/deps/npm/node_modules/glob/dist/mjs/glob.js
+++ b/deps/npm/node_modules/glob/dist/mjs/glob.js
@@ -59,6 +59,10 @@ export class Glob {
* again.
*/
constructor(pattern, opts) {
+ /* c8 ignore start */
+ if (!opts)
+ throw new TypeError('glob options required');
+ /* c8 ignore stop */
this.withFileTypes = !!opts.withFileTypes;
this.signal = opts.signal;
this.follow = !!opts.follow;
diff --git a/deps/npm/node_modules/glob/dist/mjs/glob.js.map b/deps/npm/node_modules/glob/dist/mjs/glob.js.map
index 93eb61df16f5ca..94558c1d2c66a4 100644
--- a/deps/npm/node_modules/glob/dist/mjs/glob.js.map
+++ b/deps/npm/node_modules/glob/dist/mjs/glob.js.map
@@ -1 +1 @@
-{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AAEvD,OAAO,EAGL,UAAU,EACV,gBAAgB,EAChB,eAAe,EACf,eAAe,GAChB,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAA;AAEnC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAM,OAAO,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,eAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,gBAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,eAAe;wBACjB,CAAC,CAAC,UAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,OAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]}
\ No newline at end of file
+{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AAEvD,OAAO,EAGL,UAAU,EACV,gBAAgB,EAChB,eAAe,EACf,eAAe,GAChB,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAA;AAEnC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAM,OAAO,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,eAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,gBAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,eAAe;wBACjB,CAAC,CAAC,UAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,OAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/mjs/package.json b/deps/npm/node_modules/glob/dist/mjs/package.json
index ac4c42f81fbd84..5cc80943d565b7 100644
--- a/deps/npm/node_modules/glob/dist/mjs/package.json
+++ b/deps/npm/node_modules/glob/dist/mjs/package.json
@@ -1,4 +1,4 @@
{
- "version": "10.2.7",
+ "version": "10.3.3",
"type": "module"
}
diff --git a/deps/npm/node_modules/glob/package.json b/deps/npm/node_modules/glob/package.json
index ba9732c0f6de59..2d25985d2bbb5d 100644
--- a/deps/npm/node_modules/glob/package.json
+++ b/deps/npm/node_modules/glob/package.json
@@ -2,7 +2,7 @@
"author": "Isaac Z. Schlueter (https://blog.izs.me/)",
"name": "glob",
"description": "the most correct and second fastest glob implementation in JavaScript",
- "version": "10.2.7",
+ "version": "10.3.3",
"bin": "./dist/cjs/src/bin.js",
"repository": {
"type": "git",
@@ -62,11 +62,11 @@
"foreground-child": "^3.1.0",
"jackspeak": "^2.0.3",
"minimatch": "^9.0.1",
- "minipass": "^5.0.0 || ^6.0.2",
- "path-scurry": "^1.7.0"
+ "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0",
+ "path-scurry": "^1.10.1"
},
"devDependencies": {
- "@types/node": "^20.2.1",
+ "@types/node": "^20.3.2",
"@types/tap": "^15.0.7",
"c8": "^7.12.0",
"memfs": "^3.4.13",
diff --git a/deps/npm/node_modules/hosted-git-info/lib/index.js b/deps/npm/node_modules/hosted-git-info/lib/index.js
index a7339c217e9a33..0c9d0b08c866b5 100644
--- a/deps/npm/node_modules/hosted-git-info/lib/index.js
+++ b/deps/npm/node_modules/hosted-git-info/lib/index.js
@@ -1,11 +1,11 @@
'use strict'
-const LRU = require('lru-cache')
+const { LRUCache } = require('lru-cache')
const hosts = require('./hosts.js')
const fromUrl = require('./from-url.js')
const parseUrl = require('./parse-url.js')
-const cache = new LRU({ max: 1000 })
+const cache = new LRUCache({ max: 1000 })
class GitHost {
constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
diff --git a/deps/npm/node_modules/hosted-git-info/package.json b/deps/npm/node_modules/hosted-git-info/package.json
index 612259948afe73..262a6c20fcf00b 100644
--- a/deps/npm/node_modules/hosted-git-info/package.json
+++ b/deps/npm/node_modules/hosted-git-info/package.json
@@ -1,6 +1,6 @@
{
"name": "hosted-git-info",
- "version": "6.1.1",
+ "version": "7.0.0",
"description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
"main": "./lib/index.js",
"repository": {
@@ -30,11 +30,11 @@
"template-oss-apply": "template-oss-apply --force"
},
"dependencies": {
- "lru-cache": "^7.5.1"
+ "lru-cache": "^10.0.1"
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.7.1",
+ "@npmcli/template-oss": "4.18.0",
"tap": "^16.0.1"
},
"files": [
@@ -42,7 +42,7 @@
"lib/"
],
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"tap": {
"color": 1,
@@ -54,6 +54,13 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.7.1"
+ "version": "4.18.0",
+ "publish": "true",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
}
}
diff --git a/deps/npm/node_modules/init-package-json/package.json b/deps/npm/node_modules/init-package-json/package.json
index e2cb1fe25ebba7..a164169a74df3c 100644
--- a/deps/npm/node_modules/init-package-json/package.json
+++ b/deps/npm/node_modules/init-package-json/package.json
@@ -1,6 +1,6 @@
{
"name": "init-package-json",
- "version": "5.0.0",
+ "version": "6.0.0",
"main": "lib/init-package-json.js",
"scripts": {
"test": "tap",
@@ -19,22 +19,22 @@
"license": "ISC",
"description": "A node module to get your node module started",
"dependencies": {
- "npm-package-arg": "^10.0.0",
+ "npm-package-arg": "^11.0.0",
"promzard": "^1.0.0",
"read": "^2.0.0",
- "read-package-json": "^6.0.0",
+ "read-package-json": "^7.0.0",
"semver": "^7.3.5",
"validate-npm-package-license": "^3.0.4",
"validate-npm-package-name": "^5.0.0"
},
"devDependencies": {
- "@npmcli/config": "^6.0.0",
+ "@npmcli/config": "^7.0.0",
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.11.3",
+ "@npmcli/template-oss": "4.18.0",
"tap": "^16.0.1"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"tap": {
"statements": 95,
@@ -63,6 +63,13 @@
],
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.11.3"
+ "version": "4.18.0",
+ "publish": true,
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
}
}
diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json
index 713cf8c264c986..9185e364a37600 100644
--- a/deps/npm/node_modules/libnpmaccess/package.json
+++ b/deps/npm/node_modules/libnpmaccess/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmaccess",
- "version": "7.0.2",
+ "version": "8.0.0",
"description": "programmatic library for `npm access` commands",
"author": "GitHub Inc.",
"license": "ISC",
@@ -18,8 +18,8 @@
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/mock-registry": "^1.0.0",
"@npmcli/template-oss": "4.18.0",
- "nock": "^13.3.0",
- "tap": "^16.3.4"
+ "nock": "^13.3.3",
+ "tap": "^16.3.8"
},
"repository": {
"type": "git",
@@ -29,11 +29,11 @@
"bugs": "https://github.com/npm/libnpmaccess/issues",
"homepage": "https://npmjs.com/package/libnpmaccess",
"dependencies": {
- "npm-package-arg": "^10.1.0",
- "npm-registry-fetch": "^14.0.3"
+ "npm-package-arg": "^11.0.0",
+ "npm-registry-fetch": "^16.0.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"files": [
"bin/",
@@ -42,7 +42,13 @@
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json
index ce6eb3531b32ed..9e8a7b62949bc1 100644
--- a/deps/npm/node_modules/libnpmdiff/package.json
+++ b/deps/npm/node_modules/libnpmdiff/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmdiff",
- "version": "5.0.19",
+ "version": "6.0.1",
"description": "The registry diff",
"repository": {
"type": "git",
@@ -13,7 +13,7 @@
"lib/"
],
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"keywords": [
"npm",
@@ -43,23 +43,29 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.18.0",
- "tap": "^16.3.4"
+ "tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^6.3.0",
+ "@npmcli/arborist": "^7.1.0",
"@npmcli/disparity-colors": "^3.0.0",
"@npmcli/installed-package-contents": "^2.0.2",
"binary-extensions": "^2.2.0",
"diff": "^5.1.0",
"minimatch": "^9.0.0",
- "npm-package-arg": "^10.1.0",
- "pacote": "^15.0.8",
+ "npm-package-arg": "^11.0.0",
+ "pacote": "^17.0.4",
"tar": "^6.1.13"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json
index 9b86b81a998ef7..5e49fe5264a3ae 100644
--- a/deps/npm/node_modules/libnpmexec/package.json
+++ b/deps/npm/node_modules/libnpmexec/package.json
@@ -1,13 +1,13 @@
{
"name": "libnpmexec",
- "version": "6.0.3",
+ "version": "7.0.1",
"files": [
"bin/",
"lib/"
],
"main": "lib/index.js",
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"description": "npm exec (npx) programmatic API",
"repository": {
@@ -56,15 +56,15 @@
"chalk": "^5.2.0",
"just-extend": "^6.2.0",
"just-safe-set": "^4.2.1",
- "tap": "^16.3.4"
+ "tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^6.3.0",
- "@npmcli/run-script": "^6.0.0",
+ "@npmcli/arborist": "^7.1.0",
+ "@npmcli/run-script": "^7.0.1",
"ci-info": "^3.7.1",
- "npm-package-arg": "^10.1.0",
+ "npm-package-arg": "^11.0.0",
"npmlog": "^7.0.1",
- "pacote": "^15.0.8",
+ "pacote": "^17.0.4",
"proc-log": "^3.0.0",
"read": "^2.0.0",
"read-package-json-fast": "^3.0.2",
@@ -74,6 +74,12 @@
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
}
}
diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json
index 0c863c2f92203a..b20bfec92346b6 100644
--- a/deps/npm/node_modules/libnpmfund/package.json
+++ b/deps/npm/node_modules/libnpmfund/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmfund",
- "version": "4.0.19",
+ "version": "4.1.1",
"main": "lib/index.js",
"files": [
"bin/",
@@ -42,10 +42,10 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.18.0",
- "tap": "^16.3.4"
+ "tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^6.3.0"
+ "@npmcli/arborist": "^7.1.0"
},
"engines": {
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json
index 05b34dda75c416..a45f37652a804b 100644
--- a/deps/npm/node_modules/libnpmhook/package.json
+++ b/deps/npm/node_modules/libnpmhook/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmhook",
- "version": "9.0.3",
+ "version": "10.0.0",
"description": "programmatic API for managing npm registry hooks",
"main": "lib/index.js",
"files": [
@@ -31,21 +31,27 @@
"license": "ISC",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^14.0.3"
+ "npm-registry-fetch": "^16.0.0"
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.18.0",
- "nock": "^13.3.0",
- "tap": "^16.3.4"
+ "nock": "^13.3.3",
+ "tap": "^16.3.8"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json
index 675d03b5b2437a..f1964bca7eeb9b 100644
--- a/deps/npm/node_modules/libnpmorg/package.json
+++ b/deps/npm/node_modules/libnpmorg/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmorg",
- "version": "5.0.4",
+ "version": "6.0.0",
"description": "Programmatic api for `npm org` commands",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -29,9 +29,9 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.18.0",
- "minipass": "^5.0.0",
- "nock": "^13.3.0",
- "tap": "^16.3.4"
+ "minipass": "^7.0.3",
+ "nock": "^13.3.3",
+ "tap": "^16.3.8"
},
"repository": {
"type": "git",
@@ -42,15 +42,21 @@
"homepage": "https://npmjs.com/package/libnpmorg",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^14.0.3"
+ "npm-registry-fetch": "^16.0.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json
index d8861c337c4d99..88a80e95226183 100644
--- a/deps/npm/node_modules/libnpmpack/package.json
+++ b/deps/npm/node_modules/libnpmpack/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpack",
- "version": "5.0.19",
+ "version": "6.0.1",
"description": "Programmatic API for the bits behind npm pack",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -24,9 +24,9 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.18.0",
- "nock": "^13.3.0",
+ "nock": "^13.3.3",
"spawk": "^1.7.1",
- "tap": "^16.3.4"
+ "tap": "^16.3.8"
},
"repository": {
"type": "git",
@@ -36,18 +36,24 @@
"bugs": "https://github.com/npm/libnpmpack/issues",
"homepage": "https://npmjs.com/package/libnpmpack",
"dependencies": {
- "@npmcli/arborist": "^6.3.0",
- "@npmcli/run-script": "^6.0.0",
- "npm-package-arg": "^10.1.0",
- "pacote": "^15.0.8"
+ "@npmcli/arborist": "^7.1.0",
+ "@npmcli/run-script": "^7.0.1",
+ "npm-package-arg": "^11.0.0",
+ "pacote": "^17.0.4"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/libnpmpublish/lib/provenance.js b/deps/npm/node_modules/libnpmpublish/lib/provenance.js
index 398db1b4cd4671..45fe963d5f36f2 100644
--- a/deps/npm/node_modules/libnpmpublish/lib/provenance.js
+++ b/deps/npm/node_modules/libnpmpublish/lib/provenance.js
@@ -1,4 +1,4 @@
-const { sigstore } = require('sigstore')
+const sigstore = require('sigstore')
const { readFile } = require('fs/promises')
const ci = require('ci-info')
const { env } = process
diff --git a/deps/npm/node_modules/libnpmpublish/lib/publish.js b/deps/npm/node_modules/libnpmpublish/lib/publish.js
index 554eb9bec46f8c..b0ef782a166c66 100644
--- a/deps/npm/node_modules/libnpmpublish/lib/publish.js
+++ b/deps/npm/node_modules/libnpmpublish/lib/publish.js
@@ -50,42 +50,16 @@ Remove the 'private' field from the package.json to publish it.`),
opts
)
- try {
- const res = await npmFetch(spec.escapedName, {
- ...opts,
- method: 'PUT',
- body: metadata,
- ignoreBody: true,
- })
- if (transparencyLogUrl) {
- res.transparencyLogUrl = transparencyLogUrl
- }
- return res
- } catch (err) {
- if (err.code !== 'E409') {
- throw err
- }
- // if E409, we attempt exactly ONE retry, to protect us
- // against malicious activity like trying to publish
- // a bunch of new versions of a package at the same time
- // and/or spamming the registry
- const current = await npmFetch.json(spec.escapedName, {
- ...opts,
- query: { write: true },
- })
- const newMetadata = patchMetadata(current, metadata)
- const res = await npmFetch(spec.escapedName, {
- ...opts,
- method: 'PUT',
- body: newMetadata,
- ignoreBody: true,
- })
- /* istanbul ignore next */
- if (transparencyLogUrl) {
- res.transparencyLogUrl = transparencyLogUrl
- }
- return res
+ const res = await npmFetch(spec.escapedName, {
+ ...opts,
+ method: 'PUT',
+ body: metadata,
+ ignoreBody: true,
+ })
+ if (transparencyLogUrl) {
+ res.transparencyLogUrl = transparencyLogUrl
}
+ return res
}
const patchManifest = (_manifest, opts) => {
@@ -195,51 +169,6 @@ const buildMetadata = async (registry, manifest, tarballData, spec, opts) => {
}
}
-const patchMetadata = (current, newData) => {
- const curVers = Object.keys(current.versions || {})
- .map(v => semver.clean(v, true))
- .concat(Object.keys(current.time || {})
- .map(v => semver.valid(v, true) && semver.clean(v, true))
- .filter(v => v))
-
- const newVersion = Object.keys(newData.versions)[0]
-
- if (curVers.indexOf(newVersion) !== -1) {
- const { name: pkgid, version } = newData
- throw Object.assign(
- new Error(
- `Cannot publish ${pkgid}@${version} over existing version.`
- ), {
- code: 'EPUBLISHCONFLICT',
- pkgid,
- version,
- })
- }
-
- current.versions = current.versions || {}
- current.versions[newVersion] = newData.versions[newVersion]
- for (const i in newData) {
- switch (i) {
- // objects that copy over the new stuffs
- case 'dist-tags':
- case 'versions':
- case '_attachments':
- for (const j in newData[i]) {
- current[i] = current[i] || {}
- current[i][j] = newData[i][j]
- }
- break
-
- // copy
- default:
- current[i] = newData[i]
- break
- }
- }
-
- return current
-}
-
// Check that all the prereqs are met for provenance generation
const ensureProvenanceGeneration = async (registry, spec, opts) => {
if (ciInfo.GITHUB_ACTIONS) {
diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json
index 7c7533a82c735f..3dcaf98e84782d 100644
--- a/deps/npm/node_modules/libnpmpublish/package.json
+++ b/deps/npm/node_modules/libnpmpublish/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpublish",
- "version": "7.5.0",
+ "version": "9.0.0",
"description": "Programmatic API for the bits behind npm publish and unpublish",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -27,9 +27,8 @@
"@npmcli/mock-globals": "^1.0.0",
"@npmcli/mock-registry": "^1.0.0",
"@npmcli/template-oss": "4.18.0",
- "lodash.clonedeep": "^4.5.0",
- "nock": "^13.3.0",
- "tap": "^16.3.4"
+ "nock": "^13.3.3",
+ "tap": "^16.3.8"
},
"repository": {
"type": "git",
@@ -40,21 +39,27 @@
"homepage": "https://npmjs.com/package/libnpmpublish",
"dependencies": {
"ci-info": "^3.6.1",
- "normalize-package-data": "^5.0.0",
- "npm-package-arg": "^10.1.0",
- "npm-registry-fetch": "^14.0.3",
+ "normalize-package-data": "^6.0.0",
+ "npm-package-arg": "^11.0.0",
+ "npm-registry-fetch": "^16.0.0",
"proc-log": "^3.0.0",
"semver": "^7.3.7",
- "sigstore": "^1.4.0",
- "ssri": "^10.0.1"
+ "sigstore": "^2.1.0",
+ "ssri": "^10.0.5"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json
index 32cb1f21b64221..9bd45de5f62cdb 100644
--- a/deps/npm/node_modules/libnpmsearch/package.json
+++ b/deps/npm/node_modules/libnpmsearch/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmsearch",
- "version": "6.0.2",
+ "version": "7.0.0",
"description": "Programmatic API for searching in npm and compatible registries.",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -27,8 +27,8 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.18.0",
- "nock": "^13.3.0",
- "tap": "^16.3.4"
+ "nock": "^13.3.3",
+ "tap": "^16.3.8"
},
"repository": {
"type": "git",
@@ -38,15 +38,21 @@
"bugs": "https://github.com/npm/libnpmsearch/issues",
"homepage": "https://npmjs.com/package/libnpmsearch",
"dependencies": {
- "npm-registry-fetch": "^14.0.3"
+ "npm-registry-fetch": "^16.0.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json
index 33a77095fe8489..ca153ac301bf45 100644
--- a/deps/npm/node_modules/libnpmteam/package.json
+++ b/deps/npm/node_modules/libnpmteam/package.json
@@ -1,7 +1,7 @@
{
"name": "libnpmteam",
"description": "npm Team management APIs",
- "version": "5.0.3",
+ "version": "6.0.0",
"author": "GitHub Inc.",
"license": "ISC",
"main": "lib/index.js",
@@ -17,8 +17,8 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.18.0",
- "nock": "^13.3.0",
- "tap": "^16.3.4"
+ "nock": "^13.3.3",
+ "tap": "^16.3.8"
},
"repository": {
"type": "git",
@@ -32,15 +32,21 @@
"homepage": "https://npmjs.com/package/libnpmteam",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^14.0.3"
+ "npm-registry-fetch": "^16.0.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
},
"tap": {
"nyc-arg": [
diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json
index 469f9c2bc00d67..6f3a5bd0b5155a 100644
--- a/deps/npm/node_modules/libnpmversion/package.json
+++ b/deps/npm/node_modules/libnpmversion/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmversion",
- "version": "4.0.2",
+ "version": "5.0.0",
"main": "lib/index.js",
"files": [
"bin/",
@@ -34,21 +34,27 @@
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.18.0",
"require-inject": "^1.4.4",
- "tap": "^16.3.4"
+ "tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/git": "^4.0.1",
- "@npmcli/run-script": "^6.0.0",
+ "@npmcli/git": "^5.0.3",
+ "@npmcli/run-script": "^7.0.1",
"json-parse-even-better-errors": "^3.0.0",
"proc-log": "^3.0.0",
"semver": "^7.3.7"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.18.0",
- "content": "../../scripts/template-oss/index.js"
+ "content": "../../scripts/template-oss/index.js",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ]
}
}
diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js b/deps/npm/node_modules/lru-cache/dist/cjs/index.js
similarity index 98%
rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js
rename to deps/npm/node_modules/lru-cache/dist/cjs/index.js
index e6c4f909292b3f..1d1f23a55ec4b4 100644
--- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js
+++ b/deps/npm/node_modules/lru-cache/dist/cjs/index.js
@@ -837,6 +837,15 @@ class LRUCache {
if (v !== oldVal) {
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
oldVal.__abortController.abort(new Error('replaced'));
+ const { __staleWhileFetching: s } = oldVal;
+ if (s !== undefined && !noDisposeOnSet) {
+ if (this.#hasDispose) {
+ this.#dispose?.(s, k, 'set');
+ }
+ if (this.#hasDisposeAfter) {
+ this.#disposed?.push([s, k, 'set']);
+ }
+ }
}
else if (!noDisposeOnSet) {
if (this.#hasDispose) {
@@ -1090,7 +1099,7 @@ class LRUCache {
const pcall = (res, rej) => {
const fmp = this.#fetchMethod?.(k, v, fetchOpts);
if (fmp && fmp instanceof Promise) {
- fmp.then(v => res(v), rej);
+ fmp.then(v => res(v === undefined ? undefined : v), rej);
}
// ignored, we go until we finish, regardless.
// defer check until we are actually aborting,
@@ -1098,7 +1107,7 @@ class LRUCache {
ac.signal.addEventListener('abort', () => {
if (!options.ignoreFetchAbort ||
options.allowStaleOnFetchAbort) {
- res();
+ res(undefined);
// when it eventually resolves, update the cache.
if (options.allowStaleOnFetchAbort) {
res = v => cb(v, true);
diff --git a/deps/npm/node_modules/lru-cache/dist/cjs/index.min.js b/deps/npm/node_modules/lru-cache/dist/cjs/index.min.js
new file mode 100644
index 00000000000000..8d34a03041d25e
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/dist/cjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C;
+//# sourceMappingURL=index.min.js.map
diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json b/deps/npm/node_modules/lru-cache/dist/cjs/package.json
similarity index 100%
rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json
rename to deps/npm/node_modules/lru-cache/dist/cjs/package.json
diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js b/deps/npm/node_modules/lru-cache/dist/mjs/index.js
similarity index 98%
rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js
rename to deps/npm/node_modules/lru-cache/dist/mjs/index.js
index 1d8a36931a45a8..79025471782531 100644
--- a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js
+++ b/deps/npm/node_modules/lru-cache/dist/mjs/index.js
@@ -834,6 +834,15 @@ export class LRUCache {
if (v !== oldVal) {
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
oldVal.__abortController.abort(new Error('replaced'));
+ const { __staleWhileFetching: s } = oldVal;
+ if (s !== undefined && !noDisposeOnSet) {
+ if (this.#hasDispose) {
+ this.#dispose?.(s, k, 'set');
+ }
+ if (this.#hasDisposeAfter) {
+ this.#disposed?.push([s, k, 'set']);
+ }
+ }
}
else if (!noDisposeOnSet) {
if (this.#hasDispose) {
@@ -1087,7 +1096,7 @@ export class LRUCache {
const pcall = (res, rej) => {
const fmp = this.#fetchMethod?.(k, v, fetchOpts);
if (fmp && fmp instanceof Promise) {
- fmp.then(v => res(v), rej);
+ fmp.then(v => res(v === undefined ? undefined : v), rej);
}
// ignored, we go until we finish, regardless.
// defer check until we are actually aborting,
@@ -1095,7 +1104,7 @@ export class LRUCache {
ac.signal.addEventListener('abort', () => {
if (!options.ignoreFetchAbort ||
options.allowStaleOnFetchAbort) {
- res();
+ res(undefined);
// when it eventually resolves, update the cache.
if (options.allowStaleOnFetchAbort) {
res = v => cb(v, true);
diff --git a/deps/npm/node_modules/lru-cache/dist/mjs/index.min.js b/deps/npm/node_modules/lru-cache/dist/mjs/index.min.js
new file mode 100644
index 00000000000000..5a16b3940d6df9
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/dist/mjs/index.min.js
@@ -0,0 +1,2 @@
+var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json b/deps/npm/node_modules/lru-cache/dist/mjs/package.json
similarity index 100%
rename from deps/npm/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json
rename to deps/npm/node_modules/lru-cache/dist/mjs/package.json
diff --git a/deps/npm/node_modules/lru-cache/package.json b/deps/npm/node_modules/lru-cache/package.json
index 9684991727e7a2..bae4a04839d1f7 100644
--- a/deps/npm/node_modules/lru-cache/package.json
+++ b/deps/npm/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
- "version": "7.18.3",
+ "version": "10.0.1",
"author": "Isaac Z. Schlueter ",
"keywords": [
"mru",
@@ -11,60 +11,74 @@
"sideEffects": false,
"scripts": {
"build": "npm run prepare",
+ "preprepare": "rm -rf dist",
+ "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+ "postprepare": "bash fixup.sh",
"pretest": "npm run prepare",
"presnap": "npm run prepare",
- "prepare": "node ./scripts/transpile-to-esm.js",
- "size": "size-limit",
- "test": "tap",
- "snap": "tap",
+ "test": "c8 tap",
+ "snap": "c8 tap",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags",
"format": "prettier --write .",
- "typedoc": "typedoc ./index.d.ts"
+ "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
+ "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+ "prebenchmark": "npm run prepare",
+ "benchmark": "make -C benchmark",
+ "preprofile": "npm run prepare",
+ "profile": "make -C benchmark profile"
},
- "type": "commonjs",
- "main": "./index.js",
- "module": "./index.mjs",
- "types": "./index.d.ts",
+ "main": "./dist/cjs/index.js",
+ "module": "./dist/mjs/index.js",
"exports": {
- ".": {
+ "./min": {
"import": {
- "types": "./index.d.ts",
- "default": "./index.mjs"
+ "types": "./dist/mjs/index.d.ts",
+ "default": "./dist/mjs/index.min.js"
},
"require": {
- "types": "./index.d.ts",
- "default": "./index.js"
+ "types": "./dist/cjs/index.d.ts",
+ "default": "./dist/cjs/index.min.js"
}
},
- "./package.json": "./package.json"
+ ".": {
+ "import": {
+ "types": "./dist/mjs/index.d.ts",
+ "default": "./dist/mjs/index.js"
+ },
+ "require": {
+ "types": "./dist/cjs/index.d.ts",
+ "default": "./dist/cjs/index.js"
+ }
+ }
},
"repository": "git://github.com/isaacs/node-lru-cache.git",
"devDependencies": {
"@size-limit/preset-small-lib": "^7.0.8",
- "@types/node": "^17.0.31",
+ "@types/node": "^20.2.5",
"@types/tap": "^15.0.6",
"benchmark": "^2.1.4",
"c8": "^7.11.2",
"clock-mock": "^1.0.6",
+ "esbuild": "^0.17.11",
"eslint-config-prettier": "^8.5.0",
+ "marked": "^4.2.12",
+ "mkdirp": "^2.1.5",
"prettier": "^2.6.2",
"size-limit": "^7.0.8",
"tap": "^16.3.4",
- "ts-node": "^10.7.0",
+ "ts-node": "^10.9.1",
"tslib": "^2.4.0",
- "typedoc": "^0.23.24",
- "typescript": "^4.6.4"
+ "typedoc": "^0.24.6",
+ "typescript": "^5.0.4"
},
"license": "ISC",
"files": [
- "index.js",
- "index.mjs",
- "index.d.ts"
+ "dist"
],
"engines": {
- "node": ">=12"
+ "node": "14 || >=16.14"
},
"prettier": {
"semi": false,
@@ -78,19 +92,17 @@
"endOfLine": "lf"
},
"tap": {
- "nyc-arg": [
- "--include=index.js"
- ],
+ "coverage": false,
"node-arg": [
"--expose-gc",
- "--require",
+ "-r",
"ts-node/register"
],
"ts": false
},
"size-limit": [
{
- "path": "./index.js"
+ "path": "./dist/mjs/index.js"
}
]
}
diff --git a/deps/npm/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/make-fetch-happen/lib/remote.js
index bdbcc79cad908d..2aef9f8f969b00 100644
--- a/deps/npm/node_modules/make-fetch-happen/lib/remote.js
+++ b/deps/npm/node_modules/make-fetch-happen/lib/remote.js
@@ -4,7 +4,7 @@ const promiseRetry = require('promise-retry')
const ssri = require('ssri')
const CachingMinipassPipeline = require('./pipeline.js')
-const getAgent = require('./agent.js')
+const { getAgent } = require('@npmcli/agent')
const pkg = require('../package.json')
const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
@@ -14,9 +14,15 @@ const RETRY_ERRORS = [
'ECONNREFUSED', // remote host refused to open connection
'EADDRINUSE', // failed to bind to a local port (proxy?)
'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
- 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive
+ // from @npmcli/agent
+ 'ECONNECTIONTIMEOUT',
+ 'EIDLETIMEOUT',
+ 'ERESPONSETIMEOUT',
+ 'ETRANSFERTIMEOUT',
// Known codes we do NOT retry on:
// ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
+ // EINVALIDPROXY // invalid protocol from @npmcli/agent
+ // EINVALIDRESPONSE // invalid status code from @npmcli/agent
]
const RETRY_TYPES = [
diff --git a/deps/npm/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/make-fetch-happen/package.json
index fd415dc9966faa..a874ace6d1d472 100644
--- a/deps/npm/node_modules/make-fetch-happen/package.json
+++ b/deps/npm/node_modules/make-fetch-happen/package.json
@@ -1,6 +1,6 @@
{
"name": "make-fetch-happen",
- "version": "11.1.1",
+ "version": "13.0.0",
"description": "Opinionated, caching, retrying fetch client",
"main": "lib/index.js",
"files": [
@@ -33,32 +33,28 @@
"author": "GitHub Inc.",
"license": "ISC",
"dependencies": {
- "agentkeepalive": "^4.2.1",
- "cacache": "^17.0.0",
+ "@npmcli/agent": "^2.0.0",
+ "cacache": "^18.0.0",
"http-cache-semantics": "^4.1.1",
- "http-proxy-agent": "^5.0.0",
- "https-proxy-agent": "^5.0.0",
"is-lambda": "^1.0.1",
- "lru-cache": "^7.7.1",
- "minipass": "^5.0.0",
+ "minipass": "^7.0.2",
"minipass-fetch": "^3.0.0",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
"negotiator": "^0.6.3",
"promise-retry": "^2.0.1",
- "socks-proxy-agent": "^7.0.0",
"ssri": "^10.0.0"
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"nock": "^13.2.4",
"safe-buffer": "^5.2.1",
"standard-version": "^9.3.2",
"tap": "^16.0.0"
},
"engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+ "node": "^16.14.0 || >=18.0.0"
},
"tap": {
"color": 1,
@@ -72,7 +68,13 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "ciVersions": [
+ "16.14.0",
+ "16.x",
+ "18.0.0",
+ "18.x"
+ ],
+ "version": "4.18.0",
"publish": "true"
}
}
diff --git a/deps/npm/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/minipass-fetch/package.json
index 78024317d8be4d..581275ba27d4ff 100644
--- a/deps/npm/node_modules/minipass-fetch/package.json
+++ b/deps/npm/node_modules/minipass-fetch/package.json
@@ -1,6 +1,6 @@
{
"name": "minipass-fetch",
- "version": "3.0.3",
+ "version": "3.0.4",
"description": "An implementation of window.fetch in Node.js using Minipass streams",
"license": "MIT",
"main": "lib/index.js",
@@ -24,7 +24,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.14.1",
+ "@npmcli/template-oss": "4.18.0",
"@ungap/url-search-params": "^0.2.2",
"abort-controller": "^3.0.0",
"abortcontroller-polyfill": "~1.7.3",
@@ -36,7 +36,7 @@
"tap": "^16.0.0"
},
"dependencies": {
- "minipass": "^5.0.0",
+ "minipass": "^7.0.3",
"minipass-sized": "^1.0.3",
"minizlib": "^2.1.2"
},
@@ -63,7 +63,7 @@
"author": "GitHub Inc.",
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.14.1",
+ "version": "4.18.0",
"publish": "true"
}
}
diff --git a/deps/npm/node_modules/minipass/dist/cjs/index.js b/deps/npm/node_modules/minipass/dist/cjs/index.js
new file mode 100644
index 00000000000000..b6cdae8eb514b8
--- /dev/null
+++ b/deps/npm/node_modules/minipass/dist/cjs/index.js
@@ -0,0 +1,1028 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+ ? process
+ : {
+ stdout: null,
+ stderr: null,
+ };
+const events_1 = require("events");
+const stream_1 = __importDefault(require("stream"));
+const string_decoder_1 = require("string_decoder");
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+ typeof s === 'object' &&
+ (s instanceof Minipass ||
+ s instanceof stream_1.default ||
+ (0, exports.isReadable)(s) ||
+ (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+ typeof s === 'object' &&
+ s instanceof events_1.EventEmitter &&
+ typeof s.pipe === 'function' &&
+ // node core Writable streams have a pipe() method, but it throws
+ s.pipe !== stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+ typeof s === 'object' &&
+ s instanceof events_1.EventEmitter &&
+ typeof s.write === 'function' &&
+ typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+ (!!b &&
+ typeof b === 'object' &&
+ b.constructor &&
+ b.constructor.name === 'ArrayBuffer' &&
+ b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+ src;
+ dest;
+ opts;
+ ondrain;
+ constructor(src, dest, opts) {
+ this.src = src;
+ this.dest = dest;
+ this.opts = opts;
+ this.ondrain = () => src[RESUME]();
+ this.dest.on('drain', this.ondrain);
+ }
+ unpipe() {
+ this.dest.removeListener('drain', this.ondrain);
+ }
+ // only here for the prototype
+ /* c8 ignore start */
+ proxyErrors(_er) { }
+ /* c8 ignore stop */
+ end() {
+ this.unpipe();
+ if (this.opts.end)
+ this.dest.end();
+ }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+ unpipe() {
+ this.src.removeListener('error', this.proxyErrors);
+ super.unpipe();
+ }
+ constructor(src, dest, opts) {
+ super(src, dest, opts);
+ this.proxyErrors = er => dest.emit('error', er);
+ src.on('error', this.proxyErrors);
+ }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends events_1.EventEmitter {
+ [FLOWING] = false;
+ [PAUSED] = false;
+ [PIPES] = [];
+ [BUFFER] = [];
+ [OBJECTMODE];
+ [ENCODING];
+ [ASYNC];
+ [DECODER];
+ [EOF] = false;
+ [EMITTED_END] = false;
+ [EMITTING_END] = false;
+ [CLOSED] = false;
+ [EMITTED_ERROR] = null;
+ [BUFFERLENGTH] = 0;
+ [DESTROYED] = false;
+ [SIGNAL];
+ [ABORTED] = false;
+ [DATALISTENERS] = 0;
+ [DISCARDED] = false;
+ /**
+ * true if the stream can be written
+ */
+ writable = true;
+ /**
+ * true if the stream can be read
+ */
+ readable = true;
+ /**
+ * If `RType` is Buffer, then options do not need to be provided.
+ * Otherwise, an options object must be provided to specify either
+ * {@link Minipass.SharedOptions.objectMode} or
+ * {@link Minipass.SharedOptions.encoding}, as appropriate.
+ */
+ constructor(...args) {
+ const options = (args[0] ||
+ {});
+ super();
+ if (options.objectMode && typeof options.encoding === 'string') {
+ throw new TypeError('Encoding and objectMode may not be used together');
+ }
+ if (isObjectModeOptions(options)) {
+ this[OBJECTMODE] = true;
+ this[ENCODING] = null;
+ }
+ else if (isEncodingOptions(options)) {
+ this[ENCODING] = options.encoding;
+ this[OBJECTMODE] = false;
+ }
+ else {
+ this[OBJECTMODE] = false;
+ this[ENCODING] = null;
+ }
+ this[ASYNC] = !!options.async;
+ this[DECODER] = this[ENCODING]
+ ? new string_decoder_1.StringDecoder(this[ENCODING])
+ : null;
+ //@ts-ignore - private option for debugging and testing
+ if (options && options.debugExposeBuffer === true) {
+ Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+ }
+ //@ts-ignore - private option for debugging and testing
+ if (options && options.debugExposePipes === true) {
+ Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+ }
+ const { signal } = options;
+ if (signal) {
+ this[SIGNAL] = signal;
+ if (signal.aborted) {
+ this[ABORT]();
+ }
+ else {
+ signal.addEventListener('abort', () => this[ABORT]());
+ }
+ }
+ }
+ /**
+ * The amount of data stored in the buffer waiting to be read.
+ *
+ * For Buffer strings, this will be the total byte length.
+ * For string encoding streams, this will be the string character length,
+ * according to JavaScript's `string.length` logic.
+ * For objectMode streams, this is a count of the items waiting to be
+ * emitted.
+ */
+ get bufferLength() {
+ return this[BUFFERLENGTH];
+ }
+ /**
+ * The `BufferEncoding` currently in use, or `null`
+ */
+ get encoding() {
+ return this[ENCODING];
+ }
+ /**
+ * @deprecated - This is a read only property
+ */
+ set encoding(_enc) {
+ throw new Error('Encoding must be set at instantiation time');
+ }
+ /**
+ * @deprecated - Encoding may only be set at instantiation time
+ */
+ setEncoding(_enc) {
+ throw new Error('Encoding must be set at instantiation time');
+ }
+ /**
+ * True if this is an objectMode stream
+ */
+ get objectMode() {
+ return this[OBJECTMODE];
+ }
+ /**
+ * @deprecated - This is a read-only property
+ */
+ set objectMode(_om) {
+ throw new Error('objectMode must be set at instantiation time');
+ }
+ /**
+ * true if this is an async stream
+ */
+ get ['async']() {
+ return this[ASYNC];
+ }
+ /**
+ * Set to true to make this stream async.
+ *
+ * Once set, it cannot be unset, as this would potentially cause incorrect
+ * behavior. Ie, a sync stream can be made async, but an async stream
+ * cannot be safely made sync.
+ */
+ set ['async'](a) {
+ this[ASYNC] = this[ASYNC] || !!a;
+ }
+ // drop everything and get out of the flow completely
+ [ABORT]() {
+ this[ABORTED] = true;
+ this.emit('abort', this[SIGNAL]?.reason);
+ this.destroy(this[SIGNAL]?.reason);
+ }
+ /**
+ * True if the stream has been aborted.
+ */
+ get aborted() {
+ return this[ABORTED];
+ }
+ /**
+ * No-op setter. Stream aborted status is set via the AbortSignal provided
+ * in the constructor options.
+ */
+ set aborted(_) { }
+ write(chunk, encoding, cb) {
+ if (this[ABORTED])
+ return false;
+ if (this[EOF])
+ throw new Error('write after end');
+ if (this[DESTROYED]) {
+ this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+ return true;
+ }
+ if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = 'utf8';
+ }
+ if (!encoding)
+ encoding = 'utf8';
+ const fn = this[ASYNC] ? defer : nodefer;
+ // convert array buffers and typed array views into buffers
+ // at some point in the future, we may want to do the opposite!
+ // leave strings and buffers as-is
+ // anything is only allowed if in object mode, so throw
+ if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+ if (isArrayBufferView(chunk)) {
+ //@ts-ignore - sinful unsafe type changing
+ chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+ }
+ else if (isArrayBufferLike(chunk)) {
+ //@ts-ignore - sinful unsafe type changing
+ chunk = Buffer.from(chunk);
+ }
+ else if (typeof chunk !== 'string') {
+ throw new Error('Non-contiguous data written to non-objectMode stream');
+ }
+ }
+ // handle object mode up front, since it's simpler
+ // this yields better performance, fewer checks later.
+ if (this[OBJECTMODE]) {
+ // maybe impossible?
+ /* c8 ignore start */
+ if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+ this[FLUSH](true);
+ /* c8 ignore stop */
+ if (this[FLOWING])
+ this.emit('data', chunk);
+ else
+ this[BUFFERPUSH](chunk);
+ if (this[BUFFERLENGTH] !== 0)
+ this.emit('readable');
+ if (cb)
+ fn(cb);
+ return this[FLOWING];
+ }
+ // at this point the chunk is a buffer or string
+ // don't buffer it up or send it to the decoder
+ if (!chunk.length) {
+ if (this[BUFFERLENGTH] !== 0)
+ this.emit('readable');
+ if (cb)
+ fn(cb);
+ return this[FLOWING];
+ }
+ // fast-path writing strings of same encoding to a stream with
+ // an empty buffer, skipping the buffer/decoder dance
+ if (typeof chunk === 'string' &&
+ // unless it is a string already ready for us to use
+ !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+ //@ts-ignore - sinful unsafe type change
+ chunk = Buffer.from(chunk, encoding);
+ }
+ if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+ //@ts-ignore - sinful unsafe type change
+ chunk = this[DECODER].write(chunk);
+ }
+ // Note: flushing CAN potentially switch us into not-flowing mode
+ if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+ this[FLUSH](true);
+ if (this[FLOWING])
+ this.emit('data', chunk);
+ else
+ this[BUFFERPUSH](chunk);
+ if (this[BUFFERLENGTH] !== 0)
+ this.emit('readable');
+ if (cb)
+ fn(cb);
+ return this[FLOWING];
+ }
+ /**
+ * Low-level explicit read method.
+ *
+ * In objectMode, the argument is ignored, and one item is returned if
+ * available.
+ *
+ * `n` is the number of bytes (or in the case of encoding streams,
+ * characters) to consume. If `n` is not provided, then the entire buffer
+ * is returned, or `null` is returned if no data is available.
+ *
+ * If `n` is greater that the amount of data in the internal buffer,
+ * then `null` is returned.
+ */
+ read(n) {
+ if (this[DESTROYED])
+ return null;
+ this[DISCARDED] = false;
+ if (this[BUFFERLENGTH] === 0 ||
+ n === 0 ||
+ (n && n > this[BUFFERLENGTH])) {
+ this[MAYBE_EMIT_END]();
+ return null;
+ }
+ if (this[OBJECTMODE])
+ n = null;
+ if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+ // not object mode, so if we have an encoding, then RType is string
+ // otherwise, must be Buffer
+ this[BUFFER] = [
+ (this[ENCODING]
+ ? this[BUFFER].join('')
+ : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+ ];
+ }
+ const ret = this[READ](n || null, this[BUFFER][0]);
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ [READ](n, chunk) {
+ if (this[OBJECTMODE])
+ this[BUFFERSHIFT]();
+ else {
+ const c = chunk;
+ if (n === c.length || n === null)
+ this[BUFFERSHIFT]();
+ else if (typeof c === 'string') {
+ this[BUFFER][0] = c.slice(n);
+ chunk = c.slice(0, n);
+ this[BUFFERLENGTH] -= n;
+ }
+ else {
+ this[BUFFER][0] = c.subarray(n);
+ chunk = c.subarray(0, n);
+ this[BUFFERLENGTH] -= n;
+ }
+ }
+ this.emit('data', chunk);
+ if (!this[BUFFER].length && !this[EOF])
+ this.emit('drain');
+ return chunk;
+ }
+ end(chunk, encoding, cb) {
+ if (typeof chunk === 'function') {
+ cb = chunk;
+ chunk = undefined;
+ }
+ if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = 'utf8';
+ }
+ if (chunk !== undefined)
+ this.write(chunk, encoding);
+ if (cb)
+ this.once('end', cb);
+ this[EOF] = true;
+ this.writable = false;
+ // if we haven't written anything, then go ahead and emit,
+ // even if we're not reading.
+ // we'll re-emit if a new 'end' listener is added anyway.
+ // This makes MP more suitable to write-only use cases.
+ if (this[FLOWING] || !this[PAUSED])
+ this[MAYBE_EMIT_END]();
+ return this;
+ }
+ // don't let the internal resume be overwritten
+ [RESUME]() {
+ if (this[DESTROYED])
+ return;
+ if (!this[DATALISTENERS] && !this[PIPES].length) {
+ this[DISCARDED] = true;
+ }
+ this[PAUSED] = false;
+ this[FLOWING] = true;
+ this.emit('resume');
+ if (this[BUFFER].length)
+ this[FLUSH]();
+ else if (this[EOF])
+ this[MAYBE_EMIT_END]();
+ else
+ this.emit('drain');
+ }
+ /**
+ * Resume the stream if it is currently in a paused state
+ *
+ * If called when there are no pipe destinations or `data` event listeners,
+ * this will place the stream in a "discarded" state, where all data will
+ * be thrown away. The discarded state is removed if a pipe destination or
+ * data handler is added, if pause() is called, or if any synchronous or
+ * asynchronous iteration is started.
+ */
+ resume() {
+ return this[RESUME]();
+ }
+ /**
+ * Pause the stream
+ */
+ pause() {
+ this[FLOWING] = false;
+ this[PAUSED] = true;
+ this[DISCARDED] = false;
+ }
+ /**
+ * true if the stream has been forcibly destroyed
+ */
+ get destroyed() {
+ return this[DESTROYED];
+ }
+ /**
+ * true if the stream is currently in a flowing state, meaning that
+ * any writes will be immediately emitted.
+ */
+ get flowing() {
+ return this[FLOWING];
+ }
+ /**
+ * true if the stream is currently in a paused state
+ */
+ get paused() {
+ return this[PAUSED];
+ }
+ [BUFFERPUSH](chunk) {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] += 1;
+ else
+ this[BUFFERLENGTH] += chunk.length;
+ this[BUFFER].push(chunk);
+ }
+ [BUFFERSHIFT]() {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] -= 1;
+ else
+ this[BUFFERLENGTH] -= this[BUFFER][0].length;
+ return this[BUFFER].shift();
+ }
+ [FLUSH](noDrain = false) {
+ do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+ this[BUFFER].length);
+ if (!noDrain && !this[BUFFER].length && !this[EOF])
+ this.emit('drain');
+ }
+ [FLUSHCHUNK](chunk) {
+ this.emit('data', chunk);
+ return this[FLOWING];
+ }
+ /**
+ * Pipe all data emitted by this stream into the destination provided.
+ *
+ * Triggers the flow of data.
+ */
+ pipe(dest, opts) {
+ if (this[DESTROYED])
+ return dest;
+ this[DISCARDED] = false;
+ const ended = this[EMITTED_END];
+ opts = opts || {};
+ if (dest === proc.stdout || dest === proc.stderr)
+ opts.end = false;
+ else
+ opts.end = opts.end !== false;
+ opts.proxyErrors = !!opts.proxyErrors;
+ // piping an ended stream ends immediately
+ if (ended) {
+ if (opts.end)
+ dest.end();
+ }
+ else {
+ // "as" here just ignores the WType, which pipes don't care about,
+ // since they're only consuming from us, and writing to the dest
+ this[PIPES].push(!opts.proxyErrors
+ ? new Pipe(this, dest, opts)
+ : new PipeProxyErrors(this, dest, opts));
+ if (this[ASYNC])
+ defer(() => this[RESUME]());
+ else
+ this[RESUME]();
+ }
+ return dest;
+ }
+ /**
+ * Fully unhook a piped destination stream.
+ *
+ * If the destination stream was the only consumer of this stream (ie,
+ * there are no other piped destinations or `'data'` event listeners)
+ * then the flow of data will stop until there is another consumer or
+ * {@link Minipass#resume} is explicitly called.
+ */
+ unpipe(dest) {
+ const p = this[PIPES].find(p => p.dest === dest);
+ if (p) {
+ if (this[PIPES].length === 1) {
+ if (this[FLOWING] && this[DATALISTENERS] === 0) {
+ this[FLOWING] = false;
+ }
+ this[PIPES] = [];
+ }
+ else
+ this[PIPES].splice(this[PIPES].indexOf(p), 1);
+ p.unpipe();
+ }
+ }
+ /**
+ * Alias for {@link Minipass#on}
+ */
+ addListener(ev, handler) {
+ return this.on(ev, handler);
+ }
+ /**
+ * Mostly identical to `EventEmitter.on`, with the following
+ * behavior differences to prevent data loss and unnecessary hangs:
+ *
+ * - Adding a 'data' event handler will trigger the flow of data
+ *
+ * - Adding a 'readable' event handler when there is data waiting to be read
+ * will cause 'readable' to be emitted immediately.
+ *
+ * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+ * already passed will cause the event to be emitted immediately and all
+ * handlers removed.
+ *
+ * - Adding an 'error' event handler after an error has been emitted will
+ * cause the event to be re-emitted immediately with the error previously
+ * raised.
+ */
+ on(ev, handler) {
+ const ret = super.on(ev, handler);
+ if (ev === 'data') {
+ this[DISCARDED] = false;
+ this[DATALISTENERS]++;
+ if (!this[PIPES].length && !this[FLOWING]) {
+ this[RESUME]();
+ }
+ }
+ else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+ super.emit('readable');
+ }
+ else if (isEndish(ev) && this[EMITTED_END]) {
+ super.emit(ev);
+ this.removeAllListeners(ev);
+ }
+ else if (ev === 'error' && this[EMITTED_ERROR]) {
+ const h = handler;
+ if (this[ASYNC])
+ defer(() => h.call(this, this[EMITTED_ERROR]));
+ else
+ h.call(this, this[EMITTED_ERROR]);
+ }
+ return ret;
+ }
+ /**
+ * Alias for {@link Minipass#off}
+ */
+ removeListener(ev, handler) {
+ return this.off(ev, handler);
+ }
+ /**
+ * Mostly identical to `EventEmitter.off`
+ *
+ * If a 'data' event handler is removed, and it was the last consumer
+ * (ie, there are no pipe destinations or other 'data' event listeners),
+ * then the flow of data will stop until there is another consumer or
+ * {@link Minipass#resume} is explicitly called.
+ */
+ off(ev, handler) {
+ const ret = super.off(ev, handler);
+ // if we previously had listeners, and now we don't, and we don't
+ // have any pipes, then stop the flow, unless it's been explicitly
+ // put in a discarded flowing state via stream.resume().
+ if (ev === 'data') {
+ this[DATALISTENERS] = this.listeners('data').length;
+ if (this[DATALISTENERS] === 0 &&
+ !this[DISCARDED] &&
+ !this[PIPES].length) {
+ this[FLOWING] = false;
+ }
+ }
+ return ret;
+ }
+ /**
+ * Mostly identical to `EventEmitter.removeAllListeners`
+ *
+ * If all 'data' event handlers are removed, and they were the last consumer
+ * (ie, there are no pipe destinations), then the flow of data will stop
+ * until there is another consumer or {@link Minipass#resume} is explicitly
+ * called.
+ */
+ removeAllListeners(ev) {
+ const ret = super.removeAllListeners(ev);
+ if (ev === 'data' || ev === undefined) {
+ this[DATALISTENERS] = 0;
+ if (!this[DISCARDED] && !this[PIPES].length) {
+ this[FLOWING] = false;
+ }
+ }
+ return ret;
+ }
+ /**
+ * true if the 'end' event has been emitted
+ */
+ get emittedEnd() {
+ return this[EMITTED_END];
+ }
+ [MAYBE_EMIT_END]() {
+ if (!this[EMITTING_END] &&
+ !this[EMITTED_END] &&
+ !this[DESTROYED] &&
+ this[BUFFER].length === 0 &&
+ this[EOF]) {
+ this[EMITTING_END] = true;
+ this.emit('end');
+ this.emit('prefinish');
+ this.emit('finish');
+ if (this[CLOSED])
+ this.emit('close');
+ this[EMITTING_END] = false;
+ }
+ }
+ /**
+ * Mostly identical to `EventEmitter.emit`, with the following
+ * behavior differences to prevent data loss and unnecessary hangs:
+ *
+ * If the stream has been destroyed, and the event is something other
+ * than 'close' or 'error', then `false` is returned and no handlers
+ * are called.
+ *
+ * If the event is 'end', and has already been emitted, then the event
+ * is ignored. If the stream is in a paused or non-flowing state, then
+ * the event will be deferred until data flow resumes. If the stream is
+ * async, then handlers will be called on the next tick rather than
+ * immediately.
+ *
+ * If the event is 'close', and 'end' has not yet been emitted, then
+ * the event will be deferred until after 'end' is emitted.
+ *
+ * If the event is 'error', and an AbortSignal was provided for the stream,
+ * and there are no listeners, then the event is ignored, matching the
+ * behavior of node core streams in the presense of an AbortSignal.
+ *
+ * If the event is 'finish' or 'prefinish', then all listeners will be
+ * removed after emitting the event, to prevent double-firing.
+ */
+ emit(ev, ...args) {
+ const data = args[0];
+ // error and close are only events allowed after calling destroy()
+ if (ev !== 'error' &&
+ ev !== 'close' &&
+ ev !== DESTROYED &&
+ this[DESTROYED]) {
+ return false;
+ }
+ else if (ev === 'data') {
+ return !this[OBJECTMODE] && !data
+ ? false
+ : this[ASYNC]
+ ? (defer(() => this[EMITDATA](data)), true)
+ : this[EMITDATA](data);
+ }
+ else if (ev === 'end') {
+ return this[EMITEND]();
+ }
+ else if (ev === 'close') {
+ this[CLOSED] = true;
+ // don't emit close before 'end' and 'finish'
+ if (!this[EMITTED_END] && !this[DESTROYED])
+ return false;
+ const ret = super.emit('close');
+ this.removeAllListeners('close');
+ return ret;
+ }
+ else if (ev === 'error') {
+ this[EMITTED_ERROR] = data;
+ super.emit(ERROR, data);
+ const ret = !this[SIGNAL] || this.listeners('error').length
+ ? super.emit('error', data)
+ : false;
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ else if (ev === 'resume') {
+ const ret = super.emit('resume');
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ else if (ev === 'finish' || ev === 'prefinish') {
+ const ret = super.emit(ev);
+ this.removeAllListeners(ev);
+ return ret;
+ }
+ // Some other unknown event
+ const ret = super.emit(ev, ...args);
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ [EMITDATA](data) {
+ for (const p of this[PIPES]) {
+ if (p.dest.write(data) === false)
+ this.pause();
+ }
+ const ret = this[DISCARDED] ? false : super.emit('data', data);
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ [EMITEND]() {
+ if (this[EMITTED_END])
+ return false;
+ this[EMITTED_END] = true;
+ this.readable = false;
+ return this[ASYNC]
+ ? (defer(() => this[EMITEND2]()), true)
+ : this[EMITEND2]();
+ }
+ [EMITEND2]() {
+ if (this[DECODER]) {
+ const data = this[DECODER].end();
+ if (data) {
+ for (const p of this[PIPES]) {
+ p.dest.write(data);
+ }
+ if (!this[DISCARDED])
+ super.emit('data', data);
+ }
+ }
+ for (const p of this[PIPES]) {
+ p.end();
+ }
+ const ret = super.emit('end');
+ this.removeAllListeners('end');
+ return ret;
+ }
+ /**
+ * Return a Promise that resolves to an array of all emitted data once
+ * the stream ends.
+ */
+ async collect() {
+ const buf = Object.assign([], {
+ dataLength: 0,
+ });
+ if (!this[OBJECTMODE])
+ buf.dataLength = 0;
+ // set the promise first, in case an error is raised
+ // by triggering the flow here.
+ const p = this.promise();
+ this.on('data', c => {
+ buf.push(c);
+ if (!this[OBJECTMODE])
+ buf.dataLength += c.length;
+ });
+ await p;
+ return buf;
+ }
+ /**
+ * Return a Promise that resolves to the concatenation of all emitted data
+ * once the stream ends.
+ *
+ * Not allowed on objectMode streams.
+ */
+ async concat() {
+ if (this[OBJECTMODE]) {
+ throw new Error('cannot concat in objectMode');
+ }
+ const buf = await this.collect();
+ return (this[ENCODING]
+ ? buf.join('')
+ : Buffer.concat(buf, buf.dataLength));
+ }
+ /**
+ * Return a void Promise that resolves once the stream ends.
+ */
+ async promise() {
+ return new Promise((resolve, reject) => {
+ this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+ this.on('error', er => reject(er));
+ this.on('end', () => resolve());
+ });
+ }
+ /**
+ * Asynchronous `for await of` iteration.
+ *
+ * This will continue emitting all chunks until the stream terminates.
+ */
+ [Symbol.asyncIterator]() {
+ // set this up front, in case the consumer doesn't call next()
+ // right away.
+ this[DISCARDED] = false;
+ let stopped = false;
+ const stop = async () => {
+ this.pause();
+ stopped = true;
+ return { value: undefined, done: true };
+ };
+ const next = () => {
+ if (stopped)
+ return stop();
+ const res = this.read();
+ if (res !== null)
+ return Promise.resolve({ done: false, value: res });
+ if (this[EOF])
+ return stop();
+ let resolve;
+ let reject;
+ const onerr = (er) => {
+ this.off('data', ondata);
+ this.off('end', onend);
+ this.off(DESTROYED, ondestroy);
+ stop();
+ reject(er);
+ };
+ const ondata = (value) => {
+ this.off('error', onerr);
+ this.off('end', onend);
+ this.off(DESTROYED, ondestroy);
+ this.pause();
+ resolve({ value, done: !!this[EOF] });
+ };
+ const onend = () => {
+ this.off('error', onerr);
+ this.off('data', ondata);
+ this.off(DESTROYED, ondestroy);
+ stop();
+ resolve({ done: true, value: undefined });
+ };
+ const ondestroy = () => onerr(new Error('stream destroyed'));
+ return new Promise((res, rej) => {
+ reject = rej;
+ resolve = res;
+ this.once(DESTROYED, ondestroy);
+ this.once('error', onerr);
+ this.once('end', onend);
+ this.once('data', ondata);
+ });
+ };
+ return {
+ next,
+ throw: stop,
+ return: stop,
+ [Symbol.asyncIterator]() {
+ return this;
+ },
+ };
+ }
+ /**
+ * Synchronous `for of` iteration.
+ *
+ * The iteration will terminate when the internal buffer runs out, even
+ * if the stream has not yet terminated.
+ */
+ [Symbol.iterator]() {
+ // set this up front, in case the consumer doesn't call next()
+ // right away.
+ this[DISCARDED] = false;
+ let stopped = false;
+ const stop = () => {
+ this.pause();
+ this.off(ERROR, stop);
+ this.off(DESTROYED, stop);
+ this.off('end', stop);
+ stopped = true;
+ return { done: true, value: undefined };
+ };
+ const next = () => {
+ if (stopped)
+ return stop();
+ const value = this.read();
+ return value === null ? stop() : { done: false, value };
+ };
+ this.once('end', stop);
+ this.once(ERROR, stop);
+ this.once(DESTROYED, stop);
+ return {
+ next,
+ throw: stop,
+ return: stop,
+ [Symbol.iterator]() {
+ return this;
+ },
+ };
+ }
+ /**
+ * Destroy a stream, preventing it from being used for any further purpose.
+ *
+ * If the stream has a `close()` method, then it will be called on
+ * destruction.
+ *
+ * After destruction, any attempt to write data, read data, or emit most
+ * events will be ignored.
+ *
+ * If an error argument is provided, then it will be emitted in an
+ * 'error' event.
+ */
+ destroy(er) {
+ if (this[DESTROYED]) {
+ if (er)
+ this.emit('error', er);
+ else
+ this.emit(DESTROYED);
+ return this;
+ }
+ this[DESTROYED] = true;
+ this[DISCARDED] = true;
+ // throw away all buffered data, it's never coming out
+ this[BUFFER].length = 0;
+ this[BUFFERLENGTH] = 0;
+ const wc = this;
+ if (typeof wc.close === 'function' && !this[CLOSED])
+ wc.close();
+ if (er)
+ this.emit('error', er);
+ // if no error to emit, still reject pending promises
+ else
+ this.emit(DESTROYED);
+ return this;
+ }
+ /**
+ * Alias for {@link isStream}
+ *
+ * Former export location, maintained for backwards compatibility.
+ *
+ * @deprecated
+ */
+ static get isStream() {
+ return exports.isStream;
+ }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minipass/dist/cjs/package.json b/deps/npm/node_modules/minipass/dist/cjs/package.json
new file mode 100644
index 00000000000000..5bbefffbabee39
--- /dev/null
+++ b/deps/npm/node_modules/minipass/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+ "type": "commonjs"
+}
diff --git a/deps/npm/node_modules/minipass/dist/mjs/index.js b/deps/npm/node_modules/minipass/dist/mjs/index.js
new file mode 100644
index 00000000000000..b65fafbae43a4e
--- /dev/null
+++ b/deps/npm/node_modules/minipass/dist/mjs/index.js
@@ -0,0 +1,1018 @@
+const proc = typeof process === 'object' && process
+ ? process
+ : {
+ stdout: null,
+ stderr: null,
+ };
+import { EventEmitter } from 'events';
+import Stream from 'stream';
+import { StringDecoder } from 'string_decoder';
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+export const isStream = (s) => !!s &&
+ typeof s === 'object' &&
+ (s instanceof Minipass ||
+ s instanceof Stream ||
+ isReadable(s) ||
+ isWritable(s));
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+export const isReadable = (s) => !!s &&
+ typeof s === 'object' &&
+ s instanceof EventEmitter &&
+ typeof s.pipe === 'function' &&
+ // node core Writable streams have a pipe() method, but it throws
+ s.pipe !== Stream.Writable.prototype.pipe;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+export const isWritable = (s) => !!s &&
+ typeof s === 'object' &&
+ s instanceof EventEmitter &&
+ typeof s.write === 'function' &&
+ typeof s.end === 'function';
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+ (!!b &&
+ typeof b === 'object' &&
+ b.constructor &&
+ b.constructor.name === 'ArrayBuffer' &&
+ b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+ src;
+ dest;
+ opts;
+ ondrain;
+ constructor(src, dest, opts) {
+ this.src = src;
+ this.dest = dest;
+ this.opts = opts;
+ this.ondrain = () => src[RESUME]();
+ this.dest.on('drain', this.ondrain);
+ }
+ unpipe() {
+ this.dest.removeListener('drain', this.ondrain);
+ }
+ // only here for the prototype
+ /* c8 ignore start */
+ proxyErrors(_er) { }
+ /* c8 ignore stop */
+ end() {
+ this.unpipe();
+ if (this.opts.end)
+ this.dest.end();
+ }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+ unpipe() {
+ this.src.removeListener('error', this.proxyErrors);
+ super.unpipe();
+ }
+ constructor(src, dest, opts) {
+ super(src, dest, opts);
+ this.proxyErrors = er => dest.emit('error', er);
+ src.on('error', this.proxyErrors);
+ }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+export class Minipass extends EventEmitter {
+ [FLOWING] = false;
+ [PAUSED] = false;
+ [PIPES] = [];
+ [BUFFER] = [];
+ [OBJECTMODE];
+ [ENCODING];
+ [ASYNC];
+ [DECODER];
+ [EOF] = false;
+ [EMITTED_END] = false;
+ [EMITTING_END] = false;
+ [CLOSED] = false;
+ [EMITTED_ERROR] = null;
+ [BUFFERLENGTH] = 0;
+ [DESTROYED] = false;
+ [SIGNAL];
+ [ABORTED] = false;
+ [DATALISTENERS] = 0;
+ [DISCARDED] = false;
+ /**
+ * true if the stream can be written
+ */
+ writable = true;
+ /**
+ * true if the stream can be read
+ */
+ readable = true;
+ /**
+ * If `RType` is Buffer, then options do not need to be provided.
+ * Otherwise, an options object must be provided to specify either
+ * {@link Minipass.SharedOptions.objectMode} or
+ * {@link Minipass.SharedOptions.encoding}, as appropriate.
+ */
+ constructor(...args) {
+ const options = (args[0] ||
+ {});
+ super();
+ if (options.objectMode && typeof options.encoding === 'string') {
+ throw new TypeError('Encoding and objectMode may not be used together');
+ }
+ if (isObjectModeOptions(options)) {
+ this[OBJECTMODE] = true;
+ this[ENCODING] = null;
+ }
+ else if (isEncodingOptions(options)) {
+ this[ENCODING] = options.encoding;
+ this[OBJECTMODE] = false;
+ }
+ else {
+ this[OBJECTMODE] = false;
+ this[ENCODING] = null;
+ }
+ this[ASYNC] = !!options.async;
+ this[DECODER] = this[ENCODING]
+ ? new StringDecoder(this[ENCODING])
+ : null;
+ //@ts-ignore - private option for debugging and testing
+ if (options && options.debugExposeBuffer === true) {
+ Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+ }
+ //@ts-ignore - private option for debugging and testing
+ if (options && options.debugExposePipes === true) {
+ Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+ }
+ const { signal } = options;
+ if (signal) {
+ this[SIGNAL] = signal;
+ if (signal.aborted) {
+ this[ABORT]();
+ }
+ else {
+ signal.addEventListener('abort', () => this[ABORT]());
+ }
+ }
+ }
+ /**
+ * The amount of data stored in the buffer waiting to be read.
+ *
+ * For Buffer strings, this will be the total byte length.
+ * For string encoding streams, this will be the string character length,
+ * according to JavaScript's `string.length` logic.
+ * For objectMode streams, this is a count of the items waiting to be
+ * emitted.
+ */
+ get bufferLength() {
+ return this[BUFFERLENGTH];
+ }
+ /**
+ * The `BufferEncoding` currently in use, or `null`
+ */
+ get encoding() {
+ return this[ENCODING];
+ }
+ /**
+ * @deprecated - This is a read only property
+ */
+ set encoding(_enc) {
+ throw new Error('Encoding must be set at instantiation time');
+ }
+ /**
+ * @deprecated - Encoding may only be set at instantiation time
+ */
+ setEncoding(_enc) {
+ throw new Error('Encoding must be set at instantiation time');
+ }
+ /**
+ * True if this is an objectMode stream
+ */
+ get objectMode() {
+ return this[OBJECTMODE];
+ }
+ /**
+ * @deprecated - This is a read-only property
+ */
+ set objectMode(_om) {
+ throw new Error('objectMode must be set at instantiation time');
+ }
+ /**
+ * true if this is an async stream
+ */
+ get ['async']() {
+ return this[ASYNC];
+ }
+ /**
+ * Set to true to make this stream async.
+ *
+ * Once set, it cannot be unset, as this would potentially cause incorrect
+ * behavior. Ie, a sync stream can be made async, but an async stream
+ * cannot be safely made sync.
+ */
+ set ['async'](a) {
+ this[ASYNC] = this[ASYNC] || !!a;
+ }
+ // drop everything and get out of the flow completely
+ [ABORT]() {
+ this[ABORTED] = true;
+ this.emit('abort', this[SIGNAL]?.reason);
+ this.destroy(this[SIGNAL]?.reason);
+ }
+ /**
+ * True if the stream has been aborted.
+ */
+ get aborted() {
+ return this[ABORTED];
+ }
+ /**
+ * No-op setter. Stream aborted status is set via the AbortSignal provided
+ * in the constructor options.
+ */
+ set aborted(_) { }
+ write(chunk, encoding, cb) {
+ if (this[ABORTED])
+ return false;
+ if (this[EOF])
+ throw new Error('write after end');
+ if (this[DESTROYED]) {
+ this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+ return true;
+ }
+ if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = 'utf8';
+ }
+ if (!encoding)
+ encoding = 'utf8';
+ const fn = this[ASYNC] ? defer : nodefer;
+ // convert array buffers and typed array views into buffers
+ // at some point in the future, we may want to do the opposite!
+ // leave strings and buffers as-is
+ // anything is only allowed if in object mode, so throw
+ if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+ if (isArrayBufferView(chunk)) {
+ //@ts-ignore - sinful unsafe type changing
+ chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+ }
+ else if (isArrayBufferLike(chunk)) {
+ //@ts-ignore - sinful unsafe type changing
+ chunk = Buffer.from(chunk);
+ }
+ else if (typeof chunk !== 'string') {
+ throw new Error('Non-contiguous data written to non-objectMode stream');
+ }
+ }
+ // handle object mode up front, since it's simpler
+ // this yields better performance, fewer checks later.
+ if (this[OBJECTMODE]) {
+ // maybe impossible?
+ /* c8 ignore start */
+ if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+ this[FLUSH](true);
+ /* c8 ignore stop */
+ if (this[FLOWING])
+ this.emit('data', chunk);
+ else
+ this[BUFFERPUSH](chunk);
+ if (this[BUFFERLENGTH] !== 0)
+ this.emit('readable');
+ if (cb)
+ fn(cb);
+ return this[FLOWING];
+ }
+ // at this point the chunk is a buffer or string
+ // don't buffer it up or send it to the decoder
+ if (!chunk.length) {
+ if (this[BUFFERLENGTH] !== 0)
+ this.emit('readable');
+ if (cb)
+ fn(cb);
+ return this[FLOWING];
+ }
+ // fast-path writing strings of same encoding to a stream with
+ // an empty buffer, skipping the buffer/decoder dance
+ if (typeof chunk === 'string' &&
+ // unless it is a string already ready for us to use
+ !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+ //@ts-ignore - sinful unsafe type change
+ chunk = Buffer.from(chunk, encoding);
+ }
+ if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+ //@ts-ignore - sinful unsafe type change
+ chunk = this[DECODER].write(chunk);
+ }
+ // Note: flushing CAN potentially switch us into not-flowing mode
+ if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+ this[FLUSH](true);
+ if (this[FLOWING])
+ this.emit('data', chunk);
+ else
+ this[BUFFERPUSH](chunk);
+ if (this[BUFFERLENGTH] !== 0)
+ this.emit('readable');
+ if (cb)
+ fn(cb);
+ return this[FLOWING];
+ }
+ /**
+ * Low-level explicit read method.
+ *
+ * In objectMode, the argument is ignored, and one item is returned if
+ * available.
+ *
+ * `n` is the number of bytes (or in the case of encoding streams,
+ * characters) to consume. If `n` is not provided, then the entire buffer
+ * is returned, or `null` is returned if no data is available.
+ *
+ * If `n` is greater that the amount of data in the internal buffer,
+ * then `null` is returned.
+ */
+ read(n) {
+ if (this[DESTROYED])
+ return null;
+ this[DISCARDED] = false;
+ if (this[BUFFERLENGTH] === 0 ||
+ n === 0 ||
+ (n && n > this[BUFFERLENGTH])) {
+ this[MAYBE_EMIT_END]();
+ return null;
+ }
+ if (this[OBJECTMODE])
+ n = null;
+ if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+ // not object mode, so if we have an encoding, then RType is string
+ // otherwise, must be Buffer
+ this[BUFFER] = [
+ (this[ENCODING]
+ ? this[BUFFER].join('')
+ : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+ ];
+ }
+ const ret = this[READ](n || null, this[BUFFER][0]);
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ [READ](n, chunk) {
+ if (this[OBJECTMODE])
+ this[BUFFERSHIFT]();
+ else {
+ const c = chunk;
+ if (n === c.length || n === null)
+ this[BUFFERSHIFT]();
+ else if (typeof c === 'string') {
+ this[BUFFER][0] = c.slice(n);
+ chunk = c.slice(0, n);
+ this[BUFFERLENGTH] -= n;
+ }
+ else {
+ this[BUFFER][0] = c.subarray(n);
+ chunk = c.subarray(0, n);
+ this[BUFFERLENGTH] -= n;
+ }
+ }
+ this.emit('data', chunk);
+ if (!this[BUFFER].length && !this[EOF])
+ this.emit('drain');
+ return chunk;
+ }
+ end(chunk, encoding, cb) {
+ if (typeof chunk === 'function') {
+ cb = chunk;
+ chunk = undefined;
+ }
+ if (typeof encoding === 'function') {
+ cb = encoding;
+ encoding = 'utf8';
+ }
+ if (chunk !== undefined)
+ this.write(chunk, encoding);
+ if (cb)
+ this.once('end', cb);
+ this[EOF] = true;
+ this.writable = false;
+ // if we haven't written anything, then go ahead and emit,
+ // even if we're not reading.
+ // we'll re-emit if a new 'end' listener is added anyway.
+ // This makes MP more suitable to write-only use cases.
+ if (this[FLOWING] || !this[PAUSED])
+ this[MAYBE_EMIT_END]();
+ return this;
+ }
+ // don't let the internal resume be overwritten
+ [RESUME]() {
+ if (this[DESTROYED])
+ return;
+ if (!this[DATALISTENERS] && !this[PIPES].length) {
+ this[DISCARDED] = true;
+ }
+ this[PAUSED] = false;
+ this[FLOWING] = true;
+ this.emit('resume');
+ if (this[BUFFER].length)
+ this[FLUSH]();
+ else if (this[EOF])
+ this[MAYBE_EMIT_END]();
+ else
+ this.emit('drain');
+ }
+ /**
+ * Resume the stream if it is currently in a paused state
+ *
+ * If called when there are no pipe destinations or `data` event listeners,
+ * this will place the stream in a "discarded" state, where all data will
+ * be thrown away. The discarded state is removed if a pipe destination or
+ * data handler is added, if pause() is called, or if any synchronous or
+ * asynchronous iteration is started.
+ */
+ resume() {
+ return this[RESUME]();
+ }
+ /**
+ * Pause the stream
+ */
+ pause() {
+ this[FLOWING] = false;
+ this[PAUSED] = true;
+ this[DISCARDED] = false;
+ }
+ /**
+ * true if the stream has been forcibly destroyed
+ */
+ get destroyed() {
+ return this[DESTROYED];
+ }
+ /**
+ * true if the stream is currently in a flowing state, meaning that
+ * any writes will be immediately emitted.
+ */
+ get flowing() {
+ return this[FLOWING];
+ }
+ /**
+ * true if the stream is currently in a paused state
+ */
+ get paused() {
+ return this[PAUSED];
+ }
+ [BUFFERPUSH](chunk) {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] += 1;
+ else
+ this[BUFFERLENGTH] += chunk.length;
+ this[BUFFER].push(chunk);
+ }
+ [BUFFERSHIFT]() {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] -= 1;
+ else
+ this[BUFFERLENGTH] -= this[BUFFER][0].length;
+ return this[BUFFER].shift();
+ }
+ [FLUSH](noDrain = false) {
+ do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+ this[BUFFER].length);
+ if (!noDrain && !this[BUFFER].length && !this[EOF])
+ this.emit('drain');
+ }
+ [FLUSHCHUNK](chunk) {
+ this.emit('data', chunk);
+ return this[FLOWING];
+ }
+ /**
+ * Pipe all data emitted by this stream into the destination provided.
+ *
+ * Triggers the flow of data.
+ */
+ pipe(dest, opts) {
+ if (this[DESTROYED])
+ return dest;
+ this[DISCARDED] = false;
+ const ended = this[EMITTED_END];
+ opts = opts || {};
+ if (dest === proc.stdout || dest === proc.stderr)
+ opts.end = false;
+ else
+ opts.end = opts.end !== false;
+ opts.proxyErrors = !!opts.proxyErrors;
+ // piping an ended stream ends immediately
+ if (ended) {
+ if (opts.end)
+ dest.end();
+ }
+ else {
+ // "as" here just ignores the WType, which pipes don't care about,
+ // since they're only consuming from us, and writing to the dest
+ this[PIPES].push(!opts.proxyErrors
+ ? new Pipe(this, dest, opts)
+ : new PipeProxyErrors(this, dest, opts));
+ if (this[ASYNC])
+ defer(() => this[RESUME]());
+ else
+ this[RESUME]();
+ }
+ return dest;
+ }
+ /**
+ * Fully unhook a piped destination stream.
+ *
+ * If the destination stream was the only consumer of this stream (ie,
+ * there are no other piped destinations or `'data'` event listeners)
+ * then the flow of data will stop until there is another consumer or
+ * {@link Minipass#resume} is explicitly called.
+ */
+ unpipe(dest) {
+ const p = this[PIPES].find(p => p.dest === dest);
+ if (p) {
+ if (this[PIPES].length === 1) {
+ if (this[FLOWING] && this[DATALISTENERS] === 0) {
+ this[FLOWING] = false;
+ }
+ this[PIPES] = [];
+ }
+ else
+ this[PIPES].splice(this[PIPES].indexOf(p), 1);
+ p.unpipe();
+ }
+ }
+ /**
+ * Alias for {@link Minipass#on}
+ */
+ addListener(ev, handler) {
+ return this.on(ev, handler);
+ }
+ /**
+ * Mostly identical to `EventEmitter.on`, with the following
+ * behavior differences to prevent data loss and unnecessary hangs:
+ *
+ * - Adding a 'data' event handler will trigger the flow of data
+ *
+ * - Adding a 'readable' event handler when there is data waiting to be read
+ * will cause 'readable' to be emitted immediately.
+ *
+ * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+ * already passed will cause the event to be emitted immediately and all
+ * handlers removed.
+ *
+ * - Adding an 'error' event handler after an error has been emitted will
+ * cause the event to be re-emitted immediately with the error previously
+ * raised.
+ */
+ on(ev, handler) {
+ const ret = super.on(ev, handler);
+ if (ev === 'data') {
+ this[DISCARDED] = false;
+ this[DATALISTENERS]++;
+ if (!this[PIPES].length && !this[FLOWING]) {
+ this[RESUME]();
+ }
+ }
+ else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+ super.emit('readable');
+ }
+ else if (isEndish(ev) && this[EMITTED_END]) {
+ super.emit(ev);
+ this.removeAllListeners(ev);
+ }
+ else if (ev === 'error' && this[EMITTED_ERROR]) {
+ const h = handler;
+ if (this[ASYNC])
+ defer(() => h.call(this, this[EMITTED_ERROR]));
+ else
+ h.call(this, this[EMITTED_ERROR]);
+ }
+ return ret;
+ }
+ /**
+ * Alias for {@link Minipass#off}
+ */
+ removeListener(ev, handler) {
+ return this.off(ev, handler);
+ }
+ /**
+ * Mostly identical to `EventEmitter.off`
+ *
+ * If a 'data' event handler is removed, and it was the last consumer
+ * (ie, there are no pipe destinations or other 'data' event listeners),
+ * then the flow of data will stop until there is another consumer or
+ * {@link Minipass#resume} is explicitly called.
+ */
+ off(ev, handler) {
+ const ret = super.off(ev, handler);
+ // if we previously had listeners, and now we don't, and we don't
+ // have any pipes, then stop the flow, unless it's been explicitly
+ // put in a discarded flowing state via stream.resume().
+ if (ev === 'data') {
+ this[DATALISTENERS] = this.listeners('data').length;
+ if (this[DATALISTENERS] === 0 &&
+ !this[DISCARDED] &&
+ !this[PIPES].length) {
+ this[FLOWING] = false;
+ }
+ }
+ return ret;
+ }
+ /**
+ * Mostly identical to `EventEmitter.removeAllListeners`
+ *
+ * If all 'data' event handlers are removed, and they were the last consumer
+ * (ie, there are no pipe destinations), then the flow of data will stop
+ * until there is another consumer or {@link Minipass#resume} is explicitly
+ * called.
+ */
+ removeAllListeners(ev) {
+ const ret = super.removeAllListeners(ev);
+ if (ev === 'data' || ev === undefined) {
+ this[DATALISTENERS] = 0;
+ if (!this[DISCARDED] && !this[PIPES].length) {
+ this[FLOWING] = false;
+ }
+ }
+ return ret;
+ }
+ /**
+ * true if the 'end' event has been emitted
+ */
+ get emittedEnd() {
+ return this[EMITTED_END];
+ }
+ [MAYBE_EMIT_END]() {
+ if (!this[EMITTING_END] &&
+ !this[EMITTED_END] &&
+ !this[DESTROYED] &&
+ this[BUFFER].length === 0 &&
+ this[EOF]) {
+ this[EMITTING_END] = true;
+ this.emit('end');
+ this.emit('prefinish');
+ this.emit('finish');
+ if (this[CLOSED])
+ this.emit('close');
+ this[EMITTING_END] = false;
+ }
+ }
+ /**
+ * Mostly identical to `EventEmitter.emit`, with the following
+ * behavior differences to prevent data loss and unnecessary hangs:
+ *
+ * If the stream has been destroyed, and the event is something other
+ * than 'close' or 'error', then `false` is returned and no handlers
+ * are called.
+ *
+ * If the event is 'end', and has already been emitted, then the event
+ * is ignored. If the stream is in a paused or non-flowing state, then
+ * the event will be deferred until data flow resumes. If the stream is
+ * async, then handlers will be called on the next tick rather than
+ * immediately.
+ *
+ * If the event is 'close', and 'end' has not yet been emitted, then
+ * the event will be deferred until after 'end' is emitted.
+ *
+ * If the event is 'error', and an AbortSignal was provided for the stream,
+ * and there are no listeners, then the event is ignored, matching the
+ * behavior of node core streams in the presense of an AbortSignal.
+ *
+ * If the event is 'finish' or 'prefinish', then all listeners will be
+ * removed after emitting the event, to prevent double-firing.
+ */
+ emit(ev, ...args) {
+ const data = args[0];
+ // error and close are only events allowed after calling destroy()
+ if (ev !== 'error' &&
+ ev !== 'close' &&
+ ev !== DESTROYED &&
+ this[DESTROYED]) {
+ return false;
+ }
+ else if (ev === 'data') {
+ return !this[OBJECTMODE] && !data
+ ? false
+ : this[ASYNC]
+ ? (defer(() => this[EMITDATA](data)), true)
+ : this[EMITDATA](data);
+ }
+ else if (ev === 'end') {
+ return this[EMITEND]();
+ }
+ else if (ev === 'close') {
+ this[CLOSED] = true;
+ // don't emit close before 'end' and 'finish'
+ if (!this[EMITTED_END] && !this[DESTROYED])
+ return false;
+ const ret = super.emit('close');
+ this.removeAllListeners('close');
+ return ret;
+ }
+ else if (ev === 'error') {
+ this[EMITTED_ERROR] = data;
+ super.emit(ERROR, data);
+ const ret = !this[SIGNAL] || this.listeners('error').length
+ ? super.emit('error', data)
+ : false;
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ else if (ev === 'resume') {
+ const ret = super.emit('resume');
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ else if (ev === 'finish' || ev === 'prefinish') {
+ const ret = super.emit(ev);
+ this.removeAllListeners(ev);
+ return ret;
+ }
+ // Some other unknown event
+ const ret = super.emit(ev, ...args);
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ [EMITDATA](data) {
+ for (const p of this[PIPES]) {
+ if (p.dest.write(data) === false)
+ this.pause();
+ }
+ const ret = this[DISCARDED] ? false : super.emit('data', data);
+ this[MAYBE_EMIT_END]();
+ return ret;
+ }
+ [EMITEND]() {
+ if (this[EMITTED_END])
+ return false;
+ this[EMITTED_END] = true;
+ this.readable = false;
+ return this[ASYNC]
+ ? (defer(() => this[EMITEND2]()), true)
+ : this[EMITEND2]();
+ }
+ [EMITEND2]() {
+ if (this[DECODER]) {
+ const data = this[DECODER].end();
+ if (data) {
+ for (const p of this[PIPES]) {
+ p.dest.write(data);
+ }
+ if (!this[DISCARDED])
+ super.emit('data', data);
+ }
+ }
+ for (const p of this[PIPES]) {
+ p.end();
+ }
+ const ret = super.emit('end');
+ this.removeAllListeners('end');
+ return ret;
+ }
+ /**
+ * Return a Promise that resolves to an array of all emitted data once
+ * the stream ends.
+ */
+ async collect() {
+ const buf = Object.assign([], {
+ dataLength: 0,
+ });
+ if (!this[OBJECTMODE])
+ buf.dataLength = 0;
+ // set the promise first, in case an error is raised
+ // by triggering the flow here.
+ const p = this.promise();
+ this.on('data', c => {
+ buf.push(c);
+ if (!this[OBJECTMODE])
+ buf.dataLength += c.length;
+ });
+ await p;
+ return buf;
+ }
+ /**
+ * Return a Promise that resolves to the concatenation of all emitted data
+ * once the stream ends.
+ *
+ * Not allowed on objectMode streams.
+ */
+ async concat() {
+ if (this[OBJECTMODE]) {
+ throw new Error('cannot concat in objectMode');
+ }
+ const buf = await this.collect();
+ return (this[ENCODING]
+ ? buf.join('')
+ : Buffer.concat(buf, buf.dataLength));
+ }
+ /**
+ * Return a void Promise that resolves once the stream ends.
+ */
+ async promise() {
+ return new Promise((resolve, reject) => {
+ this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+ this.on('error', er => reject(er));
+ this.on('end', () => resolve());
+ });
+ }
+ /**
+ * Asynchronous `for await of` iteration.
+ *
+ * This will continue emitting all chunks until the stream terminates.
+ */
+ [Symbol.asyncIterator]() {
+ // set this up front, in case the consumer doesn't call next()
+ // right away.
+ this[DISCARDED] = false;
+ let stopped = false;
+ const stop = async () => {
+ this.pause();
+ stopped = true;
+ return { value: undefined, done: true };
+ };
+ const next = () => {
+ if (stopped)
+ return stop();
+ const res = this.read();
+ if (res !== null)
+ return Promise.resolve({ done: false, value: res });
+ if (this[EOF])
+ return stop();
+ let resolve;
+ let reject;
+ const onerr = (er) => {
+ this.off('data', ondata);
+ this.off('end', onend);
+ this.off(DESTROYED, ondestroy);
+ stop();
+ reject(er);
+ };
+ const ondata = (value) => {
+ this.off('error', onerr);
+ this.off('end', onend);
+ this.off(DESTROYED, ondestroy);
+ this.pause();
+ resolve({ value, done: !!this[EOF] });
+ };
+ const onend = () => {
+ this.off('error', onerr);
+ this.off('data', ondata);
+ this.off(DESTROYED, ondestroy);
+ stop();
+ resolve({ done: true, value: undefined });
+ };
+ const ondestroy = () => onerr(new Error('stream destroyed'));
+ return new Promise((res, rej) => {
+ reject = rej;
+ resolve = res;
+ this.once(DESTROYED, ondestroy);
+ this.once('error', onerr);
+ this.once('end', onend);
+ this.once('data', ondata);
+ });
+ };
+ return {
+ next,
+ throw: stop,
+ return: stop,
+ [Symbol.asyncIterator]() {
+ return this;
+ },
+ };
+ }
+ /**
+ * Synchronous `for of` iteration.
+ *
+ * The iteration will terminate when the internal buffer runs out, even
+ * if the stream has not yet terminated.
+ */
+ [Symbol.iterator]() {
+ // set this up front, in case the consumer doesn't call next()
+ // right away.
+ this[DISCARDED] = false;
+ let stopped = false;
+ const stop = () => {
+ this.pause();
+ this.off(ERROR, stop);
+ this.off(DESTROYED, stop);
+ this.off('end', stop);
+ stopped = true;
+ return { done: true, value: undefined };
+ };
+ const next = () => {
+ if (stopped)
+ return stop();
+ const value = this.read();
+ return value === null ? stop() : { done: false, value };
+ };
+ this.once('end', stop);
+ this.once(ERROR, stop);
+ this.once(DESTROYED, stop);
+ return {
+ next,
+ throw: stop,
+ return: stop,
+ [Symbol.iterator]() {
+ return this;
+ },
+ };
+ }
+ /**
+ * Destroy a stream, preventing it from being used for any further purpose.
+ *
+ * If the stream has a `close()` method, then it will be called on
+ * destruction.
+ *
+ * After destruction, any attempt to write data, read data, or emit most
+ * events will be ignored.
+ *
+ * If an error argument is provided, then it will be emitted in an
+ * 'error' event.
+ */
+ destroy(er) {
+ if (this[DESTROYED]) {
+ if (er)
+ this.emit('error', er);
+ else
+ this.emit(DESTROYED);
+ return this;
+ }
+ this[DESTROYED] = true;
+ this[DISCARDED] = true;
+ // throw away all buffered data, it's never coming out
+ this[BUFFER].length = 0;
+ this[BUFFERLENGTH] = 0;
+ const wc = this;
+ if (typeof wc.close === 'function' && !this[CLOSED])
+ wc.close();
+ if (er)
+ this.emit('error', er);
+ // if no error to emit, still reject pending promises
+ else
+ this.emit(DESTROYED);
+ return this;
+ }
+ /**
+ * Alias for {@link isStream}
+ *
+ * Former export location, maintained for backwards compatibility.
+ *
+ * @deprecated
+ */
+ static get isStream() {
+ return isStream;
+ }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minipass/dist/mjs/package.json b/deps/npm/node_modules/minipass/dist/mjs/package.json
new file mode 100644
index 00000000000000..3dbc1ca591c055
--- /dev/null
+++ b/deps/npm/node_modules/minipass/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+ "type": "module"
+}
diff --git a/deps/npm/node_modules/minipass/package.json b/deps/npm/node_modules/minipass/package.json
index 0e20e988047f23..6faaa247a5bc66 100644
--- a/deps/npm/node_modules/minipass/package.json
+++ b/deps/npm/node_modules/minipass/package.json
@@ -1,70 +1,52 @@
{
"name": "minipass",
- "version": "5.0.0",
+ "version": "7.0.3",
"description": "minimal implementation of a PassThrough stream",
- "main": "./index.js",
- "module": "./index.mjs",
- "types": "./index.d.ts",
+ "main": "./dist/cjs/index.js",
+ "module": "./dist/mjs/index.js",
+ "types": "./dist/cjs/index.js",
"exports": {
".": {
"import": {
- "types": "./index.d.ts",
- "default": "./index.mjs"
+ "types": "./dist/mjs/index.d.ts",
+ "default": "./dist/mjs/index.js"
},
"require": {
- "types": "./index.d.ts",
- "default": "./index.js"
+ "types": "./dist/cjs/index.d.ts",
+ "default": "./dist/cjs/index.js"
}
},
"./package.json": "./package.json"
},
- "devDependencies": {
- "@types/node": "^17.0.41",
- "end-of-stream": "^1.4.0",
- "node-abort-controller": "^3.1.1",
- "prettier": "^2.6.2",
- "tap": "^16.2.0",
- "through2": "^2.0.3",
- "ts-node": "^10.8.1",
- "typedoc": "^0.23.24",
- "typescript": "^4.7.3"
- },
+ "files": [
+ "dist"
+ ],
"scripts": {
- "pretest": "npm run prepare",
- "presnap": "npm run prepare",
- "prepare": "node ./scripts/transpile-to-esm.js",
- "snap": "tap",
- "test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
- "postpublish": "git push origin --follow-tags",
- "typedoc": "typedoc ./index.d.ts",
- "format": "prettier --write . --loglevel warn"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/minipass.git"
+ "prepublishOnly": "git push origin --follow-tags",
+ "preprepare": "rm -rf dist",
+ "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
+ "pretest": "npm run prepare",
+ "presnap": "npm run prepare",
+ "test": "c8 tap",
+ "snap": "c8 tap",
+ "format": "prettier --write . --loglevel warn",
+ "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
},
- "keywords": [
- "passthrough",
- "stream"
- ],
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "ISC",
- "files": [
- "index.d.ts",
- "index.js",
- "index.mjs"
- ],
"tap": {
- "check-coverage": true
- },
- "engines": {
- "node": ">=8"
+ "coverage": false,
+ "node-arg": [
+ "--enable-source-maps",
+ "--no-warnings",
+ "--loader",
+ "ts-node/esm"
+ ],
+ "ts": false
},
"prettier": {
"semi": false,
- "printWidth": 80,
+ "printWidth": 75,
"tabWidth": 2,
"useTabs": false,
"singleQuote": true,
@@ -72,5 +54,29 @@
"bracketSameLine": true,
"arrowParens": "avoid",
"endOfLine": "lf"
+ },
+ "devDependencies": {
+ "@types/node": "^20.1.2",
+ "@types/tap": "^15.0.8",
+ "c8": "^7.13.0",
+ "prettier": "^2.6.2",
+ "tap": "^16.3.0",
+ "ts-node": "^10.9.1",
+ "typedoc": "^0.24.8",
+ "typescript": "^5.1.3",
+ "end-of-stream": "^1.4.0",
+ "node-abort-controller": "^3.1.1",
+ "sync-content": "^1.0.2",
+ "through2": "^2.0.3"
+ },
+ "repository": "https://github.com/isaacs/minipass",
+ "keywords": [
+ "passthrough",
+ "stream"
+ ],
+ "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
+ "license": "ISC",
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
}
}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md
new file mode 100644
index 00000000000000..8d28acf866d932
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js
new file mode 100644
index 00000000000000..ad5a76a4f73f26
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+ const sri = ssri.parse(integrity, { single: true })
+ // contentPath is the *strongest* algo given
+ return path.join(
+ contentDir(cache),
+ sri.algorithm,
+ ...hashToSegments(sri.hexDigest())
+ )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+ return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js
new file mode 100644
index 00000000000000..f41b539df65dce
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,166 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+ const { size } = opts
+ const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+ // get size
+ const stat = await fs.stat(cpath)
+ return { stat, cpath, sri }
+ })
+ if (typeof size === 'number' && stat.size !== size) {
+ throw sizeError(size, stat.size)
+ }
+
+ if (stat.size > MAX_SINGLE_READ_SIZE) {
+ return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+ }
+
+ const data = await fs.readFile(cpath, { encoding: null })
+ if (!ssri.checkData(data, sri)) {
+ throw integrityError(sri, cpath)
+ }
+
+ return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+ stream.push(
+ new fsm.ReadStream(cpath, {
+ size,
+ readSize: MAX_SINGLE_READ_SIZE,
+ }),
+ ssri.integrityStream({
+ integrity: sri,
+ size,
+ })
+ )
+ return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+ const { size } = opts
+ const stream = new Pipeline()
+ // Set all this up to run on the stream and then just return the stream
+ Promise.resolve().then(async () => {
+ const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+ // just stat to ensure it exists
+ const stat = await fs.stat(cpath)
+ return { stat, cpath, sri }
+ })
+ if (typeof size === 'number' && size !== stat.size) {
+ return stream.emit('error', sizeError(size, stat.size))
+ }
+
+ return readPipeline(cpath, stat.size, sri, stream)
+ }).catch(err => stream.emit('error', err))
+
+ return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ return fs.copyFile(cpath, dest)
+ })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+ if (!integrity) {
+ return false
+ }
+
+ try {
+ return await withContentSri(cache, integrity, async (cpath, sri) => {
+ const stat = await fs.stat(cpath)
+ return { size: stat.size, sri, stat }
+ })
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return false
+ }
+
+ if (err.code === 'EPERM') {
+ /* istanbul ignore else */
+ if (process.platform !== 'win32') {
+ throw err
+ } else {
+ return false
+ }
+ }
+ }
+}
+
+async function withContentSri (cache, integrity, fn) {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ return fn(cpath, digests[0])
+ } else {
+ // Can't use race here because a generic error can happen before
+ // a ENOENT error, and can happen before a valid result
+ const results = await Promise.all(digests.map(async (meta) => {
+ try {
+ return await withContentSri(cache, meta, fn)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return Object.assign(
+ new Error('No matching content found for ' + sri.toString()),
+ { code: 'ENOENT' }
+ )
+ }
+ return err
+ }
+ }))
+ // Return the first non error if it is found
+ const result = results.find((r) => !(r instanceof Error))
+ if (result) {
+ return result
+ }
+
+ // Throw the No matching content found error
+ const enoentError = results.find((r) => r.code === 'ENOENT')
+ if (enoentError) {
+ throw enoentError
+ }
+
+ // Throw generic error
+ throw results.find((r) => r instanceof Error)
+ }
+}
+
+function sizeError (expected, found) {
+ /* eslint-disable-next-line max-len */
+ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
+
+function integrityError (sri, path) {
+ const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+ err.code = 'EINTEGRITY'
+ err.sri = sri
+ err.path = path
+ return err
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 00000000000000..ce58d679e4cb25
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+ const content = await hasContent(cache, integrity)
+ // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+ if (content && content.sri) {
+ await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+ return true
+ } else {
+ return false
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js
new file mode 100644
index 00000000000000..71461465812878
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,205 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+ const { algorithms, size, integrity } = opts
+
+ if (typeof size === 'number' && data.length !== size) {
+ throw sizeError(size, data.length)
+ }
+
+ const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+ if (integrity && !ssri.checkData(data, integrity, opts)) {
+ throw checksumError(integrity, sri)
+ }
+
+ for (const algo in sri) {
+ const tmp = await makeTmp(cache, opts)
+ const hash = sri[algo].toString()
+ try {
+ await fs.writeFile(tmp.target, data, { flag: 'wx' })
+ await moveToDestination(tmp, cache, hash, opts)
+ } finally {
+ if (!tmp.moved) {
+ await fs.rm(tmp.target, { recursive: true, force: true })
+ }
+ }
+ }
+ return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+ constructor (cache, opts) {
+ super()
+ this.opts = opts
+ this.cache = cache
+ this.inputStream = new Minipass()
+ this.inputStream.on('error', er => this.emit('error', er))
+ this.inputStream.on('drain', () => this.emit('drain'))
+ this.handleContentP = null
+ }
+
+ write (chunk, encoding, cb) {
+ if (!this.handleContentP) {
+ this.handleContentP = handleContent(
+ this.inputStream,
+ this.cache,
+ this.opts
+ )
+ }
+ return this.inputStream.write(chunk, encoding, cb)
+ }
+
+ flush (cb) {
+ this.inputStream.end(() => {
+ if (!this.handleContentP) {
+ const e = new Error('Cache input stream was empty')
+ e.code = 'ENODATA'
+ // empty streams are probably emitting end right away.
+ // defer this one tick by rejecting a promise on it.
+ return Promise.reject(e).catch(cb)
+ }
+ // eslint-disable-next-line promise/catch-or-return
+ this.handleContentP.then(
+ (res) => {
+ res.integrity && this.emit('integrity', res.integrity)
+ // eslint-disable-next-line promise/always-return
+ res.size !== null && this.emit('size', res.size)
+ cb()
+ },
+ (er) => cb(er)
+ )
+ })
+ }
+}
+
+function writeStream (cache, opts = {}) {
+ return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+ const tmp = await makeTmp(cache, opts)
+ try {
+ const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+ await moveToDestination(
+ tmp,
+ cache,
+ res.integrity,
+ opts
+ )
+ return res
+ } finally {
+ if (!tmp.moved) {
+ await fs.rm(tmp.target, { recursive: true, force: true })
+ }
+ }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+ const outStream = new fsm.WriteStream(tmpTarget, {
+ flags: 'wx',
+ })
+
+ if (opts.integrityEmitter) {
+ // we need to create these all simultaneously since they can fire in any order
+ const [integrity, size] = await Promise.all([
+ events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+ events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+ new Pipeline(inputStream, outStream).promise(),
+ ])
+ return { integrity, size }
+ }
+
+ let integrity
+ let size
+ const hashStream = ssri.integrityStream({
+ integrity: opts.integrity,
+ algorithms: opts.algorithms,
+ size: opts.size,
+ })
+ hashStream.on('integrity', i => {
+ integrity = i
+ })
+ hashStream.on('size', s => {
+ size = s
+ })
+
+ const pipeline = new Pipeline(inputStream, hashStream, outStream)
+ await pipeline.promise()
+ return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+ const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+ await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+ return {
+ target: tmpTarget,
+ moved: false,
+ }
+}
+
+async function moveToDestination (tmp, cache, sri, opts) {
+ const destination = contentPath(cache, sri)
+ const destDir = path.dirname(destination)
+ if (moveOperations.has(destination)) {
+ return moveOperations.get(destination)
+ }
+ moveOperations.set(
+ destination,
+ fs.mkdir(destDir, { recursive: true })
+ .then(async () => {
+ await moveFile(tmp.target, destination, { overwrite: false })
+ tmp.moved = true
+ return tmp.moved
+ })
+ .catch(err => {
+ if (!err.message.startsWith('The destination file exists')) {
+ throw Object.assign(err, { code: 'EEXIST' })
+ }
+ }).finally(() => {
+ moveOperations.delete(destination)
+ })
+
+ )
+ return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+ /* eslint-disable-next-line max-len */
+ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
+
+function checksumError (expected, found) {
+ const err = new Error(`Integrity check failed:
+ Wanted: ${expected}
+ Found: ${found}`)
+ err.code = 'EINTEGRITY'
+ err.expected = expected
+ err.found = found
+ return err
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 00000000000000..722a37af5ce157
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,330 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+ appendFile,
+ mkdir,
+ readFile,
+ readdir,
+ rm,
+ writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+ constructor (cache, key) {
+ super(`No cache entry for ${key} found in ${cache}`)
+ this.code = 'ENOENT'
+ this.cache = cache
+ this.key = key
+ }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+ const bucket = bucketPath(cache, key)
+ const entries = await bucketEntries(bucket)
+ const newEntries = []
+ // we loop backwards because the bottom-most result is the newest
+ // since we add new entries with appendFile
+ for (let i = entries.length - 1; i >= 0; --i) {
+ const entry = entries[i]
+ // a null integrity could mean either a delete was appended
+ // or the user has simply stored an index that does not map
+ // to any content. we determine if the user wants to keep the
+ // null integrity based on the validateEntry function passed in options.
+ // if the integrity is null and no validateEntry is provided, we break
+ // as we consider the null integrity to be a deletion of everything
+ // that came before it.
+ if (entry.integrity === null && !opts.validateEntry) {
+ break
+ }
+
+ // if this entry is valid, and it is either the first entry or
+ // the newEntries array doesn't already include an entry that
+ // matches this one based on the provided matchFn, then we add
+ // it to the beginning of our list
+ if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+ (newEntries.length === 0 ||
+ !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+ newEntries.unshift(entry)
+ }
+ }
+
+ const newIndex = '\n' + newEntries.map((entry) => {
+ const stringified = JSON.stringify(entry)
+ const hash = hashEntry(stringified)
+ return `${hash}\t${stringified}`
+ }).join('\n')
+
+ const setup = async () => {
+ const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+ await mkdir(path.dirname(target), { recursive: true })
+ return {
+ target,
+ moved: false,
+ }
+ }
+
+ const teardown = async (tmp) => {
+ if (!tmp.moved) {
+ return rm(tmp.target, { recursive: true, force: true })
+ }
+ }
+
+ const write = async (tmp) => {
+ await writeFile(tmp.target, newIndex, { flag: 'wx' })
+ await mkdir(path.dirname(bucket), { recursive: true })
+ // we use @npmcli/move-file directly here because we
+ // want to overwrite the existing file
+ await moveFile(tmp.target, bucket)
+ tmp.moved = true
+ }
+
+ // write the file atomically
+ const tmp = await setup()
+ try {
+ await write(tmp)
+ } finally {
+ await teardown(tmp)
+ }
+
+ // we reverse the list we generated such that the newest
+ // entries come first in order to make looping through them easier
+ // the true passed to formatEntry tells it to keep null
+ // integrity values, if they made it this far it's because
+ // validateEntry returned true, and as such we should return it
+ return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+ const { metadata, size, time } = opts
+ const bucket = bucketPath(cache, key)
+ const entry = {
+ key,
+ integrity: integrity && ssri.stringify(integrity),
+ time: time || Date.now(),
+ size,
+ metadata,
+ }
+ try {
+ await mkdir(path.dirname(bucket), { recursive: true })
+ const stringified = JSON.stringify(entry)
+ // NOTE - Cleverness ahoy!
+ //
+ // This works because it's tremendously unlikely for an entry to corrupt
+ // another while still preserving the string length of the JSON in
+ // question. So, we just slap the length in there and verify it on read.
+ //
+ // Thanks to @isaacs for the whiteboarding session that ended up with
+ // this.
+ await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return undefined
+ }
+
+ throw err
+ }
+ return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+ const bucket = bucketPath(cache, key)
+ try {
+ const entries = await bucketEntries(bucket)
+ return entries.reduce((latest, next) => {
+ if (next && next.key === key) {
+ return formatEntry(cache, next)
+ } else {
+ return latest
+ }
+ }, null)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return null
+ } else {
+ throw err
+ }
+ }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+ if (!opts.removeFully) {
+ return insert(cache, key, null, opts)
+ }
+
+ const bucket = bucketPath(cache, key)
+ return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+ const indexDir = bucketDir(cache)
+ const stream = new Minipass({ objectMode: true })
+
+ // Set all this up to run on the stream and then just return the stream
+ Promise.resolve().then(async () => {
+ const buckets = await readdirOrEmpty(indexDir)
+ await Promise.all(buckets.map(async (bucket) => {
+ const bucketPath = path.join(indexDir, bucket)
+ const subbuckets = await readdirOrEmpty(bucketPath)
+ await Promise.all(subbuckets.map(async (subbucket) => {
+ const subbucketPath = path.join(bucketPath, subbucket)
+
+ // "/cachename//./*"
+ const subbucketEntries = await readdirOrEmpty(subbucketPath)
+ await Promise.all(subbucketEntries.map(async (entry) => {
+ const entryPath = path.join(subbucketPath, entry)
+ try {
+ const entries = await bucketEntries(entryPath)
+ // using a Map here prevents duplicate keys from showing up
+ // twice, I guess?
+ const reduced = entries.reduce((acc, entry) => {
+ acc.set(entry.key, entry)
+ return acc
+ }, new Map())
+ // reduced is a map of key => entry
+ for (const entry of reduced.values()) {
+ const formatted = formatEntry(cache, entry)
+ if (formatted) {
+ stream.write(formatted)
+ }
+ }
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return undefined
+ }
+ throw err
+ }
+ }))
+ }))
+ }))
+ stream.end()
+ return stream
+ }).catch(err => stream.emit('error', err))
+
+ return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+ const entries = await lsStream(cache).collect()
+ return entries.reduce((acc, xs) => {
+ acc[xs.key] = xs
+ return acc
+ }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+ const data = await readFile(bucket, 'utf8')
+ return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+ const entries = []
+ data.split('\n').forEach((entry) => {
+ if (!entry) {
+ return
+ }
+
+ const pieces = entry.split('\t')
+ if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+ // Hash is no good! Corruption or malice? Doesn't matter!
+ // EJECT EJECT
+ return
+ }
+ let obj
+ try {
+ obj = JSON.parse(pieces[1])
+ } catch (_) {
+ // eslint-ignore-next-line no-empty-block
+ }
+ // coverage disabled here, no need to test with an entry that parses to something falsey
+ // istanbul ignore else
+ if (obj) {
+ entries.push(obj)
+ }
+ })
+ return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+ return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+ const hashed = hashKey(key)
+ return path.join.apply(
+ path,
+ [bucketDir(cache)].concat(hashToSegments(hashed))
+ )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+ return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+ return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+ return crypto
+ .createHash(digest)
+ .update(str)
+ .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+ // Treat null digests as deletions. They'll shadow any previous entries.
+ if (!entry.integrity && !keepAll) {
+ return null
+ }
+
+ return {
+ key: entry.key,
+ integrity: entry.integrity,
+ path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+ size: entry.size,
+ time: entry.time,
+ metadata: entry.metadata,
+ }
+}
+
+function readdirOrEmpty (dir) {
+ return readdir(dir).catch((err) => {
+ if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+ return []
+ }
+
+ throw err
+ })
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js
new file mode 100644
index 00000000000000..80ec206c7ecaaa
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+ const { integrity, memoize, size } = opts
+ const memoized = memo.get(cache, key, opts)
+ if (memoized && memoize !== false) {
+ return {
+ metadata: memoized.entry.metadata,
+ data: memoized.data,
+ integrity: memoized.entry.integrity,
+ size: memoized.entry.size,
+ }
+ }
+
+ const entry = await index.find(cache, key, opts)
+ if (!entry) {
+ throw new index.NotFoundError(cache, key)
+ }
+ const data = await read(cache, entry.integrity, { integrity, size })
+ if (memoize) {
+ memo.put(cache, entry, data, opts)
+ }
+
+ return {
+ data,
+ metadata: entry.metadata,
+ size: entry.size,
+ integrity: entry.integrity,
+ }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+ const { integrity, memoize, size } = opts
+ const memoized = memo.get.byDigest(cache, key, opts)
+ if (memoized && memoize !== false) {
+ return memoized
+ }
+
+ const res = await read(cache, key, { integrity, size })
+ if (memoize) {
+ memo.put.byDigest(cache, key, res, opts)
+ }
+ return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+ const stream = new Minipass()
+ stream.on('newListener', function (ev, cb) {
+ ev === 'metadata' && cb(memoized.entry.metadata)
+ ev === 'integrity' && cb(memoized.entry.integrity)
+ ev === 'size' && cb(memoized.entry.size)
+ })
+ stream.end(memoized.data)
+ return stream
+}
+
+function getStream (cache, key, opts = {}) {
+ const { memoize, size } = opts
+ const memoized = memo.get(cache, key, opts)
+ if (memoized && memoize !== false) {
+ return getMemoizedStream(memoized)
+ }
+
+ const stream = new Pipeline()
+ // Set all this up to run on the stream and then just return the stream
+ Promise.resolve().then(async () => {
+ const entry = await index.find(cache, key)
+ if (!entry) {
+ throw new index.NotFoundError(cache, key)
+ }
+
+ stream.emit('metadata', entry.metadata)
+ stream.emit('integrity', entry.integrity)
+ stream.emit('size', entry.size)
+ stream.on('newListener', function (ev, cb) {
+ ev === 'metadata' && cb(entry.metadata)
+ ev === 'integrity' && cb(entry.integrity)
+ ev === 'size' && cb(entry.size)
+ })
+
+ const src = read.readStream(
+ cache,
+ entry.integrity,
+ { ...opts, size: typeof size !== 'number' ? entry.size : size }
+ )
+
+ if (memoize) {
+ const memoStream = new Collect.PassThrough()
+ memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+ stream.unshift(memoStream)
+ }
+ stream.unshift(src)
+ return stream
+ }).catch((err) => stream.emit('error', err))
+
+ return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+ const { memoize } = opts
+ const memoized = memo.get.byDigest(cache, integrity, opts)
+ if (memoized && memoize !== false) {
+ const stream = new Minipass()
+ stream.end(memoized)
+ return stream
+ } else {
+ const stream = read.readStream(cache, integrity, opts)
+ if (!memoize) {
+ return stream
+ }
+
+ const memoStream = new Collect.PassThrough()
+ memoStream.on('collect', data => memo.put.byDigest(
+ cache,
+ integrity,
+ data,
+ opts
+ ))
+ return new Pipeline(stream, memoStream)
+ }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+ const { memoize } = opts
+ const memoized = memo.get(cache, key, opts)
+ if (memoized && memoize !== false) {
+ return Promise.resolve(memoized.entry)
+ } else {
+ return index.find(cache, key)
+ }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+ const entry = await index.find(cache, key, opts)
+ if (!entry) {
+ throw new index.NotFoundError(cache, key)
+ }
+ await read.copy(cache, entry.integrity, dest, opts)
+ return {
+ metadata: entry.metadata,
+ size: entry.size,
+ integrity: entry.integrity,
+ }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+ await read.copy(cache, key, dest, opts)
+ return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js
new file mode 100644
index 00000000000000..c9b0da5f3a271b
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js
new file mode 100644
index 00000000000000..0ff604a479c9c1
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MEMOIZED = new LRU({
+ max: 500,
+ maxSize: 50 * 1024 * 1024, // 50MB
+ ttl: 3 * 60 * 1000, // 3 minutes
+ sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+ const old = {}
+ MEMOIZED.forEach((v, k) => {
+ old[k] = v
+ })
+ MEMOIZED.clear()
+ return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+ pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+ putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+ pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+ return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+ return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+ constructor (obj) {
+ this.obj = obj
+ }
+
+ get (key) {
+ return this.obj[key]
+ }
+
+ set (key, val) {
+ this.obj[key] = val
+ }
+}
+
+function pickMem (opts) {
+ if (!opts || !opts.memoize) {
+ return MEMOIZED
+ } else if (opts.memoize.get && opts.memoize.set) {
+ return opts.memoize
+ } else if (typeof opts.memoize === 'object') {
+ return new ObjProxy(opts.memoize)
+ } else {
+ return MEMOIZED
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js
new file mode 100644
index 00000000000000..9fc932d5f6dec5
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+ algorithms: ['sha512'],
+ ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+ const { memoize } = opts
+ opts = putOpts(opts)
+ const res = await write(cache, data, opts)
+ const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+ if (memoize) {
+ memo.put(cache, entry, data, opts)
+ }
+
+ return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+ const { memoize } = opts
+ opts = putOpts(opts)
+ let integrity
+ let size
+ let error
+
+ let memoData
+ const pipeline = new Pipeline()
+ // first item in the pipeline is the memoizer, because we need
+ // that to end first and get the collected data.
+ if (memoize) {
+ const memoizer = new PassThrough().on('collect', data => {
+ memoData = data
+ })
+ pipeline.push(memoizer)
+ }
+
+ // contentStream is a write-only, not a passthrough
+ // no data comes out of it.
+ const contentStream = write.stream(cache, opts)
+ .on('integrity', (int) => {
+ integrity = int
+ })
+ .on('size', (s) => {
+ size = s
+ })
+ .on('error', (err) => {
+ error = err
+ })
+
+ pipeline.push(contentStream)
+
+ // last but not least, we write the index and emit hash and size,
+ // and memoize if we're doing that
+ pipeline.push(new Flush({
+ async flush () {
+ if (!error) {
+ const entry = await index.insert(cache, key, integrity, { ...opts, size })
+ if (memoize && memoData) {
+ memo.put(cache, entry, memoData, opts)
+ }
+ pipeline.emit('integrity', integrity)
+ pipeline.emit('size', size)
+ }
+ },
+ }))
+
+ return pipeline
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js
new file mode 100644
index 00000000000000..a94760c7cf2430
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+ memo.clearMemoized()
+ return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+ memo.clearMemoized()
+ return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+ memo.clearMemoized()
+ const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+ return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 00000000000000..8500c1c16a429f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 00000000000000..445599b5038088
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+ return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 00000000000000..0bf5302136ebeb
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+ const { tmpPrefix } = opts
+ const tmpDir = path.join(cache, 'tmp')
+ await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+ // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+ const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+ return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+ if (!cb) {
+ cb = opts
+ opts = {}
+ }
+ return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js
new file mode 100644
index 00000000000000..62e85c946490fc
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js
@@ -0,0 +1,257 @@
+'use strict'
+
+const {
+ mkdir,
+ readFile,
+ rm,
+ stat,
+ truncate,
+ writeFile,
+} = require('fs/promises')
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+ Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+ concurrency: 20,
+ log: { silly () {} },
+ ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+ opts = verifyOpts(opts)
+ opts.log.silly('verify', 'verifying cache at', cache)
+
+ const steps = [
+ markStartTime,
+ fixPerms,
+ garbageCollect,
+ rebuildIndex,
+ cleanTmp,
+ writeVerifile,
+ markEndTime,
+ ]
+
+ const stats = {}
+ for (const step of steps) {
+ const label = step.name
+ const start = new Date()
+ const s = await step(cache, opts)
+ if (s) {
+ Object.keys(s).forEach((k) => {
+ stats[k] = s[k]
+ })
+ }
+ const end = new Date()
+ if (!stats.runTime) {
+ stats.runTime = {}
+ }
+ stats.runTime[label] = end - start
+ }
+ stats.runTime.total = stats.endTime - stats.startTime
+ opts.log.silly(
+ 'verify',
+ 'verification finished for',
+ cache,
+ 'in',
+ `${stats.runTime.total}ms`
+ )
+ return stats
+}
+
+async function markStartTime (cache, opts) {
+ return { startTime: new Date() }
+}
+
+async function markEndTime (cache, opts) {
+ return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+ opts.log.silly('verify', 'fixing cache permissions')
+ await mkdir(cache, { recursive: true })
+ return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+ opts.log.silly('verify', 'garbage collecting content')
+ const indexStream = index.lsStream(cache)
+ const liveContent = new Set()
+ indexStream.on('data', (entry) => {
+ if (opts.filter && !opts.filter(entry)) {
+ return
+ }
+
+ // integrity is stringified, re-parse it so we can get each hash
+ const integrity = ssri.parse(entry.integrity)
+ for (const algo in integrity) {
+ liveContent.add(integrity[algo].toString())
+ }
+ })
+ await new Promise((resolve, reject) => {
+ indexStream.on('end', resolve).on('error', reject)
+ })
+ const contentDir = contentPath.contentDir(cache)
+ const files = await glob(path.join(contentDir, '**'), {
+ follow: false,
+ nodir: true,
+ nosort: true,
+ })
+ const stats = {
+ verifiedContent: 0,
+ reclaimedCount: 0,
+ reclaimedSize: 0,
+ badContentCount: 0,
+ keptSize: 0,
+ }
+ await pMap(
+ files,
+ async (f) => {
+ const split = f.split(/[/\\]/)
+ const digest = split.slice(split.length - 3).join('')
+ const algo = split[split.length - 4]
+ const integrity = ssri.fromHex(digest, algo)
+ if (liveContent.has(integrity.toString())) {
+ const info = await verifyContent(f, integrity)
+ if (!info.valid) {
+ stats.reclaimedCount++
+ stats.badContentCount++
+ stats.reclaimedSize += info.size
+ } else {
+ stats.verifiedContent++
+ stats.keptSize += info.size
+ }
+ } else {
+ // No entries refer to this content. We can delete.
+ stats.reclaimedCount++
+ const s = await stat(f)
+ await rm(f, { recursive: true, force: true })
+ stats.reclaimedSize += s.size
+ }
+ return stats
+ },
+ { concurrency: opts.concurrency }
+ )
+ return stats
+}
+
+async function verifyContent (filepath, sri) {
+ const contentInfo = {}
+ try {
+ const { size } = await stat(filepath)
+ contentInfo.size = size
+ contentInfo.valid = true
+ await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return { size: 0, valid: false }
+ }
+ if (err.code !== 'EINTEGRITY') {
+ throw err
+ }
+
+ await rm(filepath, { recursive: true, force: true })
+ contentInfo.valid = false
+ }
+ return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+ opts.log.silly('verify', 'rebuilding index')
+ const entries = await index.ls(cache)
+ const stats = {
+ missingContent: 0,
+ rejectedEntries: 0,
+ totalEntries: 0,
+ }
+ const buckets = {}
+ for (const k in entries) {
+ /* istanbul ignore else */
+ if (hasOwnProperty(entries, k)) {
+ const hashed = index.hashKey(k)
+ const entry = entries[k]
+ const excluded = opts.filter && !opts.filter(entry)
+ excluded && stats.rejectedEntries++
+ if (buckets[hashed] && !excluded) {
+ buckets[hashed].push(entry)
+ } else if (buckets[hashed] && excluded) {
+ // skip
+ } else if (excluded) {
+ buckets[hashed] = []
+ buckets[hashed]._path = index.bucketPath(cache, k)
+ } else {
+ buckets[hashed] = [entry]
+ buckets[hashed]._path = index.bucketPath(cache, k)
+ }
+ }
+ }
+ await pMap(
+ Object.keys(buckets),
+ (key) => {
+ return rebuildBucket(cache, buckets[key], stats, opts)
+ },
+ { concurrency: opts.concurrency }
+ )
+ return stats
+}
+
+async function rebuildBucket (cache, bucket, stats, opts) {
+ await truncate(bucket._path)
+ // This needs to be serialized because cacache explicitly
+ // lets very racy bucket conflicts clobber each other.
+ for (const entry of bucket) {
+ const content = contentPath(cache, entry.integrity)
+ try {
+ await stat(content)
+ await index.insert(cache, entry.key, entry.integrity, {
+ metadata: entry.metadata,
+ size: entry.size,
+ time: entry.time,
+ })
+ stats.totalEntries++
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ stats.rejectedEntries++
+ stats.missingContent++
+ } else {
+ throw err
+ }
+ }
+ }
+}
+
+function cleanTmp (cache, opts) {
+ opts.log.silly('verify', 'cleaning tmp directory')
+ return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+ const verifile = path.join(cache, '_lastverified')
+ opts.log.silly('verify', 'writing verifile to ' + verifile)
+ return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+ const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+ return new Date(+data)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE
new file mode 100644
index 00000000000000..de3226673c3874
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2013 Julian Gruber
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js
new file mode 100644
index 00000000000000..668fb1cb9d45a4
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js
@@ -0,0 +1,202 @@
+var balanced = require('balanced-match');
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m) return [str];
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ if (/\$$/.test(m.pre)) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre+ '{' + m.body + '}' + post[k];
+ expansions.push(expansion);
+ }
+ } else {
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = [];
+
+ for (var j = 0; j < n.length; j++) {
+ N.push.apply(N, expand(n[j], false));
+ }
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+ }
+
+ return expansions;
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json
new file mode 100644
index 00000000000000..7097d41e39de5d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json
@@ -0,0 +1,46 @@
+{
+ "name": "brace-expansion",
+ "description": "Brace expansion as known from sh/bash",
+ "version": "2.0.1",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/juliangruber/brace-expansion.git"
+ },
+ "homepage": "https://github.com/juliangruber/brace-expansion",
+ "main": "index.js",
+ "scripts": {
+ "test": "tape test/*.js",
+ "gentest": "bash test/generate.sh",
+ "bench": "matcha test/perf/bench.js"
+ },
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ },
+ "devDependencies": {
+ "@c4312/matcha": "^1.3.1",
+ "tape": "^4.6.0"
+ },
+ "keywords": [],
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "license": "MIT",
+ "testling": {
+ "files": "test/*.js",
+ "browsers": [
+ "ie/8..latest",
+ "firefox/20..latest",
+ "firefox/nightly",
+ "chrome/25..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest",
+ "android-browser/4.2..latest"
+ ]
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE
new file mode 100644
index 00000000000000..ec7df93329abf3
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/README.md b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/README.md
new file mode 100644
index 00000000000000..1bde1494664d4d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/README.md
@@ -0,0 +1,1214 @@
+# Glob
+
+Match files using the patterns the shell uses.
+
+The most correct and second fastest glob implementation in
+JavaScript. (See **Comparison to Other JavaScript Glob
+Implementations** at the bottom of this readme.)
+
+![a fun cartoon logo made of glob characters](https://github.com/isaacs/node-glob/raw/main/logo/glob.png)
+
+## Usage
+
+Install with npm
+
+```
+npm i glob
+```
+
+**Note** the npm package name is _not_ `node-glob` that's a
+different thing that was abandoned years ago. Just `glob`.
+
+```js
+// load using import
+import { glob, globSync, globStream, globStreamSync, Glob } from 'glob'
+// or using commonjs, that's fine, too
+const {
+ glob,
+ globSync,
+ globStream,
+ globStreamSync,
+ Glob,
+} = require('glob')
+
+// the main glob() and globSync() resolve/return array of filenames
+
+// all js files, but don't look in node_modules
+const jsfiles = await glob('**/*.js', { ignore: 'node_modules/**' })
+
+// pass in a signal to cancel the glob walk
+const stopAfter100ms = await glob('**/*.css', {
+ signal: AbortSignal.timeout(100),
+})
+
+// multiple patterns supported as well
+const images = await glob(['css/*.{png,jpeg}', 'public/*.{png,jpeg}'])
+
+// but of course you can do that with the glob pattern also
+// the sync function is the same, just returns a string[] instead
+// of Promise
+const imagesAlt = globSync('{css,public}/*.{png,jpeg}')
+
+// you can also stream them, this is a Minipass stream
+const filesStream = globStream(['**/*.dat', 'logs/**/*.log'])
+
+// construct a Glob object if you wanna do it that way, which
+// allows for much faster walks if you have to look in the same
+// folder multiple times.
+const g = new Glob('**/foo', {})
+// glob objects are async iterators, can also do globIterate() or
+// g.iterate(), same deal
+for await (const file of g) {
+ console.log('found a foo file:', file)
+}
+// pass a glob as the glob options to reuse its settings and caches
+const g2 = new Glob('**/bar', g)
+// sync iteration works as well
+for (const file of g2) {
+ console.log('found a bar file:', file)
+}
+
+// you can also pass withFileTypes: true to get Path objects
+// these are like a Dirent, but with some more added powers
+// check out http://npm.im/path-scurry for more info on their API
+const g3 = new Glob('**/baz/**', { withFileTypes: true })
+g3.stream().on('data', path => {
+ console.log(
+ 'got a path object',
+ path.fullpath(),
+ path.isDirectory(),
+ path.readdirSync().map(e => e.name)
+ )
+})
+
+// if you use stat:true and withFileTypes, you can sort results
+// by things like modified time, filter by permission mode, etc.
+// All Stats fields will be available in that case. Slightly
+// slower, though.
+// For example:
+const results = await glob('**', { stat: true, withFileTypes: true })
+
+const timeSortedFiles = results
+ .sort((a, b) => a.mtimeMS - b.mtimeMS)
+ .map(path => path.fullpath())
+
+const groupReadableFiles = results
+ .filter(path => path.mode & 0o040)
+ .map(path => path.fullpath())
+
+// custom ignores can be done like this, for example by saying
+// you'll ignore all markdown files, and all folders named 'docs'
+const customIgnoreResults = await glob('**', {
+ ignore: {
+ ignored: p => /\.md$/.test(p.name),
+ childrenIgnored: p => p.isNamed('docs'),
+ },
+})
+
+// another fun use case, only return files with the same name as
+// their parent folder, plus either `.ts` or `.js`
+const folderNamedModules = await glob('**/*.{ts,js}', {
+ ignore: {
+ ignored: p => {
+ const pp = p.parent
+ return !(p.isNamed(pp.name + '.ts') || p.isNamed(pp.name + '.js'))
+ },
+ },
+})
+
+// find all files edited in the last hour, to do this, we ignore
+// all of them that are more than an hour old
+const newFiles = await glob('**', {
+ // need stat so we have mtime
+ stat: true,
+ // only want the files, not the dirs
+ nodir: true,
+ ignore: {
+ ignored: p => {
+ return new Date() - p.mtime > 60 * 60 * 1000
+ },
+ // could add similar childrenIgnored here as well, but
+ // directory mtime is inconsistent across platforms, so
+ // probably better not to, unless you know the system
+ // tracks this reliably.
+ },
+})
+```
+
+**Note** Glob patterns should always use `/` as a path separator,
+even on Windows systems, as `\` is used to escape glob
+characters. If you wish to use `\` as a path separator _instead
+of_ using it as an escape character on Windows platforms, you may
+set `windowsPathsNoEscape:true` in the options. In this mode,
+special glob characters cannot be escaped, making it impossible
+to match a literal `*` `?` and so on in filenames.
+
+## Command Line Interface
+
+```
+$ glob -h
+
+Usage:
+ glob [options] [ [ ...]]
+
+Expand the positional glob expression arguments into any matching file system
+paths found.
+
+ -c --cmd=
+ Run the command provided, passing the glob expression
+ matches as arguments.
+
+ -A --all By default, the glob cli command will not expand any
+ arguments that are an exact match to a file on disk.
+
+ This prevents double-expanding, in case the shell
+ expands an argument whose filename is a glob
+ expression.
+
+ For example, if 'app/*.ts' would match 'app/[id].ts',
+ then on Windows powershell or cmd.exe, 'glob app/*.ts'
+ will expand to 'app/[id].ts', as expected. However, in
+ posix shells such as bash or zsh, the shell will first
+ expand 'app/*.ts' to a list of filenames. Then glob
+ will look for a file matching 'app/[id].ts' (ie,
+ 'app/i.ts' or 'app/d.ts'), which is unexpected.
+
+ Setting '--all' prevents this behavior, causing glob to
+ treat ALL patterns as glob expressions to be expanded,
+ even if they are an exact match to a file on disk.
+
+ When setting this option, be sure to enquote arguments
+ so that the shell will not expand them prior to passing
+ them to the glob command process.
+
+ -a --absolute Expand to absolute paths
+ -d --dot-relative Prepend './' on relative matches
+ -m --mark Append a / on any directories matched
+ -x --posix Always resolve to posix style paths, using '/' as the
+ directory separator, even on Windows. Drive letter
+ absolute matches on Windows will be expanded to their
+ full resolved UNC maths, eg instead of 'C:\foo\bar', it
+ will expand to '//?/C:/foo/bar'.
+
+ -f --follow Follow symlinked directories when expanding '**'
+ -R --realpath Call 'fs.realpath' on all of the results. In the case
+ of an entry that cannot be resolved, the entry is
+ omitted. This incurs a slight performance penalty, of
+ course, because of the added system calls.
+
+ -s --stat Call 'fs.lstat' on all entries, whether required or not
+ to determine if it's a valid match.
+
+ -b --match-base Perform a basename-only match if the pattern does not
+ contain any slash characters. That is, '*.js' would be
+ treated as equivalent to '**/*.js', matching js files
+ in all directories.
+
+ --dot Allow patterns to match files/directories that start
+ with '.', even if the pattern does not start with '.'
+
+ --nobrace Do not expand {...} patterns
+ --nocase Perform a case-insensitive match. This defaults to
+ 'true' on macOS and Windows platforms, and false on all
+ others.
+
+ Note: 'nocase' should only be explicitly set when it is
+ known that the filesystem's case sensitivity differs
+ from the platform default. If set 'true' on
+ case-insensitive file systems, then the walk may return
+ more or less results than expected.
+
+ --nodir Do not match directories, only files.
+
+ Note: to *only* match directories, append a '/' at the
+ end of the pattern.
+
+ --noext Do not expand extglob patterns, such as '+(a|b)'
+ --noglobstar Do not expand '**' against multiple path portions. Ie,
+ treat it as a normal '*' instead.
+
+ --windows-path-no-escape
+ Use '\' as a path separator *only*, and *never* as an
+ escape character. If set, all '\' characters are
+ replaced with '/' in the pattern.
+
+ -D --max-depth= Maximum depth to traverse from the current working
+ directory
+
+ -C --cwd= Current working directory to execute/match in
+ -r --root= A string path resolved against the 'cwd', which is used
+ as the starting point for absolute patterns that start
+ with '/' (but not drive letters or UNC paths on
+ Windows).
+
+ Note that this *doesn't* necessarily limit the walk to
+ the 'root' directory, and doesn't affect the cwd
+ starting point for non-absolute patterns. A pattern
+ containing '..' will still be able to traverse out of
+ the root directory, if it is not an actual root
+ directory on the filesystem, and any non-absolute
+ patterns will still be matched in the 'cwd'.
+
+ To start absolute and non-absolute patterns in the same
+ path, you can use '--root=' to set it to the empty
+ string. However, be aware that on Windows systems, a
+ pattern like 'x:/*' or '//host/share/*' will *always*
+ start in the 'x:/' or '//host/share/' directory,
+ regardless of the --root setting.
+
+ --platform= Defaults to the value of 'process.platform' if
+ available, or 'linux' if not. Setting --platform=win32
+ on non-Windows systems may cause strange behavior!
+
+ -i --ignore=
+ Glob patterns to ignore Can be set multiple times
+ -v --debug Output a huge amount of noisy debug information about
+ patterns as they are parsed and used to match files.
+
+ -h --help Show this usage information
+```
+
+## `glob(pattern: string | string[], options?: GlobOptions) => Promise`
+
+Perform an asynchronous glob search for the pattern(s) specified.
+Returns
+[Path](https://isaacs.github.io/path-scurry/classes/PathBase)
+objects if the `withFileTypes` option is set to `true`. See below
+for full options field desciptions.
+
+## `globSync(pattern: string | string[], options?: GlobOptions) => string[] | Path[]`
+
+Synchronous form of `glob()`.
+
+Alias: `glob.sync()`
+
+## `globIterate(pattern: string | string[], options?: GlobOptions) => AsyncGenerator`
+
+Return an async iterator for walking glob pattern matches.
+
+Alias: `glob.iterate()`
+
+## `globIterateSync(pattern: string | string[], options?: GlobOptions) => Generator`
+
+Return a sync iterator for walking glob pattern matches.
+
+Alias: `glob.iterate.sync()`, `glob.sync.iterate()`
+
+## `globStream(pattern: string | string[], options?: GlobOptions) => Minipass`
+
+Return a stream that emits all the strings or `Path` objects and
+then emits `end` when completed.
+
+Alias: `glob.stream()`
+
+## `globStreamSync(pattern: string | string[], options?: GlobOptions) => Minipass`
+
+Syncronous form of `globStream()`. Will read all the matches as
+fast as you consume them, even all in a single tick if you
+consume them immediately, but will still respond to backpressure
+if they're not consumed immediately.
+
+Alias: `glob.stream.sync()`, `glob.sync.stream()`
+
+## `hasMagic(pattern: string | string[], options?: GlobOptions) => boolean`
+
+Returns `true` if the provided pattern contains any "magic" glob
+characters, given the options provided.
+
+Brace expansion is not considered "magic" unless the
+`magicalBraces` option is set, as brace expansion just turns one
+string into an array of strings. So a pattern like `'x{a,b}y'`
+would return `false`, because `'xay'` and `'xby'` both do not
+contain any magic glob characters, and it's treated the same as
+if you had called it on `['xay', 'xby']`. When
+`magicalBraces:true` is in the options, brace expansion _is_
+treated as a pattern having magic.
+
+## `escape(pattern: string, options?: GlobOptions) => string`
+
+Escape all magic characters in a glob pattern, so that it will
+only ever match literal strings
+
+If the `windowsPathsNoEscape` option is used, then characters are
+escaped by wrapping in `[]`, because a magic character wrapped in
+a character class can only be satisfied by that exact character.
+
+Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot
+be escaped or unescaped.
+
+## `unescape(pattern: string, options?: GlobOptions) => string`
+
+Un-escape a glob string that may contain some escaped characters.
+
+If the `windowsPathsNoEscape` option is used, then square-brace
+escapes are removed, but not backslash escapes. For example, it
+will turn the string `'[*]'` into `*`, but it will not turn
+`'\\*'` into `'*'`, because `\` is a path separator in
+`windowsPathsNoEscape` mode.
+
+When `windowsPathsNoEscape` is not set, then both brace escapes
+and backslash escapes are removed.
+
+Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot
+be escaped or unescaped.
+
+## Class `Glob`
+
+An object that can perform glob pattern traversals.
+
+### `const g = new Glob(pattern: string | string[], options: GlobOptions)`
+
+Options object is required.
+
+See full options descriptions below.
+
+Note that a previous `Glob` object can be passed as the
+`GlobOptions` to another `Glob` instantiation to re-use settings
+and caches with a new pattern.
+
+Traversal functions can be called multiple times to run the walk
+again.
+
+### `g.stream()`
+
+Stream results asynchronously,
+
+### `g.streamSync()`
+
+Stream results synchronously.
+
+### `g.iterate()`
+
+Default async iteration function. Returns an AsyncGenerator that
+iterates over the results.
+
+### `g.iterateSync()`
+
+Default sync iteration function. Returns a Generator that
+iterates over the results.
+
+### `g.walk()`
+
+Returns a Promise that resolves to the results array.
+
+### `g.walkSync()`
+
+Returns a results array.
+
+### Properties
+
+All options are stored as properties on the `Glob` object.
+
+- `opts` The options provided to the constructor.
+- `patterns` An array of parsed immutable `Pattern` objects.
+
+## Options
+
+Exported as `GlobOptions` TypeScript interface. A `GlobOptions`
+object may be provided to any of the exported methods, and must
+be provided to the `Glob` constructor.
+
+All options are optional, boolean, and false by default, unless
+otherwise noted.
+
+All resolved options are added to the Glob object as properties.
+
+If you are running many `glob` operations, you can pass a Glob
+object as the `options` argument to a subsequent operation to
+share the previously loaded cache.
+
+- `cwd` String path or `file://` string or URL object. The
+ current working directory in which to search. Defaults to
+ `process.cwd()`. See also: "Windows, CWDs, Drive Letters, and
+ UNC Paths", below.
+
+ This option may be eiher a string path or a `file://` URL
+ object or string.
+
+- `root` A string path resolved against the `cwd` option, which
+ is used as the starting point for absolute patterns that start
+ with `/`, (but not drive letters or UNC paths on Windows).
+
+ Note that this _doesn't_ necessarily limit the walk to the
+ `root` directory, and doesn't affect the cwd starting point for
+ non-absolute patterns. A pattern containing `..` will still be
+ able to traverse out of the root directory, if it is not an
+ actual root directory on the filesystem, and any non-absolute
+ patterns will be matched in the `cwd`. For example, the
+ pattern `/../*` with `{root:'/some/path'}` will return all
+ files in `/some`, not all files in `/some/path`. The pattern
+ `*` with `{root:'/some/path'}` will return all the entries in
+ the cwd, not the entries in `/some/path`.
+
+ To start absolute and non-absolute patterns in the same
+ path, you can use `{root:''}`. However, be aware that on
+ Windows systems, a pattern like `x:/*` or `//host/share/*` will
+ _always_ start in the `x:/` or `//host/share` directory,
+ regardless of the `root` setting.
+
+- `windowsPathsNoEscape` Use `\\` as a path separator _only_, and
+ _never_ as an escape character. If set, all `\\` characters are
+ replaced with `/` in the pattern.
+
+ Note that this makes it **impossible** to match against paths
+ containing literal glob pattern characters, but allows matching
+ with patterns constructed using `path.join()` and
+ `path.resolve()` on Windows platforms, mimicking the (buggy!)
+ behavior of Glob v7 and before on Windows. Please use with
+ caution, and be mindful of [the caveat below about Windows
+ paths](#windows). (For legacy reasons, this is also set if
+ `allowWindowsEscape` is set to the exact value `false`.)
+
+- `dot` Include `.dot` files in normal matches and `globstar`
+ matches. Note that an explicit dot in a portion of the pattern
+ will always match dot files.
+
+- `magicalBraces` Treat brace expansion like `{a,b}` as a "magic"
+ pattern. Has no effect if {@link nobrace} is set.
+
+ Only has effect on the {@link hasMagic} function, no effect on
+ glob pattern matching itself.
+
+- `dotRelative` Prepend all relative path strings with `./` (or
+ `.\` on Windows).
+
+ Without this option, returned relative paths are "bare", so
+ instead of returning `'./foo/bar'`, they are returned as
+ `'foo/bar'`.
+
+ Relative patterns starting with `'../'` are not prepended with
+ `./`, even if this option is set.
+
+- `mark` Add a `/` character to directory matches. Note that this
+ requires additional stat calls.
+
+- `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
+
+- `noglobstar` Do not match `**` against multiple filenames. (Ie,
+ treat it as a normal `*` instead.)
+
+- `noext` Do not match "extglob" patterns such as `+(a|b)`.
+
+- `nocase` Perform a case-insensitive match. This defaults to
+ `true` on macOS and Windows systems, and `false` on all others.
+
+ **Note** `nocase` should only be explicitly set when it is
+ known that the filesystem's case sensitivity differs from the
+ platform default. If set `true` on case-sensitive file
+ systems, or `false` on case-insensitive file systems, then the
+ walk may return more or less results than expected.
+
+- `maxDepth` Specify a number to limit the depth of the directory
+ traversal to this many levels below the `cwd`.
+
+- `matchBase` Perform a basename-only match if the pattern does
+ not contain any slash characters. That is, `*.js` would be
+ treated as equivalent to `**/*.js`, matching all js files in
+ all directories.
+
+- `nodir` Do not match directories, only files. (Note: to match
+ _only_ directories, put a `/` at the end of the pattern.)
+
+- `stat` Call `lstat()` on all entries, whether required or not
+ to determine whether it's a valid match. When used with
+ `withFileTypes`, this means that matches will include data such
+ as modified time, permissions, and so on. Note that this will
+ incur a performance cost due to the added system calls.
+
+- `ignore` string or string[], or an object with `ignore` and
+ `ignoreChildren` methods.
+
+ If a string or string[] is provided, then this is treated as a
+ glob pattern or array of glob patterns to exclude from matches.
+ To ignore all children within a directory, as well as the entry
+ itself, append `'/**'` to the ignore pattern.
+
+ **Note** `ignore` patterns are _always_ in `dot:true` mode,
+ regardless of any other settings.
+
+ If an object is provided that has `ignored(path)` and/or
+ `childrenIgnored(path)` methods, then these methods will be
+ called to determine whether any Path is a match or if its
+ children should be traversed, respectively.
+
+- `follow` Follow symlinked directories when expanding `**`
+ patterns. This can result in a lot of duplicate references in
+ the presence of cyclic links, and make performance quite bad.
+
+ By default, a `**` in a pattern will follow 1 symbolic link if
+ it is not the first item in the pattern, or none if it is the
+ first item in the pattern, following the same behavior as Bash.
+
+- `realpath` Set to true to call `fs.realpath` on all of the
+ results. In the case of an entry that cannot be resolved, the
+ entry is omitted. This incurs a slight performance penalty, of
+ course, because of the added system calls.
+
+- `absolute` Set to true to always receive absolute paths for
+ matched files. Set to `false` to always receive relative paths
+ for matched files.
+
+ By default, when this option is not set, absolute paths are
+ returned for patterns that are absolute, and otherwise paths
+ are returned that are relative to the `cwd` setting.
+
+ This does _not_ make an extra system call to get the realpath,
+ it only does string path resolution.
+
+ `absolute` may not be used along with `withFileTypes`.
+
+- `posix` Set to true to use `/` as the path separator in
+ returned results. On posix systems, this has no effect. On
+ Windows systems, this will return `/` delimited path results,
+ and absolute paths will be returned in their full resolved UNC
+ path form, eg insted of `'C:\\foo\\bar'`, it will return
+ `//?/C:/foo/bar`.
+
+- `platform` Defaults to value of `process.platform` if
+ available, or `'linux'` if not. Setting `platform:'win32'` on
+ non-Windows systems may cause strange behavior.
+
+- `withFileTypes` Return [PathScurry](http://npm.im/path-scurry)
+ `Path` objects instead of strings. These are similar to a
+ NodeJS `Dirent` object, but with additional methods and
+ properties.
+
+ `withFileTypes` may not be used along with `absolute`.
+
+- `signal` An AbortSignal which will cancel the Glob walk when
+ triggered.
+
+- `fs` An override object to pass in custom filesystem methods.
+ See [PathScurry docs](http://npm.im/path-scurry) for what can
+ be overridden.
+
+- `scurry` A [PathScurry](http://npm.im/path-scurry) object used
+ to traverse the file system. If the `nocase` option is set
+ explicitly, then any provided `scurry` object must match this
+ setting.
+
+## Glob Primer
+
+Much more information about glob pattern expansion can be found
+by running `man bash` and searching for `Pattern Matching`.
+
+"Globs" are the patterns you type when you do stuff like `ls
+*.js` on the command line, or put `build/*` in a `.gitignore`
+file.
+
+Before parsing the path part patterns, braced sections are
+expanded into a set. Braced sections start with `{` and end with
+`}`, with 2 or more comma-delimited sections within. Braced
+sections may contain slash characters, so `a{/b/c,bcd}` would
+expand into `a/b/c` and `abcd`.
+
+The following characters have special magic meaning when used in
+a path portion. With the exception of `**`, none of these match
+path separators (ie, `/` on all platforms, and `\` on Windows).
+
+- `*` Matches 0 or more characters in a single path portion.
+ When alone in a path portion, it must match at least 1
+ character. If `dot:true` is not specified, then `*` will not
+ match against a `.` character at the start of a path portion.
+- `?` Matches 1 character. If `dot:true` is not specified, then
+ `?` will not match against a `.` character at the start of a
+ path portion.
+- `[...]` Matches a range of characters, similar to a RegExp
+ range. If the first character of the range is `!` or `^` then
+ it matches any character not in the range. If the first
+ character is `]`, then it will be considered the same as `\]`,
+ rather than the end of the character class.
+- `!(pattern|pattern|pattern)` Matches anything that does not
+ match any of the patterns provided. May _not_ contain `/`
+ characters. Similar to `*`, if alone in a path portion, then
+ the path portion must have at least one character.
+- `?(pattern|pattern|pattern)` Matches zero or one occurrence of
+ the patterns provided. May _not_ contain `/` characters.
+- `+(pattern|pattern|pattern)` Matches one or more occurrences of
+ the patterns provided. May _not_ contain `/` characters.
+- `*(a|b|c)` Matches zero or more occurrences of the patterns
+ provided. May _not_ contain `/` characters.
+- `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
+ provided. May _not_ contain `/` characters.
+- `**` If a "globstar" is alone in a path portion, then it
+ matches zero or more directories and subdirectories searching
+ for matches. It does not crawl symlinked directories, unless
+ `{follow:true}` is passed in the options object. A pattern
+ like `a/b/**` will only match `a/b` if it is a directory.
+ Follows 1 symbolic link if not the first item in the pattern,
+ or 0 if it is the first item, unless `follow:true` is set, in
+ which case it follows all symbolic links.
+
+`[:class:]` patterns are supported by this implementation, but
+`[=c=]` and `[.symbol.]` style class patterns are not.
+
+### Dots
+
+If a file or directory path portion has a `.` as the first
+character, then it will not match any glob pattern unless that
+pattern's corresponding path part also has a `.` as its first
+character.
+
+For example, the pattern `a/.*/c` would match the file at
+`a/.b/c`. However the pattern `a/*/c` would not, because `*` does
+not start with a dot character.
+
+You can make glob treat dots as normal characters by setting
+`dot:true` in the options.
+
+### Basename Matching
+
+If you set `matchBase:true` in the options, and the pattern has
+no slashes in it, then it will seek for any file anywhere in the
+tree with a matching basename. For example, `*.js` would match
+`test/simple/basic.js`.
+
+### Empty Sets
+
+If no matching files are found, then an empty array is returned.
+This differs from the shell, where the pattern itself is
+returned. For example:
+
+```sh
+$ echo a*s*d*f
+a*s*d*f
+```
+
+## Comparisons to other fnmatch/glob implementations
+
+While strict compliance with the existing standards is a
+worthwhile goal, some discrepancies exist between node-glob and
+other implementations, and are intentional.
+
+The double-star character `**` is supported by default, unless
+the `noglobstar` flag is set. This is supported in the manner of
+bsdglob and bash 5, where `**` only has special significance if
+it is the only thing in a path part. That is, `a/**/b` will match
+`a/x/y/b`, but `a/**b` will not.
+
+Note that symlinked directories are not traversed as part of a
+`**`, though their contents may match against subsequent portions
+of the pattern. This prevents infinite loops and duplicates and
+the like. You can force glob to traverse symlinks with `**` by
+setting `{follow:true}` in the options.
+
+There is no equivalent of the `nonull` option. A pattern that
+does not find any matches simply resolves to nothing. (An empty
+array, immediately ended stream, etc.)
+
+If brace expansion is not disabled, then it is performed before
+any other interpretation of the glob pattern. Thus, a pattern
+like `+(a|{b),c)}`, which would not be valid in bash or zsh, is
+expanded **first** into the set of `+(a|b)` and `+(a|c)`, and
+those patterns are checked for validity. Since those two are
+valid, matching proceeds.
+
+The character class patterns `[:class:]` (posix standard named
+classes) style class patterns are supported and unicode-aware,
+but `[=c=]` (locale-specific character collation weight), and
+`[.symbol.]` (collating symbol), are not.
+
+### Repeated Slashes
+
+Unlike Bash and zsh, repeated `/` are always coalesced into a
+single path separator.
+
+### Comments and Negation
+
+Previously, this module let you mark a pattern as a "comment" if
+it started with a `#` character, or a "negated" pattern if it
+started with a `!` character.
+
+These options were deprecated in version 5, and removed in
+version 6.
+
+To specify things that should not match, use the `ignore` option.
+
+## Windows
+
+**Please only use forward-slashes in glob expressions.**
+
+Though windows uses either `/` or `\` as its path separator, only
+`/` characters are used by this glob implementation. You must use
+forward-slashes **only** in glob expressions. Back-slashes will
+always be interpreted as escape characters, not path separators.
+
+Results from absolute patterns such as `/foo/*` are mounted onto
+the root setting using `path.join`. On windows, this will by
+default result in `/foo/*` matching `C:\foo\bar.txt`.
+
+To automatically coerce all `\` characters to `/` in pattern
+strings, **thus making it impossible to escape literal glob
+characters**, you may set the `windowsPathsNoEscape` option to
+`true`.
+
+### Windows, CWDs, Drive Letters, and UNC Paths
+
+On posix systems, when a pattern starts with `/`, any `cwd`
+option is ignored, and the traversal starts at `/`, plus any
+non-magic path portions specified in the pattern.
+
+On Windows systems, the behavior is similar, but the concept of
+an "absolute path" is somewhat more involved.
+
+#### UNC Paths
+
+A UNC path may be used as the start of a pattern on Windows
+platforms. For example, a pattern like: `//?/x:/*` will return
+all file entries in the root of the `x:` drive. A pattern like
+`//ComputerName/Share/*` will return all files in the associated
+share.
+
+UNC path roots are always compared case insensitively.
+
+#### Drive Letters
+
+A pattern starting with a drive letter, like `c:/*`, will search
+in that drive, regardless of any `cwd` option provided.
+
+If the pattern starts with `/`, and is not a UNC path, and there
+is an explicit `cwd` option set with a drive letter, then the
+drive letter in the `cwd` is used as the root of the directory
+traversal.
+
+For example, `glob('/tmp', { cwd: 'c:/any/thing' })` will return
+`['c:/tmp']` as the result.
+
+If an explicit `cwd` option is not provided, and the pattern
+starts with `/`, then the traversal will run on the root of the
+drive provided as the `cwd` option. (That is, it is the result of
+`path.resolve('/')`.)
+
+## Race Conditions
+
+Glob searching, by its very nature, is susceptible to race
+conditions, since it relies on directory walking.
+
+As a result, it is possible that a file that exists when glob
+looks for it may have been deleted or modified by the time it
+returns the result.
+
+By design, this implementation caches all readdir calls that it
+makes, in order to cut down on system overhead. However, this
+also makes it even more susceptible to races, especially if the
+cache object is reused between glob calls.
+
+Users are thus advised not to use a glob result as a guarantee of
+filesystem state in the face of rapid changes. For the vast
+majority of operations, this is never a problem.
+
+### See Also:
+
+- `man sh`
+- `man bash` [Pattern
+ Matching](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html)
+- `man 3 fnmatch`
+- `man 5 gitignore`
+- [minimatch documentation](https://github.com/isaacs/minimatch)
+
+## Glob Logo
+
+Glob's logo was created by [Tanya
+Brassie](http://tanyabrassie.com/). Logo files can be found
+[here](https://github.com/isaacs/node-glob/tree/master/logo).
+
+The logo is licensed under a [Creative Commons
+Attribution-ShareAlike 4.0 International
+License](https://creativecommons.org/licenses/by-sa/4.0/).
+
+## Contributing
+
+Any change to behavior (including bugfixes) must come with a
+test.
+
+Patches that fail tests or reduce performance will be rejected.
+
+```sh
+# to run tests
+npm test
+
+# to re-generate test fixtures
+npm run test-regen
+
+# run the benchmarks
+npm run bench
+
+# to profile javascript
+npm run prof
+```
+
+## Comparison to Other JavaScript Glob Implementations
+
+**tl;dr**
+
+- If you want glob matching that is as faithful as possible to
+ Bash pattern expansion semantics, and as fast as possible
+ within that constraint, _use this module_.
+- If you are reasonably sure that the patterns you will encounter
+ are relatively simple, and want the absolutely fastest glob
+ matcher out there, _use [fast-glob](http://npm.im/fast-glob)_.
+- If you are reasonably sure that the patterns you will encounter
+ are relatively simple, and want the convenience of
+ automatically respecting `.gitignore` files, _use
+ [globby](http://npm.im/globby)_.
+
+There are some other glob matcher libraries on npm, but these
+three are (in my opinion, as of 2023) the best.
+
+---
+
+**full explanation**
+
+Every library reflects a set of opinions and priorities in the
+trade-offs it makes. Other than this library, I can personally
+recommend both [globby](http://npm.im/globby) and
+[fast-glob](http://npm.im/fast-glob), though they differ in their
+benefits and drawbacks.
+
+Both have very nice APIs and are reasonably fast.
+
+`fast-glob` is, as far as I am aware, the fastest glob
+implementation in JavaScript today. However, there are many
+cases where the choices that `fast-glob` makes in pursuit of
+speed mean that its results differ from the results returned by
+Bash and other sh-like shells, which may be surprising.
+
+In my testing, `fast-glob` is around 10-20% faster than this
+module when walking over 200k files nested 4 directories
+deep[1](#fn-webscale). However, there are some inconsistencies
+with Bash matching behavior that this module does not suffer
+from:
+
+- `**` only matches files, not directories
+- `..` path portions are not handled unless they appear at the
+ start of the pattern
+- `./!()` will not match any files that _start_ with
+ ``, even if they do not match ``. For
+ example, `!(9).txt` will not match `9999.txt`.
+- Some brace patterns in the middle of a pattern will result in
+ failing to find certain matches.
+- Extglob patterns are allowed to contain `/` characters.
+
+Globby exhibits all of the same pattern semantics as fast-glob,
+(as it is a wrapper around fast-glob) and is slightly slower than
+node-glob (by about 10-20% in the benchmark test set, or in other
+words, anywhere from 20-50% slower than fast-glob). However, it
+adds some API conveniences that may be worth the costs.
+
+- Support for `.gitignore` and other ignore files.
+- Support for negated globs (ie, patterns starting with `!`
+ rather than using a separate `ignore` option).
+
+The priority of this module is "correctness" in the sense of
+performing a glob pattern expansion as faithfully as possible to
+the behavior of Bash and other sh-like shells, with as much speed
+as possible.
+
+Note that prior versions of `node-glob` are _not_ on this list.
+Former versions of this module are far too slow for any cases
+where performance matters at all, and were designed with APIs
+that are extremely dated by current JavaScript standards.
+
+---
+
+[1]: In the cases where this module
+returns results and `fast-glob` doesn't, it's even faster, of
+course.
+
+![lumpy space princess saying 'oh my GLOB'](https://github.com/isaacs/node-glob/raw/main/oh-my-glob.gif)
+
+### Benchmark Results
+
+First number is time, smaller is better.
+
+Second number is the count of results returned.
+
+```
+--- pattern: '**' ---
+~~ sync ~~
+node fast-glob sync 0m0.598s 200364
+node globby sync 0m0.765s 200364
+node current globSync mjs 0m0.683s 222656
+node current glob syncStream 0m0.649s 222656
+~~ async ~~
+node fast-glob async 0m0.350s 200364
+node globby async 0m0.509s 200364
+node current glob async mjs 0m0.463s 222656
+node current glob stream 0m0.411s 222656
+
+--- pattern: '**/..' ---
+~~ sync ~~
+node fast-glob sync 0m0.486s 0
+node globby sync 0m0.769s 200364
+node current globSync mjs 0m0.564s 2242
+node current glob syncStream 0m0.583s 2242
+~~ async ~~
+node fast-glob async 0m0.283s 0
+node globby async 0m0.512s 200364
+node current glob async mjs 0m0.299s 2242
+node current glob stream 0m0.312s 2242
+
+--- pattern: './**/0/**/0/**/0/**/0/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.490s 10
+node globby sync 0m0.517s 10
+node current globSync mjs 0m0.540s 10
+node current glob syncStream 0m0.550s 10
+~~ async ~~
+node fast-glob async 0m0.290s 10
+node globby async 0m0.296s 10
+node current glob async mjs 0m0.278s 10
+node current glob stream 0m0.302s 10
+
+--- pattern: './**/[01]/**/[12]/**/[23]/**/[45]/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.500s 160
+node globby sync 0m0.528s 160
+node current globSync mjs 0m0.556s 160
+node current glob syncStream 0m0.573s 160
+~~ async ~~
+node fast-glob async 0m0.283s 160
+node globby async 0m0.301s 160
+node current glob async mjs 0m0.306s 160
+node current glob stream 0m0.322s 160
+
+--- pattern: './**/0/**/0/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.502s 5230
+node globby sync 0m0.527s 5230
+node current globSync mjs 0m0.544s 5230
+node current glob syncStream 0m0.557s 5230
+~~ async ~~
+node fast-glob async 0m0.285s 5230
+node globby async 0m0.305s 5230
+node current glob async mjs 0m0.304s 5230
+node current glob stream 0m0.310s 5230
+
+--- pattern: '**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.580s 200023
+node globby sync 0m0.771s 200023
+node current globSync mjs 0m0.685s 200023
+node current glob syncStream 0m0.649s 200023
+~~ async ~~
+node fast-glob async 0m0.349s 200023
+node globby async 0m0.509s 200023
+node current glob async mjs 0m0.427s 200023
+node current glob stream 0m0.388s 200023
+
+--- pattern: '{**/*.txt,**/?/**/*.txt,**/?/**/?/**/*.txt,**/?/**/?/**/?/**/*.txt,**/?/**/?/**/?/**/?/**/*.txt}' ---
+~~ sync ~~
+node fast-glob sync 0m0.589s 200023
+node globby sync 0m0.771s 200023
+node current globSync mjs 0m0.716s 200023
+node current glob syncStream 0m0.684s 200023
+~~ async ~~
+node fast-glob async 0m0.351s 200023
+node globby async 0m0.518s 200023
+node current glob async mjs 0m0.462s 200023
+node current glob stream 0m0.468s 200023
+
+--- pattern: '**/5555/0000/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.496s 1000
+node globby sync 0m0.519s 1000
+node current globSync mjs 0m0.539s 1000
+node current glob syncStream 0m0.567s 1000
+~~ async ~~
+node fast-glob async 0m0.285s 1000
+node globby async 0m0.299s 1000
+node current glob async mjs 0m0.305s 1000
+node current glob stream 0m0.301s 1000
+
+--- pattern: './**/0/**/../[01]/**/0/../**/0/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.484s 0
+node globby sync 0m0.507s 0
+node current globSync mjs 0m0.577s 4880
+node current glob syncStream 0m0.586s 4880
+~~ async ~~
+node fast-glob async 0m0.280s 0
+node globby async 0m0.298s 0
+node current glob async mjs 0m0.327s 4880
+node current glob stream 0m0.324s 4880
+
+--- pattern: '**/????/????/????/????/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.547s 100000
+node globby sync 0m0.673s 100000
+node current globSync mjs 0m0.626s 100000
+node current glob syncStream 0m0.618s 100000
+~~ async ~~
+node fast-glob async 0m0.315s 100000
+node globby async 0m0.414s 100000
+node current glob async mjs 0m0.366s 100000
+node current glob stream 0m0.345s 100000
+
+--- pattern: './{**/?{/**/?{/**/?{/**/?,,,,},,,,},,,,},,,}/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.588s 100000
+node globby sync 0m0.670s 100000
+node current globSync mjs 0m0.717s 200023
+node current glob syncStream 0m0.687s 200023
+~~ async ~~
+node fast-glob async 0m0.343s 100000
+node globby async 0m0.418s 100000
+node current glob async mjs 0m0.519s 200023
+node current glob stream 0m0.451s 200023
+
+--- pattern: '**/!(0|9).txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.573s 160023
+node globby sync 0m0.731s 160023
+node current globSync mjs 0m0.680s 180023
+node current glob syncStream 0m0.659s 180023
+~~ async ~~
+node fast-glob async 0m0.345s 160023
+node globby async 0m0.476s 160023
+node current glob async mjs 0m0.427s 180023
+node current glob stream 0m0.388s 180023
+
+--- pattern: './{*/**/../{*/**/../{*/**/../{*/**/../{*/**,,,,},,,,},,,,},,,,},,,,}/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.483s 0
+node globby sync 0m0.512s 0
+node current globSync mjs 0m0.811s 200023
+node current glob syncStream 0m0.773s 200023
+~~ async ~~
+node fast-glob async 0m0.280s 0
+node globby async 0m0.299s 0
+node current glob async mjs 0m0.617s 200023
+node current glob stream 0m0.568s 200023
+
+--- pattern: './*/**/../*/**/../*/**/../*/**/../*/**/../*/**/../*/**/../*/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.485s 0
+node globby sync 0m0.507s 0
+node current globSync mjs 0m0.759s 200023
+node current glob syncStream 0m0.740s 200023
+~~ async ~~
+node fast-glob async 0m0.281s 0
+node globby async 0m0.297s 0
+node current glob async mjs 0m0.544s 200023
+node current glob stream 0m0.464s 200023
+
+--- pattern: './*/**/../*/**/../*/**/../*/**/../*/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.486s 0
+node globby sync 0m0.513s 0
+node current globSync mjs 0m0.734s 200023
+node current glob syncStream 0m0.696s 200023
+~~ async ~~
+node fast-glob async 0m0.286s 0
+node globby async 0m0.296s 0
+node current glob async mjs 0m0.506s 200023
+node current glob stream 0m0.483s 200023
+
+--- pattern: './0/**/../1/**/../2/**/../3/**/../4/**/../5/**/../6/**/../7/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.060s 0
+node globby sync 0m0.074s 0
+node current globSync mjs 0m0.067s 0
+node current glob syncStream 0m0.066s 0
+~~ async ~~
+node fast-glob async 0m0.060s 0
+node globby async 0m0.075s 0
+node current glob async mjs 0m0.066s 0
+node current glob stream 0m0.067s 0
+
+--- pattern: './**/?/**/?/**/?/**/?/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.568s 100000
+node globby sync 0m0.651s 100000
+node current globSync mjs 0m0.619s 100000
+node current glob syncStream 0m0.617s 100000
+~~ async ~~
+node fast-glob async 0m0.332s 100000
+node globby async 0m0.409s 100000
+node current glob async mjs 0m0.372s 100000
+node current glob stream 0m0.351s 100000
+
+--- pattern: '**/*/**/*/**/*/**/*/**' ---
+~~ sync ~~
+node fast-glob sync 0m0.603s 200113
+node globby sync 0m0.798s 200113
+node current globSync mjs 0m0.730s 222137
+node current glob syncStream 0m0.693s 222137
+~~ async ~~
+node fast-glob async 0m0.356s 200113
+node globby async 0m0.525s 200113
+node current glob async mjs 0m0.508s 222137
+node current glob stream 0m0.455s 222137
+
+--- pattern: './**/*/**/*/**/*/**/*/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.622s 200000
+node globby sync 0m0.792s 200000
+node current globSync mjs 0m0.722s 200000
+node current glob syncStream 0m0.695s 200000
+~~ async ~~
+node fast-glob async 0m0.369s 200000
+node globby async 0m0.527s 200000
+node current glob async mjs 0m0.502s 200000
+node current glob stream 0m0.481s 200000
+
+--- pattern: '**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.588s 200023
+node globby sync 0m0.771s 200023
+node current globSync mjs 0m0.684s 200023
+node current glob syncStream 0m0.658s 200023
+~~ async ~~
+node fast-glob async 0m0.352s 200023
+node globby async 0m0.516s 200023
+node current glob async mjs 0m0.432s 200023
+node current glob stream 0m0.384s 200023
+
+--- pattern: './**/**/**/**/**/**/**/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.589s 200023
+node globby sync 0m0.766s 200023
+node current globSync mjs 0m0.682s 200023
+node current glob syncStream 0m0.652s 200023
+~~ async ~~
+node fast-glob async 0m0.352s 200023
+node globby async 0m0.523s 200023
+node current glob async mjs 0m0.436s 200023
+node current glob stream 0m0.380s 200023
+
+--- pattern: '**/*/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.592s 200023
+node globby sync 0m0.776s 200023
+node current globSync mjs 0m0.691s 200023
+node current glob syncStream 0m0.659s 200023
+~~ async ~~
+node fast-glob async 0m0.357s 200023
+node globby async 0m0.513s 200023
+node current glob async mjs 0m0.471s 200023
+node current glob stream 0m0.424s 200023
+
+--- pattern: '**/*/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.585s 200023
+node globby sync 0m0.766s 200023
+node current globSync mjs 0m0.694s 200023
+node current glob syncStream 0m0.664s 200023
+~~ async ~~
+node fast-glob async 0m0.350s 200023
+node globby async 0m0.514s 200023
+node current glob async mjs 0m0.472s 200023
+node current glob stream 0m0.424s 200023
+
+--- pattern: '**/[0-9]/**/*.txt' ---
+~~ sync ~~
+node fast-glob sync 0m0.544s 100000
+node globby sync 0m0.636s 100000
+node current globSync mjs 0m0.626s 100000
+node current glob syncStream 0m0.621s 100000
+~~ async ~~
+node fast-glob async 0m0.322s 100000
+node globby async 0m0.404s 100000
+node current glob async mjs 0m0.360s 100000
+node current glob stream 0m0.352s 100000
+```
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json
new file mode 100644
index 00000000000000..c15df94a3582bf
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json
@@ -0,0 +1,4 @@
+{
+ "version": "10.3.3",
+ "type": "commonjs"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts
new file mode 100644
index 00000000000000..34e005228653c8
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts
@@ -0,0 +1,3 @@
+#!/usr/bin/env node
+export {};
+//# sourceMappingURL=bin.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts.map
new file mode 100644
index 00000000000000..c10c656ec75109
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"bin.d.ts","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":""}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js
new file mode 100755
index 00000000000000..4a8a88f2734d2e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js
@@ -0,0 +1,270 @@
+#!/usr/bin/env node
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+const foreground_child_1 = require("foreground-child");
+const fs_1 = require("fs");
+const jackspeak_1 = require("jackspeak");
+const package_json_1 = require("../package.json");
+const index_js_1 = require("./index.js");
+const j = (0, jackspeak_1.jack)({
+ usage: 'glob [options] [ [ ...]]',
+})
+ .description(`
+ Glob v${package_json_1.version}
+
+ Expand the positional glob expression arguments into any matching file
+ system paths found.
+ `)
+ .opt({
+ cmd: {
+ short: 'c',
+ hint: 'command',
+ description: `Run the command provided, passing the glob expression
+ matches as arguments.`,
+ },
+})
+ .opt({
+ default: {
+ short: 'p',
+ hint: 'pattern',
+ description: `If no positional arguments are provided, glob will use
+ this pattern`,
+ },
+})
+ .flag({
+ all: {
+ short: 'A',
+ description: `By default, the glob cli command will not expand any
+ arguments that are an exact match to a file on disk.
+
+ This prevents double-expanding, in case the shell expands
+ an argument whose filename is a glob expression.
+
+ For example, if 'app/*.ts' would match 'app/[id].ts', then
+ on Windows powershell or cmd.exe, 'glob app/*.ts' will
+ expand to 'app/[id].ts', as expected. However, in posix
+ shells such as bash or zsh, the shell will first expand
+ 'app/*.ts' to a list of filenames. Then glob will look
+ for a file matching 'app/[id].ts' (ie, 'app/i.ts' or
+ 'app/d.ts'), which is unexpected.
+
+ Setting '--all' prevents this behavior, causing glob
+ to treat ALL patterns as glob expressions to be expanded,
+ even if they are an exact match to a file on disk.
+
+ When setting this option, be sure to enquote arguments
+ so that the shell will not expand them prior to passing
+ them to the glob command process.
+ `,
+ },
+ absolute: {
+ short: 'a',
+ description: 'Expand to absolute paths',
+ },
+ 'dot-relative': {
+ short: 'd',
+ description: `Prepend './' on relative matches`,
+ },
+ mark: {
+ short: 'm',
+ description: `Append a / on any directories matched`,
+ },
+ posix: {
+ short: 'x',
+ description: `Always resolve to posix style paths, using '/' as the
+ directory separator, even on Windows. Drive letter
+ absolute matches on Windows will be expanded to their
+ full resolved UNC maths, eg instead of 'C:\\foo\\bar',
+ it will expand to '//?/C:/foo/bar'.
+ `,
+ },
+ follow: {
+ short: 'f',
+ description: `Follow symlinked directories when expanding '**'`,
+ },
+ realpath: {
+ short: 'R',
+ description: `Call 'fs.realpath' on all of the results. In the case
+ of an entry that cannot be resolved, the entry is
+ omitted. This incurs a slight performance penalty, of
+ course, because of the added system calls.`,
+ },
+ stat: {
+ short: 's',
+ description: `Call 'fs.lstat' on all entries, whether required or not
+ to determine if it's a valid match.`,
+ },
+ 'match-base': {
+ short: 'b',
+ description: `Perform a basename-only match if the pattern does not
+ contain any slash characters. That is, '*.js' would be
+ treated as equivalent to '**/*.js', matching js files
+ in all directories.
+ `,
+ },
+ dot: {
+ description: `Allow patterns to match files/directories that start
+ with '.', even if the pattern does not start with '.'
+ `,
+ },
+ nobrace: {
+ description: 'Do not expand {...} patterns',
+ },
+ nocase: {
+ description: `Perform a case-insensitive match. This defaults to
+ 'true' on macOS and Windows platforms, and false on
+ all others.
+
+ Note: 'nocase' should only be explicitly set when it is
+ known that the filesystem's case sensitivity differs
+ from the platform default. If set 'true' on
+ case-insensitive file systems, then the walk may return
+ more or less results than expected.
+ `,
+ },
+ nodir: {
+ description: `Do not match directories, only files.
+
+ Note: to *only* match directories, append a '/' at the
+ end of the pattern.
+ `,
+ },
+ noext: {
+ description: `Do not expand extglob patterns, such as '+(a|b)'`,
+ },
+ noglobstar: {
+ description: `Do not expand '**' against multiple path portions.
+ Ie, treat it as a normal '*' instead.`,
+ },
+ 'windows-path-no-escape': {
+ description: `Use '\\' as a path separator *only*, and *never* as an
+ escape character. If set, all '\\' characters are
+ replaced with '/' in the pattern.`,
+ },
+})
+ .num({
+ 'max-depth': {
+ short: 'D',
+ description: `Maximum depth to traverse from the current
+ working directory`,
+ },
+})
+ .opt({
+ cwd: {
+ short: 'C',
+ description: 'Current working directory to execute/match in',
+ default: process.cwd(),
+ },
+ root: {
+ short: 'r',
+ description: `A string path resolved against the 'cwd', which is
+ used as the starting point for absolute patterns that
+ start with '/' (but not drive letters or UNC paths
+ on Windows).
+
+ Note that this *doesn't* necessarily limit the walk to
+ the 'root' directory, and doesn't affect the cwd
+ starting point for non-absolute patterns. A pattern
+ containing '..' will still be able to traverse out of
+ the root directory, if it is not an actual root directory
+ on the filesystem, and any non-absolute patterns will
+ still be matched in the 'cwd'.
+
+ To start absolute and non-absolute patterns in the same
+ path, you can use '--root=' to set it to the empty
+ string. However, be aware that on Windows systems, a
+ pattern like 'x:/*' or '//host/share/*' will *always*
+ start in the 'x:/' or '//host/share/' directory,
+ regardless of the --root setting.
+ `,
+ },
+ platform: {
+ description: `Defaults to the value of 'process.platform' if
+ available, or 'linux' if not. Setting --platform=win32
+ on non-Windows systems may cause strange behavior!`,
+ validate: v => new Set([
+ 'aix',
+ 'android',
+ 'darwin',
+ 'freebsd',
+ 'haiku',
+ 'linux',
+ 'openbsd',
+ 'sunos',
+ 'win32',
+ 'cygwin',
+ 'netbsd',
+ ]).has(v),
+ },
+})
+ .optList({
+ ignore: {
+ short: 'i',
+ description: `Glob patterns to ignore`,
+ },
+})
+ .flag({
+ debug: {
+ short: 'v',
+ description: `Output a huge amount of noisy debug information about
+ patterns as they are parsed and used to match files.`,
+ },
+})
+ .flag({
+ help: {
+ short: 'h',
+ description: 'Show this usage information',
+ },
+});
+try {
+ const { positionals, values } = j.parse();
+ if (values.help) {
+ console.log(j.usage());
+ process.exit(0);
+ }
+ if (positionals.length === 0 && !values.default)
+ throw 'No patterns provided';
+ if (positionals.length === 0 && values.default)
+ positionals.push(values.default);
+ const patterns = values.all
+ ? positionals
+ : positionals.filter(p => !(0, fs_1.existsSync)(p));
+ const matches = values.all ? [] : positionals.filter(p => (0, fs_1.existsSync)(p));
+ const stream = (0, index_js_1.globStream)(patterns, {
+ absolute: values.absolute,
+ cwd: values.cwd,
+ dot: values.dot,
+ dotRelative: values['dot-relative'],
+ follow: values.follow,
+ ignore: values.ignore,
+ mark: values.mark,
+ matchBase: values['match-base'],
+ maxDepth: values['max-depth'],
+ nobrace: values.nobrace,
+ nocase: values.nocase,
+ nodir: values.nodir,
+ noext: values.noext,
+ noglobstar: values.noglobstar,
+ platform: values.platform,
+ realpath: values.realpath,
+ root: values.root,
+ stat: values.stat,
+ debug: values.debug,
+ posix: values.posix,
+ });
+ const cmd = values.cmd;
+ if (!cmd) {
+ matches.forEach(m => console.log(m));
+ stream.on('data', f => console.log(f));
+ }
+ else {
+ stream.on('data', f => matches.push(f));
+ stream.on('end', () => (0, foreground_child_1.foregroundChild)(cmd, matches, { shell: true }));
+ }
+}
+catch (e) {
+ console.error(j.usage());
+ console.error(e instanceof Error ? e.message : String(e));
+ process.exit(1);
+}
+//# sourceMappingURL=bin.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js.map
new file mode 100644
index 00000000000000..e189acfd01b1a7
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AACA,uDAAkD;AAClD,2BAA+B;AAC/B,yCAAgC;AAChC,kDAAyC;AACzC,yCAAuC;AAEvC,MAAM,CAAC,GAAG,IAAA,gBAAI,EAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,sBAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,GAAG,CAAC;IACH,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;iCACc;KAC5B;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,QAAQ,EAAE,CAAC,CAAC,EAAE,CACZ,IAAI,GAAG,CAAC;YACN,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;KACZ;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;CACF,CAAC;KACD,IAAI,CAAC;IACJ,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI;IACF,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;QAC7C,MAAM,sBAAsB,CAAA;IAC9B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO;QAC5C,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;IAClC,MAAM,QAAQ,GAAG,MAAM,CAAC,GAAG;QACzB,CAAC,CAAC,WAAW;QACb,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IAC3C,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,IAAA,eAAU,EAAC,CAAC,CAAC,CAAC,CAAA;IACxE,MAAM,MAAM,GAAG,IAAA,qBAAU,EAAC,QAAQ,EAAE;QAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,WAAW,EAAE,MAAM,CAAC,cAAc,CAAC;QACnC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,SAAS,EAAE,MAAM,CAAC,YAAY,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC,WAAW,CAAC;QAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,QAAQ,EAAE,MAAM,CAAC,QAAuC;QACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;KACpB,CAAC,CAAA;IAEF,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAA;IACtB,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;KACvC;SAAM;QACL,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACvC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,IAAA,kCAAe,EAAC,GAAG,EAAE,OAAO,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;KACvE;CACF;AAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;CAChB","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { version } from '../package.json'\nimport { globStream } from './index.js'\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]',\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .opt({\n default: {\n short: 'p',\n hint: 'pattern',\n description: `If no positional arguments are provided, glob will use\n this pattern`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC maths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validate: v =>\n new Set([\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ]).has(v),\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n })\n .flag({\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n if (values.help) {\n console.log(j.usage())\n process.exit(0)\n }\n if (positionals.length === 0 && !values.default)\n throw 'No patterns provided'\n if (positionals.length === 0 && values.default)\n positionals.push(values.default)\n const patterns = values.all\n ? positionals\n : positionals.filter(p => !existsSync(p))\n const matches = values.all ? [] : positionals.filter(p => existsSync(p))\n const stream = globStream(patterns, {\n absolute: values.absolute,\n cwd: values.cwd,\n dot: values.dot,\n dotRelative: values['dot-relative'],\n follow: values.follow,\n ignore: values.ignore,\n mark: values.mark,\n matchBase: values['match-base'],\n maxDepth: values['max-depth'],\n nobrace: values.nobrace,\n nocase: values.nocase,\n nodir: values.nodir,\n noext: values.noext,\n noglobstar: values.noglobstar,\n platform: values.platform as undefined | NodeJS.Platform,\n realpath: values.realpath,\n root: values.root,\n stat: values.stat,\n debug: values.debug,\n posix: values.posix,\n })\n\n const cmd = values.cmd\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n stream.on('data', f => matches.push(f))\n stream.on('end', () => foregroundChild(cmd, matches, { shell: true }))\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts
new file mode 100644
index 00000000000000..a8b3da7722b652
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts
@@ -0,0 +1,344 @@
+///
+import { Minimatch } from 'minimatch';
+import { Minipass } from 'minipass';
+import { FSOption, Path, PathScurry } from 'path-scurry';
+import { IgnoreLike } from './ignore.js';
+import { Pattern } from './pattern.js';
+export type MatchSet = Minimatch['set'];
+export type GlobParts = Exclude;
+/**
+ * A `GlobOptions` object may be provided to any of the exported methods, and
+ * must be provided to the `Glob` constructor.
+ *
+ * All options are optional, boolean, and false by default, unless otherwise
+ * noted.
+ *
+ * All resolved options are added to the Glob object as properties.
+ *
+ * If you are running many `glob` operations, you can pass a Glob object as the
+ * `options` argument to a subsequent operation to share the previously loaded
+ * cache.
+ */
+export interface GlobOptions {
+ /**
+ * Set to `true` to always receive absolute paths for
+ * matched files. Set to `false` to always return relative paths.
+ *
+ * When this option is not set, absolute paths are returned for patterns
+ * that are absolute, and otherwise paths are returned that are relative
+ * to the `cwd` setting.
+ *
+ * This does _not_ make an extra system call to get
+ * the realpath, it only does string path resolution.
+ *
+ * Conflicts with {@link withFileTypes}
+ */
+ absolute?: boolean;
+ /**
+ * Set to false to enable {@link windowsPathsNoEscape}
+ *
+ * @deprecated
+ */
+ allowWindowsEscape?: boolean;
+ /**
+ * The current working directory in which to search. Defaults to
+ * `process.cwd()`.
+ *
+ * May be eiher a string path or a `file://` URL object or string.
+ */
+ cwd?: string | URL;
+ /**
+ * Include `.dot` files in normal matches and `globstar`
+ * matches. Note that an explicit dot in a portion of the pattern
+ * will always match dot files.
+ */
+ dot?: boolean;
+ /**
+ * Prepend all relative path strings with `./` (or `.\` on Windows).
+ *
+ * Without this option, returned relative paths are "bare", so instead of
+ * returning `'./foo/bar'`, they are returned as `'foo/bar'`.
+ *
+ * Relative patterns starting with `'../'` are not prepended with `./`, even
+ * if this option is set.
+ */
+ dotRelative?: boolean;
+ /**
+ * Follow symlinked directories when expanding `**`
+ * patterns. This can result in a lot of duplicate references in
+ * the presence of cyclic links, and make performance quite bad.
+ *
+ * By default, a `**` in a pattern will follow 1 symbolic link if
+ * it is not the first item in the pattern, or none if it is the
+ * first item in the pattern, following the same behavior as Bash.
+ */
+ follow?: boolean;
+ /**
+ * string or string[], or an object with `ignore` and `ignoreChildren`
+ * methods.
+ *
+ * If a string or string[] is provided, then this is treated as a glob
+ * pattern or array of glob patterns to exclude from matches. To ignore all
+ * children within a directory, as well as the entry itself, append `'/**'`
+ * to the ignore pattern.
+ *
+ * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of
+ * any other settings.
+ *
+ * If an object is provided that has `ignored(path)` and/or
+ * `childrenIgnored(path)` methods, then these methods will be called to
+ * determine whether any Path is a match or if its children should be
+ * traversed, respectively.
+ */
+ ignore?: string | string[] | IgnoreLike;
+ /**
+ * Treat brace expansion like `{a,b}` as a "magic" pattern. Has no
+ * effect if {@link nobrace} is set.
+ *
+ * Only has effect on the {@link hasMagic} function.
+ */
+ magicalBraces?: boolean;
+ /**
+ * Add a `/` character to directory matches. Note that this requires
+ * additional stat calls in some cases.
+ */
+ mark?: boolean;
+ /**
+ * Perform a basename-only match if the pattern does not contain any slash
+ * characters. That is, `*.js` would be treated as equivalent to
+ * `**\/*.js`, matching all js files in all directories.
+ */
+ matchBase?: boolean;
+ /**
+ * Limit the directory traversal to a given depth below the cwd.
+ * Note that this does NOT prevent traversal to sibling folders,
+ * root patterns, and so on. It only limits the maximum folder depth
+ * that the walk will descend, relative to the cwd.
+ */
+ maxDepth?: number;
+ /**
+ * Do not expand `{a,b}` and `{1..3}` brace sets.
+ */
+ nobrace?: boolean;
+ /**
+ * Perform a case-insensitive match. This defaults to `true` on macOS and
+ * Windows systems, and `false` on all others.
+ *
+ * **Note** `nocase` should only be explicitly set when it is
+ * known that the filesystem's case sensitivity differs from the
+ * platform default. If set `true` on case-sensitive file
+ * systems, or `false` on case-insensitive file systems, then the
+ * walk may return more or less results than expected.
+ */
+ nocase?: boolean;
+ /**
+ * Do not match directories, only files. (Note: to match
+ * _only_ directories, put a `/` at the end of the pattern.)
+ */
+ nodir?: boolean;
+ /**
+ * Do not match "extglob" patterns such as `+(a|b)`.
+ */
+ noext?: boolean;
+ /**
+ * Do not match `**` against multiple filenames. (Ie, treat it as a normal
+ * `*` instead.)
+ *
+ * Conflicts with {@link matchBase}
+ */
+ noglobstar?: boolean;
+ /**
+ * Defaults to value of `process.platform` if available, or `'linux'` if
+ * not. Setting `platform:'win32'` on non-Windows systems may cause strange
+ * behavior.
+ */
+ platform?: NodeJS.Platform;
+ /**
+ * Set to true to call `fs.realpath` on all of the
+ * results. In the case of an entry that cannot be resolved, the
+ * entry is omitted. This incurs a slight performance penalty, of
+ * course, because of the added system calls.
+ */
+ realpath?: boolean;
+ /**
+ *
+ * A string path resolved against the `cwd` option, which
+ * is used as the starting point for absolute patterns that start
+ * with `/`, (but not drive letters or UNC paths on Windows).
+ *
+ * Note that this _doesn't_ necessarily limit the walk to the
+ * `root` directory, and doesn't affect the cwd starting point for
+ * non-absolute patterns. A pattern containing `..` will still be
+ * able to traverse out of the root directory, if it is not an
+ * actual root directory on the filesystem, and any non-absolute
+ * patterns will be matched in the `cwd`. For example, the
+ * pattern `/../*` with `{root:'/some/path'}` will return all
+ * files in `/some`, not all files in `/some/path`. The pattern
+ * `*` with `{root:'/some/path'}` will return all the entries in
+ * the cwd, not the entries in `/some/path`.
+ *
+ * To start absolute and non-absolute patterns in the same
+ * path, you can use `{root:''}`. However, be aware that on
+ * Windows systems, a pattern like `x:/*` or `//host/share/*` will
+ * _always_ start in the `x:/` or `//host/share` directory,
+ * regardless of the `root` setting.
+ */
+ root?: string;
+ /**
+ * A [PathScurry](http://npm.im/path-scurry) object used
+ * to traverse the file system. If the `nocase` option is set
+ * explicitly, then any provided `scurry` object must match this
+ * setting.
+ */
+ scurry?: PathScurry;
+ /**
+ * Call `lstat()` on all entries, whether required or not to determine
+ * if it's a valid match. When used with {@link withFileTypes}, this means
+ * that matches will include data such as modified time, permissions, and
+ * so on. Note that this will incur a performance cost due to the added
+ * system calls.
+ */
+ stat?: boolean;
+ /**
+ * An AbortSignal which will cancel the Glob walk when
+ * triggered.
+ */
+ signal?: AbortSignal;
+ /**
+ * Use `\\` as a path separator _only_, and
+ * _never_ as an escape character. If set, all `\\` characters are
+ * replaced with `/` in the pattern.
+ *
+ * Note that this makes it **impossible** to match against paths
+ * containing literal glob pattern characters, but allows matching
+ * with patterns constructed using `path.join()` and
+ * `path.resolve()` on Windows platforms, mimicking the (buggy!)
+ * behavior of Glob v7 and before on Windows. Please use with
+ * caution, and be mindful of [the caveat below about Windows
+ * paths](#windows). (For legacy reasons, this is also set if
+ * `allowWindowsEscape` is set to the exact value `false`.)
+ */
+ windowsPathsNoEscape?: boolean;
+ /**
+ * Return [PathScurry](http://npm.im/path-scurry)
+ * `Path` objects instead of strings. These are similar to a
+ * NodeJS `Dirent` object, but with additional methods and
+ * properties.
+ *
+ * Conflicts with {@link absolute}
+ */
+ withFileTypes?: boolean;
+ /**
+ * An fs implementation to override some or all of the defaults. See
+ * http://npm.im/path-scurry for details about what can be overridden.
+ */
+ fs?: FSOption;
+ /**
+ * Just passed along to Minimatch. Note that this makes all pattern
+ * matching operations slower and *extremely* noisy.
+ */
+ debug?: boolean;
+ /**
+ * Return `/` delimited paths, even on Windows.
+ *
+ * On posix systems, this has no effect. But, on Windows, it means that
+ * paths will be `/` delimited, and absolute paths will be their full
+ * resolved UNC forms, eg instead of `'C:\\foo\\bar'`, it would return
+ * `'//?/C:/foo/bar'`
+ */
+ posix?: boolean;
+}
+export type GlobOptionsWithFileTypesTrue = GlobOptions & {
+ withFileTypes: true;
+ absolute?: undefined;
+ mark?: undefined;
+ posix?: undefined;
+};
+export type GlobOptionsWithFileTypesFalse = GlobOptions & {
+ withFileTypes?: false;
+};
+export type GlobOptionsWithFileTypesUnset = GlobOptions & {
+ withFileTypes?: undefined;
+};
+export type Result = Opts extends GlobOptionsWithFileTypesTrue ? Path : Opts extends GlobOptionsWithFileTypesFalse ? string : Opts extends GlobOptionsWithFileTypesUnset ? string : string | Path;
+export type Results = Result[];
+export type FileTypes = Opts extends GlobOptionsWithFileTypesTrue ? true : Opts extends GlobOptionsWithFileTypesFalse ? false : Opts extends GlobOptionsWithFileTypesUnset ? false : boolean;
+/**
+ * An object that can perform glob pattern traversals.
+ */
+export declare class Glob implements GlobOptions {
+ absolute?: boolean;
+ cwd: string;
+ root?: string;
+ dot: boolean;
+ dotRelative: boolean;
+ follow: boolean;
+ ignore?: string | string[] | IgnoreLike;
+ magicalBraces: boolean;
+ mark?: boolean;
+ matchBase: boolean;
+ maxDepth: number;
+ nobrace: boolean;
+ nocase: boolean;
+ nodir: boolean;
+ noext: boolean;
+ noglobstar: boolean;
+ pattern: string[];
+ platform: NodeJS.Platform;
+ realpath: boolean;
+ scurry: PathScurry;
+ stat: boolean;
+ signal?: AbortSignal;
+ windowsPathsNoEscape: boolean;
+ withFileTypes: FileTypes;
+ /**
+ * The options provided to the constructor.
+ */
+ opts: Opts;
+ /**
+ * An array of parsed immutable {@link Pattern} objects.
+ */
+ patterns: Pattern[];
+ /**
+ * All options are stored as properties on the `Glob` object.
+ *
+ * See {@link GlobOptions} for full options descriptions.
+ *
+ * Note that a previous `Glob` object can be passed as the
+ * `GlobOptions` to another `Glob` instantiation to re-use settings
+ * and caches with a new pattern.
+ *
+ * Traversal functions can be called multiple times to run the walk
+ * again.
+ */
+ constructor(pattern: string | string[], opts: Opts);
+ /**
+ * Returns a Promise that resolves to the results array.
+ */
+ walk(): Promise>;
+ /**
+ * synchronous {@link Glob.walk}
+ */
+ walkSync(): Results;
+ /**
+ * Stream results asynchronously.
+ */
+ stream(): Minipass, Result>;
+ /**
+ * Stream results synchronously.
+ */
+ streamSync(): Minipass, Result>;
+ /**
+ * Default sync iteration function. Returns a Generator that
+ * iterates over the results.
+ */
+ iterateSync(): Generator, void, void>;
+ [Symbol.iterator](): Generator, void, void>;
+ /**
+ * Default async iteration function. Returns an AsyncGenerator that
+ * iterates over the results.
+ */
+ iterate(): AsyncGenerator, void, void>;
+ [Symbol.asyncIterator](): AsyncGenerator, void, void>;
+}
+//# sourceMappingURL=glob.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts.map
new file mode 100644
index 00000000000000..6353d8b3c47126
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IAwHlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js
new file mode 100644
index 00000000000000..eb37c6b9a6601e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js
@@ -0,0 +1,238 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Glob = void 0;
+const minimatch_1 = require("minimatch");
+const path_scurry_1 = require("path-scurry");
+const url_1 = require("url");
+const pattern_js_1 = require("./pattern.js");
+const walker_js_1 = require("./walker.js");
+// if no process global, just call it linux.
+// so we default to case-sensitive, / separators
+const defaultPlatform = typeof process === 'object' &&
+ process &&
+ typeof process.platform === 'string'
+ ? process.platform
+ : 'linux';
+/**
+ * An object that can perform glob pattern traversals.
+ */
+class Glob {
+ absolute;
+ cwd;
+ root;
+ dot;
+ dotRelative;
+ follow;
+ ignore;
+ magicalBraces;
+ mark;
+ matchBase;
+ maxDepth;
+ nobrace;
+ nocase;
+ nodir;
+ noext;
+ noglobstar;
+ pattern;
+ platform;
+ realpath;
+ scurry;
+ stat;
+ signal;
+ windowsPathsNoEscape;
+ withFileTypes;
+ /**
+ * The options provided to the constructor.
+ */
+ opts;
+ /**
+ * An array of parsed immutable {@link Pattern} objects.
+ */
+ patterns;
+ /**
+ * All options are stored as properties on the `Glob` object.
+ *
+ * See {@link GlobOptions} for full options descriptions.
+ *
+ * Note that a previous `Glob` object can be passed as the
+ * `GlobOptions` to another `Glob` instantiation to re-use settings
+ * and caches with a new pattern.
+ *
+ * Traversal functions can be called multiple times to run the walk
+ * again.
+ */
+ constructor(pattern, opts) {
+ /* c8 ignore start */
+ if (!opts)
+ throw new TypeError('glob options required');
+ /* c8 ignore stop */
+ this.withFileTypes = !!opts.withFileTypes;
+ this.signal = opts.signal;
+ this.follow = !!opts.follow;
+ this.dot = !!opts.dot;
+ this.dotRelative = !!opts.dotRelative;
+ this.nodir = !!opts.nodir;
+ this.mark = !!opts.mark;
+ if (!opts.cwd) {
+ this.cwd = '';
+ }
+ else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
+ opts.cwd = (0, url_1.fileURLToPath)(opts.cwd);
+ }
+ this.cwd = opts.cwd || '';
+ this.root = opts.root;
+ this.magicalBraces = !!opts.magicalBraces;
+ this.nobrace = !!opts.nobrace;
+ this.noext = !!opts.noext;
+ this.realpath = !!opts.realpath;
+ this.absolute = opts.absolute;
+ this.noglobstar = !!opts.noglobstar;
+ this.matchBase = !!opts.matchBase;
+ this.maxDepth =
+ typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
+ this.stat = !!opts.stat;
+ this.ignore = opts.ignore;
+ if (this.withFileTypes && this.absolute !== undefined) {
+ throw new Error('cannot set absolute and withFileTypes:true');
+ }
+ if (typeof pattern === 'string') {
+ pattern = [pattern];
+ }
+ this.windowsPathsNoEscape =
+ !!opts.windowsPathsNoEscape ||
+ opts.allowWindowsEscape === false;
+ if (this.windowsPathsNoEscape) {
+ pattern = pattern.map(p => p.replace(/\\/g, '/'));
+ }
+ if (this.matchBase) {
+ if (opts.noglobstar) {
+ throw new TypeError('base matching requires globstar');
+ }
+ pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
+ }
+ this.pattern = pattern;
+ this.platform = opts.platform || defaultPlatform;
+ this.opts = { ...opts, platform: this.platform };
+ if (opts.scurry) {
+ this.scurry = opts.scurry;
+ if (opts.nocase !== undefined &&
+ opts.nocase !== opts.scurry.nocase) {
+ throw new Error('nocase option contradicts provided scurry option');
+ }
+ }
+ else {
+ const Scurry = opts.platform === 'win32'
+ ? path_scurry_1.PathScurryWin32
+ : opts.platform === 'darwin'
+ ? path_scurry_1.PathScurryDarwin
+ : opts.platform
+ ? path_scurry_1.PathScurryPosix
+ : path_scurry_1.PathScurry;
+ this.scurry = new Scurry(this.cwd, {
+ nocase: opts.nocase,
+ fs: opts.fs,
+ });
+ }
+ this.nocase = this.scurry.nocase;
+ // If you do nocase:true on a case-sensitive file system, then
+ // we need to use regexps instead of strings for non-magic
+ // path portions, because statting `aBc` won't return results
+ // for the file `AbC` for example.
+ const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
+ const mmo = {
+ // default nocase based on platform
+ ...opts,
+ dot: this.dot,
+ matchBase: this.matchBase,
+ nobrace: this.nobrace,
+ nocase: this.nocase,
+ nocaseMagicOnly,
+ nocomment: true,
+ noext: this.noext,
+ nonegate: true,
+ optimizationLevel: 2,
+ platform: this.platform,
+ windowsPathsNoEscape: this.windowsPathsNoEscape,
+ debug: !!this.opts.debug,
+ };
+ const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo));
+ const [matchSet, globParts] = mms.reduce((set, m) => {
+ set[0].push(...m.set);
+ set[1].push(...m.globParts);
+ return set;
+ }, [[], []]);
+ this.patterns = matchSet.map((set, i) => {
+ return new pattern_js_1.Pattern(set, globParts[i], 0, this.platform);
+ });
+ }
+ async walk() {
+ // Walkers always return array of Path objects, so we just have to
+ // coerce them into the right shape. It will have already called
+ // realpath() if the option was set to do so, so we know that's cached.
+ // start out knowing the cwd, at least
+ return [
+ ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
+ ...this.opts,
+ maxDepth: this.maxDepth !== Infinity
+ ? this.maxDepth + this.scurry.cwd.depth()
+ : Infinity,
+ platform: this.platform,
+ nocase: this.nocase,
+ }).walk()),
+ ];
+ }
+ walkSync() {
+ return [
+ ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
+ ...this.opts,
+ maxDepth: this.maxDepth !== Infinity
+ ? this.maxDepth + this.scurry.cwd.depth()
+ : Infinity,
+ platform: this.platform,
+ nocase: this.nocase,
+ }).walkSync(),
+ ];
+ }
+ stream() {
+ return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
+ ...this.opts,
+ maxDepth: this.maxDepth !== Infinity
+ ? this.maxDepth + this.scurry.cwd.depth()
+ : Infinity,
+ platform: this.platform,
+ nocase: this.nocase,
+ }).stream();
+ }
+ streamSync() {
+ return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
+ ...this.opts,
+ maxDepth: this.maxDepth !== Infinity
+ ? this.maxDepth + this.scurry.cwd.depth()
+ : Infinity,
+ platform: this.platform,
+ nocase: this.nocase,
+ }).streamSync();
+ }
+ /**
+ * Default sync iteration function. Returns a Generator that
+ * iterates over the results.
+ */
+ iterateSync() {
+ return this.streamSync()[Symbol.iterator]();
+ }
+ [Symbol.iterator]() {
+ return this.iterateSync();
+ }
+ /**
+ * Default async iteration function. Returns an AsyncGenerator that
+ * iterates over the results.
+ */
+ iterate() {
+ return this.stream()[Symbol.asyncIterator]();
+ }
+ [Symbol.asyncIterator]() {
+ return this.iterate();
+ }
+}
+exports.Glob = Glob;
+//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js.map
new file mode 100644
index 00000000000000..7a7a9b28627480
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../../src/glob.ts"],"names":[],"mappings":";;;AAAA,yCAAuD;AAEvD,6CAOoB;AACpB,6BAAmC;AAEnC,6CAAsC;AACtC,2CAAoD;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAgTb;;GAEG;AACH,MAAa,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAE9B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;SACd;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YACpE,IAAI,CAAC,GAAG,GAAG,IAAA,mBAAa,EAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACnC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAE7B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;SAC9D;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC/B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;SACpB;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAoB,CAAC,kBAAkB,KAAK,KAAK,CAAA;QAEpD,IAAI,IAAI,CAAC,oBAAoB,EAAE;YAC7B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;SAClD;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;aACvD;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;SAChE;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC;gBACA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;aACpE;SACF;aAAM;YACL,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO;gBACvB,CAAC,CAAC,6BAAe;gBACjB,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBAC5B,CAAC,CAAC,8BAAgB;oBAClB,CAAC,CAAC,IAAI,CAAC,QAAQ;wBACf,CAAC,CAAC,6BAAe;wBACjB,CAAC,CAAC,wBAAU,CAAA;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;SACH;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,qBAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,OAAO,IAAI,oBAAO,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACzD,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;oBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;aACpB,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBACxB,CAAC,CAAC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACd,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF;AArQD,oBAqQC","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { fileURLToPath } from 'url'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignore` and `ignoreChildren`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result = Opts extends GlobOptionsWithFileTypesTrue\n ? Path\n : Opts extends GlobOptionsWithFileTypesFalse\n ? string\n : Opts extends GlobOptionsWithFileTypesUnset\n ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes = Opts extends GlobOptionsWithFileTypesTrue\n ? true\n : Opts extends GlobOptionsWithFileTypesFalse\n ? false\n : Opts extends GlobOptionsWithFileTypesUnset\n ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as GlobOptions).allowWindowsEscape === false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32'\n ? PathScurryWin32\n : opts.platform === 'darwin'\n ? PathScurryDarwin\n : opts.platform\n ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []]\n )\n this.patterns = matchSet.map((set, i) => {\n return new Pattern(set, globParts[i], 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity\n ? this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts
new file mode 100644
index 00000000000000..8aec3bd9725175
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts
@@ -0,0 +1,14 @@
+import { GlobOptions } from './glob.js';
+/**
+ * Return true if the patterns provided contain any magic glob characters,
+ * given the options provided.
+ *
+ * Brace expansion is not considered "magic" unless the `magicalBraces` option
+ * is set, as brace expansion just turns one string into an array of strings.
+ * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
+ * `'xby'` both do not contain any magic glob characters, and it's treated the
+ * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
+ * is in the options, brace expansion _is_ treated as a pattern having magic.
+ */
+export declare const hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
+//# sourceMappingURL=has-magic.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts.map
new file mode 100644
index 00000000000000..dd5053f80b44c3
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js
new file mode 100644
index 00000000000000..0918bd57e0f1c2
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.hasMagic = void 0;
+const minimatch_1 = require("minimatch");
+/**
+ * Return true if the patterns provided contain any magic glob characters,
+ * given the options provided.
+ *
+ * Brace expansion is not considered "magic" unless the `magicalBraces` option
+ * is set, as brace expansion just turns one string into an array of strings.
+ * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
+ * `'xby'` both do not contain any magic glob characters, and it's treated the
+ * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
+ * is in the options, brace expansion _is_ treated as a pattern having magic.
+ */
+const hasMagic = (pattern, options = {}) => {
+ if (!Array.isArray(pattern)) {
+ pattern = [pattern];
+ }
+ for (const p of pattern) {
+ if (new minimatch_1.Minimatch(p, options).hasMagic())
+ return true;
+ }
+ return false;
+};
+exports.hasMagic = hasMagic;
+//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js.map
new file mode 100644
index 00000000000000..9b73cfad7d05e4
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"has-magic.js","sourceRoot":"","sources":["../../../src/has-magic.ts"],"names":[],"mappings":";;;AAAA,yCAAqC;AAGrC;;;;;;;;;;GAUG;AACI,MAAM,QAAQ,GAAG,CACtB,OAA0B,EAC1B,UAAuB,EAAE,EAChB,EAAE;IACX,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC3B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;KACpB;IACD,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE;QACvB,IAAI,IAAI,qBAAS,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE;YAAE,OAAO,IAAI,CAAA;KACtD;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA;AAXY,QAAA,QAAQ,YAWpB","sourcesContent":["import { Minimatch } from 'minimatch'\nimport { GlobOptions } from './glob.js'\n\n/**\n * Return true if the patterns provided contain any magic glob characters,\n * given the options provided.\n *\n * Brace expansion is not considered \"magic\" unless the `magicalBraces` option\n * is set, as brace expansion just turns one string into an array of strings.\n * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and\n * `'xby'` both do not contain any magic glob characters, and it's treated the\n * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`\n * is in the options, brace expansion _is_ treated as a pattern having magic.\n */\nexport const hasMagic = (\n pattern: string | string[],\n options: GlobOptions = {}\n): boolean => {\n if (!Array.isArray(pattern)) {\n pattern = [pattern]\n }\n for (const p of pattern) {\n if (new Minimatch(p, options).hasMagic()) return true\n }\n return false\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts
new file mode 100644
index 00000000000000..e9d74f3b5e1291
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts
@@ -0,0 +1,20 @@
+import { Minimatch } from 'minimatch';
+import { Path } from 'path-scurry';
+import { GlobWalkerOpts } from './walker.js';
+export interface IgnoreLike {
+ ignored?: (p: Path) => boolean;
+ childrenIgnored?: (p: Path) => boolean;
+}
+/**
+ * Class used to process ignored patterns
+ */
+export declare class Ignore implements IgnoreLike {
+ relative: Minimatch[];
+ relativeChildren: Minimatch[];
+ absolute: Minimatch[];
+ absoluteChildren: Minimatch[];
+ constructor(ignored: string[], { nobrace, nocase, noext, noglobstar, platform, }: GlobWalkerOpts);
+ ignored(p: Path): boolean;
+ childrenIgnored(p: Path): boolean;
+}
+//# sourceMappingURL=ignore.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts.map
new file mode 100644
index 00000000000000..3d604838d1eed2
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"ignore.d.ts","sourceRoot":"","sources":["../../../src/ignore.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AACrC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAElC,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IAC9B,eAAe,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;CACvC;AASD;;GAEG;AACH,qBAAa,MAAO,YAAW,UAAU;IACvC,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;gBAG3B,OAAO,EAAE,MAAM,EAAE,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAA0B,GAC3B,EAAE,cAAc;IAiDnB,OAAO,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;IAczB,eAAe,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;CAWlC"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js
new file mode 100644
index 00000000000000..0cbcca335e1cca
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js
@@ -0,0 +1,103 @@
+"use strict";
+// give it a pattern, and it'll be able to tell you if
+// a given path should be ignored.
+// Ignoring a path ignores its children if the pattern ends in /**
+// Ignores are always parsed in dot:true mode
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Ignore = void 0;
+const minimatch_1 = require("minimatch");
+const pattern_js_1 = require("./pattern.js");
+const defaultPlatform = typeof process === 'object' &&
+ process &&
+ typeof process.platform === 'string'
+ ? process.platform
+ : 'linux';
+/**
+ * Class used to process ignored patterns
+ */
+class Ignore {
+ relative;
+ relativeChildren;
+ absolute;
+ absoluteChildren;
+ constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
+ this.relative = [];
+ this.absolute = [];
+ this.relativeChildren = [];
+ this.absoluteChildren = [];
+ const mmopts = {
+ dot: true,
+ nobrace,
+ nocase,
+ noext,
+ noglobstar,
+ optimizationLevel: 2,
+ platform,
+ nocomment: true,
+ nonegate: true,
+ };
+ // this is a little weird, but it gives us a clean set of optimized
+ // minimatch matchers, without getting tripped up if one of them
+ // ends in /** inside a brace section, and it's only inefficient at
+ // the start of the walk, not along it.
+ // It'd be nice if the Pattern class just had a .test() method, but
+ // handling globstars is a bit of a pita, and that code already lives
+ // in minimatch anyway.
+ // Another way would be if maybe Minimatch could take its set/globParts
+ // as an option, and then we could at least just use Pattern to test
+ // for absolute-ness.
+ // Yet another way, Minimatch could take an array of glob strings, and
+ // a cwd option, and do the right thing.
+ for (const ign of ignored) {
+ const mm = new minimatch_1.Minimatch(ign, mmopts);
+ for (let i = 0; i < mm.set.length; i++) {
+ const parsed = mm.set[i];
+ const globParts = mm.globParts[i];
+ const p = new pattern_js_1.Pattern(parsed, globParts, 0, platform);
+ const m = new minimatch_1.Minimatch(p.globString(), mmopts);
+ const children = globParts[globParts.length - 1] === '**';
+ const absolute = p.isAbsolute();
+ if (absolute)
+ this.absolute.push(m);
+ else
+ this.relative.push(m);
+ if (children) {
+ if (absolute)
+ this.absoluteChildren.push(m);
+ else
+ this.relativeChildren.push(m);
+ }
+ }
+ }
+ }
+ ignored(p) {
+ const fullpath = p.fullpath();
+ const fullpaths = `${fullpath}/`;
+ const relative = p.relative() || '.';
+ const relatives = `${relative}/`;
+ for (const m of this.relative) {
+ if (m.match(relative) || m.match(relatives))
+ return true;
+ }
+ for (const m of this.absolute) {
+ if (m.match(fullpath) || m.match(fullpaths))
+ return true;
+ }
+ return false;
+ }
+ childrenIgnored(p) {
+ const fullpath = p.fullpath() + '/';
+ const relative = (p.relative() || '.') + '/';
+ for (const m of this.relativeChildren) {
+ if (m.match(relative))
+ return true;
+ }
+ for (const m of this.absoluteChildren) {
+ if (m.match(fullpath))
+ true;
+ }
+ return false;
+ }
+}
+exports.Ignore = Ignore;
+//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js.map
new file mode 100644
index 00000000000000..7595b4c68f79ed
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"ignore.js","sourceRoot":"","sources":["../../../src/ignore.ts"],"names":[],"mappings":";AAAA,sDAAsD;AACtD,kCAAkC;AAClC,kEAAkE;AAClE,6CAA6C;;;AAE7C,yCAAqC;AAErC,6CAAsC;AAQtC,MAAM,eAAe,GACnB,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ;IAClC,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAEb;;GAEG;AACH,MAAa,MAAM;IACjB,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAE7B,YACE,OAAiB,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAAQ,GAAG,eAAe,GACX;QAEjB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,MAAM,MAAM,GAAG;YACb,GAAG,EAAE,IAAI;YACT,OAAO;YACP,MAAM;YACN,KAAK;YACL,UAAU;YACV,iBAAiB,EAAE,CAAC;YACpB,QAAQ;YACR,SAAS,EAAE,IAAI;YACf,QAAQ,EAAE,IAAI;SACf,CAAA;QAED,mEAAmE;QACnE,gEAAgE;QAChE,mEAAmE;QACnE,uCAAuC;QACvC,mEAAmE;QACnE,qEAAqE;QACrE,uBAAuB;QACvB,uEAAuE;QACvE,oEAAoE;QACpE,qBAAqB;QACrB,sEAAsE;QACtE,wCAAwC;QACxC,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;YACzB,MAAM,EAAE,GAAG,IAAI,qBAAS,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACtC,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;gBACxB,MAAM,SAAS,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;gBACjC,MAAM,CAAC,GAAG,IAAI,oBAAO,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAA;gBACrD,MAAM,CAAC,GAAG,IAAI,qBAAS,CAAC,CAAC,CAAC,UAAU,EAAE,EAAE,MAAM,CAAC,CAAA;gBAC/C,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAA;gBACzD,MAAM,QAAQ,GAAG,CAAC,CAAC,UAAU,EAAE,CAAA;gBAC/B,IAAI,QAAQ;oBAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;oBAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;gBAC1B,IAAI,QAAQ,EAAE;oBACZ,IAAI,QAAQ;wBAAE,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;wBACtC,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;iBACnC;aACF;SACF;IACH,CAAC;IAED,OAAO,CAAC,CAAO;QACb,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAA;QAC7B,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAA;QACpC,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE;YAC7B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;SACzD;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,eAAe,CAAC,CAAO;QACrB,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,GAAG,GAAG,CAAA;QACnC,MAAM,QAAQ,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,GAAG,CAAA;QAC5C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;SACnC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACrC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,IAAI,CAAA;SAC5B;QACD,OAAO,KAAK,CAAA;IACd,CAAC;CACF;AAxFD,wBAwFC","sourcesContent":["// give it a pattern, and it'll be able to tell you if\n// a given path should be ignored.\n// Ignoring a path ignores its children if the pattern ends in /**\n// Ignores are always parsed in dot:true mode\n\nimport { Minimatch } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\nexport interface IgnoreLike {\n ignored?: (p: Path) => boolean\n childrenIgnored?: (p: Path) => boolean\n}\n\nconst defaultPlatform: NodeJS.Platform =\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ? process.platform\n : 'linux'\n\n/**\n * Class used to process ignored patterns\n */\nexport class Ignore implements IgnoreLike {\n relative: Minimatch[]\n relativeChildren: Minimatch[]\n absolute: Minimatch[]\n absoluteChildren: Minimatch[]\n\n constructor(\n ignored: string[],\n {\n nobrace,\n nocase,\n noext,\n noglobstar,\n platform = defaultPlatform,\n }: GlobWalkerOpts\n ) {\n this.relative = []\n this.absolute = []\n this.relativeChildren = []\n this.absoluteChildren = []\n const mmopts = {\n dot: true,\n nobrace,\n nocase,\n noext,\n noglobstar,\n optimizationLevel: 2,\n platform,\n nocomment: true,\n nonegate: true,\n }\n\n // this is a little weird, but it gives us a clean set of optimized\n // minimatch matchers, without getting tripped up if one of them\n // ends in /** inside a brace section, and it's only inefficient at\n // the start of the walk, not along it.\n // It'd be nice if the Pattern class just had a .test() method, but\n // handling globstars is a bit of a pita, and that code already lives\n // in minimatch anyway.\n // Another way would be if maybe Minimatch could take its set/globParts\n // as an option, and then we could at least just use Pattern to test\n // for absolute-ness.\n // Yet another way, Minimatch could take an array of glob strings, and\n // a cwd option, and do the right thing.\n for (const ign of ignored) {\n const mm = new Minimatch(ign, mmopts)\n for (let i = 0; i < mm.set.length; i++) {\n const parsed = mm.set[i]\n const globParts = mm.globParts[i]\n const p = new Pattern(parsed, globParts, 0, platform)\n const m = new Minimatch(p.globString(), mmopts)\n const children = globParts[globParts.length - 1] === '**'\n const absolute = p.isAbsolute()\n if (absolute) this.absolute.push(m)\n else this.relative.push(m)\n if (children) {\n if (absolute) this.absoluteChildren.push(m)\n else this.relativeChildren.push(m)\n }\n }\n }\n }\n\n ignored(p: Path): boolean {\n const fullpath = p.fullpath()\n const fullpaths = `${fullpath}/`\n const relative = p.relative() || '.'\n const relatives = `${relative}/`\n for (const m of this.relative) {\n if (m.match(relative) || m.match(relatives)) return true\n }\n for (const m of this.absolute) {\n if (m.match(fullpath) || m.match(fullpaths)) return true\n }\n return false\n }\n\n childrenIgnored(p: Path): boolean {\n const fullpath = p.fullpath() + '/'\n const relative = (p.relative() || '.') + '/'\n for (const m of this.relativeChildren) {\n if (m.match(relative)) return true\n }\n for (const m of this.absoluteChildren) {\n if (m.match(fullpath)) true\n }\n return false\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts
new file mode 100644
index 00000000000000..669bf12e6d5916
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts
@@ -0,0 +1,95 @@
+import { Minipass } from 'minipass';
+import { Path } from 'path-scurry';
+import type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset } from './glob.js';
+import { Glob } from './glob.js';
+/**
+ * Syncronous form of {@link globStream}. Will read all the matches as fast as
+ * you consume them, even all in a single tick if you consume them immediately,
+ * but will still respond to backpressure if they're not consumed immediately.
+ */
+export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass;
+export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass;
+export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesUnset): Minipass;
+export declare function globStreamSync(pattern: string | string[], options: GlobOptions): Minipass | Minipass;
+/**
+ * Return a stream that emits all the strings or `Path` objects and
+ * then emits `end` when completed.
+ */
+export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass;
+export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass;
+export declare function globStream(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Minipass;
+export declare function globStream(pattern: string | string[], options: GlobOptions): Minipass | Minipass;
+/**
+ * Synchronous form of {@link glob}
+ */
+export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): string[];
+export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Path[];
+export declare function globSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): string[];
+export declare function globSync(pattern: string | string[], options: GlobOptions): Path[] | string[];
+/**
+ * Perform an asynchronous glob search for the pattern(s) specified. Returns
+ * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the
+ * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for
+ * full option descriptions.
+ */
+declare function glob_(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Promise;
+declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Promise;
+declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Promise;
+declare function glob_(pattern: string | string[], options: GlobOptions): Promise;
+/**
+ * Return a sync iterator for walking glob pattern matches.
+ */
+export declare function globIterateSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Generator;
+export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Generator;
+export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Generator;
+export declare function globIterateSync(pattern: string | string[], options: GlobOptions): Generator | Generator;
+/**
+ * Return an async iterator for walking glob pattern matches.
+ */
+export declare function globIterate(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): AsyncGenerator;
+export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): AsyncGenerator;
+export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): AsyncGenerator;
+export declare function globIterate(pattern: string | string[], options: GlobOptions): AsyncGenerator | AsyncGenerator;
+export declare const streamSync: typeof globStreamSync;
+export declare const stream: typeof globStream & {
+ sync: typeof globStreamSync;
+};
+export declare const iterateSync: typeof globIterateSync;
+export declare const iterate: typeof globIterate & {
+ sync: typeof globIterateSync;
+};
+export declare const sync: typeof globSync & {
+ stream: typeof globStreamSync;
+ iterate: typeof globIterateSync;
+};
+export { escape, unescape } from 'minimatch';
+export { Glob } from './glob.js';
+export type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset, } from './glob.js';
+export { hasMagic } from './has-magic.js';
+export type { IgnoreLike } from './ignore.js';
+export type { MatchStream } from './walker.js';
+export declare const glob: typeof glob_ & {
+ glob: typeof glob_;
+ globSync: typeof globSync;
+ sync: typeof globSync & {
+ stream: typeof globStreamSync;
+ iterate: typeof globIterateSync;
+ };
+ globStream: typeof globStream;
+ stream: typeof globStream & {
+ sync: typeof globStreamSync;
+ };
+ globStreamSync: typeof globStreamSync;
+ streamSync: typeof globStreamSync;
+ globIterate: typeof globIterate;
+ iterate: typeof globIterate & {
+ sync: typeof globIterateSync;
+ };
+ globIterateSync: typeof globIterateSync;
+ iterateSync: typeof globIterateSync;
+ Glob: typeof Glob;
+ hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
+ escape: (s: string, { windowsPathsNoEscape, }?: Pick | undefined) => string;
+ unescape: (s: string, { windowsPathsNoEscape, }?: Pick | undefined) => string;
+};
+//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts.map
new file mode 100644
index 00000000000000..4e9ba085ce45b2
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,KAAK,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,EAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAGhC;;;;GAIG;AACH,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;GAEG;AACH,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,IAAI,EAAE,CAAA;AACT,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,IAAI,EAAE,GAAG,MAAM,EAAE,CAAA;AAQpB;;;;;GAKG;AACH,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAClB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,OAAO,CAAC,IAAI,EAAE,GAAG,MAAM,EAAE,CAAC,CAAA;AAQ7B;;GAEG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAC9B,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAQ9D;;GAEG;AACH,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACnC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AASxE,eAAO,MAAM,UAAU,uBAAiB,CAAA;AACxC,eAAO,MAAM,MAAM;;CAAsD,CAAA;AACzE,eAAO,MAAM,WAAW,wBAAkB,CAAA;AAC1C,eAAO,MAAM,OAAO;;CAElB,CAAA;AACF,eAAO,MAAM,IAAI;;;CAGf,CAAA;AAGF,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,YAAY,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,YAAY,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AAG9C,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;CAgBf,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js
new file mode 100644
index 00000000000000..71c31c03dd339b
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js
@@ -0,0 +1,68 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.glob = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.globIterate = exports.globIterateSync = exports.globSync = exports.globStream = exports.globStreamSync = void 0;
+const minimatch_1 = require("minimatch");
+const glob_js_1 = require("./glob.js");
+const has_magic_js_1 = require("./has-magic.js");
+function globStreamSync(pattern, options = {}) {
+ return new glob_js_1.Glob(pattern, options).streamSync();
+}
+exports.globStreamSync = globStreamSync;
+function globStream(pattern, options = {}) {
+ return new glob_js_1.Glob(pattern, options).stream();
+}
+exports.globStream = globStream;
+function globSync(pattern, options = {}) {
+ return new glob_js_1.Glob(pattern, options).walkSync();
+}
+exports.globSync = globSync;
+async function glob_(pattern, options = {}) {
+ return new glob_js_1.Glob(pattern, options).walk();
+}
+function globIterateSync(pattern, options = {}) {
+ return new glob_js_1.Glob(pattern, options).iterateSync();
+}
+exports.globIterateSync = globIterateSync;
+function globIterate(pattern, options = {}) {
+ return new glob_js_1.Glob(pattern, options).iterate();
+}
+exports.globIterate = globIterate;
+// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
+exports.streamSync = globStreamSync;
+exports.stream = Object.assign(globStream, { sync: globStreamSync });
+exports.iterateSync = globIterateSync;
+exports.iterate = Object.assign(globIterate, {
+ sync: globIterateSync,
+});
+exports.sync = Object.assign(globSync, {
+ stream: globStreamSync,
+ iterate: globIterateSync,
+});
+/* c8 ignore start */
+var minimatch_2 = require("minimatch");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } });
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } });
+var glob_js_2 = require("./glob.js");
+Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } });
+var has_magic_js_2 = require("./has-magic.js");
+Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } });
+/* c8 ignore stop */
+exports.glob = Object.assign(glob_, {
+ glob: glob_,
+ globSync,
+ sync: exports.sync,
+ globStream,
+ stream: exports.stream,
+ globStreamSync,
+ streamSync: exports.streamSync,
+ globIterate,
+ iterate: exports.iterate,
+ globIterateSync,
+ iterateSync: exports.iterateSync,
+ Glob: glob_js_1.Glob,
+ hasMagic: has_magic_js_1.hasMagic,
+ escape: minimatch_1.escape,
+ unescape: minimatch_1.unescape,
+});
+exports.glob.glob = exports.glob;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js.map
new file mode 100644
index 00000000000000..060338fbd1b94b
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":";;;AAAA,yCAA4C;AAS5C,uCAAgC;AAChC,iDAAyC;AAuBzC,SAAgB,cAAc,CAC5B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,UAAU,EAAE,CAAA;AAChD,CAAC;AALD,wCAKC;AAsBD,SAAgB,UAAU,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;AAC5C,CAAC;AALD,gCAKC;AAqBD,SAAgB,QAAQ,CACtB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;AAC9C,CAAC;AALD,4BAKC;AAwBD,KAAK,UAAU,KAAK,CAClB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAA;AAC1C,CAAC;AAqBD,SAAgB,eAAe,CAC7B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,WAAW,EAAE,CAAA;AACjD,CAAC;AALD,0CAKC;AAqBD,SAAgB,WAAW,CACzB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,OAAO,EAAE,CAAA;AAC7C,CAAC;AALD,kCAKC;AAED,iEAAiE;AACpD,QAAA,UAAU,GAAG,cAAc,CAAA;AAC3B,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,cAAc,EAAE,CAAC,CAAA;AAC5D,QAAA,WAAW,GAAG,eAAe,CAAA;AAC7B,QAAA,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;IAChD,IAAI,EAAE,eAAe;CACtB,CAAC,CAAA;AACW,QAAA,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;IAC1C,MAAM,EAAE,cAAc;IACtB,OAAO,EAAE,eAAe;CACzB,CAAC,CAAA;AAEF,qBAAqB;AACrB,uCAA4C;AAAnC,mGAAA,MAAM,OAAA;AAAE,qGAAA,QAAQ,OAAA;AACzB,qCAAgC;AAAvB,+FAAA,IAAI,OAAA;AAOb,+CAAyC;AAAhC,wGAAA,QAAQ,OAAA;AAGjB,oBAAoB;AAEP,QAAA,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;IACvC,IAAI,EAAE,KAAK;IACX,QAAQ;IACR,IAAI,EAAJ,YAAI;IACJ,UAAU;IACV,MAAM,EAAN,cAAM;IACN,cAAc;IACd,UAAU,EAAV,kBAAU;IACV,WAAW;IACX,OAAO,EAAP,eAAO;IACP,eAAe;IACf,WAAW,EAAX,mBAAW;IACX,IAAI,EAAJ,cAAI;IACJ,QAAQ,EAAR,uBAAQ;IACR,MAAM,EAAN,kBAAM;IACN,QAAQ,EAAR,oBAAQ;CACT,CAAC,CAAA;AACF,YAAI,CAAC,IAAI,GAAG,YAAI,CAAA","sourcesContent":["import { escape, unescape } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nimport { Glob } from './glob.js'\nimport { hasMagic } from './has-magic.js'\n\n/**\n * Syncronous form of {@link globStream}. Will read all the matches as fast as\n * you consume them, even all in a single tick if you consume them immediately,\n * but will still respond to backpressure if they're not consumed immediately.\n */\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesUnset\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions\n): Minipass | Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).streamSync()\n}\n\n/**\n * Return a stream that emits all the strings or `Path` objects and\n * then emits `end` when completed.\n */\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions\n): Minipass | Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).stream()\n}\n\n/**\n * Synchronous form of {@link glob}\n */\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Path[]\nexport function globSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions\n): Path[] | string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).walkSync()\n}\n\n/**\n * Perform an asynchronous glob search for the pattern(s) specified. Returns\n * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the\n * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for\n * full option descriptions.\n */\nasync function glob_(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).walk()\n}\n\n/**\n * Return a sync iterator for walking glob pattern matches.\n */\nexport function globIterateSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions\n): Generator | Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).iterateSync()\n}\n\n/**\n * Return an async iterator for walking glob pattern matches.\n */\nexport function globIterate(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions\n): AsyncGenerator | AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions = {}\n) {\n return new Glob(pattern, options).iterate()\n}\n\n// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc\nexport const streamSync = globStreamSync\nexport const stream = Object.assign(globStream, { sync: globStreamSync })\nexport const iterateSync = globIterateSync\nexport const iterate = Object.assign(globIterate, {\n sync: globIterateSync,\n})\nexport const sync = Object.assign(globSync, {\n stream: globStreamSync,\n iterate: globIterateSync,\n})\n\n/* c8 ignore start */\nexport { escape, unescape } from 'minimatch'\nexport { Glob } from './glob.js'\nexport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nexport { hasMagic } from './has-magic.js'\nexport type { IgnoreLike } from './ignore.js'\nexport type { MatchStream } from './walker.js'\n/* c8 ignore stop */\n\nexport const glob = Object.assign(glob_, {\n glob: glob_,\n globSync,\n sync,\n globStream,\n stream,\n globStreamSync,\n streamSync,\n globIterate,\n iterate,\n globIterateSync,\n iterateSync,\n Glob,\n hasMagic,\n escape,\n unescape,\n})\nglob.glob = glob\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts
new file mode 100644
index 00000000000000..109cc4e7a5dae3
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts
@@ -0,0 +1,77 @@
+///
+import { GLOBSTAR } from 'minimatch';
+export type MMPattern = string | RegExp | typeof GLOBSTAR;
+export type PatternList = [p: MMPattern, ...rest: MMPattern[]];
+export type UNCPatternList = [
+ p0: '',
+ p1: '',
+ p2: string,
+ p3: string,
+ ...rest: MMPattern[]
+];
+export type DrivePatternList = [p0: string, ...rest: MMPattern[]];
+export type AbsolutePatternList = [p0: '', ...rest: MMPattern[]];
+export type GlobList = [p: string, ...rest: string[]];
+/**
+ * An immutable-ish view on an array of glob parts and their parsed
+ * results
+ */
+export declare class Pattern {
+ #private;
+ readonly length: number;
+ constructor(patternList: MMPattern[], globList: string[], index: number, platform: NodeJS.Platform);
+ /**
+ * The first entry in the parsed list of patterns
+ */
+ pattern(): MMPattern;
+ /**
+ * true of if pattern() returns a string
+ */
+ isString(): boolean;
+ /**
+ * true of if pattern() returns GLOBSTAR
+ */
+ isGlobstar(): boolean;
+ /**
+ * true if pattern() returns a regexp
+ */
+ isRegExp(): boolean;
+ /**
+ * The /-joined set of glob parts that make up this pattern
+ */
+ globString(): string;
+ /**
+ * true if there are more pattern parts after this one
+ */
+ hasMore(): boolean;
+ /**
+ * The rest of the pattern after this part, or null if this is the end
+ */
+ rest(): Pattern | null;
+ /**
+ * true if the pattern represents a //unc/path/ on windows
+ */
+ isUNC(): boolean;
+ /**
+ * True if the pattern starts with a drive letter on Windows
+ */
+ isDrive(): boolean;
+ /**
+ * True if the pattern is rooted on an absolute path
+ */
+ isAbsolute(): boolean;
+ /**
+ * consume the root of the pattern, and return it
+ */
+ root(): string;
+ /**
+ * Check to see if the current globstar pattern is allowed to follow
+ * a symbolic link.
+ */
+ checkFollowGlobstar(): boolean;
+ /**
+ * Mark that the current globstar pattern is following a symbolic link
+ */
+ markFollowGlobstar(): boolean;
+}
+//# sourceMappingURL=pattern.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts.map
new file mode 100644
index 00000000000000..48430f63db0947
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"pattern.d.ts","sourceRoot":"","sources":["../../../src/pattern.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AACpC,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,QAAQ,CAAA;AAGzD,MAAM,MAAM,WAAW,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAC9D,MAAM,MAAM,cAAc,GAAG;IAC3B,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,MAAM;IACV,EAAE,EAAE,MAAM;IACV,GAAG,IAAI,EAAE,SAAS,EAAE;CACrB,CAAA;AACD,MAAM,MAAM,gBAAgB,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AACjE,MAAM,MAAM,mBAAmB,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAChE,MAAM,MAAM,QAAQ,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;AAMrD;;;GAGG;AACH,qBAAa,OAAO;;IAIlB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAA;gBAUrB,WAAW,EAAE,SAAS,EAAE,EACxB,QAAQ,EAAE,MAAM,EAAE,EAClB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,CAAC,QAAQ;IA6D3B;;OAEG;IACH,OAAO,IAAI,SAAS;IAIpB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAGnB;;OAEG;IACH,UAAU,IAAI,OAAO;IAGrB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAInB;;OAEG;IACH,UAAU,IAAI,MAAM;IAUpB;;OAEG;IACH,OAAO,IAAI,OAAO;IAIlB;;OAEG;IACH,IAAI,IAAI,OAAO,GAAG,IAAI;IAetB;;OAEG;IACH,KAAK,IAAI,OAAO;IAoBhB;;OAEG;IACH,OAAO,IAAI,OAAO;IAelB;;OAEG;IACH,UAAU,IAAI,OAAO;IAUrB;;OAEG;IACH,IAAI,IAAI,MAAM;IAOd;;;OAGG;IACH,mBAAmB,IAAI,OAAO;IAQ9B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAM9B"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js
new file mode 100644
index 00000000000000..181371293d8605
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js
@@ -0,0 +1,219 @@
+"use strict";
+// this is just a very light wrapper around 2 arrays with an offset index
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Pattern = void 0;
+const minimatch_1 = require("minimatch");
+const isPatternList = (pl) => pl.length >= 1;
+const isGlobList = (gl) => gl.length >= 1;
+/**
+ * An immutable-ish view on an array of glob parts and their parsed
+ * results
+ */
+class Pattern {
+ #patternList;
+ #globList;
+ #index;
+ length;
+ #platform;
+ #rest;
+ #globString;
+ #isDrive;
+ #isUNC;
+ #isAbsolute;
+ #followGlobstar = true;
+ constructor(patternList, globList, index, platform) {
+ if (!isPatternList(patternList)) {
+ throw new TypeError('empty pattern list');
+ }
+ if (!isGlobList(globList)) {
+ throw new TypeError('empty glob list');
+ }
+ if (globList.length !== patternList.length) {
+ throw new TypeError('mismatched pattern list and glob list lengths');
+ }
+ this.length = patternList.length;
+ if (index < 0 || index >= this.length) {
+ throw new TypeError('index out of range');
+ }
+ this.#patternList = patternList;
+ this.#globList = globList;
+ this.#index = index;
+ this.#platform = platform;
+ // normalize root entries of absolute patterns on initial creation.
+ if (this.#index === 0) {
+ // c: => ['c:/']
+ // C:/ => ['C:/']
+ // C:/x => ['C:/', 'x']
+ // //host/share => ['//host/share/']
+ // //host/share/ => ['//host/share/']
+ // //host/share/x => ['//host/share/', 'x']
+ // /etc => ['/', 'etc']
+ // / => ['/']
+ if (this.isUNC()) {
+ // '' / '' / 'host' / 'share'
+ const [p0, p1, p2, p3, ...prest] = this.#patternList;
+ const [g0, g1, g2, g3, ...grest] = this.#globList;
+ if (prest[0] === '') {
+ // ends in /
+ prest.shift();
+ grest.shift();
+ }
+ const p = [p0, p1, p2, p3, ''].join('/');
+ const g = [g0, g1, g2, g3, ''].join('/');
+ this.#patternList = [p, ...prest];
+ this.#globList = [g, ...grest];
+ this.length = this.#patternList.length;
+ }
+ else if (this.isDrive() || this.isAbsolute()) {
+ const [p1, ...prest] = this.#patternList;
+ const [g1, ...grest] = this.#globList;
+ if (prest[0] === '') {
+ // ends in /
+ prest.shift();
+ grest.shift();
+ }
+ const p = p1 + '/';
+ const g = g1 + '/';
+ this.#patternList = [p, ...prest];
+ this.#globList = [g, ...grest];
+ this.length = this.#patternList.length;
+ }
+ }
+ }
+ /**
+ * The first entry in the parsed list of patterns
+ */
+ pattern() {
+ return this.#patternList[this.#index];
+ }
+ /**
+ * true of if pattern() returns a string
+ */
+ isString() {
+ return typeof this.#patternList[this.#index] === 'string';
+ }
+ /**
+ * true of if pattern() returns GLOBSTAR
+ */
+ isGlobstar() {
+ return this.#patternList[this.#index] === minimatch_1.GLOBSTAR;
+ }
+ /**
+ * true if pattern() returns a regexp
+ */
+ isRegExp() {
+ return this.#patternList[this.#index] instanceof RegExp;
+ }
+ /**
+ * The /-joined set of glob parts that make up this pattern
+ */
+ globString() {
+ return (this.#globString =
+ this.#globString ||
+ (this.#index === 0
+ ? this.isAbsolute()
+ ? this.#globList[0] + this.#globList.slice(1).join('/')
+ : this.#globList.join('/')
+ : this.#globList.slice(this.#index).join('/')));
+ }
+ /**
+ * true if there are more pattern parts after this one
+ */
+ hasMore() {
+ return this.length > this.#index + 1;
+ }
+ /**
+ * The rest of the pattern after this part, or null if this is the end
+ */
+ rest() {
+ if (this.#rest !== undefined)
+ return this.#rest;
+ if (!this.hasMore())
+ return (this.#rest = null);
+ this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
+ this.#rest.#isAbsolute = this.#isAbsolute;
+ this.#rest.#isUNC = this.#isUNC;
+ this.#rest.#isDrive = this.#isDrive;
+ return this.#rest;
+ }
+ /**
+ * true if the pattern represents a //unc/path/ on windows
+ */
+ isUNC() {
+ const pl = this.#patternList;
+ return this.#isUNC !== undefined
+ ? this.#isUNC
+ : (this.#isUNC =
+ this.#platform === 'win32' &&
+ this.#index === 0 &&
+ pl[0] === '' &&
+ pl[1] === '' &&
+ typeof pl[2] === 'string' &&
+ !!pl[2] &&
+ typeof pl[3] === 'string' &&
+ !!pl[3]);
+ }
+ // pattern like C:/...
+ // split = ['C:', ...]
+ // XXX: would be nice to handle patterns like `c:*` to test the cwd
+ // in c: for *, but I don't know of a way to even figure out what that
+ // cwd is without actually chdir'ing into it?
+ /**
+ * True if the pattern starts with a drive letter on Windows
+ */
+ isDrive() {
+ const pl = this.#patternList;
+ return this.#isDrive !== undefined
+ ? this.#isDrive
+ : (this.#isDrive =
+ this.#platform === 'win32' &&
+ this.#index === 0 &&
+ this.length > 1 &&
+ typeof pl[0] === 'string' &&
+ /^[a-z]:$/i.test(pl[0]));
+ }
+ // pattern = '/' or '/...' or '/x/...'
+ // split = ['', ''] or ['', ...] or ['', 'x', ...]
+ // Drive and UNC both considered absolute on windows
+ /**
+ * True if the pattern is rooted on an absolute path
+ */
+ isAbsolute() {
+ const pl = this.#patternList;
+ return this.#isAbsolute !== undefined
+ ? this.#isAbsolute
+ : (this.#isAbsolute =
+ (pl[0] === '' && pl.length > 1) ||
+ this.isDrive() ||
+ this.isUNC());
+ }
+ /**
+ * consume the root of the pattern, and return it
+ */
+ root() {
+ const p = this.#patternList[0];
+ return typeof p === 'string' && this.isAbsolute() && this.#index === 0
+ ? p
+ : '';
+ }
+ /**
+ * Check to see if the current globstar pattern is allowed to follow
+ * a symbolic link.
+ */
+ checkFollowGlobstar() {
+ return !(this.#index === 0 ||
+ !this.isGlobstar() ||
+ !this.#followGlobstar);
+ }
+ /**
+ * Mark that the current globstar pattern is following a symbolic link
+ */
+ markFollowGlobstar() {
+ if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
+ return false;
+ this.#followGlobstar = false;
+ return true;
+ }
+}
+exports.Pattern = Pattern;
+//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js.map
new file mode 100644
index 00000000000000..ba5293ff9f2489
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"pattern.js","sourceRoot":"","sources":["../../../src/pattern.ts"],"names":[],"mappings":";AAAA,yEAAyE;;;AAEzE,yCAAoC;AAgBpC,MAAM,aAAa,GAAG,CAAC,EAAe,EAAqB,EAAE,CAC3D,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAChB,MAAM,UAAU,GAAG,CAAC,EAAY,EAAkB,EAAE,CAAC,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAEnE;;;GAGG;AACH,MAAa,OAAO;IACT,YAAY,CAAa;IACzB,SAAS,CAAU;IACnB,MAAM,CAAQ;IACd,MAAM,CAAQ;IACd,SAAS,CAAiB;IACnC,KAAK,CAAiB;IACtB,WAAW,CAAS;IACpB,QAAQ,CAAU;IAClB,MAAM,CAAU;IAChB,WAAW,CAAU;IACrB,eAAe,GAAY,IAAI,CAAA;IAE/B,YACE,WAAwB,EACxB,QAAkB,EAClB,KAAa,EACb,QAAyB;QAEzB,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,EAAE;YAC/B,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;SAC1C;QACD,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;YACzB,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC,CAAA;SACvC;QACD,IAAI,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,MAAM,EAAE;YAC1C,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC,CAAA;SACrE;QACD,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAA;QAChC,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,MAAM,EAAE;YACrC,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;SAC1C;QACD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAA;QAC/B,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QAEzB,mEAAmE;QACnE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;YACrB,gBAAgB;YAChB,iBAAiB;YACjB,uBAAuB;YACvB,oCAAoC;YACpC,qCAAqC;YACrC,2CAA2C;YAC3C,uBAAuB;YACvB,aAAa;YACb,IAAI,IAAI,CAAC,KAAK,EAAE,EAAE;gBAChB,6BAA6B;gBAC7B,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACpD,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACjD,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE;oBACnB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;iBACd;gBACD,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;aACvC;iBAAM,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE;gBAC9C,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACxC,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACrC,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE;oBACnB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;iBACd;gBACD,MAAM,CAAC,GAAI,EAAa,GAAG,GAAG,CAAA;gBAC9B,MAAM,CAAC,GAAG,EAAE,GAAG,GAAG,CAAA;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;aACvC;SACF;IACH,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACvC,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,OAAO,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,QAAQ,CAAA;IAC3D,CAAC;IACD;;OAEG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,oBAAQ,CAAA;IACpD,CAAC;IACD;;OAEG;IACH,QAAQ;QACN,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,MAAM,CAAA;IACzD,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,IAAI,CAAC,WAAW;YACtB,IAAI,CAAC,WAAW;gBAChB,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC;oBAChB,CAAC,CAAC,IAAI,CAAC,UAAU,EAAE;wBACjB,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;wBACvD,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC;oBAC5B,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACrD,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;IACtC,CAAC;IAED;;OAEG;IACH,IAAI;QACF,IAAI,IAAI,CAAC,KAAK,KAAK,SAAS;YAAE,OAAO,IAAI,CAAC,KAAK,CAAA;QAC/C,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YAAE,OAAO,CAAC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,CAAA;QAC/C,IAAI,CAAC,KAAK,GAAG,IAAI,OAAO,CACtB,IAAI,CAAC,YAAY,EACjB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,MAAM,GAAG,CAAC,EACf,IAAI,CAAC,SAAS,CACf,CAAA;QACD,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;QACzC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAC/B,IAAI,CAAC,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QACnC,OAAO,IAAI,CAAC,KAAK,CAAA;IACnB,CAAC;IAED;;OAEG;IACH,KAAK;QACH,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,SAAS;YAC9B,CAAC,CAAC,IAAI,CAAC,MAAM;YACb,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM;gBACV,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;oBACP,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;IAChB,CAAC;IAED,sBAAsB;IACtB,sBAAsB;IACtB,mEAAmE;IACnE,sEAAsE;IACtE,6CAA6C;IAC7C;;OAEG;IACH,OAAO;QACL,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,QAAQ,KAAK,SAAS;YAChC,CAAC,CAAC,IAAI,CAAC,QAAQ;YACf,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ;gBACZ,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,IAAI,CAAC,MAAM,GAAG,CAAC;oBACf,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAChC,CAAC;IAED,sCAAsC;IACtC,kDAAkD;IAClD,oDAAoD;IACpD;;OAEG;IACH,UAAU;QACR,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS;YACnC,CAAC,CAAC,IAAI,CAAC,WAAW;YAClB,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW;gBACf,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;oBAC/B,IAAI,CAAC,OAAO,EAAE;oBACd,IAAI,CAAC,KAAK,EAAE,CAAC,CAAA;IACrB,CAAC;IAED;;OAEG;IACH,IAAI;QACF,MAAM,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAA;QAC9B,OAAO,OAAO,CAAC,KAAK,QAAQ,IAAI,IAAI,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YACpE,CAAC,CAAC,CAAC;YACH,CAAC,CAAC,EAAE,CAAA;IACR,CAAC;IAED;;;OAGG;IACH,mBAAmB;QACjB,OAAO,CAAC,CACN,IAAI,CAAC,MAAM,KAAK,CAAC;YACjB,CAAC,IAAI,CAAC,UAAU,EAAE;YAClB,CAAC,IAAI,CAAC,eAAe,CACtB,CAAA;IACH,CAAC;IAED;;OAEG;IACH,kBAAkB;QAChB,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,eAAe;YAClE,OAAO,KAAK,CAAA;QACd,IAAI,CAAC,eAAe,GAAG,KAAK,CAAA;QAC5B,OAAO,IAAI,CAAA;IACb,CAAC;CACF;AAnOD,0BAmOC","sourcesContent":["// this is just a very light wrapper around 2 arrays with an offset index\n\nimport { GLOBSTAR } from 'minimatch'\nexport type MMPattern = string | RegExp | typeof GLOBSTAR\n\n// an array of length >= 1\nexport type PatternList = [p: MMPattern, ...rest: MMPattern[]]\nexport type UNCPatternList = [\n p0: '',\n p1: '',\n p2: string,\n p3: string,\n ...rest: MMPattern[]\n]\nexport type DrivePatternList = [p0: string, ...rest: MMPattern[]]\nexport type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]\nexport type GlobList = [p: string, ...rest: string[]]\n\nconst isPatternList = (pl: MMPattern[]): pl is PatternList =>\n pl.length >= 1\nconst isGlobList = (gl: string[]): gl is GlobList => gl.length >= 1\n\n/**\n * An immutable-ish view on an array of glob parts and their parsed\n * results\n */\nexport class Pattern {\n readonly #patternList: PatternList\n readonly #globList: GlobList\n readonly #index: number\n readonly length: number\n readonly #platform: NodeJS.Platform\n #rest?: Pattern | null\n #globString?: string\n #isDrive?: boolean\n #isUNC?: boolean\n #isAbsolute?: boolean\n #followGlobstar: boolean = true\n\n constructor(\n patternList: MMPattern[],\n globList: string[],\n index: number,\n platform: NodeJS.Platform\n ) {\n if (!isPatternList(patternList)) {\n throw new TypeError('empty pattern list')\n }\n if (!isGlobList(globList)) {\n throw new TypeError('empty glob list')\n }\n if (globList.length !== patternList.length) {\n throw new TypeError('mismatched pattern list and glob list lengths')\n }\n this.length = patternList.length\n if (index < 0 || index >= this.length) {\n throw new TypeError('index out of range')\n }\n this.#patternList = patternList\n this.#globList = globList\n this.#index = index\n this.#platform = platform\n\n // normalize root entries of absolute patterns on initial creation.\n if (this.#index === 0) {\n // c: => ['c:/']\n // C:/ => ['C:/']\n // C:/x => ['C:/', 'x']\n // //host/share => ['//host/share/']\n // //host/share/ => ['//host/share/']\n // //host/share/x => ['//host/share/', 'x']\n // /etc => ['/', 'etc']\n // / => ['/']\n if (this.isUNC()) {\n // '' / '' / 'host' / 'share'\n const [p0, p1, p2, p3, ...prest] = this.#patternList\n const [g0, g1, g2, g3, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = [p0, p1, p2, p3, ''].join('/')\n const g = [g0, g1, g2, g3, ''].join('/')\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n } else if (this.isDrive() || this.isAbsolute()) {\n const [p1, ...prest] = this.#patternList\n const [g1, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = (p1 as string) + '/'\n const g = g1 + '/'\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n }\n }\n }\n\n /**\n * The first entry in the parsed list of patterns\n */\n pattern(): MMPattern {\n return this.#patternList[this.#index]\n }\n\n /**\n * true of if pattern() returns a string\n */\n isString(): boolean {\n return typeof this.#patternList[this.#index] === 'string'\n }\n /**\n * true of if pattern() returns GLOBSTAR\n */\n isGlobstar(): boolean {\n return this.#patternList[this.#index] === GLOBSTAR\n }\n /**\n * true if pattern() returns a regexp\n */\n isRegExp(): boolean {\n return this.#patternList[this.#index] instanceof RegExp\n }\n\n /**\n * The /-joined set of glob parts that make up this pattern\n */\n globString(): string {\n return (this.#globString =\n this.#globString ||\n (this.#index === 0\n ? this.isAbsolute()\n ? this.#globList[0] + this.#globList.slice(1).join('/')\n : this.#globList.join('/')\n : this.#globList.slice(this.#index).join('/')))\n }\n\n /**\n * true if there are more pattern parts after this one\n */\n hasMore(): boolean {\n return this.length > this.#index + 1\n }\n\n /**\n * The rest of the pattern after this part, or null if this is the end\n */\n rest(): Pattern | null {\n if (this.#rest !== undefined) return this.#rest\n if (!this.hasMore()) return (this.#rest = null)\n this.#rest = new Pattern(\n this.#patternList,\n this.#globList,\n this.#index + 1,\n this.#platform\n )\n this.#rest.#isAbsolute = this.#isAbsolute\n this.#rest.#isUNC = this.#isUNC\n this.#rest.#isDrive = this.#isDrive\n return this.#rest\n }\n\n /**\n * true if the pattern represents a //unc/path/ on windows\n */\n isUNC(): boolean {\n const pl = this.#patternList\n return this.#isUNC !== undefined\n ? this.#isUNC\n : (this.#isUNC =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n pl[0] === '' &&\n pl[1] === '' &&\n typeof pl[2] === 'string' &&\n !!pl[2] &&\n typeof pl[3] === 'string' &&\n !!pl[3])\n }\n\n // pattern like C:/...\n // split = ['C:', ...]\n // XXX: would be nice to handle patterns like `c:*` to test the cwd\n // in c: for *, but I don't know of a way to even figure out what that\n // cwd is without actually chdir'ing into it?\n /**\n * True if the pattern starts with a drive letter on Windows\n */\n isDrive(): boolean {\n const pl = this.#patternList\n return this.#isDrive !== undefined\n ? this.#isDrive\n : (this.#isDrive =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n this.length > 1 &&\n typeof pl[0] === 'string' &&\n /^[a-z]:$/i.test(pl[0]))\n }\n\n // pattern = '/' or '/...' or '/x/...'\n // split = ['', ''] or ['', ...] or ['', 'x', ...]\n // Drive and UNC both considered absolute on windows\n /**\n * True if the pattern is rooted on an absolute path\n */\n isAbsolute(): boolean {\n const pl = this.#patternList\n return this.#isAbsolute !== undefined\n ? this.#isAbsolute\n : (this.#isAbsolute =\n (pl[0] === '' && pl.length > 1) ||\n this.isDrive() ||\n this.isUNC())\n }\n\n /**\n * consume the root of the pattern, and return it\n */\n root(): string {\n const p = this.#patternList[0]\n return typeof p === 'string' && this.isAbsolute() && this.#index === 0\n ? p\n : ''\n }\n\n /**\n * Check to see if the current globstar pattern is allowed to follow\n * a symbolic link.\n */\n checkFollowGlobstar(): boolean {\n return !(\n this.#index === 0 ||\n !this.isGlobstar() ||\n !this.#followGlobstar\n )\n }\n\n /**\n * Mark that the current globstar pattern is following a symbolic link\n */\n markFollowGlobstar(): boolean {\n if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)\n return false\n this.#followGlobstar = false\n return true\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts
new file mode 100644
index 00000000000000..ccedfbf2820f7d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts
@@ -0,0 +1,59 @@
+import { MMRegExp } from 'minimatch';
+import { Path } from 'path-scurry';
+import { Pattern } from './pattern.js';
+import { GlobWalkerOpts } from './walker.js';
+/**
+ * A cache of which patterns have been processed for a given Path
+ */
+export declare class HasWalkedCache {
+ store: Map>;
+ constructor(store?: Map>);
+ copy(): HasWalkedCache;
+ hasWalked(target: Path, pattern: Pattern): boolean | undefined;
+ storeWalked(target: Path, pattern: Pattern): void;
+}
+/**
+ * A record of which paths have been matched in a given walk step,
+ * and whether they only are considered a match if they are a directory,
+ * and whether their absolute or relative path should be returned.
+ */
+export declare class MatchRecord {
+ store: Map;
+ add(target: Path, absolute: boolean, ifDir: boolean): void;
+ entries(): [Path, boolean, boolean][];
+}
+/**
+ * A collection of patterns that must be processed in a subsequent step
+ * for a given path.
+ */
+export declare class SubWalks {
+ store: Map;
+ add(target: Path, pattern: Pattern): void;
+ get(target: Path): Pattern[];
+ entries(): [Path, Pattern[]][];
+ keys(): Path[];
+}
+/**
+ * The class that processes patterns for a given path.
+ *
+ * Handles child entry filtering, and determining whether a path's
+ * directory contents must be read.
+ */
+export declare class Processor {
+ hasWalkedCache: HasWalkedCache;
+ matches: MatchRecord;
+ subwalks: SubWalks;
+ patterns?: Pattern[];
+ follow: boolean;
+ dot: boolean;
+ opts: GlobWalkerOpts;
+ constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache);
+ processPatterns(target: Path, patterns: Pattern[]): this;
+ subwalkTargets(): Path[];
+ child(): Processor;
+ filterEntries(parent: Path, entries: Path[]): Processor;
+ testGlobstar(e: Path, pattern: Pattern, rest: Pattern | null, absolute: boolean): void;
+ testRegExp(e: Path, p: MMRegExp, rest: Pattern | null, absolute: boolean): void;
+ testString(e: Path, p: string, rest: Pattern | null, absolute: boolean): void;
+}
+//# sourceMappingURL=processor.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts.map
new file mode 100644
index 00000000000000..ca6c63ca264b27
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.d.ts.map
@@ -0,0 +1 @@
+{"version":3,"file":"processor.d.ts","sourceRoot":"","sources":["../../../src/processor.ts"],"names":[],"mappings":"AAEA,OAAO,EAAY,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC9C,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAa,OAAO,EAAE,MAAM,cAAc,CAAA;AACjD,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C;;GAEG;AACH,qBAAa,cAAc;IACzB,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC,CAAA;gBACnB,KAAK,GAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAa;IAGvD,IAAI;IAGJ,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAGxC,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;CAM3C;AAED;;;;GAIG;AACH,qBAAa,WAAW;IACtB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAY;IACpC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO;IAMnD,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE;CAOtC;AAED;;;GAGG;AACH,qBAAa,QAAQ;IACnB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAY;IACvC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAWlC,GAAG,CAAC,MAAM,EAAE,IAAI,GAAG,OAAO,EAAE;IAS5B,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE;IAG9B,IAAI,IAAI,IAAI,EAAE;CAGf;AAED;;;;;GAKG;AACH,qBAAa,SAAS;IACpB,cAAc,EAAE,cAAc,CAAA;IAC9B,OAAO,cAAoB;IAC3B,QAAQ,WAAiB;IACzB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,EAAE,OAAO,CAAA;IACZ,IAAI,EAAE,cAAc,CAAA;gBAER,IAAI,EAAE,cAAc,EAAE,cAAc,CAAC,EAAE,cAAc;IASjE,eAAe,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE;IAwGjD,cAAc,IAAI,IAAI,EAAE;IAIxB,KAAK;IAQL,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,GAAG,SAAS;IAqBvD,YAAY,CACV,CAAC,EAAE,IAAI,EACP,OAAO,EAAE,OAAO,EAChB,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IA8CnB,UAAU,CACR,CAAC,EAAE,IAAI,EACP,CAAC,EAAE,QAAQ,EACX,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IAUnB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,GAAG,IAAI,EAAE,QAAQ,EAAE,OAAO;CASvE"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js
new file mode 100644
index 00000000000000..bd067e9b9033dc
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js
@@ -0,0 +1,309 @@
+"use strict";
+// synchronous utility for filtering entries and calculating subwalks
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0;
+const minimatch_1 = require("minimatch");
+/**
+ * A cache of which patterns have been processed for a given Path
+ */
+class HasWalkedCache {
+ store;
+ constructor(store = new Map()) {
+ this.store = store;
+ }
+ copy() {
+ return new HasWalkedCache(new Map(this.store));
+ }
+ hasWalked(target, pattern) {
+ return this.store.get(target.fullpath())?.has(pattern.globString());
+ }
+ storeWalked(target, pattern) {
+ const fullpath = target.fullpath();
+ const cached = this.store.get(fullpath);
+ if (cached)
+ cached.add(pattern.globString());
+ else
+ this.store.set(fullpath, new Set([pattern.globString()]));
+ }
+}
+exports.HasWalkedCache = HasWalkedCache;
+/**
+ * A record of which paths have been matched in a given walk step,
+ * and whether they only are considered a match if they are a directory,
+ * and whether their absolute or relative path should be returned.
+ */
+class MatchRecord {
+ store = new Map();
+ add(target, absolute, ifDir) {
+ const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
+ const current = this.store.get(target);
+ this.store.set(target, current === undefined ? n : n & current);
+ }
+ // match, absolute, ifdir
+ entries() {
+ return [...this.store.entries()].map(([path, n]) => [
+ path,
+ !!(n & 2),
+ !!(n & 1),
+ ]);
+ }
+}
+exports.MatchRecord = MatchRecord;
+/**
+ * A collection of patterns that must be processed in a subsequent step
+ * for a given path.
+ */
+class SubWalks {
+ store = new Map();
+ add(target, pattern) {
+ if (!target.canReaddir()) {
+ return;
+ }
+ const subs = this.store.get(target);
+ if (subs) {
+ if (!subs.find(p => p.globString() === pattern.globString())) {
+ subs.push(pattern);
+ }
+ }
+ else
+ this.store.set(target, [pattern]);
+ }
+ get(target) {
+ const subs = this.store.get(target);
+ /* c8 ignore start */
+ if (!subs) {
+ throw new Error('attempting to walk unknown path');
+ }
+ /* c8 ignore stop */
+ return subs;
+ }
+ entries() {
+ return this.keys().map(k => [k, this.store.get(k)]);
+ }
+ keys() {
+ return [...this.store.keys()].filter(t => t.canReaddir());
+ }
+}
+exports.SubWalks = SubWalks;
+/**
+ * The class that processes patterns for a given path.
+ *
+ * Handles child entry filtering, and determining whether a path's
+ * directory contents must be read.
+ */
+class Processor {
+ hasWalkedCache;
+ matches = new MatchRecord();
+ subwalks = new SubWalks();
+ patterns;
+ follow;
+ dot;
+ opts;
+ constructor(opts, hasWalkedCache) {
+ this.opts = opts;
+ this.follow = !!opts.follow;
+ this.dot = !!opts.dot;
+ this.hasWalkedCache = hasWalkedCache
+ ? hasWalkedCache.copy()
+ : new HasWalkedCache();
+ }
+ processPatterns(target, patterns) {
+ this.patterns = patterns;
+ const processingSet = patterns.map(p => [target, p]);
+ // map of paths to the magic-starting subwalks they need to walk
+ // first item in patterns is the filter
+ for (let [t, pattern] of processingSet) {
+ this.hasWalkedCache.storeWalked(t, pattern);
+ const root = pattern.root();
+ const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
+ // start absolute patterns at root
+ if (root) {
+ t = t.resolve(root === '/' && this.opts.root !== undefined
+ ? this.opts.root
+ : root);
+ const rest = pattern.rest();
+ if (!rest) {
+ this.matches.add(t, true, false);
+ continue;
+ }
+ else {
+ pattern = rest;
+ }
+ }
+ if (t.isENOENT())
+ continue;
+ let p;
+ let rest;
+ let changed = false;
+ while (typeof (p = pattern.pattern()) === 'string' &&
+ (rest = pattern.rest())) {
+ const c = t.resolve(p);
+ // we can be reasonably sure that .. is a readable dir
+ if (c.isUnknown() && p !== '..')
+ break;
+ t = c;
+ pattern = rest;
+ changed = true;
+ }
+ p = pattern.pattern();
+ rest = pattern.rest();
+ if (changed) {
+ if (this.hasWalkedCache.hasWalked(t, pattern))
+ continue;
+ this.hasWalkedCache.storeWalked(t, pattern);
+ }
+ // now we have either a final string for a known entry,
+ // more strings for an unknown entry,
+ // or a pattern starting with magic, mounted on t.
+ if (typeof p === 'string') {
+ // must be final entry
+ if (!rest) {
+ const ifDir = p === '..' || p === '' || p === '.';
+ this.matches.add(t.resolve(p), absolute, ifDir);
+ }
+ else {
+ this.subwalks.add(t, pattern);
+ }
+ continue;
+ }
+ else if (p === minimatch_1.GLOBSTAR) {
+ // if no rest, match and subwalk pattern
+ // if rest, process rest and subwalk pattern
+ // if it's a symlink, but we didn't get here by way of a
+ // globstar match (meaning it's the first time THIS globstar
+ // has traversed a symlink), then we follow it. Otherwise, stop.
+ if (!t.isSymbolicLink() ||
+ this.follow ||
+ pattern.checkFollowGlobstar()) {
+ this.subwalks.add(t, pattern);
+ }
+ const rp = rest?.pattern();
+ const rrest = rest?.rest();
+ if (!rest || ((rp === '' || rp === '.') && !rrest)) {
+ // only HAS to be a dir if it ends in **/ or **/.
+ // but ending in ** will match files as well.
+ this.matches.add(t, absolute, rp === '' || rp === '.');
+ }
+ else {
+ if (rp === '..') {
+ // this would mean you're matching **/.. at the fs root,
+ // and no thanks, I'm not gonna test that specific case.
+ /* c8 ignore start */
+ const tp = t.parent || t;
+ /* c8 ignore stop */
+ if (!rrest)
+ this.matches.add(tp, absolute, true);
+ else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
+ this.subwalks.add(tp, rrest);
+ }
+ }
+ }
+ }
+ else if (p instanceof RegExp) {
+ this.subwalks.add(t, pattern);
+ }
+ }
+ return this;
+ }
+ subwalkTargets() {
+ return this.subwalks.keys();
+ }
+ child() {
+ return new Processor(this.opts, this.hasWalkedCache);
+ }
+ // return a new Processor containing the subwalks for each
+ // child entry, and a set of matches, and
+ // a hasWalkedCache that's a copy of this one
+ // then we're going to call
+ filterEntries(parent, entries) {
+ const patterns = this.subwalks.get(parent);
+ // put matches and entry walks into the results processor
+ const results = this.child();
+ for (const e of entries) {
+ for (const pattern of patterns) {
+ const absolute = pattern.isAbsolute();
+ const p = pattern.pattern();
+ const rest = pattern.rest();
+ if (p === minimatch_1.GLOBSTAR) {
+ results.testGlobstar(e, pattern, rest, absolute);
+ }
+ else if (p instanceof RegExp) {
+ results.testRegExp(e, p, rest, absolute);
+ }
+ else {
+ results.testString(e, p, rest, absolute);
+ }
+ }
+ }
+ return results;
+ }
+ testGlobstar(e, pattern, rest, absolute) {
+ if (this.dot || !e.name.startsWith('.')) {
+ if (!pattern.hasMore()) {
+ this.matches.add(e, absolute, false);
+ }
+ if (e.canReaddir()) {
+ // if we're in follow mode or it's not a symlink, just keep
+ // testing the same pattern. If there's more after the globstar,
+ // then this symlink consumes the globstar. If not, then we can
+ // follow at most ONE symlink along the way, so we mark it, which
+ // also checks to ensure that it wasn't already marked.
+ if (this.follow || !e.isSymbolicLink()) {
+ this.subwalks.add(e, pattern);
+ }
+ else if (e.isSymbolicLink()) {
+ if (rest && pattern.checkFollowGlobstar()) {
+ this.subwalks.add(e, rest);
+ }
+ else if (pattern.markFollowGlobstar()) {
+ this.subwalks.add(e, pattern);
+ }
+ }
+ }
+ }
+ // if the NEXT thing matches this entry, then also add
+ // the rest.
+ if (rest) {
+ const rp = rest.pattern();
+ if (typeof rp === 'string' &&
+ // dots and empty were handled already
+ rp !== '..' &&
+ rp !== '' &&
+ rp !== '.') {
+ this.testString(e, rp, rest.rest(), absolute);
+ }
+ else if (rp === '..') {
+ /* c8 ignore start */
+ const ep = e.parent || e;
+ /* c8 ignore stop */
+ this.subwalks.add(ep, rest);
+ }
+ else if (rp instanceof RegExp) {
+ this.testRegExp(e, rp, rest.rest(), absolute);
+ }
+ }
+ }
+ testRegExp(e, p, rest, absolute) {
+ if (!p.test(e.name))
+ return;
+ if (!rest) {
+ this.matches.add(e, absolute, false);
+ }
+ else {
+ this.subwalks.add(e, rest);
+ }
+ }
+ testString(e, p, rest, absolute) {
+ // should never happen?
+ if (!e.isNamed(p))
+ return;
+ if (!rest) {
+ this.matches.add(e, absolute, false);
+ }
+ else {
+ this.subwalks.add(e, rest);
+ }
+ }
+}
+exports.Processor = Processor;
+//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js.map b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js.map
new file mode 100644
index 00000000000000..bcbac1f723f983
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"processor.js","sourceRoot":"","sources":["../../../src/processor.ts"],"names":[],"mappings":";AAAA,qEAAqE;;;AAErE,yCAA8C;AAK9C;;GAEG;AACH,MAAa,cAAc;IACzB,KAAK,CAA0B;IAC/B,YAAY,QAAkC,IAAI,GAAG,EAAE;QACrD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IACD,IAAI;QACF,OAAO,IAAI,cAAc,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IAChD,CAAC;IACD,SAAS,CAAC,MAAY,EAAE,OAAgB;QACtC,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,EAAE,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACrE,CAAC;IACD,WAAW,CAAC,MAAY,EAAE,OAAgB;QACxC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAClC,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA;QACvC,IAAI,MAAM;YAAE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;;YACvC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;IAChE,CAAC;CACF;AAjBD,wCAiBC;AAED;;;;GAIG;AACH,MAAa,WAAW;IACtB,KAAK,GAAsB,IAAI,GAAG,EAAE,CAAA;IACpC,GAAG,CAAC,MAAY,EAAE,QAAiB,EAAE,KAAc;QACjD,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;QAC9C,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAA;IACjE,CAAC;IACD,yBAAyB;IACzB,OAAO;QACL,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC;YAClD,IAAI;YACJ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YACT,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;SACV,CAAC,CAAA;IACJ,CAAC;CACF;AAfD,kCAeC;AAED;;;GAGG;AACH,MAAa,QAAQ;IACnB,KAAK,GAAyB,IAAI,GAAG,EAAE,CAAA;IACvC,GAAG,CAAC,MAAY,EAAE,OAAgB;QAChC,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE;YACxB,OAAM;SACP;QACD,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,KAAK,OAAO,CAAC,UAAU,EAAE,CAAC,EAAE;gBAC5D,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACnB;SACF;;YAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,CAAC,CAAA;IAC1C,CAAC;IACD,GAAG,CAAC,MAAY;QACd,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,qBAAqB;QACrB,IAAI,CAAC,IAAI,EAAE;YACT,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAA;SACnD;QACD,oBAAoB;QACpB,OAAO,IAAI,CAAA;IACb,CAAC;IACD,OAAO;QACL,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAc,CAAC,CAAC,CAAA;IAClE,CAAC;IACD,IAAI;QACF,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,CAAA;IAC3D,CAAC;CACF;AA5BD,4BA4BC;AAED;;;;;GAKG;AACH,MAAa,SAAS;IACpB,cAAc,CAAgB;IAC9B,OAAO,GAAG,IAAI,WAAW,EAAE,CAAA;IAC3B,QAAQ,GAAG,IAAI,QAAQ,EAAE,CAAA;IACzB,QAAQ,CAAY;IACpB,MAAM,CAAS;IACf,GAAG,CAAS;IACZ,IAAI,CAAgB;IAEpB,YAAY,IAAoB,EAAE,cAA+B;QAC/D,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,cAAc,GAAG,cAAc;YAClC,CAAC,CAAC,cAAc,CAAC,IAAI,EAAE;YACvB,CAAC,CAAC,IAAI,cAAc,EAAE,CAAA;IAC1B,CAAC;IAED,eAAe,CAAC,MAAY,EAAE,QAAmB;QAC/C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,MAAM,aAAa,GAAsB,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;QAEvE,gEAAgE;QAChE,uCAAuC;QAEvC,KAAK,IAAI,CAAC,CAAC,EAAE,OAAO,CAAC,IAAI,aAAa,EAAE;YACtC,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAE3C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YAC3B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,KAAK,CAAA;YAErE,kCAAkC;YAClC,IAAI,IAAI,EAAE;gBACR,CAAC,GAAG,CAAC,CAAC,OAAO,CACX,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,KAAK,SAAS;oBAC1C,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI;oBAChB,CAAC,CAAC,IAAI,CACT,CAAA;gBACD,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,IAAI,EAAE;oBACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;oBAChC,SAAQ;iBACT;qBAAM;oBACL,OAAO,GAAG,IAAI,CAAA;iBACf;aACF;YAED,IAAI,CAAC,CAAC,QAAQ,EAAE;gBAAE,SAAQ;YAE1B,IAAI,CAAY,CAAA;YAChB,IAAI,IAAoB,CAAA;YACxB,IAAI,OAAO,GAAG,KAAK,CAAA;YACnB,OACE,OAAO,CAAC,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,QAAQ;gBAC3C,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,EACvB;gBACA,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;gBACtB,sDAAsD;gBACtD,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,IAAI;oBAAE,MAAK;gBACtC,CAAC,GAAG,CAAC,CAAA;gBACL,OAAO,GAAG,IAAI,CAAA;gBACd,OAAO,GAAG,IAAI,CAAA;aACf;YACD,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;YACrB,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YACrB,IAAI,OAAO,EAAE;gBACX,IAAI,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC;oBAAE,SAAQ;gBACvD,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;aAC5C;YAED,uDAAuD;YACvD,qCAAqC;YACrC,kDAAkD;YAClD,IAAI,OAAO,CAAC,KAAK,QAAQ,EAAE;gBACzB,sBAAsB;gBACtB,IAAI,CAAC,IAAI,EAAE;oBACT,MAAM,KAAK,GAAG,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,CAAA;oBACjD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;iBAChD;qBAAM;oBACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;gBACD,SAAQ;aACT;iBAAM,IAAI,CAAC,KAAK,oBAAQ,EAAE;gBACzB,wCAAwC;gBACxC,4CAA4C;gBAC5C,wDAAwD;gBACxD,4DAA4D;gBAC5D,gEAAgE;gBAChE,IACE,CAAC,CAAC,CAAC,cAAc,EAAE;oBACnB,IAAI,CAAC,MAAM;oBACX,OAAO,CAAC,mBAAmB,EAAE,EAC7B;oBACA,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;gBACD,MAAM,EAAE,GAAG,IAAI,EAAE,OAAO,EAAE,CAAA;gBAC1B,MAAM,KAAK,GAAG,IAAI,EAAE,IAAI,EAAE,CAAA;gBAC1B,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;oBAClD,iDAAiD;oBACjD,6CAA6C;oBAC7C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,CAAA;iBACvD;qBAAM;oBACL,IAAI,EAAE,KAAK,IAAI,EAAE;wBACf,wDAAwD;wBACxD,wDAAwD;wBACxD,qBAAqB;wBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;wBACxB,oBAAoB;wBACpB,IAAI,CAAC,KAAK;4BAAE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAA;6BAC3C,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE,EAAE,KAAK,CAAC,EAAE;4BAClD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,KAAK,CAAC,CAAA;yBAC7B;qBACF;iBACF;aACF;iBAAM,IAAI,CAAC,YAAY,MAAM,EAAE;gBAC9B,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;aAC9B;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAED,cAAc;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAA;IAC7B,CAAC;IAED,KAAK;QACH,OAAO,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;IACtD,CAAC;IAED,0DAA0D;IAC1D,yCAAyC;IACzC,6CAA6C;IAC7C,2BAA2B;IAC3B,aAAa,CAAC,MAAY,EAAE,OAAe;QACzC,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QAC1C,yDAAyD;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,EAAE,CAAA;QAC5B,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE;YACvB,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;gBAC9B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,CAAA;gBACrC,MAAM,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;gBAC3B,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,KAAK,oBAAQ,EAAE;oBAClB,OAAO,CAAC,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACjD;qBAAM,IAAI,CAAC,YAAY,MAAM,EAAE;oBAC9B,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACzC;qBAAM;oBACL,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;iBACzC;aACF;SACF;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED,YAAY,CACV,CAAO,EACP,OAAgB,EAChB,IAAoB,EACpB,QAAiB;QAEjB,IAAI,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACvC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;gBACtB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;aACrC;YACD,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE;gBAClB,2DAA2D;gBAC3D,gEAAgE;gBAChE,+DAA+D;gBAC/D,iEAAiE;gBACjE,uDAAuD;gBACvD,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,cAAc,EAAE,EAAE;oBACtC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;iBAC9B;qBAAM,IAAI,CAAC,CAAC,cAAc,EAAE,EAAE;oBAC7B,IAAI,IAAI,IAAI,OAAO,CAAC,mBAAmB,EAAE,EAAE;wBACzC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;qBAC3B;yBAAM,IAAI,OAAO,CAAC,kBAAkB,EAAE,EAAE;wBACvC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;qBAC9B;iBACF;aACF;SACF;QACD,sDAAsD;QACtD,YAAY;QACZ,IAAI,IAAI,EAAE;YACR,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,CAAA;YACzB,IACE,OAAO,EAAE,KAAK,QAAQ;gBACtB,sCAAsC;gBACtC,EAAE,KAAK,IAAI;gBACX,EAAE,KAAK,EAAE;gBACT,EAAE,KAAK,GAAG,EACV;gBACA,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;aAC9C;iBAAM,IAAI,EAAE,KAAK,IAAI,EAAE;gBACtB,qBAAqB;gBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;gBACxB,oBAAoB;gBACpB,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,CAAC,CAAA;aAC5B;iBAAM,IAAI,EAAE,YAAY,MAAM,EAAE;gBAC/B,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;aAC9C;SACF;IACH,CAAC;IAED,UAAU,CACR,CAAO,EACP,CAAW,EACX,IAAoB,EACpB,QAAiB;QAEjB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;YAAE,OAAM;QAC3B,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACrC;aAAM;YACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;SAC3B;IACH,CAAC;IAED,UAAU,CAAC,CAAO,EAAE,CAAS,EAAE,IAAoB,EAAE,QAAiB;QACpE,uBAAuB;QACvB,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;YAAE,OAAM;QACzB,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;SACrC;aAAM;YACL,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;SAC3B;IACH,CAAC;CACF;AApOD,8BAoOC","sourcesContent":["// synchronous utility for filtering entries and calculating subwalks\n\nimport { GLOBSTAR, MMRegExp } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { MMPattern, Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\n/**\n * A cache of which patterns have been processed for a given Path\n */\nexport class HasWalkedCache {\n store: Map>\n constructor(store: Map> = new Map()) {\n this.store = store\n }\n copy() {\n return new HasWalkedCache(new Map(this.store))\n }\n hasWalked(target: Path, pattern: Pattern) {\n return this.store.get(target.fullpath())?.has(pattern.globString())\n }\n storeWalked(target: Path, pattern: Pattern) {\n const fullpath = target.fullpath()\n const cached = this.store.get(fullpath)\n if (cached) cached.add(pattern.globString())\n else this.store.set(fullpath, new Set([pattern.globString()]))\n }\n}\n\n/**\n * A record of which paths have been matched in a given walk step,\n * and whether they only are considered a match if they are a directory,\n * and whether their absolute or relative path should be returned.\n */\nexport class MatchRecord {\n store: Map = new Map()\n add(target: Path, absolute: boolean, ifDir: boolean) {\n const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0)\n const current = this.store.get(target)\n this.store.set(target, current === undefined ? n : n & current)\n }\n // match, absolute, ifdir\n entries(): [Path, boolean, boolean][] {\n return [...this.store.entries()].map(([path, n]) => [\n path,\n !!(n & 2),\n !!(n & 1),\n ])\n }\n}\n\n/**\n * A collection of patterns that must be processed in a subsequent step\n * for a given path.\n */\nexport class SubWalks {\n store: Map = new Map()\n add(target: Path, pattern: Pattern) {\n if (!target.canReaddir()) {\n return\n }\n const subs = this.store.get(target)\n if (subs) {\n if (!subs.find(p => p.globString() === pattern.globString())) {\n subs.push(pattern)\n }\n } else this.store.set(target, [pattern])\n }\n get(target: Path): Pattern[] {\n const subs = this.store.get(target)\n /* c8 ignore start */\n if (!subs) {\n throw new Error('attempting to walk unknown path')\n }\n /* c8 ignore stop */\n return subs\n }\n entries(): [Path, Pattern[]][] {\n return this.keys().map(k => [k, this.store.get(k) as Pattern[]])\n }\n keys(): Path[] {\n return [...this.store.keys()].filter(t => t.canReaddir())\n }\n}\n\n/**\n * The class that processes patterns for a given path.\n *\n * Handles child entry filtering, and determining whether a path's\n * directory contents must be read.\n */\nexport class Processor {\n hasWalkedCache: HasWalkedCache\n matches = new MatchRecord()\n subwalks = new SubWalks()\n patterns?: Pattern[]\n follow: boolean\n dot: boolean\n opts: GlobWalkerOpts\n\n constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache) {\n this.opts = opts\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.hasWalkedCache = hasWalkedCache\n ? hasWalkedCache.copy()\n : new HasWalkedCache()\n }\n\n processPatterns(target: Path, patterns: Pattern[]) {\n this.patterns = patterns\n const processingSet: [Path, Pattern][] = patterns.map(p => [target, p])\n\n // map of paths to the magic-starting subwalks they need to walk\n // first item in patterns is the filter\n\n for (let [t, pattern] of processingSet) {\n this.hasWalkedCache.storeWalked(t, pattern)\n\n const root = pattern.root()\n const absolute = pattern.isAbsolute() && this.opts.absolute !== false\n\n // start absolute patterns at root\n if (root) {\n t = t.resolve(\n root === '/' && this.opts.root !== undefined\n ? this.opts.root\n : root\n )\n const rest = pattern.rest()\n if (!rest) {\n this.matches.add(t, true, false)\n continue\n } else {\n pattern = rest\n }\n }\n\n if (t.isENOENT()) continue\n\n let p: MMPattern\n let rest: Pattern | null\n let changed = false\n while (\n typeof (p = pattern.pattern()) === 'string' &&\n (rest = pattern.rest())\n ) {\n const c = t.resolve(p)\n // we can be reasonably sure that .. is a readable dir\n if (c.isUnknown() && p !== '..') break\n t = c\n pattern = rest\n changed = true\n }\n p = pattern.pattern()\n rest = pattern.rest()\n if (changed) {\n if (this.hasWalkedCache.hasWalked(t, pattern)) continue\n this.hasWalkedCache.storeWalked(t, pattern)\n }\n\n // now we have either a final string for a known entry,\n // more strings for an unknown entry,\n // or a pattern starting with magic, mounted on t.\n if (typeof p === 'string') {\n // must be final entry\n if (!rest) {\n const ifDir = p === '..' || p === '' || p === '.'\n this.matches.add(t.resolve(p), absolute, ifDir)\n } else {\n this.subwalks.add(t, pattern)\n }\n continue\n } else if (p === GLOBSTAR) {\n // if no rest, match and subwalk pattern\n // if rest, process rest and subwalk pattern\n // if it's a symlink, but we didn't get here by way of a\n // globstar match (meaning it's the first time THIS globstar\n // has traversed a symlink), then we follow it. Otherwise, stop.\n if (\n !t.isSymbolicLink() ||\n this.follow ||\n pattern.checkFollowGlobstar()\n ) {\n this.subwalks.add(t, pattern)\n }\n const rp = rest?.pattern()\n const rrest = rest?.rest()\n if (!rest || ((rp === '' || rp === '.') && !rrest)) {\n // only HAS to be a dir if it ends in **/ or **/.\n // but ending in ** will match files as well.\n this.matches.add(t, absolute, rp === '' || rp === '.')\n } else {\n if (rp === '..') {\n // this would mean you're matching **/.. at the fs root,\n // and no thanks, I'm not gonna test that specific case.\n /* c8 ignore start */\n const tp = t.parent || t\n /* c8 ignore stop */\n if (!rrest) this.matches.add(tp, absolute, true)\n else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {\n this.subwalks.add(tp, rrest)\n }\n }\n }\n } else if (p instanceof RegExp) {\n this.subwalks.add(t, pattern)\n }\n }\n\n return this\n }\n\n subwalkTargets(): Path[] {\n return this.subwalks.keys()\n }\n\n child() {\n return new Processor(this.opts, this.hasWalkedCache)\n }\n\n // return a new Processor containing the subwalks for each\n // child entry, and a set of matches, and\n // a hasWalkedCache that's a copy of this one\n // then we're going to call\n filterEntries(parent: Path, entries: Path[]): Processor {\n const patterns = this.subwalks.get(parent)\n // put matches and entry walks into the results processor\n const results = this.child()\n for (const e of entries) {\n for (const pattern of patterns) {\n const absolute = pattern.isAbsolute()\n const p = pattern.pattern()\n const rest = pattern.rest()\n if (p === GLOBSTAR) {\n results.testGlobstar(e, pattern, rest, absolute)\n } else if (p instanceof RegExp) {\n results.testRegExp(e, p, rest, absolute)\n } else {\n results.testString(e, p, rest, absolute)\n }\n }\n }\n return results\n }\n\n testGlobstar(\n e: Path,\n pattern: Pattern,\n rest: Pattern | null,\n absolute: boolean\n ) {\n if (this.dot || !e.name.startsWith('.')) {\n if (!pattern.hasMore()) {\n this.matches.add(e, absolute, false)\n }\n if (e.canReaddir()) {\n // if we're in follow mode or it's not a symlink, just keep\n // testing the same pattern. If there's more after the globstar,\n // then this symlink consumes the globstar. If not, then we can\n // follow at most ONE symlink along the way, so we mark it, which\n // also checks to ensure that it wasn't already marked.\n if (this.follow || !e.isSymbolicLink()) {\n this.subwalks.add(e, pattern)\n } else if (e.isSymbolicLink()) {\n if (rest && pattern.checkFollowGlobstar()) {\n this.subwalks.add(e, rest)\n } else if (pattern.markFollowGlobstar()) {\n this.subwalks.add(e, pattern)\n }\n }\n }\n }\n // if the NEXT thing matches this entry, then also add\n // the rest.\n if (rest) {\n const rp = rest.pattern()\n if (\n typeof rp === 'string' &&\n // dots and empty were handled already\n rp !== '..' &&\n rp !== '' &&\n rp !== '.'\n ) {\n this.testString(e, rp, rest.rest(), absolute)\n } else if (rp === '..') {\n /* c8 ignore start */\n const ep = e.parent || e\n /* c8 ignore stop */\n this.subwalks.add(ep, rest)\n } else if (rp instanceof RegExp) {\n this.testRegExp(e, rp, rest.rest(), absolute)\n }\n }\n }\n\n testRegExp(\n e: Path,\n p: MMRegExp,\n rest: Pattern | null,\n absolute: boolean\n ) {\n if (!p.test(e.name)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n\n testString(e: Path, p: string, rest: Pattern | null, absolute: boolean) {\n // should never happen?\n if (!e.isNamed(p)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts
new file mode 100644
index 00000000000000..5c1a0414971b3a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.d.ts
@@ -0,0 +1,96 @@
+///
+/**
+ * Single-use utility classes to provide functionality to the {@link Glob}
+ * methods.
+ *
+ * @module
+ */
+import { Minipass } from 'minipass';
+import { Path } from 'path-scurry';
+import { IgnoreLike } from './ignore.js';
+import { Pattern } from './pattern.js';
+import { Processor } from './processor.js';
+export interface GlobWalkerOpts {
+ absolute?: boolean;
+ allowWindowsEscape?: boolean;
+ cwd?: string | URL;
+ dot?: boolean;
+ dotRelative?: boolean;
+ follow?: boolean;
+ ignore?: string | string[] | IgnoreLike;
+ mark?: boolean;
+ matchBase?: boolean;
+ maxDepth?: number;
+ nobrace?: boolean;
+ nocase?: boolean;
+ nodir?: boolean;
+ noext?: boolean;
+ noglobstar?: boolean;
+ platform?: NodeJS.Platform;
+ posix?: boolean;
+ realpath?: boolean;
+ root?: string;
+ stat?: boolean;
+ signal?: AbortSignal;
+ windowsPathsNoEscape?: boolean;
+ withFileTypes?: boolean;
+}
+export type GWOFileTypesTrue = GlobWalkerOpts & {
+ withFileTypes: true;
+};
+export type GWOFileTypesFalse = GlobWalkerOpts & {
+ withFileTypes: false;
+};
+export type GWOFileTypesUnset = GlobWalkerOpts & {
+ withFileTypes?: undefined;
+};
+export type Result = O extends GWOFileTypesTrue ? Path : O extends GWOFileTypesFalse ? string : O extends GWOFileTypesUnset ? string : Path | string;
+export type Matches = O extends GWOFileTypesTrue ? Set : O extends GWOFileTypesFalse ? Set : O extends GWOFileTypesUnset ? Set : Set;
+export type MatchStream = O extends GWOFileTypesTrue ? Minipass : O extends GWOFileTypesFalse ? Minipass : O extends GWOFileTypesUnset ? Minipass : Minipass;
+/**
+ * basic walking utilities that all the glob walker types use
+ */
+export declare abstract class GlobUtil {
+ #private;
+ path: Path;
+ patterns: Pattern[];
+ opts: O;
+ seen: Set;
+ paused: boolean;
+ aborted: boolean;
+ signal?: AbortSignal;
+ maxDepth: number;
+ constructor(patterns: Pattern[], path: Path, opts: O);
+ pause(): void;
+ resume(): void;
+ onResume(fn: () => any): void;
+ matchCheck(e: Path, ifDir: boolean): Promise;
+ matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined;
+ matchCheckSync(e: Path, ifDir: boolean): Path | undefined;
+ abstract matchEmit(p: Result): void;
+ abstract matchEmit(p: string | Path): void;
+ matchFinish(e: Path, absolute: boolean): void;
+ match(e: Path, absolute: boolean, ifDir: boolean): Promise;
+ matchSync(e: Path, absolute: boolean, ifDir: boolean): void;
+ walkCB(target: Path, patterns: Pattern[], cb: () => any): void;
+ walkCB2(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any;
+ walkCB3(target: Path, entries: Path[], processor: Processor, cb: () => any): void;
+ walkCBSync(target: Path, patterns: Pattern[], cb: () => any): void;
+ walkCB2Sync(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any;
+ walkCB3Sync(target: Path, entries: Path[], processor: Processor, cb: () => any): void;
+}
+export declare class GlobWalker extends GlobUtil {
+ matches: O extends GWOFileTypesTrue ? Set : O extends GWOFileTypesFalse ? Set : O extends GWOFileTypesUnset ? Set : Set;
+ constructor(patterns: Pattern[], path: Path, opts: O);
+ matchEmit(e: Result